Commit c786e90b authored by Linus Torvalds's avatar Linus Torvalds

Merge branch 'core-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull gcc "asm goto" miscompilation workaround from Ingo Molnar:
 "This is the fix for the GCC miscompilation discussed in the following
  lkml thread:

    [x86] BUG: unable to handle kernel paging request at 00740060

  The bug in GCC has been fixed by Jakub and the fix will be part of the
  GCC 4.8.2 release expected to be released next week - so the quirk's
  version test checks for <= 4.8.1.

  The quirk is only added to compiler-gcc4.h and not to the higher level
  compiler.h because all asm goto uses are behind a feature check"

* 'core-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  compiler/gcc4: Add quirk for 'asm goto' miscompilation bug
parents 71ac3d19 3f0116c3
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\n\t" asm_volatile_goto("1:\n\t"
JUMP_LABEL_NOP "\n\t" JUMP_LABEL_NOP "\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
".word 1b, %l[l_yes], %c0\n\t" ".word 1b, %l[l_yes], %c0\n\t"
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\tnop\n\t" asm_volatile_goto("1:\tnop\n\t"
"nop\n\t" "nop\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
WORD_INSN " 1b, %l[l_yes], %0\n\t" WORD_INSN " 1b, %l[l_yes], %0\n\t"
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\n\t" asm_volatile_goto("1:\n\t"
"nop\n\t" "nop\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
JUMP_ENTRY_TYPE "1b, %l[l_yes], %c0\n\t" JUMP_ENTRY_TYPE "1b, %l[l_yes], %c0\n\t"
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("0: brcl 0,0\n" asm_volatile_goto("0: brcl 0,0\n"
".pushsection __jump_table, \"aw\"\n" ".pushsection __jump_table, \"aw\"\n"
ASM_ALIGN "\n" ASM_ALIGN "\n"
ASM_PTR " 0b, %l[label], %0\n" ASM_PTR " 0b, %l[label], %0\n"
......
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\n\t" asm_volatile_goto("1:\n\t"
"nop\n\t" "nop\n\t"
"nop\n\t" "nop\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
......
...@@ -374,7 +374,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit) ...@@ -374,7 +374,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
* Catch too early usage of this before alternatives * Catch too early usage of this before alternatives
* have run. * have run.
*/ */
asm goto("1: jmp %l[t_warn]\n" asm_volatile_goto("1: jmp %l[t_warn]\n"
"2:\n" "2:\n"
".section .altinstructions,\"a\"\n" ".section .altinstructions,\"a\"\n"
" .long 1b - .\n" " .long 1b - .\n"
...@@ -388,7 +388,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit) ...@@ -388,7 +388,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
#endif #endif
asm goto("1: jmp %l[t_no]\n" asm_volatile_goto("1: jmp %l[t_no]\n"
"2:\n" "2:\n"
".section .altinstructions,\"a\"\n" ".section .altinstructions,\"a\"\n"
" .long 1b - .\n" " .long 1b - .\n"
...@@ -453,7 +453,7 @@ static __always_inline __pure bool _static_cpu_has_safe(u16 bit) ...@@ -453,7 +453,7 @@ static __always_inline __pure bool _static_cpu_has_safe(u16 bit)
* have. Thus, we force the jump to the widest, 4-byte, signed relative * have. Thus, we force the jump to the widest, 4-byte, signed relative
* offset even though the last would often fit in less bytes. * offset even though the last would often fit in less bytes.
*/ */
asm goto("1: .byte 0xe9\n .long %l[t_dynamic] - 2f\n" asm_volatile_goto("1: .byte 0xe9\n .long %l[t_dynamic] - 2f\n"
"2:\n" "2:\n"
".section .altinstructions,\"a\"\n" ".section .altinstructions,\"a\"\n"
" .long 1b - .\n" /* src offset */ " .long 1b - .\n" /* src offset */
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:" asm_volatile_goto("1:"
".byte " __stringify(STATIC_KEY_INIT_NOP) "\n\t" ".byte " __stringify(STATIC_KEY_INIT_NOP) "\n\t"
".pushsection __jump_table, \"aw\" \n\t" ".pushsection __jump_table, \"aw\" \n\t"
_ASM_ALIGN "\n\t" _ASM_ALIGN "\n\t"
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
static inline void __mutex_fastpath_lock(atomic_t *v, static inline void __mutex_fastpath_lock(atomic_t *v,
void (*fail_fn)(atomic_t *)) void (*fail_fn)(atomic_t *))
{ {
asm volatile goto(LOCK_PREFIX " decl %0\n" asm_volatile_goto(LOCK_PREFIX " decl %0\n"
" jns %l[exit]\n" " jns %l[exit]\n"
: : "m" (v->counter) : : "m" (v->counter)
: "memory", "cc" : "memory", "cc"
...@@ -75,7 +75,7 @@ static inline int __mutex_fastpath_lock_retval(atomic_t *count) ...@@ -75,7 +75,7 @@ static inline int __mutex_fastpath_lock_retval(atomic_t *count)
static inline void __mutex_fastpath_unlock(atomic_t *v, static inline void __mutex_fastpath_unlock(atomic_t *v,
void (*fail_fn)(atomic_t *)) void (*fail_fn)(atomic_t *))
{ {
asm volatile goto(LOCK_PREFIX " incl %0\n" asm_volatile_goto(LOCK_PREFIX " incl %0\n"
" jg %l[exit]\n" " jg %l[exit]\n"
: : "m" (v->counter) : : "m" (v->counter)
: "memory", "cc" : "memory", "cc"
......
...@@ -65,6 +65,21 @@ ...@@ -65,6 +65,21 @@
#define __visible __attribute__((externally_visible)) #define __visible __attribute__((externally_visible))
#endif #endif
/*
* GCC 'asm goto' miscompiles certain code sequences:
*
* http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58670
*
* Work it around via a compiler barrier quirk suggested by Jakub Jelinek.
* Fixed in GCC 4.8.2 and later versions.
*
* (asm goto is automatically volatile - the naming reflects this.)
*/
#if GCC_VERSION <= 40801
# define asm_volatile_goto(x...) do { asm goto(x); asm (""); } while (0)
#else
# define asm_volatile_goto(x...) do { asm goto(x); } while (0)
#endif
#ifdef CONFIG_ARCH_USE_BUILTIN_BSWAP #ifdef CONFIG_ARCH_USE_BUILTIN_BSWAP
#if GCC_VERSION >= 40400 #if GCC_VERSION >= 40400
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment