Commit cd8730c3 authored by Marco Elver's avatar Marco Elver Committed by Paul E. McKenney

x86/barriers, kcsan: Use generic instrumentation for non-smp barriers

Prefix all barriers with __, now that asm-generic/barriers.h supports
defining the final instrumented version of these barriers. The change is
limited to barriers used by x86-64.
Signed-off-by: default avatarMarco Elver <elver@google.com>
Signed-off-by: default avatarPaul E. McKenney <paulmck@kernel.org>
parent 04def1b9
...@@ -19,9 +19,9 @@ ...@@ -19,9 +19,9 @@
#define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \ #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
X86_FEATURE_XMM2) ::: "memory", "cc") X86_FEATURE_XMM2) ::: "memory", "cc")
#else #else
#define mb() asm volatile("mfence":::"memory") #define __mb() asm volatile("mfence":::"memory")
#define rmb() asm volatile("lfence":::"memory") #define __rmb() asm volatile("lfence":::"memory")
#define wmb() asm volatile("sfence" ::: "memory") #define __wmb() asm volatile("sfence" ::: "memory")
#endif #endif
/** /**
...@@ -51,8 +51,8 @@ static inline unsigned long array_index_mask_nospec(unsigned long index, ...@@ -51,8 +51,8 @@ static inline unsigned long array_index_mask_nospec(unsigned long index,
/* Prevent speculative execution past this barrier. */ /* Prevent speculative execution past this barrier. */
#define barrier_nospec() alternative("", "lfence", X86_FEATURE_LFENCE_RDTSC) #define barrier_nospec() alternative("", "lfence", X86_FEATURE_LFENCE_RDTSC)
#define dma_rmb() barrier() #define __dma_rmb() barrier()
#define dma_wmb() barrier() #define __dma_wmb() barrier()
#define __smp_mb() asm volatile("lock; addl $0,-4(%%" _ASM_SP ")" ::: "memory", "cc") #define __smp_mb() asm volatile("lock; addl $0,-4(%%" _ASM_SP ")" ::: "memory", "cc")
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment