Commit 3c1d3f09 authored by Paul Burton's avatar Paul Burton

MIPS: futex: Emit Loongson3 sync workarounds within asm

Generate the sync instructions required to workaround Loongson3 LL/SC
errata within inline asm blocks, which feels a little safer than doing
it from C where strictly speaking the compiler would be well within its
rights to insert a memory access between the separate asm statements we
previously had, containing sync & ll instructions respectively.
Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
Cc: linux-mips@vger.kernel.org
Cc: Huacai Chen <chenhc@lemote.com>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: linux-kernel@vger.kernel.org
parent a91f2a1d
......@@ -95,13 +95,14 @@ static inline void wmb(void)
* ordering will be done by smp_llsc_mb() and friends.
*/
#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
#define __WEAK_LLSC_MB " sync \n"
#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
#define __LLSC_CLOBBER
# define __WEAK_LLSC_MB sync
# define smp_llsc_mb() \
__asm__ __volatile__(__stringify(__WEAK_LLSC_MB) : : :"memory")
# define __LLSC_CLOBBER
#else
#define __WEAK_LLSC_MB " \n"
#define smp_llsc_mb() do { } while (0)
#define __LLSC_CLOBBER "memory"
# define __WEAK_LLSC_MB
# define smp_llsc_mb() do { } while (0)
# define __LLSC_CLOBBER "memory"
#endif
#ifdef CONFIG_CPU_CAVIUM_OCTEON
......
......@@ -16,6 +16,7 @@
#include <asm/barrier.h>
#include <asm/compiler.h>
#include <asm/errno.h>
#include <asm/sync.h>
#include <asm/war.h>
#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
......@@ -32,7 +33,7 @@
" .set arch=r4000 \n" \
"2: sc $1, %2 \n" \
" beqzl $1, 1b \n" \
__WEAK_LLSC_MB \
__stringify(__WEAK_LLSC_MB) \
"3: \n" \
" .insn \n" \
" .set pop \n" \
......@@ -50,19 +51,19 @@
"i" (-EFAULT) \
: "memory"); \
} else if (cpu_has_llsc) { \
loongson_llsc_mb(); \
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
" .set push \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
" " __SYNC(full, loongson3_war) " \n" \
"1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \
" .set pop \n" \
" " insn " \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
"2: "user_sc("$1", "%2")" \n" \
" beqz $1, 1b \n" \
__WEAK_LLSC_MB \
__stringify(__WEAK_LLSC_MB) \
"3: \n" \
" .insn \n" \
" .set pop \n" \
......@@ -147,7 +148,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
" .set arch=r4000 \n"
"2: sc $1, %2 \n"
" beqzl $1, 1b \n"
__WEAK_LLSC_MB
__stringify(__WEAK_LLSC_MB)
"3: \n"
" .insn \n"
" .set pop \n"
......@@ -164,13 +165,13 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
"i" (-EFAULT)
: "memory");
} else if (cpu_has_llsc) {
loongson_llsc_mb();
__asm__ __volatile__(
"# futex_atomic_cmpxchg_inatomic \n"
" .set push \n"
" .set noat \n"
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __SYNC(full, loongson3_war) " \n"
"1: "user_ll("%1", "%3")" \n"
" bne %1, %z4, 3f \n"
" .set pop \n"
......@@ -178,8 +179,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
" .set "MIPS_ISA_ARCH_LEVEL" \n"
"2: "user_sc("$1", "%2")" \n"
" beqz $1, 1b \n"
__WEAK_LLSC_MB
"3: \n"
"3: " __SYNC_ELSE(full, loongson3_war, __WEAK_LLSC_MB) "\n"
" .insn \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
......@@ -194,7 +194,6 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
: GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
"i" (-EFAULT)
: "memory");
loongson_llsc_mb();
} else
return -ENOSYS;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment