Commit 5154f3b4 authored by Paul Burton's avatar Paul Burton Committed by Ralf Baechle

MIPS: cmpxchg: Pull xchg() asm into a macro

Use a macro to generate the 32 & 64 bit variants of the backing code for
xchg(), much as is already done for cmpxchg(). This removes the
duplication that could previously be found in __xchg_u32() &
__xchg_u64().
Signed-off-by: default avatarPaul Burton <paul.burton@imgtec.com>
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/16349/Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent 6b1e7629
...@@ -24,36 +24,43 @@ ...@@ -24,36 +24,43 @@
# define __scbeqz "beqz" # define __scbeqz "beqz"
#endif #endif
#define __xchg_asm(ld, st, m, val) \
({ \
__typeof(*(m)) __ret; \
\
if (kernel_uses_llsc) { \
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
" .set " MIPS_ISA_ARCH_LEVEL " \n" \
"1: " ld " %0, %2 # __xchg_asm \n" \
" .set mips0 \n" \
" move $1, %z3 \n" \
" .set " MIPS_ISA_ARCH_LEVEL " \n" \
" " st " $1, %1 \n" \
"\t" __scbeqz " $1, 1b \n" \
" .set pop \n" \
: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
: GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
: "memory"); \
} else { \
unsigned long __flags; \
\
raw_local_irq_save(__flags); \
__ret = *m; \
*m = val; \
raw_local_irq_restore(__flags); \
} \
\
__ret; \
})
static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
{ {
__u32 retval; __u32 retval;
smp_mb__before_llsc(); smp_mb__before_llsc();
retval = __xchg_asm("ll", "sc", m, val);
if (kernel_uses_llsc) {
unsigned long dummy;
__asm__ __volatile__(
" .set " MIPS_ISA_ARCH_LEVEL " \n"
"1: ll %0, %3 # xchg_u32 \n"
" .set mips0 \n"
" move %2, %z4 \n"
" .set " MIPS_ISA_ARCH_LEVEL " \n"
" sc %2, %1 \n"
"\t" __scbeqz " %2, 1b \n"
" .set mips0 \n"
: "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory");
} else {
unsigned long flags;
raw_local_irq_save(flags);
retval = *m;
*m = val;
raw_local_irq_restore(flags); /* implies memory barrier */
}
smp_llsc_mb(); smp_llsc_mb();
return retval; return retval;
...@@ -65,29 +72,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) ...@@ -65,29 +72,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
__u64 retval; __u64 retval;
smp_mb__before_llsc(); smp_mb__before_llsc();
retval = __xchg_asm("lld", "scd", m, val);
if (kernel_uses_llsc) {
unsigned long dummy;
__asm__ __volatile__(
" .set " MIPS_ISA_ARCH_LEVEL " \n"
"1: lld %0, %3 # xchg_u64 \n"
" move %2, %z4 \n"
" scd %2, %1 \n"
"\t" __scbeqz " %2, 1b \n"
" .set mips0 \n"
: "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory");
} else {
unsigned long flags;
raw_local_irq_save(flags);
retval = *m;
*m = val;
raw_local_irq_restore(flags); /* implies memory barrier */
}
smp_llsc_mb(); smp_llsc_mb();
return retval; return retval;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment