Commit 94bfb75a authored by Markos Chandras's avatar Markos Chandras

MIPS: asm: Rename GCC_OFF12_ASM to GCC_OFF_SMALL_ASM

The GCC_OFF12_ASM macro is used for 12-bit immediate constrains
but we will also use it for 9-bit constrains on MIPS R6 so we
rename it to something more appropriate.

Cc: Maciej W. Rozycki <macro@linux-mips.org>
Signed-off-by: default avatarMarkos Chandras <markos.chandras@imgtec.com>
parent a7e07b1a
...@@ -54,7 +54,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \ ...@@ -54,7 +54,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
" sc %0, %1 \n" \ " sc %0, %1 \n" \
" beqzl %0, 1b \n" \ " beqzl %0, 1b \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
int temp; \ int temp; \
...@@ -66,7 +66,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \ ...@@ -66,7 +66,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" sc %0, %1 \n" \ " sc %0, %1 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} while (unlikely(!temp)); \ } while (unlikely(!temp)); \
} else { \ } else { \
...@@ -97,7 +97,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ ...@@ -97,7 +97,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \ "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
int temp; \ int temp; \
...@@ -110,7 +110,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ ...@@ -110,7 +110,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
" sc %0, %2 \n" \ " sc %0, %2 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \ "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} while (unlikely(!result)); \ } while (unlikely(!result)); \
\ \
...@@ -171,8 +171,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) ...@@ -171,8 +171,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
"1: \n" "1: \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
"+" GCC_OFF12_ASM() (v->counter) "+" GCC_OFF_SMALL_ASM() (v->counter)
: "Ir" (i), GCC_OFF12_ASM() (v->counter) : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
: "memory"); : "memory");
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
int temp; int temp;
...@@ -190,7 +190,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) ...@@ -190,7 +190,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
"1: \n" "1: \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
"+" GCC_OFF12_ASM() (v->counter) "+" GCC_OFF_SMALL_ASM() (v->counter)
: "Ir" (i)); : "Ir" (i));
} else { } else {
unsigned long flags; unsigned long flags;
...@@ -333,7 +333,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \ ...@@ -333,7 +333,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
" scd %0, %1 \n" \ " scd %0, %1 \n" \
" beqzl %0, 1b \n" \ " beqzl %0, 1b \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
long temp; \ long temp; \
...@@ -345,7 +345,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \ ...@@ -345,7 +345,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" scd %0, %1 \n" \ " scd %0, %1 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} while (unlikely(!temp)); \ } while (unlikely(!temp)); \
} else { \ } else { \
...@@ -376,7 +376,7 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ ...@@ -376,7 +376,7 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \ "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
long temp; \ long temp; \
...@@ -389,8 +389,8 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ ...@@ -389,8 +389,8 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
" scd %0, %2 \n" \ " scd %0, %2 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"=" GCC_OFF12_ASM() (v->counter) \ "=" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i), GCC_OFF12_ASM() (v->counter) \ : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
: "memory"); \ : "memory"); \
} while (unlikely(!result)); \ } while (unlikely(!result)); \
\ \
...@@ -452,8 +452,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) ...@@ -452,8 +452,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
"1: \n" "1: \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
"=" GCC_OFF12_ASM() (v->counter) "=" GCC_OFF_SMALL_ASM() (v->counter)
: "Ir" (i), GCC_OFF12_ASM() (v->counter) : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
: "memory"); : "memory");
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
long temp; long temp;
...@@ -471,7 +471,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) ...@@ -471,7 +471,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
"1: \n" "1: \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (result), "=&r" (temp), : "=&r" (result), "=&r" (temp),
"+" GCC_OFF12_ASM() (v->counter) "+" GCC_OFF_SMALL_ASM() (v->counter)
: "Ir" (i)); : "Ir" (i));
} else { } else {
unsigned long flags; unsigned long flags;
......
...@@ -79,8 +79,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -79,8 +79,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
" beqzl %0, 1b \n" " beqzl %0, 1b \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "=" GCC_OFF12_ASM() (*m) : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit), GCC_OFF12_ASM() (*m)); : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
#ifdef CONFIG_CPU_MIPSR2 #ifdef CONFIG_CPU_MIPSR2
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) { } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
do { do {
...@@ -88,7 +88,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -88,7 +88,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
" " __LL "%0, %1 # set_bit \n" " " __LL "%0, %1 # set_bit \n"
" " __INS "%0, %3, %2, 1 \n" " " __INS "%0, %3, %2, 1 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (bit), "r" (~0)); : "ir" (bit), "r" (~0));
} while (unlikely(!temp)); } while (unlikely(!temp));
#endif /* CONFIG_CPU_MIPSR2 */ #endif /* CONFIG_CPU_MIPSR2 */
...@@ -100,7 +100,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -100,7 +100,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
" or %0, %2 \n" " or %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit)); : "ir" (1UL << bit));
} while (unlikely(!temp)); } while (unlikely(!temp));
} else } else
...@@ -131,7 +131,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -131,7 +131,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
" beqzl %0, 1b \n" " beqzl %0, 1b \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (~(1UL << bit))); : "ir" (~(1UL << bit)));
#ifdef CONFIG_CPU_MIPSR2 #ifdef CONFIG_CPU_MIPSR2
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) { } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
...@@ -140,7 +140,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -140,7 +140,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
" " __LL "%0, %1 # clear_bit \n" " " __LL "%0, %1 # clear_bit \n"
" " __INS "%0, $0, %2, 1 \n" " " __INS "%0, $0, %2, 1 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (bit)); : "ir" (bit));
} while (unlikely(!temp)); } while (unlikely(!temp));
#endif /* CONFIG_CPU_MIPSR2 */ #endif /* CONFIG_CPU_MIPSR2 */
...@@ -152,7 +152,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -152,7 +152,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
" and %0, %2 \n" " and %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (~(1UL << bit))); : "ir" (~(1UL << bit)));
} while (unlikely(!temp)); } while (unlikely(!temp));
} else } else
...@@ -197,7 +197,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -197,7 +197,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
" beqzl %0, 1b \n" " beqzl %0, 1b \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit)); : "ir" (1UL << bit));
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
...@@ -210,7 +210,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -210,7 +210,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
" xor %0, %2 \n" " xor %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit)); : "ir" (1UL << bit));
} while (unlikely(!temp)); } while (unlikely(!temp));
} else } else
...@@ -245,7 +245,7 @@ static inline int test_and_set_bit(unsigned long nr, ...@@ -245,7 +245,7 @@ static inline int test_and_set_bit(unsigned long nr,
" beqzl %2, 1b \n" " beqzl %2, 1b \n"
" and %2, %0, %3 \n" " and %2, %0, %3 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
...@@ -259,7 +259,7 @@ static inline int test_and_set_bit(unsigned long nr, ...@@ -259,7 +259,7 @@ static inline int test_and_set_bit(unsigned long nr,
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
} while (unlikely(!res)); } while (unlikely(!res));
...@@ -313,7 +313,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, ...@@ -313,7 +313,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
} while (unlikely(!res)); } while (unlikely(!res));
...@@ -355,7 +355,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -355,7 +355,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" beqzl %2, 1b \n" " beqzl %2, 1b \n"
" and %2, %0, %3 \n" " and %2, %0, %3 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
#ifdef CONFIG_CPU_MIPSR2 #ifdef CONFIG_CPU_MIPSR2
...@@ -369,7 +369,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -369,7 +369,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" " __EXT "%2, %0, %3, 1 \n" " " __EXT "%2, %0, %3, 1 \n"
" " __INS "%0, $0, %3, 1 \n" " " __INS "%0, $0, %3, 1 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "ir" (bit) : "ir" (bit)
: "memory"); : "memory");
} while (unlikely(!temp)); } while (unlikely(!temp));
...@@ -386,7 +386,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -386,7 +386,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" xor %2, %3 \n" " xor %2, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
} while (unlikely(!res)); } while (unlikely(!res));
...@@ -428,7 +428,7 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -428,7 +428,7 @@ static inline int test_and_change_bit(unsigned long nr,
" beqzl %2, 1b \n" " beqzl %2, 1b \n"
" and %2, %0, %3 \n" " and %2, %0, %3 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
...@@ -442,7 +442,7 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -442,7 +442,7 @@ static inline int test_and_change_bit(unsigned long nr,
" xor %2, %0, %3 \n" " xor %2, %0, %3 \n"
" " __SC "\t%2, %1 \n" " " __SC "\t%2, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
} while (unlikely(!res)); } while (unlikely(!res));
......
...@@ -31,8 +31,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) ...@@ -31,8 +31,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
" sc %2, %1 \n" " sc %2, %1 \n"
" beqzl %2, 1b \n" " beqzl %2, 1b \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
: GCC_OFF12_ASM() (*m), "Jr" (val) : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory"); : "memory");
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
unsigned long dummy; unsigned long dummy;
...@@ -46,9 +46,9 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) ...@@ -46,9 +46,9 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
" .set arch=r4000 \n" " .set arch=r4000 \n"
" sc %2, %1 \n" " sc %2, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (retval), "=" GCC_OFF12_ASM() (*m), : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
"=&r" (dummy) "=&r" (dummy)
: GCC_OFF12_ASM() (*m), "Jr" (val) : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory"); : "memory");
} while (unlikely(!dummy)); } while (unlikely(!dummy));
} else { } else {
...@@ -82,8 +82,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) ...@@ -82,8 +82,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
" scd %2, %1 \n" " scd %2, %1 \n"
" beqzl %2, 1b \n" " beqzl %2, 1b \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
: GCC_OFF12_ASM() (*m), "Jr" (val) : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory"); : "memory");
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
unsigned long dummy; unsigned long dummy;
...@@ -95,9 +95,9 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) ...@@ -95,9 +95,9 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
" move %2, %z4 \n" " move %2, %z4 \n"
" scd %2, %1 \n" " scd %2, %1 \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (retval), "=" GCC_OFF12_ASM() (*m), : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
"=&r" (dummy) "=&r" (dummy)
: GCC_OFF12_ASM() (*m), "Jr" (val) : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory"); : "memory");
} while (unlikely(!dummy)); } while (unlikely(!dummy));
} else { } else {
...@@ -158,8 +158,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz ...@@ -158,8 +158,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
" beqzl $1, 1b \n" \ " beqzl $1, 1b \n" \
"2: \n" \ "2: \n" \
" .set pop \n" \ " .set pop \n" \
: "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
: GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
: "memory"); \ : "memory"); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
__asm__ __volatile__( \ __asm__ __volatile__( \
...@@ -175,8 +175,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz ...@@ -175,8 +175,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
" beqz $1, 1b \n" \ " beqz $1, 1b \n" \
" .set pop \n" \ " .set pop \n" \
"2: \n" \ "2: \n" \
: "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
: GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
: "memory"); \ : "memory"); \
} else { \ } else { \
unsigned long __flags; \ unsigned long __flags; \
......
...@@ -17,9 +17,9 @@ ...@@ -17,9 +17,9 @@
#endif #endif
#ifndef CONFIG_CPU_MICROMIPS #ifndef CONFIG_CPU_MICROMIPS
#define GCC_OFF12_ASM() "R" #define GCC_OFF_SMALL_ASM() "R"
#elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) #elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
#define GCC_OFF12_ASM() "ZC" #define GCC_OFF_SMALL_ASM() "ZC"
#else #else
#error "microMIPS compilation unsupported with GCC older than 4.9" #error "microMIPS compilation unsupported with GCC older than 4.9"
#endif #endif
......
...@@ -26,8 +26,8 @@ static inline void atomic_scrub(void *va, u32 size) ...@@ -26,8 +26,8 @@ static inline void atomic_scrub(void *va, u32 size)
" sc %0, %1 \n" " sc %0, %1 \n"
" beqz %0, 1b \n" " beqz %0, 1b \n"
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "=" GCC_OFF12_ASM() (*virt_addr) : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr)
: GCC_OFF12_ASM() (*virt_addr)); : GCC_OFF_SMALL_ASM() (*virt_addr));
virt_addr++; virt_addr++;
} }
......
...@@ -45,8 +45,8 @@ ...@@ -45,8 +45,8 @@
" "__UA_ADDR "\t2b, 4b \n" \ " "__UA_ADDR "\t2b, 4b \n" \
" .previous \n" \ " .previous \n" \
: "=r" (ret), "=&r" (oldval), \ : "=r" (ret), "=&r" (oldval), \
"=" GCC_OFF12_ASM() (*uaddr) \ "=" GCC_OFF_SMALL_ASM() (*uaddr) \
: "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
"i" (-EFAULT) \ "i" (-EFAULT) \
: "memory"); \ : "memory"); \
} else if (cpu_has_llsc) { \ } else if (cpu_has_llsc) { \
...@@ -74,8 +74,8 @@ ...@@ -74,8 +74,8 @@
" "__UA_ADDR "\t2b, 4b \n" \ " "__UA_ADDR "\t2b, 4b \n" \
" .previous \n" \ " .previous \n" \
: "=r" (ret), "=&r" (oldval), \ : "=r" (ret), "=&r" (oldval), \
"=" GCC_OFF12_ASM() (*uaddr) \ "=" GCC_OFF_SMALL_ASM() (*uaddr) \
: "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
"i" (-EFAULT) \ "i" (-EFAULT) \
: "memory"); \ : "memory"); \
} else \ } else \
...@@ -174,8 +174,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, ...@@ -174,8 +174,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
" "__UA_ADDR "\t1b, 4b \n" " "__UA_ADDR "\t1b, 4b \n"
" "__UA_ADDR "\t2b, 4b \n" " "__UA_ADDR "\t2b, 4b \n"
" .previous \n" " .previous \n"
: "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
: GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
"i" (-EFAULT) "i" (-EFAULT)
: "memory"); : "memory");
} else if (cpu_has_llsc) { } else if (cpu_has_llsc) {
...@@ -203,8 +203,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, ...@@ -203,8 +203,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
" "__UA_ADDR "\t1b, 4b \n" " "__UA_ADDR "\t1b, 4b \n"
" "__UA_ADDR "\t2b, 4b \n" " "__UA_ADDR "\t2b, 4b \n"
" .previous \n" " .previous \n"
: "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
: GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
"i" (-EFAULT) "i" (-EFAULT)
: "memory"); : "memory");
} else } else
......
...@@ -85,8 +85,8 @@ static inline void set_value_reg32(volatile u32 *const addr, ...@@ -85,8 +85,8 @@ static inline void set_value_reg32(volatile u32 *const addr,
" "__beqz"%0, 1b \n" " "__beqz"%0, 1b \n"
" nop \n" " nop \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
: "ir" (~mask), "ir" (value), GCC_OFF12_ASM() (*addr)); : "ir" (~mask), "ir" (value), GCC_OFF_SMALL_ASM() (*addr));
} }
/* /*
...@@ -106,8 +106,8 @@ static inline void set_reg32(volatile u32 *const addr, ...@@ -106,8 +106,8 @@ static inline void set_reg32(volatile u32 *const addr,
" "__beqz"%0, 1b \n" " "__beqz"%0, 1b \n"
" nop \n" " nop \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
: "ir" (mask), GCC_OFF12_ASM() (*addr)); : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr));
} }
/* /*
...@@ -127,8 +127,8 @@ static inline void clear_reg32(volatile u32 *const addr, ...@@ -127,8 +127,8 @@ static inline void clear_reg32(volatile u32 *const addr,
" "__beqz"%0, 1b \n" " "__beqz"%0, 1b \n"
" nop \n" " nop \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
: "ir" (~mask), GCC_OFF12_ASM() (*addr)); : "ir" (~mask), GCC_OFF_SMALL_ASM() (*addr));
} }
/* /*
...@@ -148,8 +148,8 @@ static inline void toggle_reg32(volatile u32 *const addr, ...@@ -148,8 +148,8 @@ static inline void toggle_reg32(volatile u32 *const addr,
" "__beqz"%0, 1b \n" " "__beqz"%0, 1b \n"
" nop \n" " nop \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
: "ir" (mask), GCC_OFF12_ASM() (*addr)); : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr));
} }
/* /*
...@@ -220,8 +220,8 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr) ...@@ -220,8 +220,8 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
" .set arch=r4000 \n" \ " .set arch=r4000 \n" \
"1: ll %0, %1 #custom_read_reg32 \n" \ "1: ll %0, %1 #custom_read_reg32 \n" \
" .set pop \n" \ " .set pop \n" \
: "=r" (tmp), "=" GCC_OFF12_ASM() (*address) \ : "=r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \
: GCC_OFF12_ASM() (*address)) : GCC_OFF_SMALL_ASM() (*address))
#define custom_write_reg32(address, tmp) \ #define custom_write_reg32(address, tmp) \
__asm__ __volatile__( \ __asm__ __volatile__( \
...@@ -231,7 +231,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr) ...@@ -231,7 +231,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
" "__beqz"%0, 1b \n" \ " "__beqz"%0, 1b \n" \
" nop \n" \ " nop \n" \
" .set pop \n" \ " .set pop \n" \
: "=&r" (tmp), "=" GCC_OFF12_ASM() (*address) \ : "=&r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \
: "0" (tmp), GCC_OFF12_ASM() (*address)) : "0" (tmp), GCC_OFF_SMALL_ASM() (*address))
#endif /* __ASM_REGOPS_H__ */ #endif /* __ASM_REGOPS_H__ */
...@@ -275,7 +275,7 @@ static inline void __cvmx_cmd_queue_lock(cvmx_cmd_queue_id_t queue_id, ...@@ -275,7 +275,7 @@ static inline void __cvmx_cmd_queue_lock(cvmx_cmd_queue_id_t queue_id,
" lbu %[ticket], %[now_serving]\n" " lbu %[ticket], %[now_serving]\n"
"4:\n" "4:\n"
".set pop\n" : ".set pop\n" :
[ticket_ptr] "=" GCC_OFF12_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]), [ticket_ptr] "=" GCC_OFF_SMALL_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]),
[now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp), [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp),
[my_ticket] "=r"(my_ticket) [my_ticket] "=r"(my_ticket)
); );
......
...@@ -89,7 +89,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) ...@@ -89,7 +89,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
" subu %[ticket], %[ticket], 1 \n" " subu %[ticket], %[ticket], 1 \n"
" .previous \n" " .previous \n"
" .set pop \n" " .set pop \n"
: [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
[serving_now_ptr] "+m" (lock->h.serving_now), [serving_now_ptr] "+m" (lock->h.serving_now),
[ticket] "=&r" (tmp), [ticket] "=&r" (tmp),
[my_ticket] "=&r" (my_ticket) [my_ticket] "=&r" (my_ticket)
...@@ -122,7 +122,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) ...@@ -122,7 +122,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
" subu %[ticket], %[ticket], 1 \n" " subu %[ticket], %[ticket], 1 \n"
" .previous \n" " .previous \n"
" .set pop \n" " .set pop \n"
: [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
[serving_now_ptr] "+m" (lock->h.serving_now), [serving_now_ptr] "+m" (lock->h.serving_now),
[ticket] "=&r" (tmp), [ticket] "=&r" (tmp),
[my_ticket] "=&r" (my_ticket) [my_ticket] "=&r" (my_ticket)
...@@ -164,7 +164,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) ...@@ -164,7 +164,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
" li %[ticket], 0 \n" " li %[ticket], 0 \n"
" .previous \n" " .previous \n"
" .set pop \n" " .set pop \n"
: [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
[ticket] "=&r" (tmp), [ticket] "=&r" (tmp),
[my_ticket] "=&r" (tmp2), [my_ticket] "=&r" (tmp2),
[now_serving] "=&r" (tmp3) [now_serving] "=&r" (tmp3)
...@@ -188,7 +188,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) ...@@ -188,7 +188,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
" li %[ticket], 0 \n" " li %[ticket], 0 \n"
" .previous \n" " .previous \n"
" .set pop \n" " .set pop \n"
: [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
[ticket] "=&r" (tmp), [ticket] "=&r" (tmp),
[my_ticket] "=&r" (tmp2), [my_ticket] "=&r" (tmp2),
[now_serving] "=&r" (tmp3) [now_serving] "=&r" (tmp3)
...@@ -235,8 +235,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) ...@@ -235,8 +235,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
" beqzl %1, 1b \n" " beqzl %1, 1b \n"
" nop \n" " nop \n"
" .set reorder \n" " .set reorder \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} else { } else {
do { do {
...@@ -245,8 +245,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) ...@@ -245,8 +245,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
" bltz %1, 1b \n" " bltz %1, 1b \n"
" addu %1, 1 \n" " addu %1, 1 \n"
"2: sc %1, %0 \n" "2: sc %1, %0 \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} while (unlikely(!tmp)); } while (unlikely(!tmp));
} }
...@@ -269,8 +269,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) ...@@ -269,8 +269,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
" sub %1, 1 \n" " sub %1, 1 \n"
" sc %1, %0 \n" " sc %1, %0 \n"
" beqzl %1, 1b \n" " beqzl %1, 1b \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} else { } else {
do { do {
...@@ -278,8 +278,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) ...@@ -278,8 +278,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
"1: ll %1, %2 # arch_read_unlock \n" "1: ll %1, %2 # arch_read_unlock \n"
" sub %1, 1 \n" " sub %1, 1 \n"
" sc %1, %0 \n" " sc %1, %0 \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} while (unlikely(!tmp)); } while (unlikely(!tmp));
} }
...@@ -299,8 +299,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) ...@@ -299,8 +299,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
" beqzl %1, 1b \n" " beqzl %1, 1b \n"
" nop \n" " nop \n"
" .set reorder \n" " .set reorder \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} else { } else {
do { do {
...@@ -309,8 +309,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) ...@@ -309,8 +309,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
" bnez %1, 1b \n" " bnez %1, 1b \n"
" lui %1, 0x8000 \n" " lui %1, 0x8000 \n"
"2: sc %1, %0 \n" "2: sc %1, %0 \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} while (unlikely(!tmp)); } while (unlikely(!tmp));
} }
...@@ -349,8 +349,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) ...@@ -349,8 +349,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
__WEAK_LLSC_MB __WEAK_LLSC_MB
" li %2, 1 \n" " li %2, 1 \n"
"2: \n" "2: \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} else { } else {
__asm__ __volatile__( __asm__ __volatile__(
...@@ -366,8 +366,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) ...@@ -366,8 +366,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
__WEAK_LLSC_MB __WEAK_LLSC_MB
" li %2, 1 \n" " li %2, 1 \n"
"2: \n" "2: \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} }
...@@ -393,8 +393,8 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) ...@@ -393,8 +393,8 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
" li %2, 1 \n" " li %2, 1 \n"
" .set reorder \n" " .set reorder \n"
"2: \n" "2: \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} else { } else {
do { do {
...@@ -406,9 +406,9 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) ...@@ -406,9 +406,9 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
" sc %1, %0 \n" " sc %1, %0 \n"
" li %2, 1 \n" " li %2, 1 \n"
"2: \n" "2: \n"
: "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp),
"=&r" (ret) "=&r" (ret)
: GCC_OFF12_ASM() (rw->lock) : GCC_OFF_SMALL_ASM() (rw->lock)
: "memory"); : "memory");
} while (unlikely(!tmp)); } while (unlikely(!tmp));
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment