Commit e0f6dec3 authored by H. Peter Anvin's avatar H. Peter Anvin

x86, bitops: Correct the assembly constraints to testing bitops

In checkin:

0c44c2d0 x86: Use asm goto to implement better modify_and_test() functions

the various functions which do modify and test were unified and
optimized using "asm goto".  However, this change missed the detail
that the bitops require an "Ir" constraint rather than an "er"
constraint ("I" = integer constant from 0-31, "e" = signed 32-bit
integer constant).  This would cause code to miscompile if these
functions were used on constant bit positions 32-255 and the build to
fail if used on constant bit positions above 255.

Add the constraints as a parameter to the GEN_BINARY_RMWcc() macro to
avoid this problem.
Reported-by: default avatarJesse Brandeburg <jesse.brandeburg@intel.com>
Signed-off-by: default avatarH. Peter Anvin <hpa@linux.intel.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Link: http://lkml.kernel.org/r/529E8719.4070202@zytor.com
parent 5551a34e
...@@ -77,7 +77,7 @@ static inline void atomic_sub(int i, atomic_t *v) ...@@ -77,7 +77,7 @@ static inline void atomic_sub(int i, atomic_t *v)
*/ */
static inline int atomic_sub_and_test(int i, atomic_t *v) static inline int atomic_sub_and_test(int i, atomic_t *v)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, i, "%0", "e"); GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
} }
/** /**
...@@ -141,7 +141,7 @@ static inline int atomic_inc_and_test(atomic_t *v) ...@@ -141,7 +141,7 @@ static inline int atomic_inc_and_test(atomic_t *v)
*/ */
static inline int atomic_add_negative(int i, atomic_t *v) static inline int atomic_add_negative(int i, atomic_t *v)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, i, "%0", "s"); GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
} }
/** /**
......
...@@ -72,7 +72,7 @@ static inline void atomic64_sub(long i, atomic64_t *v) ...@@ -72,7 +72,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
*/ */
static inline int atomic64_sub_and_test(long i, atomic64_t *v) static inline int atomic64_sub_and_test(long i, atomic64_t *v)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, i, "%0", "e"); GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e");
} }
/** /**
...@@ -138,7 +138,7 @@ static inline int atomic64_inc_and_test(atomic64_t *v) ...@@ -138,7 +138,7 @@ static inline int atomic64_inc_and_test(atomic64_t *v)
*/ */
static inline int atomic64_add_negative(long i, atomic64_t *v) static inline int atomic64_add_negative(long i, atomic64_t *v)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, i, "%0", "s"); GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", "s");
} }
/** /**
......
...@@ -205,7 +205,7 @@ static inline void change_bit(long nr, volatile unsigned long *addr) ...@@ -205,7 +205,7 @@ static inline void change_bit(long nr, volatile unsigned long *addr)
*/ */
static inline int test_and_set_bit(long nr, volatile unsigned long *addr) static inline int test_and_set_bit(long nr, volatile unsigned long *addr)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, nr, "%0", "c"); GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", "c");
} }
/** /**
...@@ -251,7 +251,7 @@ static inline int __test_and_set_bit(long nr, volatile unsigned long *addr) ...@@ -251,7 +251,7 @@ static inline int __test_and_set_bit(long nr, volatile unsigned long *addr)
*/ */
static inline int test_and_clear_bit(long nr, volatile unsigned long *addr) static inline int test_and_clear_bit(long nr, volatile unsigned long *addr)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, nr, "%0", "c"); GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", "c");
} }
/** /**
...@@ -304,7 +304,7 @@ static inline int __test_and_change_bit(long nr, volatile unsigned long *addr) ...@@ -304,7 +304,7 @@ static inline int __test_and_change_bit(long nr, volatile unsigned long *addr)
*/ */
static inline int test_and_change_bit(long nr, volatile unsigned long *addr) static inline int test_and_change_bit(long nr, volatile unsigned long *addr)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, nr, "%0", "c"); GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", "c");
} }
static __always_inline int constant_test_bit(long nr, const volatile unsigned long *addr) static __always_inline int constant_test_bit(long nr, const volatile unsigned long *addr)
......
...@@ -52,7 +52,7 @@ static inline void local_sub(long i, local_t *l) ...@@ -52,7 +52,7 @@ static inline void local_sub(long i, local_t *l)
*/ */
static inline int local_sub_and_test(long i, local_t *l) static inline int local_sub_and_test(long i, local_t *l)
{ {
GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, i, "%0", "e"); GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, "er", i, "%0", "e");
} }
/** /**
...@@ -92,7 +92,7 @@ static inline int local_inc_and_test(local_t *l) ...@@ -92,7 +92,7 @@ static inline int local_inc_and_test(local_t *l)
*/ */
static inline int local_add_negative(long i, local_t *l) static inline int local_add_negative(long i, local_t *l)
{ {
GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, i, "%0", "s"); GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, "er", i, "%0", "s");
} }
/** /**
......
...@@ -16,8 +16,8 @@ cc_label: \ ...@@ -16,8 +16,8 @@ cc_label: \
#define GEN_UNARY_RMWcc(op, var, arg0, cc) \ #define GEN_UNARY_RMWcc(op, var, arg0, cc) \
__GEN_RMWcc(op " " arg0, var, cc) __GEN_RMWcc(op " " arg0, var, cc)
#define GEN_BINARY_RMWcc(op, var, val, arg0, cc) \ #define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \
__GEN_RMWcc(op " %1, " arg0, var, cc, "er" (val)) __GEN_RMWcc(op " %1, " arg0, var, cc, vcon (val))
#else /* !CC_HAVE_ASM_GOTO */ #else /* !CC_HAVE_ASM_GOTO */
...@@ -33,8 +33,8 @@ do { \ ...@@ -33,8 +33,8 @@ do { \
#define GEN_UNARY_RMWcc(op, var, arg0, cc) \ #define GEN_UNARY_RMWcc(op, var, arg0, cc) \
__GEN_RMWcc(op " " arg0, var, cc) __GEN_RMWcc(op " " arg0, var, cc)
#define GEN_BINARY_RMWcc(op, var, val, arg0, cc) \ #define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \
__GEN_RMWcc(op " %2, " arg0, var, cc, "er" (val)) __GEN_RMWcc(op " %2, " arg0, var, cc, vcon (val))
#endif /* CC_HAVE_ASM_GOTO */ #endif /* CC_HAVE_ASM_GOTO */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment