Commit b387dc04 authored by Max Filippov's avatar Max Filippov

xtensa: use macros to generate *_bit and test_and_*_bit functions

Parameterize macros with function name, opcode and inversion pattern.
This reduces code duplication removing 2/3 of definitions.
Signed-off-by: default avatarMax Filippov <jcmvbkbc@gmail.com>
parent cbc6e287
...@@ -98,247 +98,110 @@ static inline unsigned long __fls(unsigned long word) ...@@ -98,247 +98,110 @@ static inline unsigned long __fls(unsigned long word)
#if XCHAL_HAVE_EXCLUSIVE #if XCHAL_HAVE_EXCLUSIVE
static inline void set_bit(unsigned int bit, volatile unsigned long *p) #define BIT_OP(op, insn, inv) \
{ static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
unsigned long tmp; { \
unsigned long mask = 1UL << (bit & 31); unsigned long tmp; \
unsigned long mask = 1UL << (bit & 31); \
p += bit >> 5; \
p += bit >> 5; \
__asm__ __volatile__( \
"1: l32ex %0, %2\n" __asm__ __volatile__( \
" or %0, %0, %1\n" "1: l32ex %0, %2\n" \
" s32ex %0, %2\n" " "insn" %0, %0, %1\n" \
" getex %0\n" " s32ex %0, %2\n" \
" beqz %0, 1b\n" " getex %0\n" \
: "=&a" (tmp) " beqz %0, 1b\n" \
: "a" (mask), "a" (p) : "=&a" (tmp) \
: "memory"); : "a" (inv mask), "a" (p) \
} : "memory"); \
}
static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
{ #define TEST_AND_BIT_OP(op, insn, inv) \
unsigned long tmp; static inline int \
unsigned long mask = 1UL << (bit & 31); test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
{ \
p += bit >> 5; unsigned long tmp, value; \
unsigned long mask = 1UL << (bit & 31); \
__asm__ __volatile__( \
"1: l32ex %0, %2\n" p += bit >> 5; \
" and %0, %0, %1\n" \
" s32ex %0, %2\n" __asm__ __volatile__( \
" getex %0\n" "1: l32ex %1, %3\n" \
" beqz %0, 1b\n" " "insn" %0, %1, %2\n" \
: "=&a" (tmp) " s32ex %0, %3\n" \
: "a" (~mask), "a" (p) " getex %0\n" \
: "memory"); " beqz %0, 1b\n" \
} : "=&a" (tmp), "=&a" (value) \
: "a" (inv mask), "a" (p) \
static inline void change_bit(unsigned int bit, volatile unsigned long *p) : "memory"); \
{ \
unsigned long tmp; return value & mask; \
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32ex %0, %2\n"
" xor %0, %0, %1\n"
" s32ex %0, %2\n"
" getex %0\n"
" beqz %0, 1b\n"
: "=&a" (tmp)
: "a" (mask), "a" (p)
: "memory");
}
static inline int
test_and_set_bit(unsigned int bit, volatile unsigned long *p)
{
unsigned long tmp, value;
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32ex %1, %3\n"
" or %0, %1, %2\n"
" s32ex %0, %3\n"
" getex %0\n"
" beqz %0, 1b\n"
: "=&a" (tmp), "=&a" (value)
: "a" (mask), "a" (p)
: "memory");
return value & mask;
}
static inline int
test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
{
unsigned long tmp, value;
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32ex %1, %3\n"
" and %0, %1, %2\n"
" s32ex %0, %3\n"
" getex %0\n"
" beqz %0, 1b\n"
: "=&a" (tmp), "=&a" (value)
: "a" (~mask), "a" (p)
: "memory");
return value & mask;
}
static inline int
test_and_change_bit(unsigned int bit, volatile unsigned long *p)
{
unsigned long tmp, value;
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32ex %1, %3\n"
" xor %0, %1, %2\n"
" s32ex %0, %3\n"
" getex %0\n"
" beqz %0, 1b\n"
: "=&a" (tmp), "=&a" (value)
: "a" (mask), "a" (p)
: "memory");
return value & mask;
} }
#elif XCHAL_HAVE_S32C1I #elif XCHAL_HAVE_S32C1I
static inline void set_bit(unsigned int bit, volatile unsigned long *p) #define BIT_OP(op, insn, inv) \
{ static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
unsigned long tmp, value; { \
unsigned long mask = 1UL << (bit & 31); unsigned long tmp, value; \
unsigned long mask = 1UL << (bit & 31); \
p += bit >> 5; \
p += bit >> 5; \
__asm__ __volatile__( \
"1: l32i %1, %3, 0\n" __asm__ __volatile__( \
" wsr %1, scompare1\n" "1: l32i %1, %3, 0\n" \
" or %0, %1, %2\n" " wsr %1, scompare1\n" \
" s32c1i %0, %3, 0\n" " "insn" %0, %1, %2\n" \
" bne %0, %1, 1b\n" " s32c1i %0, %3, 0\n" \
: "=&a" (tmp), "=&a" (value) " bne %0, %1, 1b\n" \
: "a" (mask), "a" (p) : "=&a" (tmp), "=&a" (value) \
: "memory"); : "a" (inv mask), "a" (p) \
} : "memory"); \
}
static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
{ #define TEST_AND_BIT_OP(op, insn, inv) \
unsigned long tmp, value; static inline int \
unsigned long mask = 1UL << (bit & 31); test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
{ \
p += bit >> 5; unsigned long tmp, value; \
unsigned long mask = 1UL << (bit & 31); \
__asm__ __volatile__( \
"1: l32i %1, %3, 0\n" p += bit >> 5; \
" wsr %1, scompare1\n" \
" and %0, %1, %2\n" __asm__ __volatile__( \
" s32c1i %0, %3, 0\n" "1: l32i %1, %3, 0\n" \
" bne %0, %1, 1b\n" " wsr %1, scompare1\n" \
: "=&a" (tmp), "=&a" (value) " "insn" %0, %1, %2\n" \
: "a" (~mask), "a" (p) " s32c1i %0, %3, 0\n" \
: "memory"); " bne %0, %1, 1b\n" \
: "=&a" (tmp), "=&a" (value) \
: "a" (inv mask), "a" (p) \
: "memory"); \
\
return tmp & mask; \
} }
static inline void change_bit(unsigned int bit, volatile unsigned long *p) #else
{
unsigned long tmp, value;
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32i %1, %3, 0\n"
" wsr %1, scompare1\n"
" xor %0, %1, %2\n"
" s32c1i %0, %3, 0\n"
" bne %0, %1, 1b\n"
: "=&a" (tmp), "=&a" (value)
: "a" (mask), "a" (p)
: "memory");
}
static inline int #define BIT_OP(op, insn, inv)
test_and_set_bit(unsigned int bit, volatile unsigned long *p) #define TEST_AND_BIT_OP(op, insn, inv)
{
unsigned long tmp, value;
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32i %1, %3, 0\n"
" wsr %1, scompare1\n"
" or %0, %1, %2\n"
" s32c1i %0, %3, 0\n"
" bne %0, %1, 1b\n"
: "=&a" (tmp), "=&a" (value)
: "a" (mask), "a" (p)
: "memory");
return tmp & mask;
}
static inline int #include <asm-generic/bitops/atomic.h>
test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
{
unsigned long tmp, value;
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32i %1, %3, 0\n"
" wsr %1, scompare1\n"
" and %0, %1, %2\n"
" s32c1i %0, %3, 0\n"
" bne %0, %1, 1b\n"
: "=&a" (tmp), "=&a" (value)
: "a" (~mask), "a" (p)
: "memory");
return tmp & mask;
}
static inline int #endif /* XCHAL_HAVE_S32C1I */
test_and_change_bit(unsigned int bit, volatile unsigned long *p)
{
unsigned long tmp, value;
unsigned long mask = 1UL << (bit & 31);
p += bit >> 5;
__asm__ __volatile__(
"1: l32i %1, %3, 0\n"
" wsr %1, scompare1\n"
" xor %0, %1, %2\n"
" s32c1i %0, %3, 0\n"
" bne %0, %1, 1b\n"
: "=&a" (tmp), "=&a" (value)
: "a" (mask), "a" (p)
: "memory");
return tmp & mask;
}
#else #define BIT_OPS(op, insn, inv) \
BIT_OP(op, insn, inv) \
TEST_AND_BIT_OP(op, insn, inv)
#include <asm-generic/bitops/atomic.h> BIT_OPS(set, "or", )
BIT_OPS(clear, "and", ~)
BIT_OPS(change, "xor", )
#endif /* XCHAL_HAVE_S32C1I */ #undef BIT_OPS
#undef BIT_OP
#undef TEST_AND_BIT_OP
#include <asm-generic/bitops/find.h> #include <asm-generic/bitops/find.h>
#include <asm-generic/bitops/le.h> #include <asm-generic/bitops/le.h>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment