Commit 1f51dee7 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Ingo Molnar

locking/atomic, arch/alpha: Implement atomic{,64}_fetch_{add,sub,and,andnot,or,xor}()

Implement FETCH-OP atomic primitives, these are very similar to the
existing OP-RETURN primitives we already have, except they return the
value of the atomic variable _before_ modification.

This is especially useful for irreversible operations -- such as
bitops (because it becomes impossible to reconstruct the state prior
to modification).
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Matt Turner <mattst88@gmail.com>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Richard Henderson <rth@twiddle.net>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: linux-alpha@vger.kernel.org
Cc: linux-arch@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent b316ff78
...@@ -65,6 +65,25 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -65,6 +65,25 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
return result; \ return result; \
} }
#define ATOMIC_FETCH_OP(op, asm_op) \
static inline int atomic_fetch_##op(int i, atomic_t *v) \
{ \
long temp, result; \
smp_mb(); \
__asm__ __volatile__( \
"1: ldl_l %2,%1\n" \
" " #asm_op " %2,%3,%0\n" \
" stl_c %0,%1\n" \
" beq %0,2f\n" \
".subsection 2\n" \
"2: br 1b\n" \
".previous" \
:"=&r" (temp), "=m" (v->counter), "=&r" (result) \
:"Ir" (i), "m" (v->counter) : "memory"); \
smp_mb(); \
return result; \
}
#define ATOMIC64_OP(op, asm_op) \ #define ATOMIC64_OP(op, asm_op) \
static __inline__ void atomic64_##op(long i, atomic64_t * v) \ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
{ \ { \
...@@ -101,11 +120,32 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ ...@@ -101,11 +120,32 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
return result; \ return result; \
} }
#define ATOMIC64_FETCH_OP(op, asm_op) \
static __inline__ long atomic64_fetch_##op(long i, atomic64_t * v) \
{ \
long temp, result; \
smp_mb(); \
__asm__ __volatile__( \
"1: ldq_l %2,%1\n" \
" " #asm_op " %2,%3,%0\n" \
" stq_c %0,%1\n" \
" beq %0,2f\n" \
".subsection 2\n" \
"2: br 1b\n" \
".previous" \
:"=&r" (temp), "=m" (v->counter), "=&r" (result) \
:"Ir" (i), "m" (v->counter) : "memory"); \
smp_mb(); \
return result; \
}
#define ATOMIC_OPS(op) \ #define ATOMIC_OPS(op) \
ATOMIC_OP(op, op##l) \ ATOMIC_OP(op, op##l) \
ATOMIC_OP_RETURN(op, op##l) \ ATOMIC_OP_RETURN(op, op##l) \
ATOMIC_FETCH_OP(op, op##l) \
ATOMIC64_OP(op, op##q) \ ATOMIC64_OP(op, op##q) \
ATOMIC64_OP_RETURN(op, op##q) ATOMIC64_OP_RETURN(op, op##q) \
ATOMIC64_FETCH_OP(op, op##q)
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
...@@ -113,18 +153,25 @@ ATOMIC_OPS(sub) ...@@ -113,18 +153,25 @@ ATOMIC_OPS(sub)
#define atomic_andnot atomic_andnot #define atomic_andnot atomic_andnot
#define atomic64_andnot atomic64_andnot #define atomic64_andnot atomic64_andnot
ATOMIC_OP(and, and) #define atomic_fetch_or atomic_fetch_or
ATOMIC_OP(andnot, bic)
ATOMIC_OP(or, bis) #undef ATOMIC_OPS
ATOMIC_OP(xor, xor) #define ATOMIC_OPS(op, asm) \
ATOMIC64_OP(and, and) ATOMIC_OP(op, asm) \
ATOMIC64_OP(andnot, bic) ATOMIC_FETCH_OP(op, asm) \
ATOMIC64_OP(or, bis) ATOMIC64_OP(op, asm) \
ATOMIC64_OP(xor, xor) ATOMIC64_FETCH_OP(op, asm)
ATOMIC_OPS(and, and)
ATOMIC_OPS(andnot, bic)
ATOMIC_OPS(or, bis)
ATOMIC_OPS(xor, xor)
#undef ATOMIC_OPS #undef ATOMIC_OPS
#undef ATOMIC64_FETCH_OP
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP #undef ATOMIC64_OP
#undef ATOMIC_FETCH_OP
#undef ATOMIC_OP_RETURN #undef ATOMIC_OP_RETURN
#undef ATOMIC_OP #undef ATOMIC_OP
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment