Commit 6e5609a9 authored by Joe Perches's avatar Joe Perches Committed by Ingo Molnar

include/asm-x86/rwsem.h: checkpatch cleanups - formatting only

Signed-off-by: default avatarJoe Perches <joe@perches.com>
Signed-off-by: default avatarIngo Molnar <mingo@elte.hu>
parent 0f4fc8c1
...@@ -56,14 +56,16 @@ extern asmregparm struct rw_semaphore * ...@@ -56,14 +56,16 @@ extern asmregparm struct rw_semaphore *
/* /*
* the semaphore definition * the semaphore definition
*/ */
struct rw_semaphore {
signed long count;
#define RWSEM_UNLOCKED_VALUE 0x00000000 #define RWSEM_UNLOCKED_VALUE 0x00000000
#define RWSEM_ACTIVE_BIAS 0x00000001 #define RWSEM_ACTIVE_BIAS 0x00000001
#define RWSEM_ACTIVE_MASK 0x0000ffff #define RWSEM_ACTIVE_MASK 0x0000ffff
#define RWSEM_WAITING_BIAS (-0x00010000) #define RWSEM_WAITING_BIAS (-0x00010000)
#define RWSEM_ACTIVE_READ_BIAS RWSEM_ACTIVE_BIAS #define RWSEM_ACTIVE_READ_BIAS RWSEM_ACTIVE_BIAS
#define RWSEM_ACTIVE_WRITE_BIAS (RWSEM_WAITING_BIAS + RWSEM_ACTIVE_BIAS) #define RWSEM_ACTIVE_WRITE_BIAS (RWSEM_WAITING_BIAS + RWSEM_ACTIVE_BIAS)
struct rw_semaphore {
signed long count;
spinlock_t wait_lock; spinlock_t wait_lock;
struct list_head wait_list; struct list_head wait_list;
#ifdef CONFIG_DEBUG_LOCK_ALLOC #ifdef CONFIG_DEBUG_LOCK_ALLOC
...@@ -79,8 +81,10 @@ struct rw_semaphore { ...@@ -79,8 +81,10 @@ struct rw_semaphore {
#define __RWSEM_INITIALIZER(name) \ #define __RWSEM_INITIALIZER(name) \
{ RWSEM_UNLOCKED_VALUE, __SPIN_LOCK_UNLOCKED((name).wait_lock), \ { \
LIST_HEAD_INIT((name).wait_list) __RWSEM_DEP_MAP_INIT(name) } RWSEM_UNLOCKED_VALUE, __SPIN_LOCK_UNLOCKED((name).wait_lock), \
LIST_HEAD_INIT((name).wait_list) __RWSEM_DEP_MAP_INIT(name) \
}
#define DECLARE_RWSEM(name) \ #define DECLARE_RWSEM(name) \
struct rw_semaphore name = __RWSEM_INITIALIZER(name) struct rw_semaphore name = __RWSEM_INITIALIZER(name)
...@@ -100,9 +104,9 @@ do { \ ...@@ -100,9 +104,9 @@ do { \
*/ */
static inline void __down_read(struct rw_semaphore *sem) static inline void __down_read(struct rw_semaphore *sem)
{ {
__asm__ __volatile__( asm volatile("# beginning down_read\n\t"
"# beginning down_read\n\t" LOCK_PREFIX " incl (%%eax)\n\t"
LOCK_PREFIX " incl (%%eax)\n\t" /* adds 0x00000001, returns the old value */ /* adds 0x00000001, returns the old value */
" jns 1f\n" " jns 1f\n"
" call call_rwsem_down_read_failed\n" " call call_rwsem_down_read_failed\n"
"1:\n\t" "1:\n\t"
...@@ -118,21 +122,20 @@ LOCK_PREFIX " incl (%%eax)\n\t" /* adds 0x00000001, returns the old value ...@@ -118,21 +122,20 @@ LOCK_PREFIX " incl (%%eax)\n\t" /* adds 0x00000001, returns the old value
static inline int __down_read_trylock(struct rw_semaphore *sem) static inline int __down_read_trylock(struct rw_semaphore *sem)
{ {
__s32 result, tmp; __s32 result, tmp;
__asm__ __volatile__( asm volatile("# beginning __down_read_trylock\n\t"
"# beginning __down_read_trylock\n\t"
" movl %0,%1\n\t" " movl %0,%1\n\t"
"1:\n\t" "1:\n\t"
" movl %1,%2\n\t" " movl %1,%2\n\t"
" addl %3,%2\n\t" " addl %3,%2\n\t"
" jle 2f\n\t" " jle 2f\n\t"
LOCK_PREFIX " cmpxchgl %2,%0\n\t" LOCK_PREFIX " cmpxchgl %2,%0\n\t"
" jnz 1b\n\t" " jnz 1b\n\t"
"2:\n\t" "2:\n\t"
"# ending __down_read_trylock\n\t" "# ending __down_read_trylock\n\t"
: "+m" (sem->count), "=&a" (result), "=&r" (tmp) : "+m" (sem->count), "=&a" (result), "=&r" (tmp)
: "i" (RWSEM_ACTIVE_READ_BIAS) : "i" (RWSEM_ACTIVE_READ_BIAS)
: "memory", "cc"); : "memory", "cc");
return result>=0 ? 1 : 0; return result >= 0 ? 1 : 0;
} }
/* /*
...@@ -143,10 +146,11 @@ static inline void __down_write_nested(struct rw_semaphore *sem, int subclass) ...@@ -143,10 +146,11 @@ static inline void __down_write_nested(struct rw_semaphore *sem, int subclass)
int tmp; int tmp;
tmp = RWSEM_ACTIVE_WRITE_BIAS; tmp = RWSEM_ACTIVE_WRITE_BIAS;
__asm__ __volatile__( asm volatile("# beginning down_write\n\t"
"# beginning down_write\n\t" LOCK_PREFIX " xadd %%edx,(%%eax)\n\t"
LOCK_PREFIX " xadd %%edx,(%%eax)\n\t" /* subtract 0x0000ffff, returns the old value */ /* subtract 0x0000ffff, returns the old value */
" testl %%edx,%%edx\n\t" /* was the count 0 before? */ " testl %%edx,%%edx\n\t"
/* was the count 0 before? */
" jz 1f\n" " jz 1f\n"
" call call_rwsem_down_write_failed\n" " call call_rwsem_down_write_failed\n"
"1:\n" "1:\n"
...@@ -180,9 +184,9 @@ static inline int __down_write_trylock(struct rw_semaphore *sem) ...@@ -180,9 +184,9 @@ static inline int __down_write_trylock(struct rw_semaphore *sem)
static inline void __up_read(struct rw_semaphore *sem) static inline void __up_read(struct rw_semaphore *sem)
{ {
__s32 tmp = -RWSEM_ACTIVE_READ_BIAS; __s32 tmp = -RWSEM_ACTIVE_READ_BIAS;
__asm__ __volatile__( asm volatile("# beginning __up_read\n\t"
"# beginning __up_read\n\t" LOCK_PREFIX " xadd %%edx,(%%eax)\n\t"
LOCK_PREFIX " xadd %%edx,(%%eax)\n\t" /* subtracts 1, returns the old value */ /* subtracts 1, returns the old value */
" jns 1f\n\t" " jns 1f\n\t"
" call call_rwsem_wake\n" " call call_rwsem_wake\n"
"1:\n" "1:\n"
...@@ -197,10 +201,11 @@ LOCK_PREFIX " xadd %%edx,(%%eax)\n\t" /* subtracts 1, returns the old valu ...@@ -197,10 +201,11 @@ LOCK_PREFIX " xadd %%edx,(%%eax)\n\t" /* subtracts 1, returns the old valu
*/ */
static inline void __up_write(struct rw_semaphore *sem) static inline void __up_write(struct rw_semaphore *sem)
{ {
__asm__ __volatile__( asm volatile("# beginning __up_write\n\t"
"# beginning __up_write\n\t"
" movl %2,%%edx\n\t" " movl %2,%%edx\n\t"
LOCK_PREFIX " xaddl %%edx,(%%eax)\n\t" /* tries to transition 0xffff0001 -> 0x00000000 */ LOCK_PREFIX " xaddl %%edx,(%%eax)\n\t"
/* tries to transition
0xffff0001 -> 0x00000000 */
" jz 1f\n" " jz 1f\n"
" call call_rwsem_wake\n" " call call_rwsem_wake\n"
"1:\n\t" "1:\n\t"
...@@ -215,9 +220,9 @@ LOCK_PREFIX " xaddl %%edx,(%%eax)\n\t" /* tries to transition 0xffff0001 -> ...@@ -215,9 +220,9 @@ LOCK_PREFIX " xaddl %%edx,(%%eax)\n\t" /* tries to transition 0xffff0001 ->
*/ */
static inline void __downgrade_write(struct rw_semaphore *sem) static inline void __downgrade_write(struct rw_semaphore *sem)
{ {
__asm__ __volatile__( asm volatile("# beginning __downgrade_write\n\t"
"# beginning __downgrade_write\n\t" LOCK_PREFIX " addl %2,(%%eax)\n\t"
LOCK_PREFIX " addl %2,(%%eax)\n\t" /* transitions 0xZZZZ0001 -> 0xYYYY0001 */ /* transitions 0xZZZZ0001 -> 0xYYYY0001 */
" jns 1f\n\t" " jns 1f\n\t"
" call call_rwsem_downgrade_wake\n" " call call_rwsem_downgrade_wake\n"
"1:\n\t" "1:\n\t"
...@@ -232,8 +237,7 @@ LOCK_PREFIX " addl %2,(%%eax)\n\t" /* transitions 0xZZZZ0001 -> 0xYYYY0001 ...@@ -232,8 +237,7 @@ LOCK_PREFIX " addl %2,(%%eax)\n\t" /* transitions 0xZZZZ0001 -> 0xYYYY0001
*/ */
static inline void rwsem_atomic_add(int delta, struct rw_semaphore *sem) static inline void rwsem_atomic_add(int delta, struct rw_semaphore *sem)
{ {
__asm__ __volatile__( asm volatile(LOCK_PREFIX "addl %1,%0"
LOCK_PREFIX "addl %1,%0"
: "+m" (sem->count) : "+m" (sem->count)
: "ir" (delta)); : "ir" (delta));
} }
...@@ -245,12 +249,11 @@ static inline int rwsem_atomic_update(int delta, struct rw_semaphore *sem) ...@@ -245,12 +249,11 @@ static inline int rwsem_atomic_update(int delta, struct rw_semaphore *sem)
{ {
int tmp = delta; int tmp = delta;
__asm__ __volatile__( asm volatile(LOCK_PREFIX "xadd %0,%1"
LOCK_PREFIX "xadd %0,%1"
: "+r" (tmp), "+m" (sem->count) : "+r" (tmp), "+m" (sem->count)
: : "memory"); : : "memory");
return tmp+delta; return tmp + delta;
} }
static inline int rwsem_is_locked(struct rw_semaphore *sem) static inline int rwsem_is_locked(struct rw_semaphore *sem)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment