Commit 8edee057 authored by Ingo Molnar's avatar Ingo Molnar Committed by Linus Torvalds

[PATCH] rename 'lock' to 'slock' in asm-i386/spinlock.h

This renames the x86 implementation of the spinlock_t's 'lock' field to
'slock', to protect against spinlock_t/rwlock_t type mismatches.

This way, if you use a spinlock where a rwlock is expected (or vice
versa), you'll get an obvious compile failure.

build- and boot-tested on x86 SMP+PREEMPT and SMP+!PREEMPT.
Signed-off-by: default avatarIngo Molnar <mingo@elte.hu>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent ab9dd2b7
...@@ -15,7 +15,7 @@ asmlinkage int printk(const char * fmt, ...) ...@@ -15,7 +15,7 @@ asmlinkage int printk(const char * fmt, ...)
*/ */
typedef struct { typedef struct {
volatile unsigned int lock; volatile unsigned int slock;
#ifdef CONFIG_DEBUG_SPINLOCK #ifdef CONFIG_DEBUG_SPINLOCK
unsigned magic; unsigned magic;
#endif #endif
...@@ -43,7 +43,7 @@ typedef struct { ...@@ -43,7 +43,7 @@ typedef struct {
* We make no fairness assumptions. They have a cost. * We make no fairness assumptions. They have a cost.
*/ */
#define spin_is_locked(x) (*(volatile signed char *)(&(x)->lock) <= 0) #define spin_is_locked(x) (*(volatile signed char *)(&(x)->slock) <= 0)
#define spin_unlock_wait(x) do { barrier(); } while(spin_is_locked(x)) #define spin_unlock_wait(x) do { barrier(); } while(spin_is_locked(x))
#define spin_lock_string \ #define spin_lock_string \
...@@ -83,7 +83,7 @@ typedef struct { ...@@ -83,7 +83,7 @@ typedef struct {
#define spin_unlock_string \ #define spin_unlock_string \
"movb $1,%0" \ "movb $1,%0" \
:"=m" (lock->lock) : : "memory" :"=m" (lock->slock) : : "memory"
static inline void _raw_spin_unlock(spinlock_t *lock) static inline void _raw_spin_unlock(spinlock_t *lock)
...@@ -101,7 +101,7 @@ static inline void _raw_spin_unlock(spinlock_t *lock) ...@@ -101,7 +101,7 @@ static inline void _raw_spin_unlock(spinlock_t *lock)
#define spin_unlock_string \ #define spin_unlock_string \
"xchgb %b0, %1" \ "xchgb %b0, %1" \
:"=q" (oldval), "=m" (lock->lock) \ :"=q" (oldval), "=m" (lock->slock) \
:"0" (oldval) : "memory" :"0" (oldval) : "memory"
static inline void _raw_spin_unlock(spinlock_t *lock) static inline void _raw_spin_unlock(spinlock_t *lock)
...@@ -123,7 +123,7 @@ static inline int _raw_spin_trylock(spinlock_t *lock) ...@@ -123,7 +123,7 @@ static inline int _raw_spin_trylock(spinlock_t *lock)
char oldval; char oldval;
__asm__ __volatile__( __asm__ __volatile__(
"xchgb %b0,%1" "xchgb %b0,%1"
:"=q" (oldval), "=m" (lock->lock) :"=q" (oldval), "=m" (lock->slock)
:"0" (0) : "memory"); :"0" (0) : "memory");
return oldval > 0; return oldval > 0;
} }
...@@ -138,7 +138,7 @@ static inline void _raw_spin_lock(spinlock_t *lock) ...@@ -138,7 +138,7 @@ static inline void _raw_spin_lock(spinlock_t *lock)
#endif #endif
__asm__ __volatile__( __asm__ __volatile__(
spin_lock_string spin_lock_string
:"=m" (lock->lock) : : "memory"); :"=m" (lock->slock) : : "memory");
} }
static inline void _raw_spin_lock_flags (spinlock_t *lock, unsigned long flags) static inline void _raw_spin_lock_flags (spinlock_t *lock, unsigned long flags)
...@@ -151,7 +151,7 @@ static inline void _raw_spin_lock_flags (spinlock_t *lock, unsigned long flags) ...@@ -151,7 +151,7 @@ static inline void _raw_spin_lock_flags (spinlock_t *lock, unsigned long flags)
#endif #endif
__asm__ __volatile__( __asm__ __volatile__(
spin_lock_string_flags spin_lock_string_flags
:"=m" (lock->lock) : "r" (flags) : "memory"); :"=m" (lock->slock) : "r" (flags) : "memory");
} }
/* /*
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment