Commit 49edd098 authored by Ralf Baechle's avatar Ralf Baechle

[MIPS] Lockdep: Fix recursion bug.

 trace_hardirqs_off -> atomic_inc -> local_irq_restore -> trace_hardirqs_off
Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent 46230aa6
...@@ -79,9 +79,9 @@ static __inline__ void atomic_add(int i, atomic_t * v) ...@@ -79,9 +79,9 @@ static __inline__ void atomic_add(int i, atomic_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
v->counter += i; v->counter += i;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
} }
...@@ -124,9 +124,9 @@ static __inline__ void atomic_sub(int i, atomic_t * v) ...@@ -124,9 +124,9 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
v->counter -= i; v->counter -= i;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
} }
...@@ -173,11 +173,11 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) ...@@ -173,11 +173,11 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
result = v->counter; result = v->counter;
result += i; result += i;
v->counter = result; v->counter = result;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
smp_mb(); smp_mb();
...@@ -225,11 +225,11 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) ...@@ -225,11 +225,11 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
result = v->counter; result = v->counter;
result -= i; result -= i;
v->counter = result; v->counter = result;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
smp_mb(); smp_mb();
...@@ -293,12 +293,12 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) ...@@ -293,12 +293,12 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
result = v->counter; result = v->counter;
result -= i; result -= i;
if (result >= 0) if (result >= 0)
v->counter = result; v->counter = result;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
smp_mb(); smp_mb();
...@@ -454,9 +454,9 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) ...@@ -454,9 +454,9 @@ static __inline__ void atomic64_add(long i, atomic64_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
v->counter += i; v->counter += i;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
} }
...@@ -499,9 +499,9 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) ...@@ -499,9 +499,9 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
v->counter -= i; v->counter -= i;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
} }
...@@ -548,11 +548,11 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) ...@@ -548,11 +548,11 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
result = v->counter; result = v->counter;
result += i; result += i;
v->counter = result; v->counter = result;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
smp_mb(); smp_mb();
...@@ -600,11 +600,11 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) ...@@ -600,11 +600,11 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
result = v->counter; result = v->counter;
result -= i; result -= i;
v->counter = result; v->counter = result;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
smp_mb(); smp_mb();
...@@ -668,12 +668,12 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) ...@@ -668,12 +668,12 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
result = v->counter; result = v->counter;
result -= i; result -= i;
if (result >= 0) if (result >= 0)
v->counter = result; v->counter = result;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
smp_mb(); smp_mb();
......
...@@ -100,9 +100,9 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -100,9 +100,9 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
a += nr >> SZLONG_LOG; a += nr >> SZLONG_LOG;
mask = 1UL << bit; mask = 1UL << bit;
local_irq_save(flags); raw_local_irq_save(flags);
*a |= mask; *a |= mask;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
} }
...@@ -165,9 +165,9 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -165,9 +165,9 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
a += nr >> SZLONG_LOG; a += nr >> SZLONG_LOG;
mask = 1UL << bit; mask = 1UL << bit;
local_irq_save(flags); raw_local_irq_save(flags);
*a &= ~mask; *a &= ~mask;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
} }
...@@ -220,9 +220,9 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -220,9 +220,9 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
a += nr >> SZLONG_LOG; a += nr >> SZLONG_LOG;
mask = 1UL << bit; mask = 1UL << bit;
local_irq_save(flags); raw_local_irq_save(flags);
*a ^= mask; *a ^= mask;
local_irq_restore(flags); raw_local_irq_restore(flags);
} }
} }
...@@ -287,10 +287,10 @@ static inline int test_and_set_bit(unsigned long nr, ...@@ -287,10 +287,10 @@ static inline int test_and_set_bit(unsigned long nr,
a += nr >> SZLONG_LOG; a += nr >> SZLONG_LOG;
mask = 1UL << bit; mask = 1UL << bit;
local_irq_save(flags); raw_local_irq_save(flags);
retval = (mask & *a) != 0; retval = (mask & *a) != 0;
*a |= mask; *a |= mask;
local_irq_restore(flags); raw_local_irq_restore(flags);
return retval; return retval;
} }
...@@ -381,10 +381,10 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -381,10 +381,10 @@ static inline int test_and_clear_bit(unsigned long nr,
a += nr >> SZLONG_LOG; a += nr >> SZLONG_LOG;
mask = 1UL << bit; mask = 1UL << bit;
local_irq_save(flags); raw_local_irq_save(flags);
retval = (mask & *a) != 0; retval = (mask & *a) != 0;
*a &= ~mask; *a &= ~mask;
local_irq_restore(flags); raw_local_irq_restore(flags);
return retval; return retval;
} }
...@@ -452,10 +452,10 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -452,10 +452,10 @@ static inline int test_and_change_bit(unsigned long nr,
a += nr >> SZLONG_LOG; a += nr >> SZLONG_LOG;
mask = 1UL << bit; mask = 1UL << bit;
local_irq_save(flags); raw_local_irq_save(flags);
retval = (mask & *a) != 0; retval = (mask & *a) != 0;
*a ^= mask; *a ^= mask;
local_irq_restore(flags); raw_local_irq_restore(flags);
return retval; return retval;
} }
......
...@@ -121,10 +121,10 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) ...@@ -121,10 +121,10 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
retval = *m; retval = *m;
*m = val; *m = val;
local_irq_restore(flags); /* implies memory barrier */ raw_local_irq_restore(flags); /* implies memory barrier */
} }
smp_mb(); smp_mb();
...@@ -169,10 +169,10 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) ...@@ -169,10 +169,10 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
retval = *m; retval = *m;
*m = val; *m = val;
local_irq_restore(flags); /* implies memory barrier */ raw_local_irq_restore(flags); /* implies memory barrier */
} }
smp_mb(); smp_mb();
...@@ -250,11 +250,11 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old, ...@@ -250,11 +250,11 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
retval = *m; retval = *m;
if (retval == old) if (retval == old)
*m = new; *m = new;
local_irq_restore(flags); /* implies memory barrier */ raw_local_irq_restore(flags); /* implies memory barrier */
} }
smp_mb(); smp_mb();
...@@ -304,11 +304,11 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old, ...@@ -304,11 +304,11 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
} else { } else {
unsigned long flags; unsigned long flags;
local_irq_save(flags); raw_local_irq_save(flags);
retval = *m; retval = *m;
if (retval == old) if (retval == old)
*m = new; *m = new;
local_irq_restore(flags); /* implies memory barrier */ raw_local_irq_restore(flags); /* implies memory barrier */
} }
smp_mb(); smp_mb();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment