Commit 0f5e4816 authored by Tejun Heo's avatar Tejun Heo

percpu: remove some sparse warnings

Make the following changes to remove some sparse warnings.

* Make DEFINE_PER_CPU_SECTION() declare __pcpu_unique_* before
  defining it.

* Annotate pcpu_extend_area_map() that it is entered with pcpu_lock
  held, releases it and then reacquires it.

* Make percpu related macros use unique nested variable names.

* While at it, add pcpu prefix to __size_call[_return]() macros as
  to-be-implemented sparse annotations will add percpu specific stuff
  to these macros.
Signed-off-by: default avatarTejun Heo <tj@kernel.org>
Reviewed-by: default avatarChristoph Lameter <cl@linux-foundation.org>
Cc: Rusty Russell <rusty@rustcorp.com.au>
parent 64ef291f
...@@ -74,31 +74,31 @@ extern void __bad_percpu_size(void); ...@@ -74,31 +74,31 @@ extern void __bad_percpu_size(void);
#define percpu_to_op(op, var, val) \ #define percpu_to_op(op, var, val) \
do { \ do { \
typedef typeof(var) T__; \ typedef typeof(var) pto_T__; \
if (0) { \ if (0) { \
T__ tmp__; \ pto_T__ pto_tmp__; \
tmp__ = (val); \ pto_tmp__ = (val); \
} \ } \
switch (sizeof(var)) { \ switch (sizeof(var)) { \
case 1: \ case 1: \
asm(op "b %1,"__percpu_arg(0) \ asm(op "b %1,"__percpu_arg(0) \
: "+m" (var) \ : "+m" (var) \
: "qi" ((T__)(val))); \ : "qi" ((pto_T__)(val))); \
break; \ break; \
case 2: \ case 2: \
asm(op "w %1,"__percpu_arg(0) \ asm(op "w %1,"__percpu_arg(0) \
: "+m" (var) \ : "+m" (var) \
: "ri" ((T__)(val))); \ : "ri" ((pto_T__)(val))); \
break; \ break; \
case 4: \ case 4: \
asm(op "l %1,"__percpu_arg(0) \ asm(op "l %1,"__percpu_arg(0) \
: "+m" (var) \ : "+m" (var) \
: "ri" ((T__)(val))); \ : "ri" ((pto_T__)(val))); \
break; \ break; \
case 8: \ case 8: \
asm(op "q %1,"__percpu_arg(0) \ asm(op "q %1,"__percpu_arg(0) \
: "+m" (var) \ : "+m" (var) \
: "re" ((T__)(val))); \ : "re" ((pto_T__)(val))); \
break; \ break; \
default: __bad_percpu_size(); \ default: __bad_percpu_size(); \
} \ } \
...@@ -106,31 +106,31 @@ do { \ ...@@ -106,31 +106,31 @@ do { \
#define percpu_from_op(op, var, constraint) \ #define percpu_from_op(op, var, constraint) \
({ \ ({ \
typeof(var) ret__; \ typeof(var) pfo_ret__; \
switch (sizeof(var)) { \ switch (sizeof(var)) { \
case 1: \ case 1: \
asm(op "b "__percpu_arg(1)",%0" \ asm(op "b "__percpu_arg(1)",%0" \
: "=q" (ret__) \ : "=q" (pfo_ret__) \
: constraint); \ : constraint); \
break; \ break; \
case 2: \ case 2: \
asm(op "w "__percpu_arg(1)",%0" \ asm(op "w "__percpu_arg(1)",%0" \
: "=r" (ret__) \ : "=r" (pfo_ret__) \
: constraint); \ : constraint); \
break; \ break; \
case 4: \ case 4: \
asm(op "l "__percpu_arg(1)",%0" \ asm(op "l "__percpu_arg(1)",%0" \
: "=r" (ret__) \ : "=r" (pfo_ret__) \
: constraint); \ : constraint); \
break; \ break; \
case 8: \ case 8: \
asm(op "q "__percpu_arg(1)",%0" \ asm(op "q "__percpu_arg(1)",%0" \
: "=r" (ret__) \ : "=r" (pfo_ret__) \
: constraint); \ : constraint); \
break; \ break; \
default: __bad_percpu_size(); \ default: __bad_percpu_size(); \
} \ } \
ret__; \ pfo_ret__; \
}) })
/* /*
......
...@@ -60,6 +60,7 @@ ...@@ -60,6 +60,7 @@
#define DEFINE_PER_CPU_SECTION(type, name, sec) \ #define DEFINE_PER_CPU_SECTION(type, name, sec) \
__PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \ __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
extern __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
__PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \ __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
__PCPU_ATTRS(sec) PER_CPU_DEF_ATTRIBUTES __weak \ __PCPU_ATTRS(sec) PER_CPU_DEF_ATTRIBUTES __weak \
__typeof__(type) per_cpu__##name __typeof__(type) per_cpu__##name
......
...@@ -226,20 +226,20 @@ do { \ ...@@ -226,20 +226,20 @@ do { \
extern void __bad_size_call_parameter(void); extern void __bad_size_call_parameter(void);
#define __size_call_return(stem, variable) \ #define __pcpu_size_call_return(stem, variable) \
({ typeof(variable) ret__; \ ({ typeof(variable) pscr_ret__; \
switch(sizeof(variable)) { \ switch(sizeof(variable)) { \
case 1: ret__ = stem##1(variable);break; \ case 1: pscr_ret__ = stem##1(variable);break; \
case 2: ret__ = stem##2(variable);break; \ case 2: pscr_ret__ = stem##2(variable);break; \
case 4: ret__ = stem##4(variable);break; \ case 4: pscr_ret__ = stem##4(variable);break; \
case 8: ret__ = stem##8(variable);break; \ case 8: pscr_ret__ = stem##8(variable);break; \
default: \ default: \
__bad_size_call_parameter();break; \ __bad_size_call_parameter();break; \
} \ } \
ret__; \ pscr_ret__; \
}) })
#define __size_call(stem, variable, ...) \ #define __pcpu_size_call(stem, variable, ...) \
do { \ do { \
switch(sizeof(variable)) { \ switch(sizeof(variable)) { \
case 1: stem##1(variable, __VA_ARGS__);break; \ case 1: stem##1(variable, __VA_ARGS__);break; \
...@@ -299,7 +299,7 @@ do { \ ...@@ -299,7 +299,7 @@ do { \
# ifndef this_cpu_read_8 # ifndef this_cpu_read_8
# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp) # define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp)
# endif # endif
# define this_cpu_read(pcp) __size_call_return(this_cpu_read_, (pcp)) # define this_cpu_read(pcp) __pcpu_size_call_return(this_cpu_read_, (pcp))
#endif #endif
#define _this_cpu_generic_to_op(pcp, val, op) \ #define _this_cpu_generic_to_op(pcp, val, op) \
...@@ -322,7 +322,7 @@ do { \ ...@@ -322,7 +322,7 @@ do { \
# ifndef this_cpu_write_8 # ifndef this_cpu_write_8
# define this_cpu_write_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) # define this_cpu_write_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
# endif # endif
# define this_cpu_write(pcp, val) __size_call(this_cpu_write_, (pcp), (val)) # define this_cpu_write(pcp, val) __pcpu_size_call(this_cpu_write_, (pcp), (val))
#endif #endif
#ifndef this_cpu_add #ifndef this_cpu_add
...@@ -338,7 +338,7 @@ do { \ ...@@ -338,7 +338,7 @@ do { \
# ifndef this_cpu_add_8 # ifndef this_cpu_add_8
# define this_cpu_add_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=) # define this_cpu_add_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
# endif # endif
# define this_cpu_add(pcp, val) __size_call(this_cpu_add_, (pcp), (val)) # define this_cpu_add(pcp, val) __pcpu_size_call(this_cpu_add_, (pcp), (val))
#endif #endif
#ifndef this_cpu_sub #ifndef this_cpu_sub
...@@ -366,7 +366,7 @@ do { \ ...@@ -366,7 +366,7 @@ do { \
# ifndef this_cpu_and_8 # ifndef this_cpu_and_8
# define this_cpu_and_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=) # define this_cpu_and_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
# endif # endif
# define this_cpu_and(pcp, val) __size_call(this_cpu_and_, (pcp), (val)) # define this_cpu_and(pcp, val) __pcpu_size_call(this_cpu_and_, (pcp), (val))
#endif #endif
#ifndef this_cpu_or #ifndef this_cpu_or
...@@ -382,7 +382,7 @@ do { \ ...@@ -382,7 +382,7 @@ do { \
# ifndef this_cpu_or_8 # ifndef this_cpu_or_8
# define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) # define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
# endif # endif
# define this_cpu_or(pcp, val) __size_call(this_cpu_or_, (pcp), (val)) # define this_cpu_or(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val))
#endif #endif
#ifndef this_cpu_xor #ifndef this_cpu_xor
...@@ -398,7 +398,7 @@ do { \ ...@@ -398,7 +398,7 @@ do { \
# ifndef this_cpu_xor_8 # ifndef this_cpu_xor_8
# define this_cpu_xor_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), ^=) # define this_cpu_xor_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), ^=)
# endif # endif
# define this_cpu_xor(pcp, val) __size_call(this_cpu_or_, (pcp), (val)) # define this_cpu_xor(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val))
#endif #endif
/* /*
...@@ -428,7 +428,7 @@ do { \ ...@@ -428,7 +428,7 @@ do { \
# ifndef __this_cpu_read_8 # ifndef __this_cpu_read_8
# define __this_cpu_read_8(pcp) (*__this_cpu_ptr(&(pcp))) # define __this_cpu_read_8(pcp) (*__this_cpu_ptr(&(pcp)))
# endif # endif
# define __this_cpu_read(pcp) __size_call_return(__this_cpu_read_, (pcp)) # define __this_cpu_read(pcp) __pcpu_size_call_return(__this_cpu_read_, (pcp))
#endif #endif
#define __this_cpu_generic_to_op(pcp, val, op) \ #define __this_cpu_generic_to_op(pcp, val, op) \
...@@ -449,7 +449,7 @@ do { \ ...@@ -449,7 +449,7 @@ do { \
# ifndef __this_cpu_write_8 # ifndef __this_cpu_write_8
# define __this_cpu_write_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) # define __this_cpu_write_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), =)
# endif # endif
# define __this_cpu_write(pcp, val) __size_call(__this_cpu_write_, (pcp), (val)) # define __this_cpu_write(pcp, val) __pcpu_size_call(__this_cpu_write_, (pcp), (val))
#endif #endif
#ifndef __this_cpu_add #ifndef __this_cpu_add
...@@ -465,7 +465,7 @@ do { \ ...@@ -465,7 +465,7 @@ do { \
# ifndef __this_cpu_add_8 # ifndef __this_cpu_add_8
# define __this_cpu_add_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) # define __this_cpu_add_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=)
# endif # endif
# define __this_cpu_add(pcp, val) __size_call(__this_cpu_add_, (pcp), (val)) # define __this_cpu_add(pcp, val) __pcpu_size_call(__this_cpu_add_, (pcp), (val))
#endif #endif
#ifndef __this_cpu_sub #ifndef __this_cpu_sub
...@@ -493,7 +493,7 @@ do { \ ...@@ -493,7 +493,7 @@ do { \
# ifndef __this_cpu_and_8 # ifndef __this_cpu_and_8
# define __this_cpu_and_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) # define __this_cpu_and_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=)
# endif # endif
# define __this_cpu_and(pcp, val) __size_call(__this_cpu_and_, (pcp), (val)) # define __this_cpu_and(pcp, val) __pcpu_size_call(__this_cpu_and_, (pcp), (val))
#endif #endif
#ifndef __this_cpu_or #ifndef __this_cpu_or
...@@ -509,7 +509,7 @@ do { \ ...@@ -509,7 +509,7 @@ do { \
# ifndef __this_cpu_or_8 # ifndef __this_cpu_or_8
# define __this_cpu_or_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) # define __this_cpu_or_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=)
# endif # endif
# define __this_cpu_or(pcp, val) __size_call(__this_cpu_or_, (pcp), (val)) # define __this_cpu_or(pcp, val) __pcpu_size_call(__this_cpu_or_, (pcp), (val))
#endif #endif
#ifndef __this_cpu_xor #ifndef __this_cpu_xor
...@@ -525,7 +525,7 @@ do { \ ...@@ -525,7 +525,7 @@ do { \
# ifndef __this_cpu_xor_8 # ifndef __this_cpu_xor_8
# define __this_cpu_xor_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), ^=) # define __this_cpu_xor_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), ^=)
# endif # endif
# define __this_cpu_xor(pcp, val) __size_call(__this_cpu_xor_, (pcp), (val)) # define __this_cpu_xor(pcp, val) __pcpu_size_call(__this_cpu_xor_, (pcp), (val))
#endif #endif
/* /*
...@@ -556,7 +556,7 @@ do { \ ...@@ -556,7 +556,7 @@ do { \
# ifndef irqsafe_cpu_add_8 # ifndef irqsafe_cpu_add_8
# define irqsafe_cpu_add_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), +=) # define irqsafe_cpu_add_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), +=)
# endif # endif
# define irqsafe_cpu_add(pcp, val) __size_call(irqsafe_cpu_add_, (pcp), (val)) # define irqsafe_cpu_add(pcp, val) __pcpu_size_call(irqsafe_cpu_add_, (pcp), (val))
#endif #endif
#ifndef irqsafe_cpu_sub #ifndef irqsafe_cpu_sub
...@@ -584,7 +584,7 @@ do { \ ...@@ -584,7 +584,7 @@ do { \
# ifndef irqsafe_cpu_and_8 # ifndef irqsafe_cpu_and_8
# define irqsafe_cpu_and_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), &=) # define irqsafe_cpu_and_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), &=)
# endif # endif
# define irqsafe_cpu_and(pcp, val) __size_call(irqsafe_cpu_and_, (val)) # define irqsafe_cpu_and(pcp, val) __pcpu_size_call(irqsafe_cpu_and_, (val))
#endif #endif
#ifndef irqsafe_cpu_or #ifndef irqsafe_cpu_or
...@@ -600,7 +600,7 @@ do { \ ...@@ -600,7 +600,7 @@ do { \
# ifndef irqsafe_cpu_or_8 # ifndef irqsafe_cpu_or_8
# define irqsafe_cpu_or_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), |=) # define irqsafe_cpu_or_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), |=)
# endif # endif
# define irqsafe_cpu_or(pcp, val) __size_call(irqsafe_cpu_or_, (val)) # define irqsafe_cpu_or(pcp, val) __pcpu_size_call(irqsafe_cpu_or_, (val))
#endif #endif
#ifndef irqsafe_cpu_xor #ifndef irqsafe_cpu_xor
...@@ -616,7 +616,7 @@ do { \ ...@@ -616,7 +616,7 @@ do { \
# ifndef irqsafe_cpu_xor_8 # ifndef irqsafe_cpu_xor_8
# define irqsafe_cpu_xor_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), ^=) # define irqsafe_cpu_xor_8(pcp, val) irqsafe_cpu_generic_to_op((pcp), (val), ^=)
# endif # endif
# define irqsafe_cpu_xor(pcp, val) __size_call(irqsafe_cpu_xor_, (val)) # define irqsafe_cpu_xor(pcp, val) __pcpu_size_call(irqsafe_cpu_xor_, (val))
#endif #endif
#endif /* __LINUX_PERCPU_H */ #endif /* __LINUX_PERCPU_H */
...@@ -365,6 +365,7 @@ static struct pcpu_chunk *pcpu_chunk_addr_search(void *addr) ...@@ -365,6 +365,7 @@ static struct pcpu_chunk *pcpu_chunk_addr_search(void *addr)
* 0 if noop, 1 if successfully extended, -errno on failure. * 0 if noop, 1 if successfully extended, -errno on failure.
*/ */
static int pcpu_extend_area_map(struct pcpu_chunk *chunk) static int pcpu_extend_area_map(struct pcpu_chunk *chunk)
__releases(lock) __acquires(lock)
{ {
int new_alloc; int new_alloc;
int *new; int *new;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment