Commit 2fe1e67e authored by Nathan Chancellor's avatar Nathan Chancellor Committed by Dave Hansen

x86/csum: Fix clang -Wuninitialized in csum_partial()

Clang warns:

  arch/x86/lib/csum-partial_64.c:74:20: error: variable 'result' is uninitialized when used here [-Werror,-Wuninitialized]
                  return csum_tail(result, temp64, odd);
                                   ^~~~~~
  arch/x86/lib/csum-partial_64.c:48:22: note: initialize the variable 'result' to silence this warning
          unsigned odd, result;
                              ^
                               = 0
  1 error generated.

The only initialization and uses of result in csum_partial() were moved
into csum_tail() but result is still being passed by value to
csum_tail() (clang's -Wuninitialized does not do interprocedural
analysis to realize that result is always assigned in csum_tail()
however). Sink the declaration of result into csum_tail() to clear up
the warning.

Closes: https://lore.kernel.org/202305262039.3HUYjWJk-lkp@intel.com/
Fixes: 688eb819 ("x86/csum: Improve performance of `csum_partial`")
Reported-by: default avatarkernel test robot <lkp@intel.com>
Signed-off-by: default avatarNathan Chancellor <nathan@kernel.org>
Signed-off-by: default avatarDave Hansen <dave.hansen@linux.intel.com>
Link: https://lore.kernel.org/all/20230526-csum_partial-wuninitialized-v1-1-ebc0108dcec1%40kernel.org
parent 688eb819
...@@ -21,8 +21,10 @@ static inline unsigned short from32to16(unsigned a) ...@@ -21,8 +21,10 @@ static inline unsigned short from32to16(unsigned a)
return b; return b;
} }
static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd) static inline __wsum csum_tail(u64 temp64, int odd)
{ {
unsigned int result;
result = add32_with_carry(temp64 >> 32, temp64 & 0xffffffff); result = add32_with_carry(temp64 >> 32, temp64 & 0xffffffff);
if (unlikely(odd)) { if (unlikely(odd)) {
result = from32to16(result); result = from32to16(result);
...@@ -45,7 +47,7 @@ static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd) ...@@ -45,7 +47,7 @@ static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd)
__wsum csum_partial(const void *buff, int len, __wsum sum) __wsum csum_partial(const void *buff, int len, __wsum sum)
{ {
u64 temp64 = (__force u64)sum; u64 temp64 = (__force u64)sum;
unsigned odd, result; unsigned odd;
odd = 1 & (unsigned long) buff; odd = 1 & (unsigned long) buff;
if (unlikely(odd)) { if (unlikely(odd)) {
...@@ -71,7 +73,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum) ...@@ -71,7 +73,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
"adcq $0,%[res]" "adcq $0,%[res]"
: [res] "+r"(temp64) : [res] "+r"(temp64)
: [src] "r"(buff), "m"(*(const char(*)[40])buff)); : [src] "r"(buff), "m"(*(const char(*)[40])buff));
return csum_tail(result, temp64, odd); return csum_tail(temp64, odd);
} }
if (unlikely(len >= 64)) { if (unlikely(len >= 64)) {
/* /*
...@@ -141,7 +143,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum) ...@@ -141,7 +143,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
: [res] "+r"(temp64) : [res] "+r"(temp64)
: [trail] "r"(trail)); : [trail] "r"(trail));
} }
return csum_tail(result, temp64, odd); return csum_tail(temp64, odd);
} }
EXPORT_SYMBOL(csum_partial); EXPORT_SYMBOL(csum_partial);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment