summaryrefslogtreecommitdiffstats
path: root/arch/x86/lib
diff options
context:
space:
mode:
authorNathan Chancellor <nathan@kernel.org>2023-05-26 17:47:40 +0200
committerDave Hansen <dave.hansen@linux.intel.com>2023-05-29 15:52:32 +0200
commit2fe1e67e6987b6f05329740da79c8150a2205b0d (patch)
tree5dba4fe5eefe76b240c385bfe7a36c1242cc184a /arch/x86/lib
parentx86/csum: Improve performance of `csum_partial` (diff)
downloadlinux-2fe1e67e6987b6f05329740da79c8150a2205b0d.tar.xz
linux-2fe1e67e6987b6f05329740da79c8150a2205b0d.zip
x86/csum: Fix clang -Wuninitialized in csum_partial()
Clang warns: arch/x86/lib/csum-partial_64.c:74:20: error: variable 'result' is uninitialized when used here [-Werror,-Wuninitialized] return csum_tail(result, temp64, odd); ^~~~~~ arch/x86/lib/csum-partial_64.c:48:22: note: initialize the variable 'result' to silence this warning unsigned odd, result; ^ = 0 1 error generated. The only initialization and uses of result in csum_partial() were moved into csum_tail() but result is still being passed by value to csum_tail() (clang's -Wuninitialized does not do interprocedural analysis to realize that result is always assigned in csum_tail() however). Sink the declaration of result into csum_tail() to clear up the warning. Closes: https://lore.kernel.org/202305262039.3HUYjWJk-lkp@intel.com/ Fixes: 688eb8191b47 ("x86/csum: Improve performance of `csum_partial`") Reported-by: kernel test robot <lkp@intel.com> Signed-off-by: Nathan Chancellor <nathan@kernel.org> Signed-off-by: Dave Hansen <dave.hansen@linux.intel.com> Link: https://lore.kernel.org/all/20230526-csum_partial-wuninitialized-v1-1-ebc0108dcec1%40kernel.org
Diffstat (limited to 'arch/x86/lib')
-rw-r--r--arch/x86/lib/csum-partial_64.c10
1 files changed, 6 insertions, 4 deletions
diff --git a/arch/x86/lib/csum-partial_64.c b/arch/x86/lib/csum-partial_64.c
index fe5861951b15..cea25ca8b8cf 100644
--- a/arch/x86/lib/csum-partial_64.c
+++ b/arch/x86/lib/csum-partial_64.c
@@ -21,8 +21,10 @@ static inline unsigned short from32to16(unsigned a)
return b;
}
-static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd)
+static inline __wsum csum_tail(u64 temp64, int odd)
{
+ unsigned int result;
+
result = add32_with_carry(temp64 >> 32, temp64 & 0xffffffff);
if (unlikely(odd)) {
result = from32to16(result);
@@ -45,7 +47,7 @@ static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd)
__wsum csum_partial(const void *buff, int len, __wsum sum)
{
u64 temp64 = (__force u64)sum;
- unsigned odd, result;
+ unsigned odd;
odd = 1 & (unsigned long) buff;
if (unlikely(odd)) {
@@ -71,7 +73,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
"adcq $0,%[res]"
: [res] "+r"(temp64)
: [src] "r"(buff), "m"(*(const char(*)[40])buff));
- return csum_tail(result, temp64, odd);
+ return csum_tail(temp64, odd);
}
if (unlikely(len >= 64)) {
/*
@@ -141,7 +143,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
: [res] "+r"(temp64)
: [trail] "r"(trail));
}
- return csum_tail(result, temp64, odd);
+ return csum_tail(temp64, odd);
}
EXPORT_SYMBOL(csum_partial);