summaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/bitops.h
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2020-06-02 14:24:47 +0200
committerPeter Zijlstra <peterz@infradead.org>2020-06-15 14:10:08 +0200
commite82587336695f14283987c9aa0bfd775b520856d (patch)
tree1941a1ca0345af03f5ac508289f8377395676715 /arch/x86/include/asm/bitops.h
parentLinux 5.8-rc1 (diff)
downloadlinux-e82587336695f14283987c9aa0bfd775b520856d.tar.xz
linux-e82587336695f14283987c9aa0bfd775b520856d.zip
x86, kcsan: Remove __no_kcsan_or_inline usage
Now that KCSAN relies on -tsan-distinguish-volatile we no longer need the annotation for constant_test_bit(). Remove it. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Diffstat (limited to 'arch/x86/include/asm/bitops.h')
-rw-r--r--arch/x86/include/asm/bitops.h6
1 files changed, 1 insertions, 5 deletions
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
index 35460fef39b8..0367efdc5b7a 100644
--- a/arch/x86/include/asm/bitops.h
+++ b/arch/x86/include/asm/bitops.h
@@ -201,12 +201,8 @@ arch_test_and_change_bit(long nr, volatile unsigned long *addr)
return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr);
}
-static __no_kcsan_or_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
+static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
{
- /*
- * Because this is a plain access, we need to disable KCSAN here to
- * avoid double instrumentation via instrumented bitops.
- */
return ((1UL << (nr & (BITS_PER_LONG-1))) &
(addr[nr >> _BITOPS_LONG_SHIFT])) != 0;
}