diff options
author | Will Deacon <will@kernel.org> | 2019-12-19 18:11:43 +0100 |
---|---|---|
committer | Will Deacon <will@kernel.org> | 2020-04-16 13:28:35 +0200 |
commit | 10223c5286f7389c022e9e91f12c49918790cf36 (patch) | |
tree | 354fe951052df505cbc10520dbd538596f57389e /arch/arm64/include/asm/barrier.h | |
parent | locking/barriers: Use '__unqual_scalar_typeof' for load-acquire macros (diff) | |
download | linux-10223c5286f7389c022e9e91f12c49918790cf36.tar.xz linux-10223c5286f7389c022e9e91f12c49918790cf36.zip |
arm64: barrier: Use '__unqual_scalar_typeof' for acquire/release macros
Passing volatile-qualified pointers to the arm64 implementations of the
load-acquire/store-release macros results in a re-load from the stack
and a bunch of associated stack-protector churn due to the temporary
result variable inheriting the volatile semantics thanks to the use of
'typeof()'.
Define these temporary variables using 'unqual_scalar_typeof' to drop
the volatile qualifier in the case that they are scalar types.
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Will Deacon <will@kernel.org>
Diffstat (limited to 'arch/arm64/include/asm/barrier.h')
-rw-r--r-- | arch/arm64/include/asm/barrier.h | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index 7d9cc5ec4971..fb4c27506ef4 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h @@ -76,8 +76,8 @@ static inline unsigned long array_index_mask_nospec(unsigned long idx, #define __smp_store_release(p, v) \ do { \ typeof(p) __p = (p); \ - union { typeof(*p) __val; char __c[1]; } __u = \ - { .__val = (__force typeof(*p)) (v) }; \ + union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u = \ + { .__val = (__force __unqual_scalar_typeof(*p)) (v) }; \ compiletime_assert_atomic_type(*p); \ kasan_check_write(__p, sizeof(*p)); \ switch (sizeof(*p)) { \ @@ -110,7 +110,7 @@ do { \ #define __smp_load_acquire(p) \ ({ \ - union { typeof(*p) __val; char __c[1]; } __u; \ + union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u; \ typeof(p) __p = (p); \ compiletime_assert_atomic_type(*p); \ kasan_check_read(__p, sizeof(*p)); \ @@ -136,33 +136,33 @@ do { \ : "Q" (*__p) : "memory"); \ break; \ } \ - __u.__val; \ + (typeof(*p))__u.__val; \ }) #define smp_cond_load_relaxed(ptr, cond_expr) \ ({ \ typeof(ptr) __PTR = (ptr); \ - typeof(*ptr) VAL; \ + __unqual_scalar_typeof(*ptr) VAL; \ for (;;) { \ VAL = READ_ONCE(*__PTR); \ if (cond_expr) \ break; \ __cmpwait_relaxed(__PTR, VAL); \ } \ - VAL; \ + (typeof(*ptr))VAL; \ }) #define smp_cond_load_acquire(ptr, cond_expr) \ ({ \ typeof(ptr) __PTR = (ptr); \ - typeof(*ptr) VAL; \ + __unqual_scalar_typeof(*ptr) VAL; \ for (;;) { \ VAL = smp_load_acquire(__PTR); \ if (cond_expr) \ break; \ __cmpwait_relaxed(__PTR, VAL); \ } \ - VAL; \ + (typeof(*ptr))VAL; \ }) #include <asm-generic/barrier.h> |