diff options
author | Peter Zijlstra <peterz@infradead.org> | 2023-05-31 15:08:39 +0200 |
---|---|---|
committer | Peter Zijlstra <peterz@infradead.org> | 2023-06-05 09:36:37 +0200 |
commit | 6d12c8d308e68b9b0fa98ca2df4f83db4b4c965d (patch) | |
tree | 442e57e61e0b36dd4b9bc4028450812344d1e198 /include/asm-generic/percpu.h | |
parent | percpu: Add {raw,this}_cpu_try_cmpxchg() (diff) | |
download | linux-6d12c8d308e68b9b0fa98ca2df4f83db4b4c965d.tar.xz linux-6d12c8d308e68b9b0fa98ca2df4f83db4b4c965d.zip |
percpu: Wire up cmpxchg128
In order to replace cmpxchg_double() with the newly minted
cmpxchg128() family of functions, wire it up in this_cpu_cmpxchg().
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Mark Rutland <mark.rutland@arm.com>
Tested-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20230531132323.654945124@infradead.org
Diffstat (limited to 'include/asm-generic/percpu.h')
-rw-r--r-- | include/asm-generic/percpu.h | 56 |
1 files changed, 56 insertions, 0 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h index 96af32c283b2..5c66e4496289 100644 --- a/include/asm-generic/percpu.h +++ b/include/asm-generic/percpu.h @@ -350,6 +350,25 @@ do { \ #endif #endif +#ifndef raw_cpu_try_cmpxchg64 +#ifdef raw_cpu_cmpxchg64 +#define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg64) +#else +#define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \ + raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef raw_cpu_try_cmpxchg128 +#ifdef raw_cpu_cmpxchg128 +#define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg128) +#else +#define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \ + raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif + #ifndef raw_cpu_cmpxchg_1 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) @@ -367,6 +386,15 @@ do { \ raw_cpu_generic_cmpxchg(pcp, oval, nval) #endif +#ifndef raw_cpu_cmpxchg64 +#define raw_cpu_cmpxchg64(pcp, oval, nval) \ + raw_cpu_generic_cmpxchg(pcp, oval, nval) +#endif +#ifndef raw_cpu_cmpxchg128 +#define raw_cpu_cmpxchg128(pcp, oval, nval) \ + raw_cpu_generic_cmpxchg(pcp, oval, nval) +#endif + #ifndef raw_cpu_cmpxchg_double_1 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) @@ -512,6 +540,25 @@ do { \ #endif #endif +#ifndef this_cpu_try_cmpxchg64 +#ifdef this_cpu_cmpxchg64 +#define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg64) +#else +#define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \ + this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef this_cpu_try_cmpxchg128 +#ifdef this_cpu_cmpxchg128 +#define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg128) +#else +#define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \ + this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif + #ifndef this_cpu_cmpxchg_1 #define this_cpu_cmpxchg_1(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) @@ -529,6 +576,15 @@ do { \ this_cpu_generic_cmpxchg(pcp, oval, nval) #endif +#ifndef this_cpu_cmpxchg64 +#define this_cpu_cmpxchg64(pcp, oval, nval) \ + this_cpu_generic_cmpxchg(pcp, oval, nval) +#endif +#ifndef this_cpu_cmpxchg128 +#define this_cpu_cmpxchg128(pcp, oval, nval) \ + this_cpu_generic_cmpxchg(pcp, oval, nval) +#endif + #ifndef this_cpu_cmpxchg_double_1 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |