diff options
author | Peter Zijlstra <peterz@infradead.org> | 2023-05-31 15:08:38 +0200 |
---|---|---|
committer | Peter Zijlstra <peterz@infradead.org> | 2023-06-05 09:36:36 +0200 |
commit | c5c0ba953b8c969c5d51bf1c57f239866a97c47c (patch) | |
tree | bb1da4bcc993aeacb1383da1e379ee14834ead38 /include/asm-generic/percpu.h | |
parent | instrumentation: Wire up cmpxchg128() (diff) | |
download | linux-c5c0ba953b8c969c5d51bf1c57f239866a97c47c.tar.xz linux-c5c0ba953b8c969c5d51bf1c57f239866a97c47c.zip |
percpu: Add {raw,this}_cpu_try_cmpxchg()
Add the try_cmpxchg() form to the per-cpu ops.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Mark Rutland <mark.rutland@arm.com>
Tested-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20230531132323.587480729@infradead.org
Diffstat (limited to 'include/asm-generic/percpu.h')
-rw-r--r-- | include/asm-generic/percpu.h | 113 |
1 files changed, 109 insertions, 4 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h index 6432a7fade91..96af32c283b2 100644 --- a/include/asm-generic/percpu.h +++ b/include/asm-generic/percpu.h @@ -89,16 +89,37 @@ do { \ __ret; \ }) -#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ +#define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg) \ +({ \ + typeof(pcp) __val, __old = *(ovalp); \ + __val = _cmpxchg(pcp, __old, nval); \ + if (__val != __old) \ + *(ovalp) = __val; \ + __val == __old; \ +}) + +#define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \ ({ \ typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ - typeof(pcp) __ret; \ - __ret = *__p; \ - if (__ret == (oval)) \ + typeof(pcp) __val = *__p, __old = *(ovalp); \ + bool __ret; \ + if (__val == __old) { \ *__p = nval; \ + __ret = true; \ + } else { \ + *(ovalp) = __val; \ + __ret = false; \ + } \ __ret; \ }) +#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ +({ \ + typeof(pcp) __old = (oval); \ + raw_cpu_generic_try_cmpxchg(pcp, &__old, nval); \ + __old; \ +}) + #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ ({ \ typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1)); \ @@ -170,6 +191,16 @@ do { \ __ret; \ }) +#define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \ +({ \ + bool __ret; \ + unsigned long __flags; \ + raw_local_irq_save(__flags); \ + __ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval); \ + raw_local_irq_restore(__flags); \ + __ret; \ +}) + #define this_cpu_generic_cmpxchg(pcp, oval, nval) \ ({ \ typeof(pcp) __ret; \ @@ -282,6 +313,43 @@ do { \ #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval) #endif +#ifndef raw_cpu_try_cmpxchg_1 +#ifdef raw_cpu_cmpxchg_1 +#define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1) +#else +#define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ + raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef raw_cpu_try_cmpxchg_2 +#ifdef raw_cpu_cmpxchg_2 +#define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2) +#else +#define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ + raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef raw_cpu_try_cmpxchg_4 +#ifdef raw_cpu_cmpxchg_4 +#define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4) +#else +#define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ + raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef raw_cpu_try_cmpxchg_8 +#ifdef raw_cpu_cmpxchg_8 +#define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8) +#else +#define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ + raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif + #ifndef raw_cpu_cmpxchg_1 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) @@ -407,6 +475,43 @@ do { \ #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval) #endif +#ifndef this_cpu_try_cmpxchg_1 +#ifdef this_cpu_cmpxchg_1 +#define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1) +#else +#define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ + this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef this_cpu_try_cmpxchg_2 +#ifdef this_cpu_cmpxchg_2 +#define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2) +#else +#define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ + this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef this_cpu_try_cmpxchg_4 +#ifdef this_cpu_cmpxchg_4 +#define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4) +#else +#define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ + this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif +#ifndef this_cpu_try_cmpxchg_8 +#ifdef this_cpu_cmpxchg_8 +#define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ + __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8) +#else +#define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ + this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) +#endif +#endif + #ifndef this_cpu_cmpxchg_1 #define this_cpu_cmpxchg_1(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) |