diff options
author | Mark Rutland <mark.rutland@arm.com> | 2018-06-21 14:13:08 +0200 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2018-06-21 14:22:33 +0200 |
commit | bef828204a1bc7a0fd3a24551c4265e9c2ab95ed (patch) | |
tree | bb35ac3bc791547c1345aaa737406f9b315a9e63 /arch | |
parent | atomics: Make conditional ops return 'bool' (diff) | |
download | linux-bef828204a1bc7a0fd3a24551c4265e9c2ab95ed.tar.xz linux-bef828204a1bc7a0fd3a24551c4265e9c2ab95ed.zip |
atomics/treewide: Make atomic64_inc_not_zero() optional
We define a trivial fallback for atomic_inc_not_zero(), but don't do
the same for atomic64_inc_not_zero(), leading most architectures to
define the same boilerplate.
Let's add a fallback in <linux/atomic.h>, and remove the redundant
implementations. Note that atomic64_add_unless() is always defined in
<linux/atomic.h>, and promotes its arguments to the requisite types, so
we need not do this explicitly.
There should be no functional change as a result of this patch.
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Reviewed-by: Will Deacon <will.deacon@arm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Palmer Dabbelt <palmer@sifive.com>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: https://lore.kernel.org/lkml/20180621121321.4761-6-mark.rutland@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/alpha/include/asm/atomic.h | 2 | ||||
-rw-r--r-- | arch/arc/include/asm/atomic.h | 1 | ||||
-rw-r--r-- | arch/arm/include/asm/atomic.h | 1 | ||||
-rw-r--r-- | arch/arm64/include/asm/atomic.h | 2 | ||||
-rw-r--r-- | arch/ia64/include/asm/atomic.h | 2 | ||||
-rw-r--r-- | arch/mips/include/asm/atomic.h | 2 | ||||
-rw-r--r-- | arch/parisc/include/asm/atomic.h | 2 | ||||
-rw-r--r-- | arch/powerpc/include/asm/atomic.h | 1 | ||||
-rw-r--r-- | arch/riscv/include/asm/atomic.h | 7 | ||||
-rw-r--r-- | arch/s390/include/asm/atomic.h | 1 | ||||
-rw-r--r-- | arch/sparc/include/asm/atomic_64.h | 2 | ||||
-rw-r--r-- | arch/x86/include/asm/atomic64_32.h | 2 | ||||
-rw-r--r-- | arch/x86/include/asm/atomic64_64.h | 2 |
13 files changed, 2 insertions, 25 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h index 392b15a4dd4f..eb0f25e4c5dd 100644 --- a/arch/alpha/include/asm/atomic.h +++ b/arch/alpha/include/asm/atomic.h @@ -296,8 +296,6 @@ static inline long atomic64_dec_if_positive(atomic64_t *v) return old - 1; } -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) - #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h index cecdf3403caf..1406825b5e7d 100644 --- a/arch/arc/include/asm/atomic.h +++ b/arch/arc/include/asm/atomic.h @@ -603,7 +603,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) #define atomic64_dec(v) atomic64_sub(1LL, (v)) #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL) #endif /* !CONFIG_GENERIC_ATOMIC64 */ diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h index 9d56d0727c9b..02f3894faa48 100644 --- a/arch/arm/include/asm/atomic.h +++ b/arch/arm/include/asm/atomic.h @@ -534,7 +534,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) #define atomic64_dec(v) atomic64_sub(1LL, (v)) #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v)) #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL) #endif /* !CONFIG_GENERIC_ATOMIC64 */ #endif diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index 264d20339f74..ad50412889c5 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h @@ -204,7 +204,5 @@ #define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u) #define atomic64_andnot atomic64_andnot -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) - #endif #endif diff --git a/arch/ia64/include/asm/atomic.h b/arch/ia64/include/asm/atomic.h index 9d2ddde5f9d5..93d48b823220 100644 --- a/arch/ia64/include/asm/atomic.h +++ b/arch/ia64/include/asm/atomic.h @@ -246,8 +246,6 @@ static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u) return c != (u); } -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) - static __inline__ long atomic64_dec_if_positive(atomic64_t *v) { long c, old, dec; diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index 02fc1553cf9b..502e691c6393 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h @@ -644,8 +644,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) return c != (u); } -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) - #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) #define atomic64_inc_return(v) atomic64_add_return(1, (v)) diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h index 7748abced766..3fd0243bf405 100644 --- a/arch/parisc/include/asm/atomic.h +++ b/arch/parisc/include/asm/atomic.h @@ -305,8 +305,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) return c != (u); } -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) - /* * atomic64_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic_t diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index 1483261080a1..e59620ee4f6b 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h @@ -582,6 +582,7 @@ static __inline__ int atomic64_inc_not_zero(atomic64_t *v) return t1 != 0; } +#define atomic64_inc_not_zero(v) atomic64_inc_not_zero((v)) #endif /* __powerpc64__ */ diff --git a/arch/riscv/include/asm/atomic.h b/arch/riscv/include/asm/atomic.h index 0e27e050ba14..18259e90f57e 100644 --- a/arch/riscv/include/asm/atomic.h +++ b/arch/riscv/include/asm/atomic.h @@ -375,13 +375,6 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u) } #endif -#ifndef CONFIG_GENERIC_ATOMIC64 -static __always_inline long atomic64_inc_not_zero(atomic64_t *v) -{ - return atomic64_add_unless(v, 1, 0); -} -#endif - /* * atomic_{cmp,}xchg is required to have exactly the same ordering semantics as * {cmp,}xchg and the operations that return, so they need a full barrier. diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h index c2858cdd8c29..66dac30a4fe1 100644 --- a/arch/s390/include/asm/atomic.h +++ b/arch/s390/include/asm/atomic.h @@ -212,6 +212,5 @@ static inline long atomic64_dec_if_positive(atomic64_t *v) #define atomic64_dec(_v) atomic64_sub(1, _v) #define atomic64_dec_return(_v) atomic64_sub_return(1, _v) #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0) -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #endif /* __ARCH_S390_ATOMIC__ */ diff --git a/arch/sparc/include/asm/atomic_64.h b/arch/sparc/include/asm/atomic_64.h index f416fd3d2708..07830a316464 100644 --- a/arch/sparc/include/asm/atomic_64.h +++ b/arch/sparc/include/asm/atomic_64.h @@ -123,8 +123,6 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u) return c != (u); } -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) - long atomic64_dec_if_positive(atomic64_t *v); #endif /* !(__ARCH_SPARC64_ATOMIC__) */ diff --git a/arch/x86/include/asm/atomic64_32.h b/arch/x86/include/asm/atomic64_32.h index 92212bf0484f..2a33cc17801b 100644 --- a/arch/x86/include/asm/atomic64_32.h +++ b/arch/x86/include/asm/atomic64_32.h @@ -295,7 +295,7 @@ static inline int arch_atomic64_add_unless(atomic64_t *v, long long a, return (int)a; } - +#define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero static inline int arch_atomic64_inc_not_zero(atomic64_t *v) { int r; diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h index 6106b59d3260..6f95023894b7 100644 --- a/arch/x86/include/asm/atomic64_64.h +++ b/arch/x86/include/asm/atomic64_64.h @@ -207,8 +207,6 @@ static inline bool arch_atomic64_add_unless(atomic64_t *v, long a, long u) return true; } -#define arch_atomic64_inc_not_zero(v) arch_atomic64_add_unless((v), 1, 0) - /* * arch_atomic64_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic_t |