diff options
author | Paul Burton <paul.burton@imgtec.com> | 2017-06-10 02:26:37 +0200 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2017-06-29 02:42:25 +0200 |
commit | 62c6081dca75d6bec1198ed5a1ae50968b323a8c (patch) | |
tree | 71800a5277c34d533ee726e0947d121c00848043 /arch/mips/include/asm/cmpxchg.h | |
parent | MIPS: cmpxchg: Error out on unsupported xchg() calls (diff) | |
download | linux-62c6081dca75d6bec1198ed5a1ae50968b323a8c.tar.xz linux-62c6081dca75d6bec1198ed5a1ae50968b323a8c.zip |
MIPS: cmpxchg: Drop __xchg_u{32,64} functions
The __xchg_u32() & __xchg_u64() functions now add very little value.
This patch therefore removes them, by:
- Moving memory barriers out of them & into xchg(), which also removes
the duplication & readies us to support xchg_relaxed() if we wish to.
- Calling __xchg_asm() directly from __xchg().
- Performing the check for CONFIG_64BIT being enabled in the size=8
case of __xchg().
Signed-off-by: Paul Burton <paul.burton@imgtec.com>
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/16352/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/cmpxchg.h')
-rw-r--r-- | arch/mips/include/asm/cmpxchg.h | 48 |
1 files changed, 17 insertions, 31 deletions
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h index fe652c3e5d8c..e9c1e97bc29d 100644 --- a/arch/mips/include/asm/cmpxchg.h +++ b/arch/mips/include/asm/cmpxchg.h @@ -70,40 +70,18 @@ extern unsigned long __xchg_called_with_bad_pointer(void) __ret; \ }) -static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) -{ - __u32 retval; - - smp_mb__before_llsc(); - retval = __xchg_asm("ll", "sc", m, val); - smp_llsc_mb(); - - return retval; -} - -#ifdef CONFIG_64BIT -static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) -{ - __u64 retval; - - smp_mb__before_llsc(); - retval = __xchg_asm("lld", "scd", m, val); - smp_llsc_mb(); - - return retval; -} -#else -extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val); -#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels -#endif - static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) { switch (size) { case 4: - return __xchg_u32(ptr, x); + return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x); + case 8: - return __xchg_u64(ptr, x); + if (!IS_ENABLED(CONFIG_64BIT)) + return __xchg_called_with_bad_pointer(); + + return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x); + default: return __xchg_called_with_bad_pointer(); } @@ -111,10 +89,18 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz #define xchg(ptr, x) \ ({ \ + __typeof__(*(ptr)) __res; \ + \ BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \ \ - ((__typeof__(*(ptr))) \ - __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \ + smp_mb__before_llsc(); \ + \ + __res = (__typeof__(*(ptr))) \ + __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))); \ + \ + smp_llsc_mb(); \ + \ + __res; \ }) #define __cmpxchg_asm(ld, st, m, old, new) \ |