diff options
author | Michael Ellerman <mpe@ellerman.id.au> | 2016-11-24 07:08:11 +0100 |
---|---|---|
committer | Michael Ellerman <mpe@ellerman.id.au> | 2016-11-25 04:07:50 +0100 |
commit | da58b23cb976ab83a80d358102e139afe94f0c56 (patch) | |
tree | ee8cfaeacbd6a305928e44c568308e91d83e7a94 /arch/powerpc | |
parent | Merge branch 'topic/ppc-kvm' into next (diff) | |
download | linux-da58b23cb976ab83a80d358102e139afe94f0c56.tar.xz linux-da58b23cb976ab83a80d358102e139afe94f0c56.zip |
powerpc: Fix __cmpxchg() to take a volatile ptr again
In commit d0563a1297e2 ("powerpc: Implement {cmp}xchg for u8 and u16")
we removed the volatile from __cmpxchg().
This is leading to warnings such as:
drivers/gpu/drm/drm_lock.c: In function ‘drm_lock_take’:
arch/powerpc/include/asm/cmpxchg.h:484:37: warning: passing argument 1
of ‘__cmpxchg’ discards ‘volatile’ qualifier from pointer target
(__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
There doesn't seem to be consensus across architectures whether the
argument is volatile or not, so at least for now put the volatile back.
Fixes: d0563a1297e2 ("powerpc: Implement {cmp}xchg for u8 and u16")
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc')
-rw-r--r-- | arch/powerpc/include/asm/cmpxchg.h | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/arch/powerpc/include/asm/cmpxchg.h b/arch/powerpc/include/asm/cmpxchg.h index c12f110261b2..fc46b664c49e 100644 --- a/arch/powerpc/include/asm/cmpxchg.h +++ b/arch/powerpc/include/asm/cmpxchg.h @@ -14,7 +14,7 @@ #endif #define XCHG_GEN(type, sfx, cl) \ -static inline u32 __xchg_##type##sfx(void *p, u32 val) \ +static inline u32 __xchg_##type##sfx(volatile void *p, u32 val) \ { \ unsigned int prev, prev_mask, tmp, bitoff, off; \ \ @@ -40,7 +40,7 @@ static inline u32 __xchg_##type##sfx(void *p, u32 val) \ #define CMPXCHG_GEN(type, sfx, br, br2, cl) \ static inline \ -u32 __cmpxchg_##type##sfx(void *p, u32 old, u32 new) \ +u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \ { \ unsigned int prev, prev_mask, tmp, bitoff, off; \ \ @@ -399,7 +399,7 @@ __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new) #endif static __always_inline unsigned long -__cmpxchg(void *ptr, unsigned long old, unsigned long new, +__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, unsigned int size) { switch (size) { |