summaryrefslogtreecommitdiffstats
path: root/arch/powerpc/include
diff options
context:
space:
mode:
authorMichael Ellerman <mpe@ellerman.id.au>2016-11-24 17:08:11 +1100
committerMichael Ellerman <mpe@ellerman.id.au>2016-11-25 14:07:50 +1100
commitda58b23cb976ab83a80d358102e139afe94f0c56 (patch)
treeee8cfaeacbd6a305928e44c568308e91d83e7a94 /arch/powerpc/include
parentddbefe7e77603cc9645a571a8bf680e193caac97 (diff)
downloadlinux-da58b23cb976ab83a80d358102e139afe94f0c56.tar.bz2
powerpc: Fix __cmpxchg() to take a volatile ptr again
In commit d0563a1297e2 ("powerpc: Implement {cmp}xchg for u8 and u16") we removed the volatile from __cmpxchg(). This is leading to warnings such as: drivers/gpu/drm/drm_lock.c: In function ‘drm_lock_take’: arch/powerpc/include/asm/cmpxchg.h:484:37: warning: passing argument 1 of ‘__cmpxchg’ discards ‘volatile’ qualifier from pointer target (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ There doesn't seem to be consensus across architectures whether the argument is volatile or not, so at least for now put the volatile back. Fixes: d0563a1297e2 ("powerpc: Implement {cmp}xchg for u8 and u16") Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/include')
-rw-r--r--arch/powerpc/include/asm/cmpxchg.h6
1 files changed, 3 insertions, 3 deletions
diff --git a/arch/powerpc/include/asm/cmpxchg.h b/arch/powerpc/include/asm/cmpxchg.h
index c12f110261b2..fc46b664c49e 100644
--- a/arch/powerpc/include/asm/cmpxchg.h
+++ b/arch/powerpc/include/asm/cmpxchg.h
@@ -14,7 +14,7 @@
#endif
#define XCHG_GEN(type, sfx, cl) \
-static inline u32 __xchg_##type##sfx(void *p, u32 val) \
+static inline u32 __xchg_##type##sfx(volatile void *p, u32 val) \
{ \
unsigned int prev, prev_mask, tmp, bitoff, off; \
\
@@ -40,7 +40,7 @@ static inline u32 __xchg_##type##sfx(void *p, u32 val) \
#define CMPXCHG_GEN(type, sfx, br, br2, cl) \
static inline \
-u32 __cmpxchg_##type##sfx(void *p, u32 old, u32 new) \
+u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \
{ \
unsigned int prev, prev_mask, tmp, bitoff, off; \
\
@@ -399,7 +399,7 @@ __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new)
#endif
static __always_inline unsigned long
-__cmpxchg(void *ptr, unsigned long old, unsigned long new,
+__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
unsigned int size)
{
switch (size) {