summaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm/cmpxchg.h
diff options
context:
space:
mode:
authorSarbojit Ganguly <ganguly.s@samsung.com>2015-10-09 12:10:02 +0100
committerRussell King <rmk+kernel@arm.linux.org.uk>2015-10-09 16:22:54 +0100
commite8973a889e69cb86cac08bec2863c878c0d27af9 (patch)
treee38d278665d5998a1f4cc2df2eee8a5608440b32 /arch/arm/include/asm/cmpxchg.h
parente1b8c05dccc7d3de8c49c92c41b2b5ac7d8275b2 (diff)
downloadlinux-e8973a889e69cb86cac08bec2863c878c0d27af9.tar.bz2
ARM: 8443/1: Adding support for atomic half word exchange
Since support for half-word atomic exchange was not there and Qspinlock on ARM requires it, modified __xchg() to add support for that as well. ARMv6 and lower does not support ldrex{b,h} so, added a guard code to prevent build breaks. Signed-off-by: Sarbojit Ganguly <ganguly.s@samsung.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/include/asm/cmpxchg.h')
-rw-r--r--arch/arm/include/asm/cmpxchg.h12
1 files changed, 12 insertions, 0 deletions
diff --git a/arch/arm/include/asm/cmpxchg.h b/arch/arm/include/asm/cmpxchg.h
index 916a2744d5c6..97882f9bad12 100644
--- a/arch/arm/include/asm/cmpxchg.h
+++ b/arch/arm/include/asm/cmpxchg.h
@@ -39,6 +39,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
switch (size) {
#if __LINUX_ARM_ARCH__ >= 6
+#ifndef CONFIG_CPU_V6 /* MIN ARCH >= V6K */
case 1:
asm volatile("@ __xchg1\n"
"1: ldrexb %0, [%3]\n"
@@ -49,6 +50,17 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
+ case 2:
+ asm volatile("@ __xchg2\n"
+ "1: ldrexh %0, [%3]\n"
+ " strexh %1, %2, [%3]\n"
+ " teq %1, #0\n"
+ " bne 1b"
+ : "=&r" (ret), "=&r" (tmp)
+ : "r" (x), "r" (ptr)
+ : "memory", "cc");
+ break;
+#endif
case 4:
asm volatile("@ __xchg4\n"
"1: ldrex %0, [%3]\n"