summaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/cmpxchg.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/include/asm/cmpxchg.h')
-rw-r--r--arch/mips/include/asm/cmpxchg.h280
1 files changed, 121 insertions, 159 deletions
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index b71ab4a5fd50..903f3bf48419 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -13,153 +13,107 @@
#include <asm/compiler.h>
#include <asm/war.h>
-static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
-{
- __u32 retval;
-
- smp_mb__before_llsc();
-
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
- unsigned long dummy;
-
- __asm__ __volatile__(
- " .set arch=r4000 \n"
- "1: ll %0, %3 # xchg_u32 \n"
- " .set mips0 \n"
- " move %2, %z4 \n"
- " .set arch=r4000 \n"
- " sc %2, %1 \n"
- " beqzl %2, 1b \n"
- " .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
- : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
- : "memory");
- } else if (kernel_uses_llsc) {
- unsigned long dummy;
-
- do {
- __asm__ __volatile__(
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
- " ll %0, %3 # xchg_u32 \n"
- " .set mips0 \n"
- " move %2, %z4 \n"
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
- " sc %2, %1 \n"
- " .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
- "=&r" (dummy)
- : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
- : "memory");
- } while (unlikely(!dummy));
- } else {
- unsigned long flags;
-
- raw_local_irq_save(flags);
- retval = *m;
- *m = val;
- raw_local_irq_restore(flags); /* implies memory barrier */
- }
-
- smp_llsc_mb();
-
- return retval;
-}
+/*
+ * Using a branch-likely instruction to check the result of an sc instruction
+ * works around a bug present in R10000 CPUs prior to revision 3.0 that could
+ * cause ll-sc sequences to execute non-atomically.
+ */
+#if R10000_LLSC_WAR
+# define __scbeqz "beqzl"
+#else
+# define __scbeqz "beqz"
+#endif
-#ifdef CONFIG_64BIT
-static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
-{
- __u64 retval;
-
- smp_mb__before_llsc();
-
- if (kernel_uses_llsc && R10000_LLSC_WAR) {
- unsigned long dummy;
-
- __asm__ __volatile__(
- " .set arch=r4000 \n"
- "1: lld %0, %3 # xchg_u64 \n"
- " move %2, %z4 \n"
- " scd %2, %1 \n"
- " beqzl %2, 1b \n"
- " .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
- : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
- : "memory");
- } else if (kernel_uses_llsc) {
- unsigned long dummy;
-
- do {
- __asm__ __volatile__(
- " .set "MIPS_ISA_ARCH_LEVEL" \n"
- " lld %0, %3 # xchg_u64 \n"
- " move %2, %z4 \n"
- " scd %2, %1 \n"
- " .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
- "=&r" (dummy)
- : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
- : "memory");
- } while (unlikely(!dummy));
- } else {
- unsigned long flags;
-
- raw_local_irq_save(flags);
- retval = *m;
- *m = val;
- raw_local_irq_restore(flags); /* implies memory barrier */
- }
+/*
+ * These functions doesn't exist, so if they are called you'll either:
+ *
+ * - Get an error at compile-time due to __compiletime_error, if supported by
+ * your compiler.
+ *
+ * or:
+ *
+ * - Get an error at link-time due to the call to the missing function.
+ */
+extern unsigned long __cmpxchg_called_with_bad_pointer(void)
+ __compiletime_error("Bad argument size for cmpxchg");
+extern unsigned long __xchg_called_with_bad_pointer(void)
+ __compiletime_error("Bad argument size for xchg");
- smp_llsc_mb();
+#define __xchg_asm(ld, st, m, val) \
+({ \
+ __typeof(*(m)) __ret; \
+ \
+ if (kernel_uses_llsc) { \
+ __asm__ __volatile__( \
+ " .set push \n" \
+ " .set noat \n" \
+ " .set " MIPS_ISA_ARCH_LEVEL " \n" \
+ "1: " ld " %0, %2 # __xchg_asm \n" \
+ " .set mips0 \n" \
+ " move $1, %z3 \n" \
+ " .set " MIPS_ISA_ARCH_LEVEL " \n" \
+ " " st " $1, %1 \n" \
+ "\t" __scbeqz " $1, 1b \n" \
+ " .set pop \n" \
+ : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
+ : "memory"); \
+ } else { \
+ unsigned long __flags; \
+ \
+ raw_local_irq_save(__flags); \
+ __ret = *m; \
+ *m = val; \
+ raw_local_irq_restore(__flags); \
+ } \
+ \
+ __ret; \
+})
- return retval;
-}
-#else
-extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
-#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
-#endif
+extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
+ unsigned int size);
-static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
+static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
+ int size)
{
switch (size) {
+ case 1:
+ case 2:
+ return __xchg_small(ptr, x, size);
+
case 4:
- return __xchg_u32(ptr, x);
+ return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
+
case 8:
- return __xchg_u64(ptr, x);
- }
+ if (!IS_ENABLED(CONFIG_64BIT))
+ return __xchg_called_with_bad_pointer();
+
+ return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
- return x;
+ default:
+ return __xchg_called_with_bad_pointer();
+ }
}
#define xchg(ptr, x) \
({ \
- BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \
+ __typeof__(*(ptr)) __res; \
\
- ((__typeof__(*(ptr))) \
- __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
+ smp_mb__before_llsc(); \
+ \
+ __res = (__typeof__(*(ptr))) \
+ __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
+ \
+ smp_llsc_mb(); \
+ \
+ __res; \
})
#define __cmpxchg_asm(ld, st, m, old, new) \
({ \
__typeof(*(m)) __ret; \
\
- if (kernel_uses_llsc && R10000_LLSC_WAR) { \
- __asm__ __volatile__( \
- " .set push \n" \
- " .set noat \n" \
- " .set arch=r4000 \n" \
- "1: " ld " %0, %2 # __cmpxchg_asm \n" \
- " bne %0, %z3, 2f \n" \
- " .set mips0 \n" \
- " move $1, %z4 \n" \
- " .set arch=r4000 \n" \
- " " st " $1, %1 \n" \
- " beqzl $1, 1b \n" \
- "2: \n" \
- " .set pop \n" \
- : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
- : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
- : "memory"); \
- } else if (kernel_uses_llsc) { \
+ if (kernel_uses_llsc) { \
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
@@ -170,7 +124,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
" move $1, %z4 \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
" " st " $1, %1 \n" \
- " beqz $1, 1b \n" \
+ "\t" __scbeqz " $1, 1b \n" \
" .set pop \n" \
"2: \n" \
: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
@@ -189,44 +143,50 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
__ret; \
})
-/*
- * This function doesn't exist, so you'll get a linker error
- * if something tries to do an invalid cmpxchg().
- */
-extern void __cmpxchg_called_with_bad_pointer(void);
+extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
+ unsigned long new, unsigned int size);
+
+static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
+ unsigned long new, unsigned int size)
+{
+ switch (size) {
+ case 1:
+ case 2:
+ return __cmpxchg_small(ptr, old, new, size);
+
+ case 4:
+ return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr, old, new);
+
+ case 8:
+ /* lld/scd are only available for MIPS64 */
+ if (!IS_ENABLED(CONFIG_64BIT))
+ return __cmpxchg_called_with_bad_pointer();
+
+ return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr, old, new);
-#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
+ default:
+ return __cmpxchg_called_with_bad_pointer();
+ }
+}
+
+#define cmpxchg_local(ptr, old, new) \
+ ((__typeof__(*(ptr))) \
+ __cmpxchg((ptr), \
+ (unsigned long)(__typeof__(*(ptr)))(old), \
+ (unsigned long)(__typeof__(*(ptr)))(new), \
+ sizeof(*(ptr))))
+
+#define cmpxchg(ptr, old, new) \
({ \
- __typeof__(ptr) __ptr = (ptr); \
- __typeof__(*(ptr)) __old = (old); \
- __typeof__(*(ptr)) __new = (new); \
- __typeof__(*(ptr)) __res = 0; \
+ __typeof__(*(ptr)) __res; \
\
- pre_barrier; \
- \
- switch (sizeof(*(__ptr))) { \
- case 4: \
- __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
- break; \
- case 8: \
- if (sizeof(long) == 8) { \
- __res = __cmpxchg_asm("lld", "scd", __ptr, \
- __old, __new); \
- break; \
- } \
- default: \
- __cmpxchg_called_with_bad_pointer(); \
- break; \
- } \
- \
- post_barrier; \
+ smp_mb__before_llsc(); \
+ __res = cmpxchg_local((ptr), (old), (new)); \
+ smp_llsc_mb(); \
\
__res; \
})
-#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
-#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , )
-
#ifdef CONFIG_64BIT
#define cmpxchg64_local(ptr, o, n) \
({ \
@@ -245,4 +205,6 @@ extern void __cmpxchg_called_with_bad_pointer(void);
#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
#endif
+#undef __scbeqz
+
#endif /* __ASM_CMPXCHG_H */