summaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
Diffstat (limited to 'arch')
-rw-r--r--arch/blackfin/include/asm/atomic.h8
-rw-r--r--arch/blackfin/kernel/bfin_ksyms.c1
-rw-r--r--arch/blackfin/mach-bf561/atomic.S43
3 files changed, 40 insertions, 12 deletions
diff --git a/arch/blackfin/include/asm/atomic.h b/arch/blackfin/include/asm/atomic.h
index 1c1c42330c99..63c7deceeeb6 100644
--- a/arch/blackfin/include/asm/atomic.h
+++ b/arch/blackfin/include/asm/atomic.h
@@ -17,6 +17,7 @@
asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
asmlinkage int __raw_atomic_add_asm(volatile int *ptr, int value);
+asmlinkage int __raw_atomic_xadd_asm(volatile int *ptr, int value);
asmlinkage int __raw_atomic_and_asm(volatile int *ptr, int value);
asmlinkage int __raw_atomic_or_asm(volatile int *ptr, int value);
@@ -28,10 +29,17 @@ asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
#define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i)
#define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i))
+#define atomic_fetch_add(i, v) __raw_atomic_xadd_asm(&(v)->counter, i)
+#define atomic_fetch_sub(i, v) __raw_atomic_xadd_asm(&(v)->counter, -(i))
+
#define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i)
#define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i)
#define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
+#define atomic_fetch_or(i, v) __raw_atomic_or_asm(&(v)->counter, i)
+#define atomic_fetch_and(i, v) __raw_atomic_and_asm(&(v)->counter, i)
+#define atomic_fetch_xor(i, v) __raw_atomic_xor_asm(&(v)->counter, i)
+
#endif
#include <asm-generic/atomic.h>
diff --git a/arch/blackfin/kernel/bfin_ksyms.c b/arch/blackfin/kernel/bfin_ksyms.c
index a401c27b69b4..68096e8f787f 100644
--- a/arch/blackfin/kernel/bfin_ksyms.c
+++ b/arch/blackfin/kernel/bfin_ksyms.c
@@ -84,6 +84,7 @@ EXPORT_SYMBOL(insl_16);
#ifdef CONFIG_SMP
EXPORT_SYMBOL(__raw_atomic_add_asm);
+EXPORT_SYMBOL(__raw_atomic_xadd_asm);
EXPORT_SYMBOL(__raw_atomic_and_asm);
EXPORT_SYMBOL(__raw_atomic_or_asm);
EXPORT_SYMBOL(__raw_atomic_xor_asm);
diff --git a/arch/blackfin/mach-bf561/atomic.S b/arch/blackfin/mach-bf561/atomic.S
index 26fccb5568b9..1e2989c5d6b2 100644
--- a/arch/blackfin/mach-bf561/atomic.S
+++ b/arch/blackfin/mach-bf561/atomic.S
@@ -607,6 +607,28 @@ ENDPROC(___raw_atomic_add_asm)
/*
* r0 = ptr
+ * r1 = value
+ *
+ * ADD a signed value to a 32bit word and return the old value atomically.
+ * Clobbers: r3:0, p1:0
+ */
+ENTRY(___raw_atomic_xadd_asm)
+ p1 = r0;
+ r3 = r1;
+ [--sp] = rets;
+ call _get_core_lock;
+ r3 = [p1];
+ r2 = r3 + r2;
+ [p1] = r2;
+ r1 = p1;
+ call _put_core_lock;
+ r0 = r3;
+ rets = [sp++];
+ rts;
+ENDPROC(___raw_atomic_add_asm)
+
+/*
+ * r0 = ptr
* r1 = mask
*
* AND the mask bits from a 32bit word and return the old 32bit value
@@ -618,10 +640,9 @@ ENTRY(___raw_atomic_and_asm)
r3 = r1;
[--sp] = rets;
call _get_core_lock;
- r2 = [p1];
- r3 = r2 & r3;
- [p1] = r3;
- r3 = r2;
+ r3 = [p1];
+ r2 = r2 & r3;
+ [p1] = r2;
r1 = p1;
call _put_core_lock;
r0 = r3;
@@ -642,10 +663,9 @@ ENTRY(___raw_atomic_or_asm)
r3 = r1;
[--sp] = rets;
call _get_core_lock;
- r2 = [p1];
- r3 = r2 | r3;
- [p1] = r3;
- r3 = r2;
+ r3 = [p1];
+ r2 = r2 | r3;
+ [p1] = r2;
r1 = p1;
call _put_core_lock;
r0 = r3;
@@ -666,10 +686,9 @@ ENTRY(___raw_atomic_xor_asm)
r3 = r1;
[--sp] = rets;
call _get_core_lock;
- r2 = [p1];
- r3 = r2 ^ r3;
- [p1] = r3;
- r3 = r2;
+ r3 = [p1];
+ r2 = r2 ^ r3;
+ [p1] = r2;
r1 = p1;
call _put_core_lock;
r0 = r3;