diff options
Diffstat (limited to 'arch/x86/include/asm/atomic64_32.h')
-rw-r--r-- | arch/x86/include/asm/atomic64_32.h | 9 |
1 files changed, 9 insertions, 0 deletions
diff --git a/arch/x86/include/asm/atomic64_32.h b/arch/x86/include/asm/atomic64_32.h index 52cfaecb13f9..5efd01b548d1 100644 --- a/arch/x86/include/asm/atomic64_32.h +++ b/arch/x86/include/asm/atomic64_32.h @@ -75,6 +75,7 @@ static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n) { return arch_cmpxchg64(&v->counter, o, n); } +#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg /** * arch_atomic64_xchg - xchg atomic64 variable @@ -94,6 +95,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) : "memory"); return o; } +#define arch_atomic64_xchg arch_atomic64_xchg /** * arch_atomic64_set - set atomic64 variable @@ -138,6 +140,7 @@ static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) ASM_NO_INPUT_CLOBBER("memory")); return i; } +#define arch_atomic64_add_return arch_atomic64_add_return /* * Other variants with different arithmetic operators: @@ -149,6 +152,7 @@ static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) ASM_NO_INPUT_CLOBBER("memory")); return i; } +#define arch_atomic64_sub_return arch_atomic64_sub_return static inline s64 arch_atomic64_inc_return(atomic64_t *v) { @@ -242,6 +246,7 @@ static inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) "S" (v) : "memory"); return (int)a; } +#define arch_atomic64_add_unless arch_atomic64_add_unless static inline int arch_atomic64_inc_not_zero(atomic64_t *v) { @@ -281,6 +286,7 @@ static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_and arch_atomic64_fetch_and static inline void arch_atomic64_or(s64 i, atomic64_t *v) { @@ -299,6 +305,7 @@ static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_or arch_atomic64_fetch_or static inline void arch_atomic64_xor(s64 i, atomic64_t *v) { @@ -317,6 +324,7 @@ static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) { @@ -327,6 +335,7 @@ static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) return old; } +#define arch_atomic64_fetch_add arch_atomic64_fetch_add #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v)) |