summaryrefslogtreecommitdiffstats
path: root/arch/alpha/include/asm/atomic.h
diff options
context:
space:
mode:
authorCatalin Marinas <catalin.marinas@arm.com>2020-07-31 18:09:57 +0100
committerCatalin Marinas <catalin.marinas@arm.com>2020-07-31 18:09:57 +0100
commit0e4cd9f2654915be8d09a1bd1b405ce5426e64c4 (patch)
tree0aeb119873c87509ff54cbc9c6946b271f3780bf /arch/alpha/include/asm/atomic.h
parent18aa3bd58b1428d1927fe11f85ad444423d4fc59 (diff)
parent5f1f7f6c205a2e7f1d92229ac358254bd2826c2d (diff)
downloadlinux-0e4cd9f2654915be8d09a1bd1b405ce5426e64c4.tar.bz2
Merge branch 'for-next/read-barrier-depends' into for-next/core
* for-next/read-barrier-depends: : Allow architectures to override __READ_ONCE() arm64: Reduce the number of header files pulled into vmlinux.lds.S compiler.h: Move compiletime_assert() macros into compiler_types.h checkpatch: Remove checks relating to [smp_]read_barrier_depends() include/linux: Remove smp_read_barrier_depends() from comments tools/memory-model: Remove smp_read_barrier_depends() from informal doc Documentation/barriers/kokr: Remove references to [smp_]read_barrier_depends() Documentation/barriers: Remove references to [smp_]read_barrier_depends() locking/barriers: Remove definitions for [smp_]read_barrier_depends() alpha: Replace smp_read_barrier_depends() usage with smp_[r]mb() vhost: Remove redundant use of read_barrier_depends() barrier asm/rwonce: Don't pull <asm/barrier.h> into 'asm-generic/rwonce.h' asm/rwonce: Remove smp_read_barrier_depends() invocation alpha: Override READ_ONCE() with barriered implementation asm/rwonce: Allow __READ_ONCE to be overridden by the architecture compiler.h: Split {READ,WRITE}_ONCE definitions out into rwonce.h tools: bpf: Use local copy of headers including uapi/linux/filter.h
Diffstat (limited to 'arch/alpha/include/asm/atomic.h')
-rw-r--r--arch/alpha/include/asm/atomic.h16
1 files changed, 8 insertions, 8 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index 2144530d1428..2f8f7e54792f 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -16,10 +16,10 @@
/*
* To ensure dependency ordering is preserved for the _relaxed and
- * _release atomics, an smp_read_barrier_depends() is unconditionally
- * inserted into the _relaxed variants, which are used to build the
- * barriered versions. Avoid redundant back-to-back fences in the
- * _acquire and _fence versions.
+ * _release atomics, an smp_mb() is unconditionally inserted into the
+ * _relaxed variants, which are used to build the barriered versions.
+ * Avoid redundant back-to-back fences in the _acquire and _fence
+ * versions.
*/
#define __atomic_acquire_fence()
#define __atomic_post_full_fence()
@@ -70,7 +70,7 @@ static inline int atomic_##op##_return_relaxed(int i, atomic_t *v) \
".previous" \
:"=&r" (temp), "=m" (v->counter), "=&r" (result) \
:"Ir" (i), "m" (v->counter) : "memory"); \
- smp_read_barrier_depends(); \
+ smp_mb(); \
return result; \
}
@@ -88,7 +88,7 @@ static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
".previous" \
:"=&r" (temp), "=m" (v->counter), "=&r" (result) \
:"Ir" (i), "m" (v->counter) : "memory"); \
- smp_read_barrier_depends(); \
+ smp_mb(); \
return result; \
}
@@ -123,7 +123,7 @@ static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \
".previous" \
:"=&r" (temp), "=m" (v->counter), "=&r" (result) \
:"Ir" (i), "m" (v->counter) : "memory"); \
- smp_read_barrier_depends(); \
+ smp_mb(); \
return result; \
}
@@ -141,7 +141,7 @@ static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v) \
".previous" \
:"=&r" (temp), "=m" (v->counter), "=&r" (result) \
:"Ir" (i), "m" (v->counter) : "memory"); \
- smp_read_barrier_depends(); \
+ smp_mb(); \
return result; \
}