diff options
author | Mark Rutland <mark.rutland@arm.com> | 2023-06-05 08:01:01 +0100 |
---|---|---|
committer | Peter Zijlstra <peterz@infradead.org> | 2023-06-05 09:57:14 +0200 |
commit | d12157efc8e083c77d054675fcdd594f54cc7e2b (patch) | |
tree | 9be23f46b4b8db9d3e36a8b551d8961d8d41822d /arch/arm64/include/asm/atomic.h | |
parent | a7bafa7969da1c0e9c342c792d8224078d1c491c (diff) |
locking/atomic: make atomic*_{cmp,}xchg optional
Most architectures define the atomic/atomic64 xchg and cmpxchg
operations in terms of arch_xchg and arch_cmpxchg respectfully.
Add fallbacks for these cases and remove the trivial cases from arch
code. On some architectures the existing definitions are kept as these
are used to build other arch_atomic*() operations.
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Kees Cook <keescook@chromium.org>
Link: https://lore.kernel.org/r/20230605070124.3741859-5-mark.rutland@arm.com
Diffstat (limited to 'arch/arm64/include/asm/atomic.h')
-rw-r--r-- | arch/arm64/include/asm/atomic.h | 28 |
1 files changed, 0 insertions, 28 deletions
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index c9979273d389..400d279e0f8d 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h @@ -142,24 +142,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v) #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release #define arch_atomic_fetch_xor arch_atomic_fetch_xor -#define arch_atomic_xchg_relaxed(v, new) \ - arch_xchg_relaxed(&((v)->counter), (new)) -#define arch_atomic_xchg_acquire(v, new) \ - arch_xchg_acquire(&((v)->counter), (new)) -#define arch_atomic_xchg_release(v, new) \ - arch_xchg_release(&((v)->counter), (new)) -#define arch_atomic_xchg(v, new) \ - arch_xchg(&((v)->counter), (new)) - -#define arch_atomic_cmpxchg_relaxed(v, old, new) \ - arch_cmpxchg_relaxed(&((v)->counter), (old), (new)) -#define arch_atomic_cmpxchg_acquire(v, old, new) \ - arch_cmpxchg_acquire(&((v)->counter), (old), (new)) -#define arch_atomic_cmpxchg_release(v, old, new) \ - arch_cmpxchg_release(&((v)->counter), (old), (new)) -#define arch_atomic_cmpxchg(v, old, new) \ - arch_cmpxchg(&((v)->counter), (old), (new)) - #define arch_atomic_andnot arch_atomic_andnot /* @@ -209,16 +191,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v) #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor -#define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed -#define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire -#define arch_atomic64_xchg_release arch_atomic_xchg_release -#define arch_atomic64_xchg arch_atomic_xchg - -#define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed -#define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire -#define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release -#define arch_atomic64_cmpxchg arch_atomic_cmpxchg - #define arch_atomic64_andnot arch_atomic64_andnot #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive |