From 0aa7be05d83cc584da0782405e8007e351dfb6cc Mon Sep 17 00:00:00 2001 From: Uros Bizjak Date: Sun, 15 May 2022 20:42:03 +0200 Subject: [PATCH] locking/atomic: Add generic try_cmpxchg64 support Add generic support for try_cmpxchg64{,_acquire,_release,_relaxed} and their falbacks involving cmpxchg64. Signed-off-by: Uros Bizjak Signed-off-by: Peter Zijlstra (Intel) Link: https://lkml.kernel.org/r/20220515184205.103089-2-ubizjak@gmail.com --- include/linux/atomic/atomic-arch-fallback.h | 72 ++++++++++++++++++++- include/linux/atomic/atomic-instrumented.h | 40 +++++++++++- scripts/atomic/gen-atomic-fallback.sh | 31 +++++---- scripts/atomic/gen-atomic-instrumented.sh | 2 +- 4 files changed, 129 insertions(+), 16 deletions(-) diff --git a/include/linux/atomic/atomic-arch-fallback.h b/include/linux/atomic/atomic-arch-fallback.h index 6db58d180866..77bc5522e61c 100644 --- a/include/linux/atomic/atomic-arch-fallback.h +++ b/include/linux/atomic/atomic-arch-fallback.h @@ -147,6 +147,76 @@ #endif /* arch_try_cmpxchg_relaxed */ +#ifndef arch_try_cmpxchg64_relaxed +#ifdef arch_try_cmpxchg64 +#define arch_try_cmpxchg64_acquire arch_try_cmpxchg64 +#define arch_try_cmpxchg64_release arch_try_cmpxchg64 +#define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64 +#endif /* arch_try_cmpxchg64 */ + +#ifndef arch_try_cmpxchg64 +#define arch_try_cmpxchg64(_ptr, _oldp, _new) \ +({ \ + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ + ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \ + if (unlikely(___r != ___o)) \ + *___op = ___r; \ + likely(___r == ___o); \ +}) +#endif /* arch_try_cmpxchg64 */ + +#ifndef arch_try_cmpxchg64_acquire +#define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \ +({ \ + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ + ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \ + if (unlikely(___r != ___o)) \ + *___op = ___r; \ + likely(___r == ___o); \ +}) +#endif /* arch_try_cmpxchg64_acquire */ + +#ifndef arch_try_cmpxchg64_release +#define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \ +({ \ + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ + ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \ + if (unlikely(___r != ___o)) \ + *___op = ___r; \ + likely(___r == ___o); \ +}) +#endif /* arch_try_cmpxchg64_release */ + +#ifndef arch_try_cmpxchg64_relaxed +#define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \ +({ \ + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ + ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \ + if (unlikely(___r != ___o)) \ + *___op = ___r; \ + likely(___r == ___o); \ +}) +#endif /* arch_try_cmpxchg64_relaxed */ + +#else /* arch_try_cmpxchg64_relaxed */ + +#ifndef arch_try_cmpxchg64_acquire +#define arch_try_cmpxchg64_acquire(...) \ + __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__) +#endif + +#ifndef arch_try_cmpxchg64_release +#define arch_try_cmpxchg64_release(...) \ + __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__) +#endif + +#ifndef arch_try_cmpxchg64 +#define arch_try_cmpxchg64(...) \ + __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__) +#endif + +#endif /* arch_try_cmpxchg64_relaxed */ + #ifndef arch_atomic_read_acquire static __always_inline int arch_atomic_read_acquire(const atomic_t *v) @@ -2386,4 +2456,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v) #endif #endif /* _LINUX_ATOMIC_FALLBACK_H */ -// 8e2cc06bc0d2c0967d2f8424762bd48555ee40ae +// b5e87bdd5ede61470c29f7a7e4de781af3770f09 diff --git a/include/linux/atomic/atomic-instrumented.h b/include/linux/atomic/atomic-instrumented.h index 5d69b143c28e..7a139ec030b0 100644 --- a/include/linux/atomic/atomic-instrumented.h +++ b/include/linux/atomic/atomic-instrumented.h @@ -2006,6 +2006,44 @@ atomic_long_dec_if_positive(atomic_long_t *v) arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \ }) +#define try_cmpxchg64(ptr, oldp, ...) \ +({ \ + typeof(ptr) __ai_ptr = (ptr); \ + typeof(oldp) __ai_oldp = (oldp); \ + kcsan_mb(); \ + instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \ + instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \ + arch_try_cmpxchg64(__ai_ptr, __ai_oldp, __VA_ARGS__); \ +}) + +#define try_cmpxchg64_acquire(ptr, oldp, ...) \ +({ \ + typeof(ptr) __ai_ptr = (ptr); \ + typeof(oldp) __ai_oldp = (oldp); \ + instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \ + instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \ + arch_try_cmpxchg64_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \ +}) + +#define try_cmpxchg64_release(ptr, oldp, ...) \ +({ \ + typeof(ptr) __ai_ptr = (ptr); \ + typeof(oldp) __ai_oldp = (oldp); \ + kcsan_release(); \ + instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \ + instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \ + arch_try_cmpxchg64_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \ +}) + +#define try_cmpxchg64_relaxed(ptr, oldp, ...) \ +({ \ + typeof(ptr) __ai_ptr = (ptr); \ + typeof(oldp) __ai_oldp = (oldp); \ + instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \ + instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \ + arch_try_cmpxchg64_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \ +}) + #define cmpxchg_local(ptr, ...) \ ({ \ typeof(ptr) __ai_ptr = (ptr); \ @@ -2045,4 +2083,4 @@ atomic_long_dec_if_positive(atomic_long_t *v) }) #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */ -// 87c974b93032afd42143613434d1a7788fa598f9 +// 764f741eb77a7ad565dc8d99ce2837d5542e8aee diff --git a/scripts/atomic/gen-atomic-fallback.sh b/scripts/atomic/gen-atomic-fallback.sh index 8e2da71f1d5f..3a07695e3c89 100755 --- a/scripts/atomic/gen-atomic-fallback.sh +++ b/scripts/atomic/gen-atomic-fallback.sh @@ -164,41 +164,44 @@ gen_xchg_fallbacks() gen_try_cmpxchg_fallback() { + local cmpxchg="$1"; shift; local order="$1"; shift; cat <