From a82e62382fcbbf5c3348e802af73583e0cac39c0 Mon Sep 17 00:00:00 2001 From: Will Deacon Date: Thu, 4 Jun 2015 16:41:36 +0100 Subject: [PATCH] arm64: atomics: tidy up common atomic{,64}_* macros The common (i.e. identical for ll/sc and lse) atomic macros in atomic.h are needlessley different for atomic_t and atomic64_t. This patch tidies up the definitions to make them consistent across the two atomic types and factors out common code such as the add_unless implementation based on cmpxchg. Reviewed-by: Steve Capper Reviewed-by: Catalin Marinas Signed-off-by: Will Deacon --- arch/arm64/include/asm/atomic.h | 99 +++++++++++++++++------------------------ 1 file changed, 40 insertions(+), 59 deletions(-) diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index 1fe8f209aeb4..0b26da365f3b 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h @@ -26,8 +26,6 @@ #include #include -#define ATOMIC_INIT(i) { (i) } - #ifdef __KERNEL__ #define __ARM64_IN_ATOMIC_IMPL @@ -42,71 +40,54 @@ #include -/* - * On ARM, ordinary assignment (str instruction) doesn't clear the local - * strex/ldrex monitor on some implementations. The reason we can use it for - * atomic_set() is the clrex or dummy strex done on every exception return. - */ -#define atomic_read(v) ACCESS_ONCE((v)->counter) -#define atomic_set(v,i) (((v)->counter) = (i)) - -#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) - -static inline int __atomic_add_unless(atomic_t *v, int a, int u) -{ - int c, old; - - c = atomic_read(v); - while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c) - c = old; - return c; -} - -#define atomic_inc(v) atomic_add(1, v) -#define atomic_dec(v) atomic_sub(1, v) +#define ___atomic_add_unless(v, a, u, sfx) \ +({ \ + typeof((v)->counter) c, old; \ + \ + c = atomic##sfx##_read(v); \ + while (c != (u) && \ + (old = atomic##sfx##_cmpxchg((v), c, c + (a))) != c) \ + c = old; \ + c; \ + }) -#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0) -#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0) -#define atomic_inc_return(v) (atomic_add_return(1, v)) -#define atomic_dec_return(v) (atomic_sub_return(1, v)) -#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0) - -#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0) +#define ATOMIC_INIT(i) { (i) } -#define atomic_andnot atomic_andnot +#define atomic_read(v) READ_ONCE((v)->counter) +#define atomic_set(v, i) (((v)->counter) = (i)) +#define atomic_xchg(v, new) xchg(&((v)->counter), (new)) + +#define atomic_inc(v) atomic_add(1, (v)) +#define atomic_dec(v) atomic_sub(1, (v)) +#define atomic_inc_return(v) atomic_add_return(1, (v)) +#define atomic_dec_return(v) atomic_sub_return(1, (v)) +#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) +#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0) +#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) +#define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0) +#define __atomic_add_unless(v, a, u) ___atomic_add_unless(v, a, u,) +#define atomic_andnot atomic_andnot /* * 64-bit atomic operations. */ -#define ATOMIC64_INIT(i) { (i) } - -#define atomic64_read(v) ACCESS_ONCE((v)->counter) -#define atomic64_set(v,i) (((v)->counter) = (i)) - -#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) - -static inline int atomic64_add_unless(atomic64_t *v, long a, long u) -{ - long c, old; - - c = atomic64_read(v); - while (c != u && (old = atomic64_cmpxchg((v), c, c + a)) != c) - c = old; - - return c != u; -} - -#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) -#define atomic64_inc(v) atomic64_add(1LL, (v)) -#define atomic64_inc_return(v) atomic64_add_return(1LL, (v)) +#define ATOMIC64_INIT ATOMIC_INIT +#define atomic64_read atomic_read +#define atomic64_set atomic_set +#define atomic64_xchg atomic_xchg + +#define atomic64_inc(v) atomic64_add(1, (v)) +#define atomic64_dec(v) atomic64_sub(1, (v)) +#define atomic64_inc_return(v) atomic64_add_return(1, (v)) +#define atomic64_dec_return(v) atomic64_sub_return(1, (v)) #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) -#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) -#define atomic64_dec(v) atomic64_sub(1LL, (v)) -#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) -#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL) +#define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0) +#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) +#define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0) +#define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u) +#define atomic64_andnot atomic64_andnot -#define atomic64_andnot atomic64_andnot +#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #endif #endif -- 2.11.0