atomics/treewide: Make atomic64_fetch_add_unless() optional
Architectures with atomic64_fetch_add_unless() provide a preprocessor symbol if they do so, and all other architectures have trivial C implementations of atomic64_add_unless() which are near-identical. Let's unify the trivial definitions of atomic64_fetch_add_unless() in <linux/atomic.h>, so that we always have both atomic64_fetch_add_unless() and atomic64_add_unless() with less boilerplate code. This means that atomic64_add_unless() is always implemented in core code, and the instrumented atomics are updated accordingly. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Will Deacon <will.deacon@arm.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Thomas Gleixner <tglx@linutronix.de> Link: https://lore.kernel.org/lkml/20180621121321.4761-15-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
parent
2b523f170e
commit
356701329f
|
@ -40,17 +40,6 @@
|
|||
|
||||
#include <asm/cmpxchg.h>
|
||||
|
||||
#define ___atomic_add_unless(v, a, u, sfx) \
|
||||
({ \
|
||||
typeof((v)->counter) c, old; \
|
||||
\
|
||||
c = atomic##sfx##_read(v); \
|
||||
while (c != (u) && \
|
||||
(old = atomic##sfx##_cmpxchg((v), c, c + (a))) != c) \
|
||||
c = old; \
|
||||
c; \
|
||||
})
|
||||
|
||||
#define ATOMIC_INIT(i) { (i) }
|
||||
|
||||
#define atomic_read(v) READ_ONCE((v)->counter)
|
||||
|
@ -200,7 +189,6 @@
|
|||
#define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
|
||||
#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
|
||||
#define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0)
|
||||
#define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u)
|
||||
#define atomic64_andnot atomic64_andnot
|
||||
|
||||
#endif
|
||||
|
|
|
@ -215,21 +215,6 @@ ATOMIC64_FETCH_OP(xor, ^)
|
|||
(cmpxchg(&((v)->counter), old, new))
|
||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||
|
||||
static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||
{
|
||||
long c, old;
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
if (unlikely(c == (u)))
|
||||
break;
|
||||
old = atomic64_cmpxchg((v), c, c + (a));
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return c != (u);
|
||||
}
|
||||
|
||||
static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
long c, old, dec;
|
||||
|
|
|
@ -596,30 +596,6 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
|
|||
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
|
||||
|
||||
/**
|
||||
* atomic64_add_unless - add unless the number is a given value
|
||||
* @v: pointer of type atomic64_t
|
||||
* @a: the amount to add to v...
|
||||
* @u: ...unless v is equal to u.
|
||||
*
|
||||
* Atomically adds @a to @v, so long as it was not @u.
|
||||
* Returns true iff @v was not @u.
|
||||
*/
|
||||
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||
{
|
||||
long c, old;
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
if (unlikely(c == (u)))
|
||||
break;
|
||||
old = atomic64_cmpxchg((v), c, c + (a));
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return c != (u);
|
||||
}
|
||||
|
||||
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
|
||||
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
|
||||
|
||||
|
|
|
@ -257,30 +257,6 @@ atomic64_read(const atomic64_t *v)
|
|||
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||
|
||||
/**
|
||||
* atomic64_add_unless - add unless the number is a given value
|
||||
* @v: pointer of type atomic64_t
|
||||
* @a: the amount to add to v...
|
||||
* @u: ...unless v is equal to u.
|
||||
*
|
||||
* Atomically adds @a to @v, so long as it was not @u.
|
||||
* Returns the old value of @v.
|
||||
*/
|
||||
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||
{
|
||||
long c, old;
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
if (unlikely(c == (u)))
|
||||
break;
|
||||
old = atomic64_cmpxchg((v), c, c + (a));
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return c != (u);
|
||||
}
|
||||
|
||||
/*
|
||||
* atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||
* @v: pointer of type atomic_t
|
||||
|
|
|
@ -153,22 +153,6 @@ ATOMIC64_OPS(xor)
|
|||
|
||||
#undef ATOMIC64_OPS
|
||||
|
||||
static inline int atomic64_add_unless(atomic64_t *v, long i, long u)
|
||||
{
|
||||
long c, old;
|
||||
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
if (unlikely(c == u))
|
||||
break;
|
||||
old = atomic64_cmpxchg(v, c, c + i);
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return c != u;
|
||||
}
|
||||
|
||||
static inline long atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
long c, old, dec;
|
||||
|
|
|
@ -93,21 +93,6 @@ static inline int atomic_xchg(atomic_t *v, int new)
|
|||
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
||||
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
||||
|
||||
static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||
{
|
||||
long c, old;
|
||||
c = atomic64_read(v);
|
||||
for (;;) {
|
||||
if (unlikely(c == (u)))
|
||||
break;
|
||||
old = atomic64_cmpxchg((v), c, c + (a));
|
||||
if (likely(old == c))
|
||||
break;
|
||||
c = old;
|
||||
}
|
||||
return c != (u);
|
||||
}
|
||||
|
||||
long atomic64_dec_if_positive(atomic64_t *v);
|
||||
|
||||
#endif /* !(__ARCH_SPARC64_ATOMIC__) */
|
||||
|
|
|
@ -188,25 +188,6 @@ static inline long arch_atomic64_xchg(atomic64_t *v, long new)
|
|||
return xchg(&v->counter, new);
|
||||
}
|
||||
|
||||
/**
|
||||
* arch_atomic64_add_unless - add unless the number is a given value
|
||||
* @v: pointer of type atomic64_t
|
||||
* @a: the amount to add to v...
|
||||
* @u: ...unless v is equal to u.
|
||||
*
|
||||
* Atomically adds @a to @v, so long as it was not @u.
|
||||
* Returns the old value of @v.
|
||||
*/
|
||||
static inline bool arch_atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||
{
|
||||
s64 c = arch_atomic64_read(v);
|
||||
do {
|
||||
if (unlikely(c == u))
|
||||
return false;
|
||||
} while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||
* @v: pointer of type atomic_t
|
||||
|
|
|
@ -100,12 +100,6 @@ static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u
|
|||
kasan_check_write(v, sizeof(*v));
|
||||
return arch_atomic64_fetch_add_unless(v, a, u);
|
||||
}
|
||||
#else
|
||||
static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
|
||||
{
|
||||
kasan_check_write(v, sizeof(*v));
|
||||
return arch_atomic64_add_unless(v, a, u);
|
||||
}
|
||||
#endif
|
||||
|
||||
static __always_inline void atomic_inc(atomic_t *v)
|
||||
|
|
|
@ -1042,6 +1042,30 @@ static inline int atomic_dec_if_positive(atomic_t *v)
|
|||
#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
|
||||
#endif /* atomic64_try_cmpxchg */
|
||||
|
||||
/**
|
||||
* atomic64_fetch_add_unless - add unless the number is already a given value
|
||||
* @v: pointer of type atomic64_t
|
||||
* @a: the amount to add to v...
|
||||
* @u: ...unless v is equal to u.
|
||||
*
|
||||
* Atomically adds @a to @v, if @v was not already @u.
|
||||
* Returns the original value of @v.
|
||||
*/
|
||||
#ifndef atomic64_fetch_add_unless
|
||||
static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
|
||||
long long u)
|
||||
{
|
||||
long long c = atomic64_read(v);
|
||||
|
||||
do {
|
||||
if (unlikely(c == u))
|
||||
break;
|
||||
} while (!atomic64_try_cmpxchg(v, &c, c + a));
|
||||
|
||||
return c;
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* atomic64_add_unless - add unless the number is already a given value
|
||||
* @v: pointer of type atomic_t
|
||||
|
@ -1051,12 +1075,10 @@ static inline int atomic_dec_if_positive(atomic_t *v)
|
|||
* Atomically adds @a to @v, if @v was not already @u.
|
||||
* Returns true if the addition was done.
|
||||
*/
|
||||
#ifdef atomic64_fetch_add_unless
|
||||
static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
|
||||
{
|
||||
return atomic64_fetch_add_unless(v, a, u) != u;
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* atomic64_inc_not_zero - increment unless the number is zero
|
||||
|
|
Loading…
Reference in New Issue