1
0
Fork 0

atomics/arc: Define atomic64_fetch_add_unless()

As a step towards unifying the atomic/atomic64/atomic_long APIs, this
patch converts the arch/arc implementation of atomic64_add_unless() into
an implementation of atomic64_fetch_add_unless().

A wrapper in <linux/atomic.h> will build atomic_add_unless() atop of
this, provided it is given a preprocessor definition.

No functional change is intended as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Reviewed-by: Will Deacon <will.deacon@arm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Vineet Gupta <vgupta@synopsys.com>
Link: https://lore.kernel.org/lkml/20180621121321.4761-11-mark.rutland@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
hifive-unleashed-5.1
Mark Rutland 2018-06-21 13:13:13 +01:00 committed by Ingo Molnar
parent 434b6acc31
commit ab0b910490
1 changed files with 12 additions and 13 deletions

View File

@ -531,41 +531,40 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
}
/**
* atomic64_add_unless - add unless the number is a given value
* atomic64_fetch_add_unless - add unless the number is a given value
* @v: pointer of type atomic64_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* if (v != u) { v += a; ret = 1} else {ret = 0}
* Returns 1 iff @v was not @u (i.e. if add actually happened)
* Atomically adds @a to @v, if it was not @u.
* Returns the old value of @v
*/
static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
long long u)
{
long long val;
int op_done;
long long old, temp;
smp_mb();
__asm__ __volatile__(
"1: llockd %0, [%2] \n"
" mov %1, 1 \n"
" brne %L0, %L4, 2f # continue to add since v != u \n"
" breq.d %H0, %H4, 3f # return since v == u \n"
" mov %1, 0 \n"
"2: \n"
" add.f %L0, %L0, %L3 \n"
" adc %H0, %H0, %H3 \n"
" scondd %0, [%2] \n"
" add.f %L1, %L0, %L3 \n"
" adc %H1, %H0, %H3 \n"
" scondd %1, [%2] \n"
" bnz 1b \n"
"3: \n"
: "=&r"(val), "=&r" (op_done)
: "=&r"(old), "=&r" (temp)
: "r"(&v->counter), "r"(a), "r"(u)
: "cc"); /* memory clobber comes from smp_mb() */
smp_mb();
return op_done;
return old;
}
#define atomic64_fetch_add_unless atomic64_fetch_add_unless
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
#define atomic64_inc(v) atomic64_add(1LL, (v))