locking,arch,arc: Fold atomic_ops
Many of the atomic op implementations are the same except for one instruction; fold the lot into a few CPP macros and reduce LoC. This also prepares for easy addition of new ops. Signed-off-by: Peter Zijlstra <peterz@infradead.org> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com> Cc: Vineet Gupta <vgupta@synopsys.com> Link: http://lkml.kernel.org/r/20140508135851.886055622@infradead.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
parent
b93c7b8c5b
commit
f7d11e93ee
@ -25,79 +25,36 @@
|
|||||||
|
|
||||||
#define atomic_set(v, i) (((v)->counter) = (i))
|
#define atomic_set(v, i) (((v)->counter) = (i))
|
||||||
|
|
||||||
static inline void atomic_add(int i, atomic_t *v)
|
#define ATOMIC_OP(op, c_op, asm_op) \
|
||||||
{
|
static inline void atomic_##op(int i, atomic_t *v) \
|
||||||
unsigned int temp;
|
{ \
|
||||||
|
unsigned int temp; \
|
||||||
|
\
|
||||||
|
__asm__ __volatile__( \
|
||||||
|
"1: llock %0, [%1] \n" \
|
||||||
|
" " #asm_op " %0, %0, %2 \n" \
|
||||||
|
" scond %0, [%1] \n" \
|
||||||
|
" bnz 1b \n" \
|
||||||
|
: "=&r"(temp) /* Early clobber, to prevent reg reuse */ \
|
||||||
|
: "r"(&v->counter), "ir"(i) \
|
||||||
|
: "cc"); \
|
||||||
|
} \
|
||||||
|
|
||||||
__asm__ __volatile__(
|
#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
|
||||||
"1: llock %0, [%1] \n"
|
static inline int atomic_##op##_return(int i, atomic_t *v) \
|
||||||
" add %0, %0, %2 \n"
|
{ \
|
||||||
" scond %0, [%1] \n"
|
unsigned int temp; \
|
||||||
" bnz 1b \n"
|
\
|
||||||
: "=&r"(temp) /* Early clobber, to prevent reg reuse */
|
__asm__ __volatile__( \
|
||||||
: "r"(&v->counter), "ir"(i)
|
"1: llock %0, [%1] \n" \
|
||||||
: "cc");
|
" " #asm_op " %0, %0, %2 \n" \
|
||||||
}
|
" scond %0, [%1] \n" \
|
||||||
|
" bnz 1b \n" \
|
||||||
static inline void atomic_sub(int i, atomic_t *v)
|
: "=&r"(temp) \
|
||||||
{
|
: "r"(&v->counter), "ir"(i) \
|
||||||
unsigned int temp;
|
: "cc"); \
|
||||||
|
\
|
||||||
__asm__ __volatile__(
|
return temp; \
|
||||||
"1: llock %0, [%1] \n"
|
|
||||||
" sub %0, %0, %2 \n"
|
|
||||||
" scond %0, [%1] \n"
|
|
||||||
" bnz 1b \n"
|
|
||||||
: "=&r"(temp)
|
|
||||||
: "r"(&v->counter), "ir"(i)
|
|
||||||
: "cc");
|
|
||||||
}
|
|
||||||
|
|
||||||
/* add and also return the new value */
|
|
||||||
static inline int atomic_add_return(int i, atomic_t *v)
|
|
||||||
{
|
|
||||||
unsigned int temp;
|
|
||||||
|
|
||||||
__asm__ __volatile__(
|
|
||||||
"1: llock %0, [%1] \n"
|
|
||||||
" add %0, %0, %2 \n"
|
|
||||||
" scond %0, [%1] \n"
|
|
||||||
" bnz 1b \n"
|
|
||||||
: "=&r"(temp)
|
|
||||||
: "r"(&v->counter), "ir"(i)
|
|
||||||
: "cc");
|
|
||||||
|
|
||||||
return temp;
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int atomic_sub_return(int i, atomic_t *v)
|
|
||||||
{
|
|
||||||
unsigned int temp;
|
|
||||||
|
|
||||||
__asm__ __volatile__(
|
|
||||||
"1: llock %0, [%1] \n"
|
|
||||||
" sub %0, %0, %2 \n"
|
|
||||||
" scond %0, [%1] \n"
|
|
||||||
" bnz 1b \n"
|
|
||||||
: "=&r"(temp)
|
|
||||||
: "r"(&v->counter), "ir"(i)
|
|
||||||
: "cc");
|
|
||||||
|
|
||||||
return temp;
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
|
|
||||||
{
|
|
||||||
unsigned int temp;
|
|
||||||
|
|
||||||
__asm__ __volatile__(
|
|
||||||
"1: llock %0, [%1] \n"
|
|
||||||
" bic %0, %0, %2 \n"
|
|
||||||
" scond %0, [%1] \n"
|
|
||||||
" bnz 1b \n"
|
|
||||||
: "=&r"(temp)
|
|
||||||
: "r"(addr), "ir"(mask)
|
|
||||||
: "cc");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#else /* !CONFIG_ARC_HAS_LLSC */
|
#else /* !CONFIG_ARC_HAS_LLSC */
|
||||||
@ -126,6 +83,7 @@ static inline void atomic_set(atomic_t *v, int i)
|
|||||||
v->counter = i;
|
v->counter = i;
|
||||||
atomic_ops_unlock(flags);
|
atomic_ops_unlock(flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -133,63 +91,47 @@ static inline void atomic_set(atomic_t *v, int i)
|
|||||||
* Locking would change to irq-disabling only (UP) and spinlocks (SMP)
|
* Locking would change to irq-disabling only (UP) and spinlocks (SMP)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static inline void atomic_add(int i, atomic_t *v)
|
#define ATOMIC_OP(op, c_op, asm_op) \
|
||||||
{
|
static inline void atomic_##op(int i, atomic_t *v) \
|
||||||
unsigned long flags;
|
{ \
|
||||||
|
unsigned long flags; \
|
||||||
atomic_ops_lock(flags);
|
\
|
||||||
v->counter += i;
|
atomic_ops_lock(flags); \
|
||||||
atomic_ops_unlock(flags);
|
v->counter c_op i; \
|
||||||
|
atomic_ops_unlock(flags); \
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void atomic_sub(int i, atomic_t *v)
|
#define ATOMIC_OP_RETURN(op, c_op) \
|
||||||
{
|
static inline int atomic_##op##_return(int i, atomic_t *v) \
|
||||||
unsigned long flags;
|
{ \
|
||||||
|
unsigned long flags; \
|
||||||
atomic_ops_lock(flags);
|
unsigned long temp; \
|
||||||
v->counter -= i;
|
\
|
||||||
atomic_ops_unlock(flags);
|
atomic_ops_lock(flags); \
|
||||||
}
|
temp = v->counter; \
|
||||||
|
temp c_op i; \
|
||||||
static inline int atomic_add_return(int i, atomic_t *v)
|
v->counter = temp; \
|
||||||
{
|
atomic_ops_unlock(flags); \
|
||||||
unsigned long flags;
|
\
|
||||||
unsigned long temp;
|
return temp; \
|
||||||
|
|
||||||
atomic_ops_lock(flags);
|
|
||||||
temp = v->counter;
|
|
||||||
temp += i;
|
|
||||||
v->counter = temp;
|
|
||||||
atomic_ops_unlock(flags);
|
|
||||||
|
|
||||||
return temp;
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int atomic_sub_return(int i, atomic_t *v)
|
|
||||||
{
|
|
||||||
unsigned long flags;
|
|
||||||
unsigned long temp;
|
|
||||||
|
|
||||||
atomic_ops_lock(flags);
|
|
||||||
temp = v->counter;
|
|
||||||
temp -= i;
|
|
||||||
v->counter = temp;
|
|
||||||
atomic_ops_unlock(flags);
|
|
||||||
|
|
||||||
return temp;
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
|
|
||||||
{
|
|
||||||
unsigned long flags;
|
|
||||||
|
|
||||||
atomic_ops_lock(flags);
|
|
||||||
*addr &= ~mask;
|
|
||||||
atomic_ops_unlock(flags);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* !CONFIG_ARC_HAS_LLSC */
|
#endif /* !CONFIG_ARC_HAS_LLSC */
|
||||||
|
|
||||||
|
#define ATOMIC_OPS(op, c_op, asm_op) \
|
||||||
|
ATOMIC_OP(op, c_op, asm_op) \
|
||||||
|
ATOMIC_OP_RETURN(op, c_op, asm_op)
|
||||||
|
|
||||||
|
ATOMIC_OPS(add, +=, add)
|
||||||
|
ATOMIC_OPS(sub, -=, sub)
|
||||||
|
ATOMIC_OP(and, &=, and)
|
||||||
|
|
||||||
|
#define atomic_clear_mask(mask, v) atomic_and(~(mask), (v))
|
||||||
|
|
||||||
|
#undef ATOMIC_OPS
|
||||||
|
#undef ATOMIC_OP_RETURN
|
||||||
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* __atomic_add_unless - add unless the number is a given value
|
* __atomic_add_unless - add unless the number is a given value
|
||||||
* @v: pointer of type atomic_t
|
* @v: pointer of type atomic_t
|
||||||
|
Loading…
Reference in New Issue
Block a user