sparc32: Implement xchg and atomic_xchg using ATOMIC_HASH locks
Atomicity between xchg and cmpxchg cannot be guaranteed when xchg is implemented with a swap and cmpxchg is implemented with locks. Without this, e.g. mcs_spin_lock and mcs_spin_unlock are broken. Signed-off-by: Andreas Larsson <andreas@gaisler.com> Signed-off-by: David S. Miller <davem@davemloft.net>
This commit is contained in:
parent
ab5c780913
commit
1a17fdc4f4
@ -22,7 +22,7 @@
|
||||
|
||||
int atomic_add_return(int, atomic_t *);
|
||||
int atomic_cmpxchg(atomic_t *, int, int);
|
||||
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
||||
int atomic_xchg(atomic_t *, int);
|
||||
int __atomic_add_unless(atomic_t *, int, int);
|
||||
void atomic_set(atomic_t *, int);
|
||||
|
||||
|
@ -11,22 +11,14 @@
|
||||
#ifndef __ARCH_SPARC_CMPXCHG__
|
||||
#define __ARCH_SPARC_CMPXCHG__
|
||||
|
||||
static inline unsigned long xchg_u32(__volatile__ unsigned long *m, unsigned long val)
|
||||
{
|
||||
__asm__ __volatile__("swap [%2], %0"
|
||||
: "=&r" (val)
|
||||
: "0" (val), "r" (m)
|
||||
: "memory");
|
||||
return val;
|
||||
}
|
||||
|
||||
unsigned long __xchg_u32(volatile u32 *m, u32 new);
|
||||
void __xchg_called_with_bad_pointer(void);
|
||||
|
||||
static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int size)
|
||||
{
|
||||
switch (size) {
|
||||
case 4:
|
||||
return xchg_u32(ptr, x);
|
||||
return __xchg_u32(ptr, x);
|
||||
}
|
||||
__xchg_called_with_bad_pointer();
|
||||
return x;
|
||||
|
@ -45,6 +45,19 @@ ATOMIC_OP(add, +=)
|
||||
|
||||
#undef ATOMIC_OP
|
||||
|
||||
int atomic_xchg(atomic_t *v, int new)
|
||||
{
|
||||
int ret;
|
||||
unsigned long flags;
|
||||
|
||||
spin_lock_irqsave(ATOMIC_HASH(v), flags);
|
||||
ret = v->counter;
|
||||
v->counter = new;
|
||||
spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
|
||||
return ret;
|
||||
}
|
||||
EXPORT_SYMBOL(atomic_xchg);
|
||||
|
||||
int atomic_cmpxchg(atomic_t *v, int old, int new)
|
||||
{
|
||||
int ret;
|
||||
@ -137,3 +150,17 @@ unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
|
||||
return (unsigned long)prev;
|
||||
}
|
||||
EXPORT_SYMBOL(__cmpxchg_u32);
|
||||
|
||||
unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
|
||||
{
|
||||
unsigned long flags;
|
||||
u32 prev;
|
||||
|
||||
spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
|
||||
prev = *ptr;
|
||||
*ptr = new;
|
||||
spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
|
||||
|
||||
return (unsigned long)prev;
|
||||
}
|
||||
EXPORT_SYMBOL(__xchg_u32);
|
||||
|
Loading…
x
Reference in New Issue
Block a user