sh: move xchg_cmpxchg to a header by itself
Looks like future sh variants will support a 4-byte cas which will be used to implement 1 and 2 byte xchg. This is exactly what we do for llsc now, move the portable part of the code into a separate header so it's easy to reuse. Suggested-by: Rich Felker <dalias@libc.org> Signed-off-by: Michael S. Tsirkin <mst@redhat.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
This commit is contained in:
parent
3226aad81a
commit
9e3f84ce41
@ -1,9 +1,6 @@
|
|||||||
#ifndef __ASM_SH_CMPXCHG_LLSC_H
|
#ifndef __ASM_SH_CMPXCHG_LLSC_H
|
||||||
#define __ASM_SH_CMPXCHG_LLSC_H
|
#define __ASM_SH_CMPXCHG_LLSC_H
|
||||||
|
|
||||||
#include <linux/bitops.h>
|
|
||||||
#include <asm/byteorder.h>
|
|
||||||
|
|
||||||
static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
|
static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
|
||||||
{
|
{
|
||||||
unsigned long retval;
|
unsigned long retval;
|
||||||
@ -50,36 +47,6 @@ __cmpxchg_u32(volatile u32 *m, unsigned long old, unsigned long new)
|
|||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
|
#include <asm/cmpxchg-xchg.h>
|
||||||
{
|
|
||||||
int off = (unsigned long)ptr % sizeof(u32);
|
|
||||||
volatile u32 *p = ptr - off;
|
|
||||||
#ifdef __BIG_ENDIAN
|
|
||||||
int bitoff = (sizeof(u32) - 1 - off) * BITS_PER_BYTE;
|
|
||||||
#else
|
|
||||||
int bitoff = off * BITS_PER_BYTE;
|
|
||||||
#endif
|
|
||||||
u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
|
|
||||||
u32 oldv, newv;
|
|
||||||
u32 ret;
|
|
||||||
|
|
||||||
do {
|
|
||||||
oldv = READ_ONCE(*p);
|
|
||||||
ret = (oldv & bitmask) >> bitoff;
|
|
||||||
newv = (oldv & ~bitmask) | (x << bitoff);
|
|
||||||
} while (__cmpxchg_u32(p, oldv, newv) != oldv);
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
|
|
||||||
{
|
|
||||||
return __xchg_cmpxchg(m, val, sizeof *m);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
|
|
||||||
{
|
|
||||||
return __xchg_cmpxchg(m, val, sizeof *m);
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif /* __ASM_SH_CMPXCHG_LLSC_H */
|
#endif /* __ASM_SH_CMPXCHG_LLSC_H */
|
||||||
|
51
arch/sh/include/asm/cmpxchg-xchg.h
Normal file
51
arch/sh/include/asm/cmpxchg-xchg.h
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
#ifndef __ASM_SH_CMPXCHG_XCHG_H
|
||||||
|
#define __ASM_SH_CMPXCHG_XCHG_H
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Copyright (C) 2016 Red Hat, Inc.
|
||||||
|
* Author: Michael S. Tsirkin <mst@redhat.com>
|
||||||
|
*
|
||||||
|
* This work is licensed under the terms of the GNU GPL, version 2. See the
|
||||||
|
* file "COPYING" in the main directory of this archive for more details.
|
||||||
|
*/
|
||||||
|
#include <linux/bitops.h>
|
||||||
|
#include <asm/byteorder.h>
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Portable implementations of 1 and 2 byte xchg using a 4 byte cmpxchg.
|
||||||
|
* Note: this header isn't self-contained: before including it, __cmpxchg_u32
|
||||||
|
* must be defined first.
|
||||||
|
*/
|
||||||
|
static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
|
||||||
|
{
|
||||||
|
int off = (unsigned long)ptr % sizeof(u32);
|
||||||
|
volatile u32 *p = ptr - off;
|
||||||
|
#ifdef __BIG_ENDIAN
|
||||||
|
int bitoff = (sizeof(u32) - 1 - off) * BITS_PER_BYTE;
|
||||||
|
#else
|
||||||
|
int bitoff = off * BITS_PER_BYTE;
|
||||||
|
#endif
|
||||||
|
u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
|
||||||
|
u32 oldv, newv;
|
||||||
|
u32 ret;
|
||||||
|
|
||||||
|
do {
|
||||||
|
oldv = READ_ONCE(*p);
|
||||||
|
ret = (oldv & bitmask) >> bitoff;
|
||||||
|
newv = (oldv & ~bitmask) | (x << bitoff);
|
||||||
|
} while (__cmpxchg_u32(p, oldv, newv) != oldv);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
|
||||||
|
{
|
||||||
|
return __xchg_cmpxchg(m, val, sizeof *m);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
|
||||||
|
{
|
||||||
|
return __xchg_cmpxchg(m, val, sizeof *m);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif /* __ASM_SH_CMPXCHG_XCHG_H */
|
Loading…
x
Reference in New Issue
Block a user