2008-10-22 22:26:29 -07:00
# ifndef _ASM_X86_CMPXCHG_64_H
# define _ASM_X86_CMPXCHG_64_H
2007-05-08 00:35:02 -07:00
# include <asm/alternative.h> /* Provides LOCK_PREFIX */
# define __xg(x) ((volatile long *)(x))
static inline void set_64bit ( volatile unsigned long * ptr , unsigned long val )
{
* ptr = val ;
}
# define _set_64bit set_64bit
2009-10-09 10:12:46 +02:00
extern void __xchg_wrong_size ( void ) ;
extern void __cmpxchg_wrong_size ( void ) ;
2007-05-08 00:35:02 -07:00
/*
* Note : no " lock " prefix even on SMP : xchg always implies lock anyway
* Note 2 : xchg has side effect , so that attribute volatile is necessary ,
* but generally the primitive is invalid , * ptr is output argument . - - ANK
*/
2009-10-09 10:12:46 +02:00
# define __xchg(x, ptr, size) \
( { \
__typeof ( * ( ptr ) ) __x = ( x ) ; \
switch ( size ) { \
case 1 : \
asm volatile ( " xchgb %b0,%1 " \
: " =q " ( __x ) \
: " m " ( * __xg ( ptr ) ) , " 0 " ( __x ) \
: " memory " ) ; \
break ; \
case 2 : \
asm volatile ( " xchgw %w0,%1 " \
: " =r " ( __x ) \
: " m " ( * __xg ( ptr ) ) , " 0 " ( __x ) \
: " memory " ) ; \
break ; \
case 4 : \
asm volatile ( " xchgl %k0,%1 " \
: " =r " ( __x ) \
: " m " ( * __xg ( ptr ) ) , " 0 " ( __x ) \
: " memory " ) ; \
break ; \
case 8 : \
asm volatile ( " xchgq %0,%1 " \
: " =r " ( __x ) \
: " m " ( * __xg ( ptr ) ) , " 0 " ( __x ) \
: " memory " ) ; \
break ; \
default : \
__xchg_wrong_size ( ) ; \
} \
__x ; \
} )
# define xchg(ptr, v) \
__xchg ( ( v ) , ( ptr ) , sizeof ( * ptr ) )
# define __HAVE_ARCH_CMPXCHG 1
2007-05-08 00:35:02 -07:00
/*
* Atomic compare and exchange . Compare OLD with MEM , if identical ,
* store NEW in MEM . Return the initial value in MEM . Success is
* indicated by comparing RETURN with OLD .
*/
2009-10-09 10:12:46 +02:00
# define __raw_cmpxchg(ptr, old, new, size, lock) \
( { \
__typeof__ ( * ( ptr ) ) __ret ; \
__typeof__ ( * ( ptr ) ) __old = ( old ) ; \
__typeof__ ( * ( ptr ) ) __new = ( new ) ; \
switch ( size ) { \
case 1 : \
asm volatile ( lock " cmpxchgb %b1,%2 " \
: " =a " ( __ret ) \
: " q " ( __new ) , " m " ( * __xg ( ptr ) ) , " 0 " ( __old ) \
: " memory " ) ; \
break ; \
case 2 : \
asm volatile ( lock " cmpxchgw %w1,%2 " \
: " =a " ( __ret ) \
: " r " ( __new ) , " m " ( * __xg ( ptr ) ) , " 0 " ( __old ) \
: " memory " ) ; \
break ; \
case 4 : \
asm volatile ( lock " cmpxchgl %k1,%2 " \
: " =a " ( __ret ) \
: " r " ( __new ) , " m " ( * __xg ( ptr ) ) , " 0 " ( __old ) \
: " memory " ) ; \
break ; \
case 8 : \
asm volatile ( lock " cmpxchgq %1,%2 " \
: " =a " ( __ret ) \
: " r " ( __new ) , " m " ( * __xg ( ptr ) ) , " 0 " ( __old ) \
: " memory " ) ; \
break ; \
default : \
__cmpxchg_wrong_size ( ) ; \
} \
__ret ; \
} )
2007-05-08 00:35:02 -07:00
2009-10-09 10:12:46 +02:00
# define __cmpxchg(ptr, old, new, size) \
__raw_cmpxchg ( ( ptr ) , ( old ) , ( new ) , ( size ) , LOCK_PREFIX )
2007-05-08 00:35:02 -07:00
2009-10-09 10:12:46 +02:00
# define __sync_cmpxchg(ptr, old, new, size) \
__raw_cmpxchg ( ( ptr ) , ( old ) , ( new ) , ( size ) , " lock; " )
2007-05-08 00:35:02 -07:00
2009-10-09 10:12:46 +02:00
# define __cmpxchg_local(ptr, old, new, size) \
__raw_cmpxchg ( ( ptr ) , ( old ) , ( new ) , ( size ) , " " )
2008-06-25 00:19:10 -04:00
2009-10-09 10:12:46 +02:00
# define cmpxchg(ptr, old, new) \
__cmpxchg ( ( ptr ) , ( old ) , ( new ) , sizeof ( * ptr ) )
# define sync_cmpxchg(ptr, old, new) \
__sync_cmpxchg ( ( ptr ) , ( old ) , ( new ) , sizeof ( * ptr ) )
# define cmpxchg_local(ptr, old, new) \
__cmpxchg_local ( ( ptr ) , ( old ) , ( new ) , sizeof ( * ptr ) )
2007-05-08 00:35:02 -07:00
2008-02-07 00:16:10 -08:00
# define cmpxchg64(ptr, o, n) \
2008-03-23 01:01:52 -07:00
( { \
2008-02-07 00:16:10 -08:00
BUILD_BUG_ON ( sizeof ( * ( ptr ) ) ! = 8 ) ; \
cmpxchg ( ( ptr ) , ( o ) , ( n ) ) ; \
2008-03-23 01:01:52 -07:00
} )
2009-10-09 10:12:46 +02:00
2008-02-07 00:16:10 -08:00
# define cmpxchg64_local(ptr, o, n) \
2008-03-23 01:01:52 -07:00
( { \
2008-02-07 00:16:10 -08:00
BUILD_BUG_ON ( sizeof ( * ( ptr ) ) ! = 8 ) ; \
cmpxchg_local ( ( ptr ) , ( o ) , ( n ) ) ; \
2008-03-23 01:01:52 -07:00
} )
2007-05-08 00:35:02 -07:00
2008-10-22 22:26:29 -07:00
# endif /* _ASM_X86_CMPXCHG_64_H */