2005-04-16 15:20:36 -07:00
# ifndef __ARCH_S390_ATOMIC__
# define __ARCH_S390_ATOMIC__
2006-04-10 22:53:51 -07:00
# include <linux/compiler.h>
2005-04-16 15:20:36 -07:00
/*
* include / asm - s390 / atomic . h
*
* S390 version
2006-01-06 00:19:07 -08:00
* Copyright ( C ) 1999 - 2005 IBM Deutschland Entwicklung GmbH , IBM Corporation
2005-04-16 15:20:36 -07:00
* Author ( s ) : Martin Schwidefsky ( schwidefsky @ de . ibm . com ) ,
* Denis Joseph Barrow ,
* Arnd Bergmann ( arndb @ de . ibm . com )
*
* Derived from " include/asm-i386/bitops.h "
* Copyright ( C ) 1992 , Linus Torvalds
*
*/
/*
* Atomic operations that C can ' t guarantee us . Useful for
* resource counting etc . .
* S390 uses ' Compare And Swap ' for atomicity in SMP enviroment
*/
typedef struct {
volatile int counter ;
} __attribute__ ( ( aligned ( 4 ) ) ) atomic_t ;
# define ATOMIC_INIT(i) { (i) }
# ifdef __KERNEL__
2006-09-28 16:56:43 +02:00
# if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
2005-04-16 15:20:36 -07:00
# define __CS_LOOP(ptr, op_val, op_string) ({ \
typeof ( ptr - > counter ) old_val , new_val ; \
2006-09-28 16:56:43 +02:00
asm volatile ( \
" l %0,%2 \n " \
" 0: lr %1,%0 \n " \
op_string " %1,%3 \n " \
" cs %0,%1,%2 \n " \
" jl 0b " \
: " =&d " ( old_val ) , " =&d " ( new_val ) , \
" =Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " d " ( op_val ) , " Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " cc " , " memory " ) ; \
2005-04-16 15:20:36 -07:00
new_val ; \
} )
2006-09-28 16:56:43 +02:00
# else /* __GNUC__ */
# define __CS_LOOP(ptr, op_val, op_string) ({ \
typeof ( ptr - > counter ) old_val , new_val ; \
asm volatile ( \
" l %0,0(%3) \n " \
" 0: lr %1,%0 \n " \
op_string " %1,%4 \n " \
" cs %0,%1,0(%3) \n " \
" jl 0b " \
: " =&d " ( old_val ) , " =&d " ( new_val ) , \
" =m " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " a " ( ptr ) , " d " ( op_val ) , \
" m " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " cc " , " memory " ) ; \
new_val ; \
} )
# endif /* __GNUC__ */
2005-04-16 15:20:36 -07:00
# define atomic_read(v) ((v)->counter)
# define atomic_set(v,i) (((v)->counter) = (i))
static __inline__ int atomic_add_return ( int i , atomic_t * v )
{
return __CS_LOOP ( v , i , " ar " ) ;
}
2006-01-06 00:19:07 -08:00
# define atomic_add(_i, _v) atomic_add_return(_i, _v)
# define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
# define atomic_inc(_v) atomic_add_return(1, _v)
# define atomic_inc_return(_v) atomic_add_return(1, _v)
# define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
2005-04-16 15:20:36 -07:00
static __inline__ int atomic_sub_return ( int i , atomic_t * v )
{
return __CS_LOOP ( v , i , " sr " ) ;
}
2006-01-06 00:19:07 -08:00
# define atomic_sub(_i, _v) atomic_sub_return(_i, _v)
# define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
# define atomic_dec(_v) atomic_sub_return(1, _v)
# define atomic_dec_return(_v) atomic_sub_return(1, _v)
# define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
2005-04-16 15:20:36 -07:00
static __inline__ void atomic_clear_mask ( unsigned long mask , atomic_t * v )
{
__CS_LOOP ( v , ~ mask , " nr " ) ;
}
2006-01-06 00:19:07 -08:00
2005-04-16 15:20:36 -07:00
static __inline__ void atomic_set_mask ( unsigned long mask , atomic_t * v )
{
__CS_LOOP ( v , mask , " or " ) ;
}
2006-01-06 00:19:07 -08:00
2006-01-09 15:59:17 -08:00
# define atomic_xchg(v, new) (xchg(&((v)->counter), new))
2006-01-06 00:19:07 -08:00
static __inline__ int atomic_cmpxchg ( atomic_t * v , int old , int new )
{
2006-09-28 16:56:43 +02:00
# if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
asm volatile (
" cs %0,%2,%1 "
: " +d " ( old ) , " =Q " ( v - > counter )
: " d " ( new ) , " Q " ( v - > counter )
: " cc " , " memory " ) ;
# else /* __GNUC__ */
asm volatile (
" cs %0,%3,0(%2) "
: " +d " ( old ) , " =m " ( v - > counter )
: " a " ( v ) , " d " ( new ) , " m " ( v - > counter )
: " cc " , " memory " ) ;
# endif /* __GNUC__ */
2006-01-06 00:19:07 -08:00
return old ;
}
static __inline__ int atomic_add_unless ( atomic_t * v , int a , int u )
{
int c , old ;
c = atomic_read ( v ) ;
2006-03-23 03:01:02 -08:00
for ( ; ; ) {
if ( unlikely ( c = = u ) )
break ;
old = atomic_cmpxchg ( v , c , c + a ) ;
if ( likely ( old = = c ) )
break ;
2006-01-06 00:19:07 -08:00
c = old ;
2006-03-23 03:01:02 -08:00
}
2006-01-06 00:19:07 -08:00
return c ! = u ;
}
# define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
2005-04-16 15:20:36 -07:00
# undef __CS_LOOP
# ifdef __s390x__
typedef struct {
volatile long long counter ;
} __attribute__ ( ( aligned ( 8 ) ) ) atomic64_t ;
# define ATOMIC64_INIT(i) { (i) }
2006-09-28 16:56:43 +02:00
# if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
2005-04-16 15:20:36 -07:00
# define __CSG_LOOP(ptr, op_val, op_string) ({ \
typeof ( ptr - > counter ) old_val , new_val ; \
2006-09-28 16:56:43 +02:00
asm volatile ( \
" lg %0,%2 \n " \
" 0: lgr %1,%0 \n " \
op_string " %1,%3 \n " \
" csg %0,%1,%2 \n " \
" jl 0b " \
: " =&d " ( old_val ) , " =&d " ( new_val ) , \
" =Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " d " ( op_val ) , " Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " cc " , " memory " ) ; \
2005-04-16 15:20:36 -07:00
new_val ; \
} )
2006-09-28 16:56:43 +02:00
# else /* __GNUC__ */
# define __CSG_LOOP(ptr, op_val, op_string) ({ \
typeof ( ptr - > counter ) old_val , new_val ; \
asm volatile ( \
" lg %0,0(%3) \n " \
" 0: lgr %1,%0 \n " \
op_string " %1,%4 \n " \
" csg %0,%1,0(%3) \n " \
" jl 0b " \
: " =&d " ( old_val ) , " =&d " ( new_val ) , \
" =m " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " a " ( ptr ) , " d " ( op_val ) , \
" m " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " cc " , " memory " ) ; \
new_val ; \
} )
# endif /* __GNUC__ */
2005-04-16 15:20:36 -07:00
# define atomic64_read(v) ((v)->counter)
# define atomic64_set(v,i) (((v)->counter) = (i))
2005-07-27 11:44:59 -07:00
static __inline__ long long atomic64_add_return ( long long i , atomic64_t * v )
2005-04-16 15:20:36 -07:00
{
return __CSG_LOOP ( v , i , " agr " ) ;
}
2006-01-06 00:19:07 -08:00
# define atomic64_add(_i, _v) atomic64_add_return(_i, _v)
# define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
# define atomic64_inc(_v) atomic64_add_return(1, _v)
# define atomic64_inc_return(_v) atomic64_add_return(1, _v)
# define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
static __inline__ long long atomic64_sub_return ( long long i , atomic64_t * v )
2005-04-16 15:20:36 -07:00
{
2006-01-06 00:19:07 -08:00
return __CSG_LOOP ( v , i , " sgr " ) ;
2005-04-16 15:20:36 -07:00
}
2006-01-06 00:19:07 -08:00
# define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v)
# define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
# define atomic64_dec(_v) atomic64_sub_return(1, _v)
# define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
# define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
2005-04-16 15:20:36 -07:00
static __inline__ void atomic64_clear_mask ( unsigned long mask , atomic64_t * v )
{
__CSG_LOOP ( v , ~ mask , " ngr " ) ;
}
2006-01-06 00:19:07 -08:00
2005-04-16 15:20:36 -07:00
static __inline__ void atomic64_set_mask ( unsigned long mask , atomic64_t * v )
{
__CSG_LOOP ( v , mask , " ogr " ) ;
}
2006-01-06 00:19:07 -08:00
static __inline__ long long atomic64_cmpxchg ( atomic64_t * v ,
long long old , long long new )
{
2006-09-28 16:56:43 +02:00
# if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
asm volatile (
" csg %0,%2,%1 "
: " +d " ( old ) , " =Q " ( v - > counter )
: " d " ( new ) , " Q " ( v - > counter )
: " cc " , " memory " ) ;
# else /* __GNUC__ */
asm volatile (
" csg %0,%3,0(%2) "
: " +d " ( old ) , " =m " ( v - > counter )
: " a " ( v ) , " d " ( new ) , " m " ( v - > counter )
: " cc " , " memory " ) ;
# endif /* __GNUC__ */
2006-01-06 00:19:07 -08:00
return old ;
}
2005-04-16 15:20:36 -07:00
2006-01-06 00:19:07 -08:00
static __inline__ int atomic64_add_unless ( atomic64_t * v ,
long long a , long long u )
2005-04-16 15:20:36 -07:00
{
2006-01-06 00:19:07 -08:00
long long c , old ;
c = atomic64_read ( v ) ;
2006-03-23 03:01:02 -08:00
for ( ; ; ) {
if ( unlikely ( c = = u ) )
break ;
old = atomic64_cmpxchg ( v , c , c + a ) ;
if ( likely ( old = = c ) )
break ;
2006-01-06 00:19:07 -08:00
c = old ;
2006-03-23 03:01:02 -08:00
}
2006-01-06 00:19:07 -08:00
return c ! = u ;
2005-04-16 15:20:36 -07:00
}
2006-01-06 00:19:07 -08:00
# define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
2005-11-13 16:07:24 -08:00
2006-01-06 00:19:07 -08:00
# undef __CSG_LOOP
# endif
2005-11-13 16:07:25 -08:00
2005-04-16 15:20:36 -07:00
# define smp_mb__before_atomic_dec() smp_mb()
# define smp_mb__after_atomic_dec() smp_mb()
# define smp_mb__before_atomic_inc() smp_mb()
# define smp_mb__after_atomic_inc() smp_mb()
2006-01-06 00:11:20 -08:00
# include <asm-generic/atomic.h>
2005-04-16 15:20:36 -07:00
# endif /* __KERNEL__ */
# endif /* __ARCH_S390_ATOMIC__ */