2005-04-16 15:20:36 -07:00
# ifndef __ARCH_S390_ATOMIC__
# define __ARCH_S390_ATOMIC__
/*
2009-09-11 10:28:34 +02:00
* Copyright 1999 , 2009 IBM Corp .
* Author ( s ) : Martin Schwidefsky < schwidefsky @ de . ibm . com > ,
* Denis Joseph Barrow ,
* Arnd Bergmann < arndb @ de . ibm . com > ,
2005-04-16 15:20:36 -07:00
*
2009-09-11 10:28:34 +02:00
* Atomic operations that C can ' t guarantee us .
* Useful for resource counting etc .
2011-03-30 22:57:33 -03:00
* s390 uses ' Compare And Swap ' for atomicity in SMP environment .
2005-04-16 15:20:36 -07:00
*
*/
2009-09-11 10:28:34 +02:00
# include <linux/compiler.h>
# include <linux/types.h>
2010-05-26 23:26:18 +02:00
# include <asm/system.h>
2005-04-16 15:20:36 -07:00
# define ATOMIC_INIT(i) { (i) }
# define __CS_LOOP(ptr, op_val, op_string) ({ \
2009-12-07 12:52:05 +01:00
int old_val , new_val ; \
2006-09-28 16:56:43 +02:00
asm volatile ( \
" l %0,%2 \n " \
" 0: lr %1,%0 \n " \
op_string " %1,%3 \n " \
" cs %0,%1,%2 \n " \
" jl 0b " \
: " =&d " ( old_val ) , " =&d " ( new_val ) , \
" =Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " d " ( op_val ) , " Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " cc " , " memory " ) ; \
2005-04-16 15:20:36 -07:00
new_val ; \
} )
2006-09-28 16:56:43 +02:00
2007-08-22 13:51:45 +02:00
static inline int atomic_read ( const atomic_t * v )
{
2011-02-17 13:13:58 +01:00
int c ;
asm volatile (
" l %0,%1 \n "
: " =d " ( c ) : " Q " ( v - > counter ) ) ;
return c ;
2007-08-22 13:51:45 +02:00
}
static inline void atomic_set ( atomic_t * v , int i )
{
2011-02-17 13:13:58 +01:00
asm volatile (
" st %1,%0 \n "
: " =Q " ( v - > counter ) : " d " ( i ) ) ;
2007-08-22 13:51:45 +02:00
}
2005-04-16 15:20:36 -07:00
2009-09-11 10:28:35 +02:00
static inline int atomic_add_return ( int i , atomic_t * v )
2005-04-16 15:20:36 -07:00
{
return __CS_LOOP ( v , i , " ar " ) ;
}
2006-01-06 00:19:07 -08:00
# define atomic_add(_i, _v) atomic_add_return(_i, _v)
# define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
# define atomic_inc(_v) atomic_add_return(1, _v)
# define atomic_inc_return(_v) atomic_add_return(1, _v)
# define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
2009-09-11 10:28:35 +02:00
static inline int atomic_sub_return ( int i , atomic_t * v )
2005-04-16 15:20:36 -07:00
{
return __CS_LOOP ( v , i , " sr " ) ;
}
2006-01-06 00:19:07 -08:00
# define atomic_sub(_i, _v) atomic_sub_return(_i, _v)
# define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
# define atomic_dec(_v) atomic_sub_return(1, _v)
# define atomic_dec_return(_v) atomic_sub_return(1, _v)
# define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
2005-04-16 15:20:36 -07:00
2009-09-11 10:28:35 +02:00
static inline void atomic_clear_mask ( unsigned long mask , atomic_t * v )
2005-04-16 15:20:36 -07:00
{
2009-09-11 10:28:35 +02:00
__CS_LOOP ( v , ~ mask , " nr " ) ;
2005-04-16 15:20:36 -07:00
}
2006-01-06 00:19:07 -08:00
2009-09-11 10:28:35 +02:00
static inline void atomic_set_mask ( unsigned long mask , atomic_t * v )
2005-04-16 15:20:36 -07:00
{
2009-09-11 10:28:35 +02:00
__CS_LOOP ( v , mask , " or " ) ;
2005-04-16 15:20:36 -07:00
}
2006-01-06 00:19:07 -08:00
2006-01-09 15:59:17 -08:00
# define atomic_xchg(v, new) (xchg(&((v)->counter), new))
2009-09-11 10:28:35 +02:00
static inline int atomic_cmpxchg ( atomic_t * v , int old , int new )
2006-01-06 00:19:07 -08:00
{
2006-09-28 16:56:43 +02:00
asm volatile (
" cs %0,%2,%1 "
: " +d " ( old ) , " =Q " ( v - > counter )
: " d " ( new ) , " Q " ( v - > counter )
: " cc " , " memory " ) ;
2006-01-06 00:19:07 -08:00
return old ;
}
2009-09-11 10:28:35 +02:00
static inline int atomic_add_unless ( atomic_t * v , int a , int u )
2006-01-06 00:19:07 -08:00
{
int c , old ;
c = atomic_read ( v ) ;
2006-03-23 03:01:02 -08:00
for ( ; ; ) {
if ( unlikely ( c = = u ) )
break ;
old = atomic_cmpxchg ( v , c , c + a ) ;
if ( likely ( old = = c ) )
break ;
2006-01-06 00:19:07 -08:00
c = old ;
2006-03-23 03:01:02 -08:00
}
2006-01-06 00:19:07 -08:00
return c ! = u ;
}
# define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
2005-04-16 15:20:36 -07:00
# undef __CS_LOOP
# define ATOMIC64_INIT(i) { (i) }
2009-09-11 10:28:34 +02:00
# ifdef CONFIG_64BIT
2005-04-16 15:20:36 -07:00
# define __CSG_LOOP(ptr, op_val, op_string) ({ \
2009-12-07 12:52:05 +01:00
long long old_val , new_val ; \
2006-09-28 16:56:43 +02:00
asm volatile ( \
" lg %0,%2 \n " \
" 0: lgr %1,%0 \n " \
op_string " %1,%3 \n " \
" csg %0,%1,%2 \n " \
" jl 0b " \
: " =&d " ( old_val ) , " =&d " ( new_val ) , \
" =Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
: " d " ( op_val ) , " Q " ( ( ( atomic_t * ) ( ptr ) ) - > counter ) \
2009-09-11 10:28:35 +02:00
: " cc " , " memory " ) ; \
2005-04-16 15:20:36 -07:00
new_val ; \
} )
2006-09-28 16:56:43 +02:00
2007-08-22 13:51:45 +02:00
static inline long long atomic64_read ( const atomic64_t * v )
{
2011-02-17 13:13:58 +01:00
long long c ;
asm volatile (
" lg %0,%1 \n "
: " =d " ( c ) : " Q " ( v - > counter ) ) ;
return c ;
2007-08-22 13:51:45 +02:00
}
static inline void atomic64_set ( atomic64_t * v , long long i )
{
2011-02-17 13:13:58 +01:00
asm volatile (
" stg %1,%0 \n "
: " =Q " ( v - > counter ) : " d " ( i ) ) ;
2007-08-22 13:51:45 +02:00
}
2005-04-16 15:20:36 -07:00
2009-09-11 10:28:35 +02:00
static inline long long atomic64_add_return ( long long i , atomic64_t * v )
2005-04-16 15:20:36 -07:00
{
return __CSG_LOOP ( v , i , " agr " ) ;
}
2006-01-06 00:19:07 -08:00
2009-09-11 10:28:35 +02:00
static inline long long atomic64_sub_return ( long long i , atomic64_t * v )
2005-04-16 15:20:36 -07:00
{
2006-01-06 00:19:07 -08:00
return __CSG_LOOP ( v , i , " sgr " ) ;
2005-04-16 15:20:36 -07:00
}
2006-01-06 00:19:07 -08:00
2009-09-11 10:28:35 +02:00
static inline void atomic64_clear_mask ( unsigned long mask , atomic64_t * v )
2005-04-16 15:20:36 -07:00
{
2009-09-11 10:28:35 +02:00
__CSG_LOOP ( v , ~ mask , " ngr " ) ;
2005-04-16 15:20:36 -07:00
}
2006-01-06 00:19:07 -08:00
2009-09-11 10:28:35 +02:00
static inline void atomic64_set_mask ( unsigned long mask , atomic64_t * v )
2005-04-16 15:20:36 -07:00
{
2009-09-11 10:28:35 +02:00
__CSG_LOOP ( v , mask , " ogr " ) ;
2005-04-16 15:20:36 -07:00
}
2007-02-21 10:55:59 +01:00
# define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
2009-09-11 10:28:35 +02:00
static inline long long atomic64_cmpxchg ( atomic64_t * v ,
2006-01-06 00:19:07 -08:00
long long old , long long new )
{
2006-09-28 16:56:43 +02:00
asm volatile (
" csg %0,%2,%1 "
: " +d " ( old ) , " =Q " ( v - > counter )
: " d " ( new ) , " Q " ( v - > counter )
: " cc " , " memory " ) ;
2006-01-06 00:19:07 -08:00
return old ;
}
2005-04-16 15:20:36 -07:00
2009-09-11 10:28:34 +02:00
# undef __CSG_LOOP
# else /* CONFIG_64BIT */
typedef struct {
long long counter ;
} atomic64_t ;
static inline long long atomic64_read ( const atomic64_t * v )
{
register_pair rp ;
asm volatile (
2010-02-26 22:37:31 +01:00
" lm %0,%N0,%1 "
: " =&d " ( rp ) : " Q " ( v - > counter ) ) ;
2009-09-11 10:28:34 +02:00
return rp . pair ;
}
static inline void atomic64_set ( atomic64_t * v , long long i )
{
register_pair rp = { . pair = i } ;
asm volatile (
2010-02-26 22:37:31 +01:00
" stm %1,%N1,%0 "
: " =Q " ( v - > counter ) : " d " ( rp ) ) ;
2009-09-11 10:28:34 +02:00
}
static inline long long atomic64_xchg ( atomic64_t * v , long long new )
{
register_pair rp_new = { . pair = new } ;
register_pair rp_old ;
asm volatile (
2010-02-26 22:37:31 +01:00
" lm %0,%N0,%1 \n "
" 0: cds %0,%2,%1 \n "
2009-09-11 10:28:34 +02:00
" jl 0b \n "
2010-02-26 22:37:31 +01:00
: " =&d " ( rp_old ) , " =Q " ( v - > counter )
: " d " ( rp_new ) , " Q " ( v - > counter )
2009-09-11 10:28:34 +02:00
: " cc " ) ;
return rp_old . pair ;
}
static inline long long atomic64_cmpxchg ( atomic64_t * v ,
long long old , long long new )
{
register_pair rp_old = { . pair = old } ;
register_pair rp_new = { . pair = new } ;
asm volatile (
2010-02-26 22:37:31 +01:00
" cds %0,%2,%1 "
: " +&d " ( rp_old ) , " =Q " ( v - > counter )
: " d " ( rp_new ) , " Q " ( v - > counter )
2009-09-11 10:28:34 +02:00
: " cc " ) ;
return rp_old . pair ;
}
static inline long long atomic64_add_return ( long long i , atomic64_t * v )
{
long long old , new ;
do {
old = atomic64_read ( v ) ;
new = old + i ;
} while ( atomic64_cmpxchg ( v , old , new ) ! = old ) ;
return new ;
}
static inline long long atomic64_sub_return ( long long i , atomic64_t * v )
{
long long old , new ;
do {
old = atomic64_read ( v ) ;
new = old - i ;
} while ( atomic64_cmpxchg ( v , old , new ) ! = old ) ;
return new ;
}
static inline void atomic64_set_mask ( unsigned long long mask , atomic64_t * v )
{
long long old , new ;
do {
old = atomic64_read ( v ) ;
new = old | mask ;
} while ( atomic64_cmpxchg ( v , old , new ) ! = old ) ;
}
static inline void atomic64_clear_mask ( unsigned long long mask , atomic64_t * v )
{
long long old , new ;
do {
old = atomic64_read ( v ) ;
new = old & mask ;
} while ( atomic64_cmpxchg ( v , old , new ) ! = old ) ;
}
# endif /* CONFIG_64BIT */
2009-09-11 10:28:35 +02:00
static inline int atomic64_add_unless ( atomic64_t * v , long long a , long long u )
2005-04-16 15:20:36 -07:00
{
2006-01-06 00:19:07 -08:00
long long c , old ;
2010-05-26 23:26:18 +02:00
2006-01-06 00:19:07 -08:00
c = atomic64_read ( v ) ;
2006-03-23 03:01:02 -08:00
for ( ; ; ) {
if ( unlikely ( c = = u ) )
break ;
old = atomic64_cmpxchg ( v , c , c + a ) ;
if ( likely ( old = = c ) )
break ;
2006-01-06 00:19:07 -08:00
c = old ;
2006-03-23 03:01:02 -08:00
}
2006-01-06 00:19:07 -08:00
return c ! = u ;
2005-04-16 15:20:36 -07:00
}
2010-05-26 23:26:18 +02:00
static inline long long atomic64_dec_if_positive ( atomic64_t * v )
{
long long c , old , dec ;
c = atomic64_read ( v ) ;
for ( ; ; ) {
dec = c - 1 ;
if ( unlikely ( dec < 0 ) )
break ;
old = atomic64_cmpxchg ( ( v ) , c , dec ) ;
if ( likely ( old = = c ) )
break ;
c = old ;
}
return dec ;
}
2009-09-11 10:28:34 +02:00
# define atomic64_add(_i, _v) atomic64_add_return(_i, _v)
# define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
# define atomic64_inc(_v) atomic64_add_return(1, _v)
# define atomic64_inc_return(_v) atomic64_add_return(1, _v)
# define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
# define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v)
# define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
# define atomic64_dec(_v) atomic64_sub_return(1, _v)
# define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
# define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
# define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
2005-11-13 16:07:25 -08:00
2005-04-16 15:20:36 -07:00
# define smp_mb__before_atomic_dec() smp_mb()
# define smp_mb__after_atomic_dec() smp_mb()
# define smp_mb__before_atomic_inc() smp_mb()
# define smp_mb__after_atomic_inc() smp_mb()
2009-05-13 22:56:29 +00:00
# include <asm-generic/atomic-long.h>
2009-09-11 10:28:35 +02:00
2005-04-16 15:20:36 -07:00
# endif /* __ARCH_S390_ATOMIC__ */