2010-10-27 17:28:52 +01:00
/* MN10300 Atomic counter operations
*
* Copyright ( C ) 2007 Red Hat , Inc . All Rights Reserved .
* Written by David Howells ( dhowells @ redhat . com )
*
* This program is free software ; you can redistribute it and / or
* modify it under the terms of the GNU General Public Licence
* as published by the Free Software Foundation ; either version
* 2 of the Licence , or ( at your option ) any later version .
*/
# ifndef _ASM_ATOMIC_H
# define _ASM_ATOMIC_H
# include <asm/irqflags.h>
2012-03-28 18:30:02 +01:00
# include <asm/cmpxchg.h>
2014-03-13 19:00:36 +01:00
# include <asm/barrier.h>
2010-10-27 17:28:52 +01:00
# ifndef CONFIG_SMP
2010-05-24 14:32:56 -07:00
# include <asm-generic/atomic.h>
2010-10-27 17:28:52 +01:00
# else
/*
* Atomic operations that C can ' t guarantee us . Useful for
* resource counting etc . .
*/
# define ATOMIC_INIT(i) { (i) }
# ifdef __KERNEL__
/**
* atomic_read - read atomic variable
* @ v : pointer of type atomic_t
*
* Atomically reads the value of @ v . Note that the guaranteed
*/
2015-09-18 11:13:10 +02:00
# define atomic_read(v) READ_ONCE((v)->counter)
2010-10-27 17:28:52 +01:00
/**
* atomic_set - set atomic variable
* @ v : pointer of type atomic_t
* @ i : required value
*
* Atomically sets the value of @ v to @ i . Note that the guaranteed
*/
2015-09-18 11:13:10 +02:00
# define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
2010-10-27 17:28:52 +01:00
2014-03-26 17:59:04 +01:00
# define ATOMIC_OP(op) \
static inline void atomic_ # # op ( int i , atomic_t * v ) \
{ \
int retval , status ; \
\
asm volatile ( \
" 1: mov %4,(_AAR,%3) \n " \
" mov (_ADR,%3),%1 \n " \
" " # op " %5,%1 \n " \
" mov %1,(_ADR,%3) \n " \
" mov (_ADR,%3),%0 \n " /* flush */ \
" mov (_ASR,%3),%0 \n " \
" or %0,%0 \n " \
" bne 1b \n " \
: " =&r " ( status ) , " =&r " ( retval ) , " =m " ( v - > counter ) \
: " a " ( ATOMIC_OPS_BASE_ADDR ) , " r " ( & v - > counter ) , " r " ( i ) \
: " memory " , " cc " ) ; \
}
2010-10-27 17:28:52 +01:00
2014-03-26 17:59:04 +01:00
# define ATOMIC_OP_RETURN(op) \
static inline int atomic_ # # op # # _return ( int i , atomic_t * v ) \
{ \
int retval , status ; \
\
asm volatile ( \
" 1: mov %4,(_AAR,%3) \n " \
" mov (_ADR,%3),%1 \n " \
" " # op " %5,%1 \n " \
" mov %1,(_ADR,%3) \n " \
" mov (_ADR,%3),%0 \n " /* flush */ \
" mov (_ASR,%3),%0 \n " \
" or %0,%0 \n " \
" bne 1b \n " \
: " =&r " ( status ) , " =&r " ( retval ) , " =m " ( v - > counter ) \
: " a " ( ATOMIC_OPS_BASE_ADDR ) , " r " ( & v - > counter ) , " r " ( i ) \
: " memory " , " cc " ) ; \
return retval ; \
2010-10-27 17:28:52 +01:00
}
2016-04-18 01:16:05 +02:00
# define ATOMIC_FETCH_OP(op) \
static inline int atomic_fetch_ # # op ( int i , atomic_t * v ) \
{ \
int retval , status ; \
\
asm volatile ( \
" 1: mov %4,(_AAR,%3) \n " \
" mov (_ADR,%3),%1 \n " \
" mov %1,%0 \n " \
" " # op " %5,%0 \n " \
" mov %0,(_ADR,%3) \n " \
" mov (_ADR,%3),%0 \n " /* flush */ \
" mov (_ASR,%3),%0 \n " \
" or %0,%0 \n " \
" bne 1b \n " \
: " =&r " ( status ) , " =&r " ( retval ) , " =m " ( v - > counter ) \
: " a " ( ATOMIC_OPS_BASE_ADDR ) , " r " ( & v - > counter ) , " r " ( i ) \
: " memory " , " cc " ) ; \
return retval ; \
}
# define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
2010-10-27 17:28:52 +01:00
2014-03-26 17:59:04 +01:00
ATOMIC_OPS ( add )
ATOMIC_OPS ( sub )
2016-04-18 01:16:05 +02:00
# undef ATOMIC_OPS
# define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
ATOMIC_OPS ( and )
ATOMIC_OPS ( or )
ATOMIC_OPS ( xor )
2014-04-23 19:50:20 +02:00
2014-03-26 17:59:04 +01:00
# undef ATOMIC_OPS
2016-04-18 01:16:05 +02:00
# undef ATOMIC_FETCH_OP
2014-03-26 17:59:04 +01:00
# undef ATOMIC_OP_RETURN
# undef ATOMIC_OP
2010-10-27 17:28:52 +01:00
static inline int atomic_add_negative ( int i , atomic_t * v )
{
return atomic_add_return ( i , v ) < 0 ;
}
static inline void atomic_inc ( atomic_t * v )
{
atomic_add_return ( 1 , v ) ;
}
static inline void atomic_dec ( atomic_t * v )
{
atomic_sub_return ( 1 , v ) ;
}
# define atomic_dec_return(v) atomic_sub_return(1, (v))
# define atomic_inc_return(v) atomic_add_return(1, (v))
# define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
# define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
# define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
2011-07-26 16:09:07 -07:00
# define __atomic_add_unless(v, a, u) \
2010-10-27 17:28:52 +01:00
( { \
int c , old ; \
c = atomic_read ( v ) ; \
while ( c ! = ( u ) & & ( old = atomic_cmpxchg ( ( v ) , c , c + ( a ) ) ) ! = c ) \
c = old ; \
2011-07-26 16:09:07 -07:00
c ; \
2010-10-27 17:28:52 +01:00
} )
2012-03-28 18:30:02 +01:00
# define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
# define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
2010-10-27 17:28:52 +01:00
# endif /* __KERNEL__ */
# endif /* CONFIG_SMP */
# endif /* _ASM_ATOMIC_H */