2008-08-07 17:36:12 +09:00
# ifndef __ASM_SH_BITOPS_LLSC_H
# define __ASM_SH_BITOPS_LLSC_H
2009-01-20 21:14:37 +00:00
static inline void set_bit ( int nr , volatile void * addr )
2008-08-07 17:36:12 +09:00
{
int mask ;
volatile unsigned int * a = addr ;
unsigned long tmp ;
a + = nr > > 5 ;
mask = 1 < < ( nr & 0x1f ) ;
__asm__ __volatile__ (
" 1: \n \t "
" movli.l @%1, %0 ! set_bit \n \t "
2009-01-20 21:14:37 +00:00
" or %2, %0 \n \t "
2008-08-07 17:36:12 +09:00
" movco.l %0, @%1 \n \t "
" bf 1b \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp )
: " r " ( a ) , " r " ( mask )
2008-08-07 17:36:12 +09:00
: " t " , " memory "
) ;
}
2009-01-20 21:14:37 +00:00
static inline void clear_bit ( int nr , volatile void * addr )
2008-08-07 17:36:12 +09:00
{
int mask ;
volatile unsigned int * a = addr ;
unsigned long tmp ;
a + = nr > > 5 ;
mask = 1 < < ( nr & 0x1f ) ;
__asm__ __volatile__ (
" 1: \n \t "
" movli.l @%1, %0 ! clear_bit \n \t "
2009-01-20 21:14:37 +00:00
" and %2, %0 \n \t "
2008-08-07 17:36:12 +09:00
" movco.l %0, @%1 \n \t "
" bf 1b \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp )
: " r " ( a ) , " r " ( ~ mask )
2008-08-07 17:36:12 +09:00
: " t " , " memory "
) ;
}
2009-01-20 21:14:37 +00:00
static inline void change_bit ( int nr , volatile void * addr )
2008-08-07 17:36:12 +09:00
{
int mask ;
volatile unsigned int * a = addr ;
unsigned long tmp ;
a + = nr > > 5 ;
mask = 1 < < ( nr & 0x1f ) ;
__asm__ __volatile__ (
" 1: \n \t "
" movli.l @%1, %0 ! change_bit \n \t "
2009-01-20 21:14:37 +00:00
" xor %2, %0 \n \t "
2008-08-07 17:36:12 +09:00
" movco.l %0, @%1 \n \t "
" bf 1b \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp )
: " r " ( a ) , " r " ( mask )
2008-08-07 17:36:12 +09:00
: " t " , " memory "
) ;
}
2009-01-20 21:14:37 +00:00
static inline int test_and_set_bit ( int nr , volatile void * addr )
2008-08-07 17:36:12 +09:00
{
int mask , retval ;
volatile unsigned int * a = addr ;
unsigned long tmp ;
a + = nr > > 5 ;
mask = 1 < < ( nr & 0x1f ) ;
__asm__ __volatile__ (
" 1: \n \t "
2009-01-20 21:14:37 +00:00
" movli.l @%2, %0 ! test_and_set_bit \n \t "
" mov %0, %1 \n \t "
" or %3, %0 \n \t "
" movco.l %0, @%2 \n \t "
2008-08-07 17:36:12 +09:00
" bf 1b \n \t "
2009-01-20 21:14:37 +00:00
" and %3, %1 \n \t "
: " =&z " ( tmp ) , " =&r " ( retval )
: " r " ( a ) , " r " ( mask )
2008-08-07 17:36:12 +09:00
: " t " , " memory "
) ;
return retval ! = 0 ;
}
2009-01-20 21:14:37 +00:00
static inline int test_and_clear_bit ( int nr , volatile void * addr )
2008-08-07 17:36:12 +09:00
{
int mask , retval ;
volatile unsigned int * a = addr ;
unsigned long tmp ;
a + = nr > > 5 ;
mask = 1 < < ( nr & 0x1f ) ;
__asm__ __volatile__ (
" 1: \n \t "
2009-01-20 21:14:37 +00:00
" movli.l @%2, %0 ! test_and_clear_bit \n \t "
" mov %0, %1 \n \t "
" and %4, %0 \n \t "
" movco.l %0, @%2 \n \t "
2008-08-07 17:36:12 +09:00
" bf 1b \n \t "
2009-01-20 21:14:37 +00:00
" and %3, %1 \n \t "
2008-08-07 17:36:12 +09:00
" synco \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp ) , " =&r " ( retval )
: " r " ( a ) , " r " ( mask ) , " r " ( ~ mask )
2008-08-07 17:36:12 +09:00
: " t " , " memory "
) ;
return retval ! = 0 ;
}
2009-01-20 21:14:37 +00:00
static inline int test_and_change_bit ( int nr , volatile void * addr )
2008-08-07 17:36:12 +09:00
{
int mask , retval ;
volatile unsigned int * a = addr ;
unsigned long tmp ;
a + = nr > > 5 ;
mask = 1 < < ( nr & 0x1f ) ;
__asm__ __volatile__ (
" 1: \n \t "
2009-01-20 21:14:37 +00:00
" movli.l @%2, %0 ! test_and_change_bit \n \t "
" mov %0, %1 \n \t "
" xor %3, %0 \n \t "
" movco.l %0, @%2 \n \t "
2008-08-07 17:36:12 +09:00
" bf 1b \n \t "
2009-01-20 21:14:37 +00:00
" and %3, %1 \n \t "
2008-08-07 17:36:12 +09:00
" synco \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp ) , " =&r " ( retval )
: " r " ( a ) , " r " ( mask )
2008-08-07 17:36:12 +09:00
: " t " , " memory "
) ;
return retval ! = 0 ;
}
2008-11-20 15:25:22 +09:00
# include <asm-generic/bitops/non-atomic.h>
2008-08-07 17:36:12 +09:00
# endif /* __ASM_SH_BITOPS_LLSC_H */