2008-08-07 18:01:43 +09:00
# ifndef __ASM_SH_CMPXCHG_LLSC_H
# define __ASM_SH_CMPXCHG_LLSC_H
static inline unsigned long xchg_u32 ( volatile u32 * m , unsigned long val )
{
unsigned long retval ;
unsigned long tmp ;
__asm__ __volatile__ (
" 1: \n \t "
2009-01-20 21:14:37 +00:00
" movli.l @%2, %0 ! xchg_u32 \n \t "
" mov %0, %1 \n \t "
" mov %3, %0 \n \t "
" movco.l %0, @%2 \n \t "
2008-08-07 18:01:43 +09:00
" bf 1b \n \t "
" synco \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp ) , " =&r " ( retval )
: " r " ( m ) , " r " ( val )
2008-08-07 18:01:43 +09:00
: " t " , " memory "
) ;
return retval ;
}
static inline unsigned long xchg_u8 ( volatile u8 * m , unsigned long val )
{
unsigned long retval ;
unsigned long tmp ;
__asm__ __volatile__ (
" 1: \n \t "
2009-01-20 21:14:37 +00:00
" movli.l @%2, %0 ! xchg_u8 \n \t "
" mov %0, %1 \n \t "
" mov %3, %0 \n \t "
" movco.l %0, @%2 \n \t "
2008-08-07 18:01:43 +09:00
" bf 1b \n \t "
" synco \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp ) , " =&r " ( retval )
: " r " ( m ) , " r " ( val & 0xff )
2008-08-07 18:01:43 +09:00
: " t " , " memory "
) ;
return retval ;
}
static inline unsigned long
__cmpxchg_u32 ( volatile int * m , unsigned long old , unsigned long new )
{
unsigned long retval ;
unsigned long tmp ;
__asm__ __volatile__ (
" 1: \n \t "
2009-01-20 21:14:37 +00:00
" movli.l @%2, %0 ! __cmpxchg_u32 \n \t "
" mov %0, %1 \n \t "
" cmp/eq %1, %3 \n \t "
2008-08-07 18:01:43 +09:00
" bf 2f \n \t "
2009-06-10 16:15:42 +00:00
" mov %4, %0 \n \t "
2008-08-07 18:01:43 +09:00
" 2: \n \t "
2009-01-20 21:14:37 +00:00
" movco.l %0, @%2 \n \t "
2008-08-07 18:01:43 +09:00
" bf 1b \n \t "
" synco \n \t "
2009-01-20 21:14:37 +00:00
: " =&z " ( tmp ) , " =&r " ( retval )
: " r " ( m ) , " r " ( old ) , " r " ( new )
2008-08-07 18:01:43 +09:00
: " t " , " memory "
) ;
return retval ;
}
# endif /* __ASM_SH_CMPXCHG_LLSC_H */