2008-01-30 15:30:20 +03:00
# ifndef _ASM_X86_FUTEX_H
# define _ASM_X86_FUTEX_H
# ifdef __KERNEL__
# include <linux/futex.h>
# include <asm/asm.h>
# include <asm/errno.h>
# include <asm/processor.h>
# include <asm/system.h>
# include <asm/uaccess.h>
# define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
2008-03-23 11:02:12 +03:00
asm volatile ( " 1: \t " insn " \n " \
" 2: \t .section .fixup, \" ax \" \n " \
" 3: \t mov \t %3, %1 \n " \
" \t jmp \t 2b \n " \
" \t .previous \n " \
_ASM_EXTABLE ( 1 b , 3 b ) \
: " =r " ( oldval ) , " =r " ( ret ) , " +m " ( * uaddr ) \
: " i " ( - EFAULT ) , " 0 " ( oparg ) , " 1 " ( 0 ) )
2008-01-30 15:30:20 +03:00
# define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
2008-03-23 11:02:12 +03:00
asm volatile ( " 1: \t movl %2, %0 \n " \
" \t movl \t %0, %3 \n " \
" \t " insn " \n " \
" 2: \t lock; cmpxchgl %3, %2 \n " \
" \t jnz \t 1b \n " \
" 3: \t .section .fixup, \" ax \" \n " \
" 4: \t mov \t %5, %1 \n " \
" \t jmp \t 3b \n " \
" \t .previous \n " \
_ASM_EXTABLE ( 1 b , 4 b ) \
_ASM_EXTABLE ( 2 b , 4 b ) \
: " =&a " ( oldval ) , " =&r " ( ret ) , \
" +m " ( * uaddr ) , " =&r " ( tem ) \
: " r " ( oparg ) , " i " ( - EFAULT ) , " 1 " ( 0 ) )
static inline int futex_atomic_op_inuser ( int encoded_op , int __user * uaddr )
2008-01-30 15:30:20 +03:00
{
int op = ( encoded_op > > 28 ) & 7 ;
int cmp = ( encoded_op > > 24 ) & 15 ;
int oparg = ( encoded_op < < 8 ) > > 20 ;
int cmparg = ( encoded_op < < 20 ) > > 20 ;
int oldval = 0 , ret , tem ;
if ( encoded_op & ( FUTEX_OP_OPARG_SHIFT < < 28 ) )
oparg = 1 < < oparg ;
if ( ! access_ok ( VERIFY_WRITE , uaddr , sizeof ( int ) ) )
return - EFAULT ;
# if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
/* Real i386 machines can only support FUTEX_OP_SET */
if ( op ! = FUTEX_OP_SET & & boot_cpu_data . x86 = = 3 )
return - ENOSYS ;
# endif
pagefault_disable ( ) ;
switch ( op ) {
case FUTEX_OP_SET :
__futex_atomic_op1 ( " xchgl %0, %2 " , ret , oldval , uaddr , oparg ) ;
break ;
case FUTEX_OP_ADD :
2008-02-01 19:45:14 +03:00
__futex_atomic_op1 ( " lock; xaddl %0, %2 " , ret , oldval ,
2008-01-30 15:30:20 +03:00
uaddr , oparg ) ;
break ;
case FUTEX_OP_OR :
__futex_atomic_op2 ( " orl %4, %3 " , ret , oldval , uaddr , oparg ) ;
break ;
case FUTEX_OP_ANDN :
__futex_atomic_op2 ( " andl %4, %3 " , ret , oldval , uaddr , ~ oparg ) ;
break ;
case FUTEX_OP_XOR :
__futex_atomic_op2 ( " xorl %4, %3 " , ret , oldval , uaddr , oparg ) ;
break ;
default :
ret = - ENOSYS ;
}
pagefault_enable ( ) ;
if ( ! ret ) {
switch ( cmp ) {
2008-03-23 11:02:12 +03:00
case FUTEX_OP_CMP_EQ :
ret = ( oldval = = cmparg ) ;
break ;
case FUTEX_OP_CMP_NE :
ret = ( oldval ! = cmparg ) ;
break ;
case FUTEX_OP_CMP_LT :
ret = ( oldval < cmparg ) ;
break ;
case FUTEX_OP_CMP_GE :
ret = ( oldval > = cmparg ) ;
break ;
case FUTEX_OP_CMP_LE :
ret = ( oldval < = cmparg ) ;
break ;
case FUTEX_OP_CMP_GT :
ret = ( oldval > cmparg ) ;
break ;
default :
ret = - ENOSYS ;
2008-01-30 15:30:20 +03:00
}
}
return ret ;
}
2008-03-23 11:02:12 +03:00
static inline int futex_atomic_cmpxchg_inatomic ( int __user * uaddr , int oldval ,
int newval )
2008-01-30 15:30:20 +03:00
{
2008-02-16 16:05:01 +03:00
# if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
/* Real i386 machines have no cmpxchg instruction */
if ( boot_cpu_data . x86 = = 3 )
return - ENOSYS ;
# endif
2008-01-30 15:30:20 +03:00
if ( ! access_ok ( VERIFY_WRITE , uaddr , sizeof ( int ) ) )
return - EFAULT ;
2008-03-23 11:02:12 +03:00
asm volatile ( " 1: \t lock; cmpxchgl %3, %1 \n "
" 2: \t .section .fixup, \" ax \" \n "
" 3: \t mov %2, %0 \n "
" \t jmp 2b \n "
" \t .previous \n "
_ASM_EXTABLE ( 1 b , 3 b )
: " =a " ( oldval ) , " +m " ( * uaddr )
: " i " ( - EFAULT ) , " r " ( newval ) , " 0 " ( oldval )
: " memory "
2008-01-30 15:30:20 +03:00
) ;
return oldval ;
}
# endif
2007-10-11 13:20:03 +04:00
# endif