2008-10-22 22:26:29 -07:00
# ifndef _ASM_X86_UACCESS_64_H
# define _ASM_X86_UACCESS_64_H
2005-04-16 15:20:36 -07:00
/*
* User space memory access functions
*/
# include <linux/compiler.h>
# include <linux/errno.h>
2008-10-02 14:50:12 -07:00
# include <linux/lockdep.h>
2009-12-18 16:12:56 +00:00
# include <asm/alternative.h>
# include <asm/cpufeature.h>
2005-04-16 15:20:36 -07:00
# include <asm/page.h>
/*
* Copy To / From Userspace
*/
/* Handles exceptions in both to and from, but doesn't do access_ok */
2006-09-26 10:52:39 +02:00
__must_check unsigned long
2009-12-18 16:12:56 +00:00
copy_user_generic_string ( void * to , const void * from , unsigned len ) ;
__must_check unsigned long
copy_user_generic_unrolled ( void * to , const void * from , unsigned len ) ;
static __always_inline __must_check unsigned long
copy_user_generic ( void * to , const void * from , unsigned len )
{
unsigned ret ;
alternative_call ( copy_user_generic_unrolled ,
copy_user_generic_string ,
X86_FEATURE_REP_GOOD ,
ASM_OUTPUT2 ( " =a " ( ret ) , " =D " ( to ) , " =S " ( from ) ,
" =d " ( len ) ) ,
" 1 " ( to ) , " 2 " ( from ) , " 3 " ( len )
: " memory " , " rcx " , " r8 " , " r9 " , " r10 " , " r11 " ) ;
return ret ;
}
2006-09-26 10:52:39 +02:00
__must_check unsigned long
2009-11-16 15:42:18 +01:00
_copy_to_user ( void __user * to , const void * from , unsigned len ) ;
2006-09-26 10:52:39 +02:00
__must_check unsigned long
2009-09-26 14:33:01 +02:00
_copy_from_user ( void * to , const void __user * from , unsigned len ) ;
2006-09-26 10:52:39 +02:00
__must_check unsigned long
copy_in_user ( void __user * to , const void __user * from , unsigned len ) ;
2009-09-26 14:33:01 +02:00
static inline unsigned long __must_check copy_from_user ( void * to ,
const void __user * from ,
unsigned long n )
{
int sz = __compiletime_object_size ( to ) ;
2009-11-16 15:42:18 +01:00
might_fault ( ) ;
2009-09-26 14:33:01 +02:00
if ( likely ( sz = = - 1 | | sz > = n ) )
2010-01-05 14:19:11 +01:00
n = _copy_from_user ( to , from , n ) ;
2009-09-26 14:33:01 +02:00
# ifdef CONFIG_DEBUG_VM
else
WARN ( 1 , " Buffer overflow detected! \n " ) ;
# endif
2010-01-05 14:19:11 +01:00
return n ;
2009-09-26 14:33:01 +02:00
}
2009-11-16 15:42:18 +01:00
static __always_inline __must_check
int copy_to_user ( void __user * dst , const void * src , unsigned size )
{
might_fault ( ) ;
return _copy_to_user ( dst , src , size ) ;
}
2009-09-26 14:33:01 +02:00
2006-09-26 10:52:39 +02:00
static __always_inline __must_check
int __copy_from_user ( void * dst , const void __user * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2008-09-10 13:37:17 +02:00
2008-09-10 13:37:17 +02:00
might_fault ( ) ;
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( dst , ( __force void * ) src , size ) ;
switch ( size ) {
case 1 : __get_user_asm ( * ( u8 * ) dst , ( u8 __user * ) src ,
ret , " b " , " b " , " =q " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 2 : __get_user_asm ( * ( u16 * ) dst , ( u16 __user * ) src ,
ret , " w " , " w " , " =r " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 4 : __get_user_asm ( * ( u32 * ) dst , ( u32 __user * ) src ,
ret , " l " , " k " , " =r " , 4 ) ;
return ret ;
case 8 : __get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
case 10 :
2008-03-23 01:03:49 -07:00
__get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
2008-11-13 18:06:04 -08:00
ret , " q " , " " , " =r " , 10 ) ;
2008-03-23 01:03:49 -07:00
if ( unlikely ( ret ) )
return ret ;
__get_user_asm ( * ( u16 * ) ( 8 + ( char * ) dst ) ,
( u16 __user * ) ( 8 + ( char __user * ) src ) ,
ret , " w " , " w " , " =r " , 2 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
case 16 :
2008-03-23 01:03:49 -07:00
__get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 16 ) ;
if ( unlikely ( ret ) )
return ret ;
__get_user_asm ( * ( u64 * ) ( 8 + ( char * ) dst ) ,
( u64 __user * ) ( 8 + ( char __user * ) src ) ,
ret , " q " , " " , " =r " , 8 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( dst , ( __force void * ) src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2006-09-26 10:52:39 +02:00
static __always_inline __must_check
int __copy_to_user ( void __user * dst , const void * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2008-09-10 13:37:17 +02:00
2008-09-10 13:37:17 +02:00
might_fault ( ) ;
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
switch ( size ) {
case 1 : __put_user_asm ( * ( u8 * ) src , ( u8 __user * ) dst ,
ret , " b " , " b " , " iq " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 2 : __put_user_asm ( * ( u16 * ) src , ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 4 : __put_user_asm ( * ( u32 * ) src , ( u32 __user * ) dst ,
ret , " l " , " k " , " ir " , 4 ) ;
return ret ;
case 8 : __put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
case 10 :
2008-03-23 01:03:49 -07:00
__put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 10 ) ;
2008-03-23 01:03:49 -07:00
if ( unlikely ( ret ) )
return ret ;
2005-04-16 15:20:36 -07:00
asm ( " " : : : " memory " ) ;
2008-03-23 01:03:49 -07:00
__put_user_asm ( 4 [ ( u16 * ) src ] , 4 + ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
case 16 :
2008-03-23 01:03:49 -07:00
__put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 16 ) ;
2008-03-23 01:03:49 -07:00
if ( unlikely ( ret ) )
return ret ;
2005-04-16 15:20:36 -07:00
asm ( " " : : : " memory " ) ;
2008-03-23 01:03:49 -07:00
__put_user_asm ( 1 [ ( u64 * ) src ] , 1 + ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 8 ) ;
2008-03-23 01:03:49 -07:00
return ret ;
2005-04-16 15:20:36 -07:00
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2006-09-26 10:52:39 +02:00
static __always_inline __must_check
int __copy_in_user ( void __user * dst , const void __user * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2008-09-10 13:37:17 +02:00
2008-09-10 13:37:17 +02:00
might_fault ( ) ;
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst ,
( __force void * ) src , size ) ;
switch ( size ) {
case 1 : {
2005-04-16 15:20:36 -07:00
u8 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u8 __user * ) src ,
ret , " b " , " b " , " =q " , 1 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u8 __user * ) dst ,
ret , " b " , " b " , " iq " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 2 : {
2005-04-16 15:20:36 -07:00
u16 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u16 __user * ) src ,
ret , " w " , " w " , " =r " , 2 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 4 : {
2005-04-16 15:20:36 -07:00
u32 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u32 __user * ) src ,
ret , " l " , " k " , " =r " , 4 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u32 __user * ) dst ,
ret , " l " , " k " , " ir " , 4 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 8 : {
2005-04-16 15:20:36 -07:00
u64 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 8 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst ,
( __force void * ) src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2008-03-23 01:03:49 -07:00
__must_check long
2006-09-26 10:52:39 +02:00
strncpy_from_user ( char * dst , const char __user * src , long count ) ;
2008-03-23 01:03:49 -07:00
__must_check long
2006-09-26 10:52:39 +02:00
__strncpy_from_user ( char * dst , const char __user * src , long count ) ;
__must_check long strnlen_user ( const char __user * str , long n ) ;
__must_check long __strnlen_user ( const char __user * str , long n ) ;
__must_check long strlen_user ( const char __user * str ) ;
__must_check unsigned long clear_user ( void __user * mem , unsigned long len ) ;
__must_check unsigned long __clear_user ( void __user * mem , unsigned long len ) ;
2009-11-13 11:56:24 +00:00
static __must_check __always_inline int
__copy_from_user_inatomic ( void * dst , const void __user * src , unsigned size )
{
return copy_user_generic ( dst , ( __force const void * ) src , size ) ;
}
2006-09-30 01:47:55 +02:00
static __must_check __always_inline int
__copy_to_user_inatomic ( void __user * dst , const void * src , unsigned size )
{
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
}
2005-04-16 15:20:36 -07:00
2008-03-23 01:03:49 -07:00
extern long __copy_user_nocache ( void * dst , const void __user * src ,
unsigned size , int zerorest ) ;
2007-02-13 13:26:19 +01:00
2009-03-02 11:00:57 +01:00
static inline int
__copy_from_user_nocache ( void * dst , const void __user * src , unsigned size )
2007-02-13 13:26:19 +01:00
{
might_sleep ( ) ;
2009-03-02 11:00:57 +01:00
return __copy_user_nocache ( dst , src , size , 1 ) ;
2007-02-13 13:26:19 +01:00
}
2009-03-02 11:00:57 +01:00
static inline int
__copy_from_user_inatomic_nocache ( void * dst , const void __user * src ,
unsigned size )
2007-02-13 13:26:19 +01:00
{
2009-03-02 11:00:57 +01:00
return __copy_user_nocache ( dst , src , size , 0 ) ;
2007-02-13 13:26:19 +01:00
}
2008-07-02 15:48:21 +02:00
unsigned long
copy_user_handle_tail ( char * to , char * from , unsigned len , unsigned zerorest ) ;
2008-10-22 22:26:29 -07:00
# endif /* _ASM_X86_UACCESS_64_H */