2005-04-16 15:20:36 -07:00
# ifndef __X86_64_UACCESS_H
# define __X86_64_UACCESS_H
/*
* User space memory access functions
*/
# include <linux/compiler.h>
# include <linux/errno.h>
# include <linux/prefetch.h>
# include <asm/page.h>
/*
* Copy To / From Userspace
*/
/* Handles exceptions in both to and from, but doesn't do access_ok */
2006-09-26 10:52:39 +02:00
__must_check unsigned long
copy_user_generic ( void * to , const void * from , unsigned len ) ;
__must_check unsigned long
copy_to_user ( void __user * to , const void * from , unsigned len ) ;
__must_check unsigned long
copy_from_user ( void * to , const void __user * from , unsigned len ) ;
__must_check unsigned long
copy_in_user ( void __user * to , const void __user * from , unsigned len ) ;
static __always_inline __must_check
int __copy_from_user ( void * dst , const void __user * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( dst , ( __force void * ) src , size ) ;
switch ( size ) {
case 1 : __get_user_asm ( * ( u8 * ) dst , ( u8 __user * ) src ,
ret , " b " , " b " , " =q " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 2 : __get_user_asm ( * ( u16 * ) dst , ( u16 __user * ) src ,
ret , " w " , " w " , " =r " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 4 : __get_user_asm ( * ( u32 * ) dst , ( u32 __user * ) src ,
ret , " l " , " k " , " =r " , 4 ) ;
return ret ;
case 8 : __get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
case 10 :
2008-03-23 01:03:49 -07:00
__get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 16 ) ;
if ( unlikely ( ret ) )
return ret ;
__get_user_asm ( * ( u16 * ) ( 8 + ( char * ) dst ) ,
( u16 __user * ) ( 8 + ( char __user * ) src ) ,
ret , " w " , " w " , " =r " , 2 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
case 16 :
2008-03-23 01:03:49 -07:00
__get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 16 ) ;
if ( unlikely ( ret ) )
return ret ;
__get_user_asm ( * ( u64 * ) ( 8 + ( char * ) dst ) ,
( u64 __user * ) ( 8 + ( char __user * ) src ) ,
ret , " q " , " " , " =r " , 8 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( dst , ( __force void * ) src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2006-09-26 10:52:39 +02:00
static __always_inline __must_check
int __copy_to_user ( void __user * dst , const void * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
switch ( size ) {
case 1 : __put_user_asm ( * ( u8 * ) src , ( u8 __user * ) dst ,
ret , " b " , " b " , " iq " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 2 : __put_user_asm ( * ( u16 * ) src , ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 4 : __put_user_asm ( * ( u32 * ) src , ( u32 __user * ) dst ,
ret , " l " , " k " , " ir " , 4 ) ;
return ret ;
case 8 : __put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
ret , " q " , " " , " ir " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
case 10 :
2008-03-23 01:03:49 -07:00
__put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
ret , " q " , " " , " ir " , 10 ) ;
if ( unlikely ( ret ) )
return ret ;
2005-04-16 15:20:36 -07:00
asm ( " " : : : " memory " ) ;
2008-03-23 01:03:49 -07:00
__put_user_asm ( 4 [ ( u16 * ) src ] , 4 + ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
case 16 :
2008-03-23 01:03:49 -07:00
__put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
ret , " q " , " " , " ir " , 16 ) ;
if ( unlikely ( ret ) )
return ret ;
2005-04-16 15:20:36 -07:00
asm ( " " : : : " memory " ) ;
2008-03-23 01:03:49 -07:00
__put_user_asm ( 1 [ ( u64 * ) src ] , 1 + ( u64 __user * ) dst ,
ret , " q " , " " , " ir " , 8 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2006-09-26 10:52:39 +02:00
static __always_inline __must_check
int __copy_in_user ( void __user * dst , const void __user * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst ,
( __force void * ) src , size ) ;
switch ( size ) {
case 1 : {
2005-04-16 15:20:36 -07:00
u8 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u8 __user * ) src ,
ret , " b " , " b " , " =q " , 1 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u8 __user * ) dst ,
ret , " b " , " b " , " iq " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 2 : {
2005-04-16 15:20:36 -07:00
u16 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u16 __user * ) src ,
ret , " w " , " w " , " =r " , 2 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 4 : {
2005-04-16 15:20:36 -07:00
u32 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u32 __user * ) src ,
ret , " l " , " k " , " =r " , 4 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u32 __user * ) dst ,
ret , " l " , " k " , " ir " , 4 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 8 : {
2005-04-16 15:20:36 -07:00
u64 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 8 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u64 __user * ) dst ,
ret , " q " , " " , " ir " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst ,
( __force void * ) src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2008-03-23 01:03:49 -07:00
__must_check long
2006-09-26 10:52:39 +02:00
strncpy_from_user ( char * dst , const char __user * src , long count ) ;
2008-03-23 01:03:49 -07:00
__must_check long
2006-09-26 10:52:39 +02:00
__strncpy_from_user ( char * dst , const char __user * src , long count ) ;
__must_check long strnlen_user ( const char __user * str , long n ) ;
__must_check long __strnlen_user ( const char __user * str , long n ) ;
__must_check long strlen_user ( const char __user * str ) ;
__must_check unsigned long clear_user ( void __user * mem , unsigned long len ) ;
__must_check unsigned long __clear_user ( void __user * mem , unsigned long len ) ;
2008-03-23 01:03:49 -07:00
__must_check long __copy_from_user_inatomic ( void * dst , const void __user * src ,
unsigned size ) ;
2006-09-30 01:47:55 +02:00
static __must_check __always_inline int
__copy_to_user_inatomic ( void __user * dst , const void * src , unsigned size )
{
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
}
2005-04-16 15:20:36 -07:00
2008-03-23 01:03:49 -07:00
extern long __copy_user_nocache ( void * dst , const void __user * src ,
unsigned size , int zerorest ) ;
2007-02-13 13:26:19 +01:00
2008-03-23 01:03:49 -07:00
static inline int __copy_from_user_nocache ( void * dst , const void __user * src ,
unsigned size )
2007-02-13 13:26:19 +01:00
{
might_sleep ( ) ;
2007-03-14 09:20:20 +00:00
return __copy_user_nocache ( dst , src , size , 1 ) ;
2007-02-13 13:26:19 +01:00
}
2008-03-23 01:03:49 -07:00
static inline int __copy_from_user_inatomic_nocache ( void * dst ,
const void __user * src ,
unsigned size )
2007-02-13 13:26:19 +01:00
{
2007-03-14 09:20:20 +00:00
return __copy_user_nocache ( dst , src , size , 0 ) ;
2007-02-13 13:26:19 +01:00
}
2008-07-02 15:48:21 +02:00
unsigned long
copy_user_handle_tail ( char * to , char * from , unsigned len , unsigned zerorest ) ;
2005-04-16 15:20:36 -07:00
# endif /* __X86_64_UACCESS_H */