2008-10-22 22:26:29 -07:00
# ifndef _ASM_X86_UACCESS_64_H
# define _ASM_X86_UACCESS_64_H
2005-04-16 15:20:36 -07:00
/*
* User space memory access functions
*/
# include <linux/compiler.h>
# include <linux/errno.h>
2008-10-02 14:50:12 -07:00
# include <linux/lockdep.h>
2009-12-18 16:12:56 +00:00
# include <asm/alternative.h>
# include <asm/cpufeature.h>
2005-04-16 15:20:36 -07:00
# include <asm/page.h>
/*
* Copy To / From Userspace
*/
/* Handles exceptions in both to and from, but doesn't do access_ok */
2006-09-26 10:52:39 +02:00
__must_check unsigned long
2012-05-24 18:19:45 -07:00
copy_user_enhanced_fast_string ( void * to , const void * from , unsigned len ) ;
__must_check unsigned long
2009-12-18 16:12:56 +00:00
copy_user_generic_string ( void * to , const void * from , unsigned len ) ;
__must_check unsigned long
copy_user_generic_unrolled ( void * to , const void * from , unsigned len ) ;
static __always_inline __must_check unsigned long
copy_user_generic ( void * to , const void * from , unsigned len )
{
unsigned ret ;
2012-05-24 18:19:45 -07:00
/*
* If CPU has ERMS feature , use copy_user_enhanced_fast_string .
* Otherwise , if CPU has rep_good feature , use copy_user_generic_string .
* Otherwise , use copy_user_generic_unrolled .
*/
alternative_call_2 ( copy_user_generic_unrolled ,
2009-12-18 16:12:56 +00:00
copy_user_generic_string ,
X86_FEATURE_REP_GOOD ,
2012-05-24 18:19:45 -07:00
copy_user_enhanced_fast_string ,
X86_FEATURE_ERMS ,
2009-12-18 16:12:56 +00:00
ASM_OUTPUT2 ( " =a " ( ret ) , " =D " ( to ) , " =S " ( from ) ,
" =d " ( len ) ) ,
" 1 " ( to ) , " 2 " ( from ) , " 3 " ( len )
: " memory " , " rcx " , " r8 " , " r9 " , " r10 " , " r11 " ) ;
return ret ;
}
2006-09-26 10:52:39 +02:00
__must_check unsigned long
copy_in_user ( void __user * to , const void __user * from , unsigned len ) ;
static __always_inline __must_check
2013-08-16 14:17:19 -07:00
int __copy_from_user_nocheck ( void * dst , const void __user * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2008-09-10 13:37:17 +02:00
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( dst , ( __force void * ) src , size ) ;
switch ( size ) {
case 1 : __get_user_asm ( * ( u8 * ) dst , ( u8 __user * ) src ,
ret , " b " , " b " , " =q " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 2 : __get_user_asm ( * ( u16 * ) dst , ( u16 __user * ) src ,
ret , " w " , " w " , " =r " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 4 : __get_user_asm ( * ( u32 * ) dst , ( u32 __user * ) src ,
ret , " l " , " k " , " =r " , 4 ) ;
return ret ;
case 8 : __get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
case 10 :
2008-03-23 01:03:49 -07:00
__get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
2008-11-13 18:06:04 -08:00
ret , " q " , " " , " =r " , 10 ) ;
2008-03-23 01:03:49 -07:00
if ( unlikely ( ret ) )
return ret ;
__get_user_asm ( * ( u16 * ) ( 8 + ( char * ) dst ) ,
( u16 __user * ) ( 8 + ( char __user * ) src ) ,
ret , " w " , " w " , " =r " , 2 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
case 16 :
2008-03-23 01:03:49 -07:00
__get_user_asm ( * ( u64 * ) dst , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 16 ) ;
if ( unlikely ( ret ) )
return ret ;
__get_user_asm ( * ( u64 * ) ( 8 + ( char * ) dst ) ,
( u64 __user * ) ( 8 + ( char __user * ) src ) ,
ret , " q " , " " , " =r " , 8 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( dst , ( __force void * ) src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2006-09-26 10:52:39 +02:00
static __always_inline __must_check
2013-08-16 14:17:19 -07:00
int __copy_from_user ( void * dst , const void __user * src , unsigned size )
{
might_fault ( ) ;
return __copy_from_user_nocheck ( dst , src , size ) ;
}
static __always_inline __must_check
int __copy_to_user_nocheck ( void __user * dst , const void * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2008-09-10 13:37:17 +02:00
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
switch ( size ) {
case 1 : __put_user_asm ( * ( u8 * ) src , ( u8 __user * ) dst ,
ret , " b " , " b " , " iq " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 2 : __put_user_asm ( * ( u16 * ) src , ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
2008-03-23 01:03:49 -07:00
case 4 : __put_user_asm ( * ( u32 * ) src , ( u32 __user * ) dst ,
ret , " l " , " k " , " ir " , 4 ) ;
return ret ;
case 8 : __put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
case 10 :
2008-03-23 01:03:49 -07:00
__put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 10 ) ;
2008-03-23 01:03:49 -07:00
if ( unlikely ( ret ) )
return ret ;
2005-04-16 15:20:36 -07:00
asm ( " " : : : " memory " ) ;
2008-03-23 01:03:49 -07:00
__put_user_asm ( 4 [ ( u16 * ) src ] , 4 + ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
return ret ;
2005-04-16 15:20:36 -07:00
case 16 :
2008-03-23 01:03:49 -07:00
__put_user_asm ( * ( u64 * ) src , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 16 ) ;
2008-03-23 01:03:49 -07:00
if ( unlikely ( ret ) )
return ret ;
2005-04-16 15:20:36 -07:00
asm ( " " : : : " memory " ) ;
2008-03-23 01:03:49 -07:00
__put_user_asm ( 1 [ ( u64 * ) src ] , 1 + ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 8 ) ;
2008-03-23 01:03:49 -07:00
return ret ;
2005-04-16 15:20:36 -07:00
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst , src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2013-08-16 14:17:19 -07:00
static __always_inline __must_check
int __copy_to_user ( void __user * dst , const void * src , unsigned size )
{
might_fault ( ) ;
return __copy_to_user_nocheck ( dst , src , size ) ;
}
2006-09-26 10:52:39 +02:00
static __always_inline __must_check
int __copy_in_user ( void __user * dst , const void __user * src , unsigned size )
2008-03-23 01:03:49 -07:00
{
2006-09-26 10:52:40 +02:00
int ret = 0 ;
2008-09-10 13:37:17 +02:00
2008-09-10 13:37:17 +02:00
might_fault ( ) ;
2005-04-16 15:20:36 -07:00
if ( ! __builtin_constant_p ( size ) )
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst ,
( __force void * ) src , size ) ;
switch ( size ) {
case 1 : {
2005-04-16 15:20:36 -07:00
u8 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u8 __user * ) src ,
ret , " b " , " b " , " =q " , 1 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u8 __user * ) dst ,
ret , " b " , " b " , " iq " , 1 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 2 : {
2005-04-16 15:20:36 -07:00
u16 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u16 __user * ) src ,
ret , " w " , " w " , " =r " , 2 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u16 __user * ) dst ,
ret , " w " , " w " , " ir " , 2 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 4 : {
2005-04-16 15:20:36 -07:00
u32 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u32 __user * ) src ,
ret , " l " , " k " , " =r " , 4 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u32 __user * ) dst ,
ret , " l " , " k " , " ir " , 4 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
2008-03-23 01:03:49 -07:00
case 8 : {
2005-04-16 15:20:36 -07:00
u64 tmp ;
2008-03-23 01:03:49 -07:00
__get_user_asm ( tmp , ( u64 __user * ) src ,
ret , " q " , " " , " =r " , 8 ) ;
2005-04-16 15:20:36 -07:00
if ( likely ( ! ret ) )
2008-03-23 01:03:49 -07:00
__put_user_asm ( tmp , ( u64 __user * ) dst ,
2009-07-19 18:06:35 +02:00
ret , " q " , " " , " er " , 8 ) ;
2005-04-16 15:20:36 -07:00
return ret ;
}
default :
2008-03-23 01:03:49 -07:00
return copy_user_generic ( ( __force void * ) dst ,
( __force void * ) src , size ) ;
2005-04-16 15:20:36 -07:00
}
2008-03-23 01:03:49 -07:00
}
2005-04-16 15:20:36 -07:00
2009-11-13 11:56:24 +00:00
static __must_check __always_inline int
__copy_from_user_inatomic ( void * dst , const void __user * src , unsigned size )
{
2014-01-03 16:45:00 -05:00
return __copy_from_user_nocheck ( dst , src , size ) ;
2009-11-13 11:56:24 +00:00
}
2006-09-30 01:47:55 +02:00
static __must_check __always_inline int
__copy_to_user_inatomic ( void __user * dst , const void * src , unsigned size )
{
2014-01-03 16:45:00 -05:00
return __copy_to_user_nocheck ( dst , src , size ) ;
2006-09-30 01:47:55 +02:00
}
2005-04-16 15:20:36 -07:00
2008-03-23 01:03:49 -07:00
extern long __copy_user_nocache ( void * dst , const void __user * src ,
unsigned size , int zerorest ) ;
2007-02-13 13:26:19 +01:00
2009-03-02 11:00:57 +01:00
static inline int
__copy_from_user_nocache ( void * dst , const void __user * src , unsigned size )
2007-02-13 13:26:19 +01:00
{
2013-05-26 17:31:55 +03:00
might_fault ( ) ;
2009-03-02 11:00:57 +01:00
return __copy_user_nocache ( dst , src , size , 1 ) ;
2007-02-13 13:26:19 +01:00
}
2009-03-02 11:00:57 +01:00
static inline int
__copy_from_user_inatomic_nocache ( void * dst , const void __user * src ,
unsigned size )
2007-02-13 13:26:19 +01:00
{
2009-03-02 11:00:57 +01:00
return __copy_user_nocache ( dst , src , size , 0 ) ;
2007-02-13 13:26:19 +01:00
}
2008-07-02 15:48:21 +02:00
unsigned long
2015-04-06 10:26:17 -07:00
copy_user_handle_tail ( char * to , char * from , unsigned len ) ;
2008-07-02 15:48:21 +02:00
2008-10-22 22:26:29 -07:00
# endif /* _ASM_X86_UACCESS_64_H */