2005-04-16 15:20:36 -07:00
/ * Copyright 2 0 0 2 A n d i K l e e n , S u S E L a b s .
* Subject t o t h e G N U P u b l i c L i c e n s e v2 .
*
* Functions t o c o p y f r o m a n d t o u s e r s p a c e .
* /
2006-02-03 21:51:02 +01:00
# define F I X _ A L I G N M E N T 1
2005-04-16 15:20:36 -07:00
# include < a s m / c u r r e n t . h >
2005-09-09 21:28:48 +02:00
# include < a s m / a s m - o f f s e t s . h >
2005-04-16 15:20:36 -07:00
# include < a s m / t h r e a d _ i n f o . h >
2006-02-03 21:51:02 +01:00
# include < a s m / c p u f e a t u r e . h >
2005-04-16 15:20:36 -07:00
/* Standard copy_to_user with segment limit checking */
.globl copy_to_user
.p2align 4
copy_to_user :
GET_ T H R E A D _ I N F O ( % r a x )
movq % r d i ,% r c x
addq % r d x ,% r c x
jc b a d _ t o _ u s e r
cmpq t h r e a d i n f o _ a d d r _ l i m i t ( % r a x ) ,% r c x
jae b a d _ t o _ u s e r
2006-02-03 21:51:02 +01:00
2 :
.byte 0xe9 /* 32bit jump */
.long .Lcug - 1 f
1 :
.section .altinstr_replacement , " ax"
3 : .byte 0xe9 /* replacement jmp with 8 bit immediate */
.long copy_ u s e r _ g e n e r i c _ c - 1 b / * o f f s e t * /
.previous
.section .altinstructions , " a"
.align 8
.quad 2b
.quad 3b
.byte X86_FEATURE_REP_GOOD
.byte 5
.byte 5
.previous
2005-04-16 15:20:36 -07:00
/* Standard copy_from_user with segment limit checking */
.globl copy_from_user
.p2align 4
copy_from_user :
GET_ T H R E A D _ I N F O ( % r a x )
movq % r s i ,% r c x
addq % r d x ,% r c x
jc b a d _ f r o m _ u s e r
cmpq t h r e a d i n f o _ a d d r _ l i m i t ( % r a x ) ,% r c x
jae b a d _ f r o m _ u s e r
/* FALL THROUGH to copy_user_generic */
.section .fixup , " ax"
/* must zero dest */
bad_from_user :
movl % e d x ,% e c x
xorl % e a x ,% e a x
rep
stosb
bad_to_user :
movl % e d x ,% e a x
ret
.previous
/ *
* copy_ u s e r _ g e n e r i c - m e m o r y c o p y w i t h e x c e p t i o n h a n d l i n g .
*
* Input :
* rdi d e s t i n a t i o n
* rsi s o u r c e
* rdx c o u n t
*
* Output :
* eax u n c o p i e d b y t e s o r 0 i f s u c c e s s f u l .
* /
2006-01-11 22:44:45 +01:00
.globl copy_user_generic
2006-02-03 21:51:02 +01:00
.p2align 4
2006-01-11 22:44:45 +01:00
copy_user_generic :
2006-02-03 21:51:02 +01:00
.byte 0 x6 6 ,0 x66 ,0 x90 / * 5 b y t e n o p f o r r e p l a c e m e n t j u m p * /
.byte 0 x6 6 ,0 x90
1 :
.section .altinstr_replacement , " ax"
2 : .byte 0xe9 /* near jump with 32bit immediate */
.long copy_ u s e r _ g e n e r i c _ c - 1 b / * o f f s e t * /
.previous
.section .altinstructions , " a"
.align 8
.quad copy_user_generic
.quad 2b
.byte X86_FEATURE_REP_GOOD
.byte 5
.byte 5
.previous
.Lcug :
pushq % r b x
xorl % e a x ,% e a x / * z e r o f o r t h e e x c e p t i o n h a n d l e r * /
# ifdef F I X _ A L I G N M E N T
/* check for bad alignment of destination */
movl % e d i ,% e c x
andl $ 7 ,% e c x
jnz . L b a d _ a l i g n m e n t
.Lafter_bad_alignment :
# endif
movq % r d x ,% r c x
movl $ 6 4 ,% e b x
shrq $ 6 ,% r d x
decq % r d x
js . L h a n d l e _ t a i l
.p2align 4
.Lloop :
.Ls1 : movq ( % r s i ) ,% r11
.Ls2 : movq 1 * 8 ( % r s i ) ,% r8
.Ls3 : movq 2 * 8 ( % r s i ) ,% r9
.Ls4 : movq 3 * 8 ( % r s i ) ,% r10
.Ld1 : movq % r11 ,( % r d i )
.Ld2 : movq % r8 ,1 * 8 ( % r d i )
.Ld3 : movq % r9 ,2 * 8 ( % r d i )
.Ld4 : movq % r10 ,3 * 8 ( % r d i )
.Ls5 : movq 4 * 8 ( % r s i ) ,% r11
.Ls6 : movq 5 * 8 ( % r s i ) ,% r8
.Ls7 : movq 6 * 8 ( % r s i ) ,% r9
.Ls8 : movq 7 * 8 ( % r s i ) ,% r10
.Ld5 : movq % r11 ,4 * 8 ( % r d i )
.Ld6 : movq % r8 ,5 * 8 ( % r d i )
.Ld7 : movq % r9 ,6 * 8 ( % r d i )
.Ld8 : movq % r10 ,7 * 8 ( % r d i )
decq % r d x
leaq 6 4 ( % r s i ) ,% r s i
leaq 6 4 ( % r d i ) ,% r d i
jns . L l o o p
.p2align 4
.Lhandle_tail :
movl % e c x ,% e d x
andl $ 6 3 ,% e c x
shrl $ 3 ,% e c x
jz . L h a n d l e _ 7
movl $ 8 ,% e b x
.p2align 4
.Lloop_8 :
.Ls9 : movq ( % r s i ) ,% r8
.Ld9 : movq % r8 ,( % r d i )
decl % e c x
leaq 8 ( % r d i ) ,% r d i
leaq 8 ( % r s i ) ,% r s i
jnz . L l o o p _ 8
.Lhandle_7 :
movl % e d x ,% e c x
andl $ 7 ,% e c x
jz . L e n d e
.p2align 4
.Lloop_1 :
.Ls10 : movb ( % r s i ) ,% b l
.Ld10 : movb % b l ,( % r d i )
incq % r d i
incq % r s i
decl % e c x
jnz . L l o o p _ 1
.Lende :
popq % r b x
ret
# ifdef F I X _ A L I G N M E N T
/* align destination */
.p2align 4
.Lbad_alignment :
movl $ 8 ,% r9 d
subl % e c x ,% r9 d
movl % r9 d ,% e c x
cmpq % r9 ,% r d x
jz . L h a n d l e _ 7
js . L h a n d l e _ 7
.Lalign_1 :
.Ls11 : movb ( % r s i ) ,% b l
.Ld11 : movb % b l ,( % r d i )
incq % r s i
incq % r d i
decl % e c x
jnz . L a l i g n _ 1
subq % r9 ,% r d x
jmp . L a f t e r _ b a d _ a l i g n m e n t
# endif
/* table sorted by exception address */
.section _ _ ex_ t a b l e ," a "
.align 8
.quad .Ls1 , .Ls1e
.quad .Ls2 , .Ls2e
.quad .Ls3 , .Ls3e
.quad .Ls4 , .Ls4e
.quad .Ld1 , .Ls1e
.quad .Ld2 , .Ls2e
.quad .Ld3 , .Ls3e
.quad .Ld4 , .Ls4e
.quad .Ls5 , .Ls5e
.quad .Ls6 , .Ls6e
.quad .Ls7 , .Ls7e
.quad .Ls8 , .Ls8e
.quad .Ld5 , .Ls5e
.quad .Ld6 , .Ls6e
.quad .Ld7 , .Ls7e
.quad .Ld8 , .Ls8e
.quad .Ls9 , .Le_quad
.quad .Ld9 , .Le_quad
.quad .Ls10 , .Le_byte
.quad .Ld10 , .Le_byte
# ifdef F I X _ A L I G N M E N T
.quad .Ls11 , .Lzero_rest
.quad .Ld11 , .Lzero_rest
# endif
.quad .Le5 , .Le_zero
.previous
/ * compute 6 4 - o f f s e t f o r m a i n l o o p . 8 b y t e s a c c u r a c y w i t h e r r o r o n t h e
pessimistic s i d e . t h i s i s g r o s s . i t w o u l d b e b e t t e r t o f i x t h e
interface. * /
/* eax: zero, ebx: 64 */
.Ls1e : addl $ 8 ,% e a x
.Ls2e : addl $ 8 ,% e a x
.Ls3e : addl $ 8 ,% e a x
.Ls4e : addl $ 8 ,% e a x
.Ls5e : addl $ 8 ,% e a x
.Ls6e : addl $ 8 ,% e a x
.Ls7e : addl $ 8 ,% e a x
.Ls8e : addl $ 8 ,% e a x
addq % r b x ,% r d i / * + 6 4 * /
subq % r a x ,% r d i / * c o r r e c t d e s t i n a t i o n w i t h c o m p u t e d o f f s e t * /
shlq $ 6 ,% r d x / * l o o p c o u n t e r * 6 4 ( s t r i d e l e n g t h ) * /
addq % r a x ,% r d x / * a d d o f f s e t t o l o o p c n t * /
andl $ 6 3 ,% e c x / * r e m a i n i n g b y t e s * /
addq % r c x ,% r d x / * a d d t h e m * /
jmp . L z e r o _ r e s t
/* exception on quad word loop in tail handling */
/* ecx: loopcnt/8, %edx: length, rdi: correct */
.Le_quad :
shll $ 3 ,% e c x
andl $ 7 ,% e d x
addl % e c x ,% e d x
/* edx: bytes to zero, rdi: dest, eax:zero */
.Lzero_rest :
movq % r d x ,% r c x
.Le_byte :
xorl % e a x ,% e a x
.Le5 : rep
stosb
/* when there is another exception while zeroing the rest just return */
.Le_zero :
movq % r d x ,% r a x
jmp . L e n d e
/ * Some C P U s r u n f a s t e r u s i n g t h e s t r i n g c o p y i n s t r u c t i o n s .
This i s a l s o a l o t s i m p l e r . U s e t h e m w h e n p o s s i b l e .
Patch i n j m p s t o t h i s c o d e i n s t e a d o f c o p y i n g i t f u l l y
to a v o i d u n w a n t e d a l i a s i n g i n t h e e x c e p t i o n t a b l e s . * /
/ * rdi d e s t i n a t i o n
* rsi s o u r c e
* rdx c o u n t
*
* Output :
* eax u n c o p i e d b y t e s o r 0 i f s u c c e s s f u l l .
*
* Only 4 G B o f c o p y i s s u p p o r t e d . T h i s s h o u l d n ' t b e a p r o b l e m
* because t h e k e r n e l n o r m a l l y o n l y w r i t e s f r o m / t o p a g e s i z e d c h u n k s
* even i f u s e r s p a c e p a s s e d a l o n g e r b u f f e r .
* And m o r e w o u l d b e d a n g e r o u s b e c a u s e b o t h I n t e l a n d A M D h a v e
* errata w i t h r e p m o v s q > 4 G B . I f s o m e o n e f e e l s t h e n e e d t o f i x
* this p l e a s e c o n s i d e r t h i s .
* /
copy_user_generic_c :
2005-04-16 15:20:36 -07:00
movl % e d x ,% e c x
shrl $ 3 ,% e c x
andl $ 7 ,% e d x
1 : rep
movsq
movl % e d x ,% e c x
2 : rep
movsb
2006-02-03 21:51:02 +01:00
4 : movl % e c x ,% e a x
2005-04-16 15:20:36 -07:00
ret
3 : lea ( % r d x ,% r c x ,8 ) ,% r a x
ret
2006-01-11 22:44:45 +01:00
2005-04-16 15:20:36 -07:00
.section _ _ ex_ t a b l e ," a "
.quad 1 b,3 b
.quad 2 b,4 b
.previous