2011-05-04 14:38:26 -04:00
/ *
* Copyright 2 0 1 1 T i l e r a C o r p o r a t i o n . A l l R i g h t s R e s e r v e d .
*
* This p r o g r a m i s f r e e s o f t w a r e ; you can redistribute it and/or
* modify i t u n d e r t h e t e r m s o f t h e G N U G e n e r a l P u b l i c L i c e n s e
* as p u b l i s h e d b y t h e F r e e S o f t w a r e F o u n d a t i o n , v e r s i o n 2 .
*
* This p r o g r a m i s d i s t r i b u t e d i n t h e h o p e t h a t i t w i l l b e u s e f u l , b u t
* WITHOUT A N Y W A R R A N T Y ; without even the implied warranty of
* MERCHANTABILITY O R F I T N E S S F O R A P A R T I C U L A R P U R P O S E , G O O D T I T L E o r
* NON I N F R I N G E M E N T . S e e t h e G N U G e n e r a l P u b l i c L i c e n s e f o r
* more d e t a i l s .
* /
# include < l i n u x / l i n k a g e . h >
# include < a s m / e r r n o . h >
# include < a s m / c a c h e . h >
# include < a r c h / c h i p . h >
/* Access user memory, but use MMU to avoid propagating kernel exceptions. */
/ *
* clear_ u s e r _ a s m t a k e s t h e u s e r t a r g e t a d d r e s s i n r0 a n d t h e
* number o f b y t e s t o z e r o i n r1 .
* It r e t u r n s t h e n u m b e r o f u n c o p i a b l e b y t e s ( h o p e f u l l y z e r o ) i n r0 .
* Note t h a t w e d o n ' t u s e a s e p a r a t e . f i x u p s e c t i o n h e r e s i n c e w e f a l l
* through i n t o t h e " f i x u p " c o d e a s t h e l a s t s t r a i g h t - l i n e b u n d l e a n y w a y .
* /
STD_ E N T R Y ( c l e a r _ u s e r _ a s m )
{ beqz r1 , 2 f ; or r2, r0, r1 }
andi r2 , r2 , 7
beqzt r2 , . L c l e a r _ a l i g n e d _ u s e r _ a s m
1 : { st1 r0 , z e r o ; addi r0, r0, 1; addi r1, r1, -1 }
bnezt r1 , 1 b
2 : { move r0 , r1 ; jrp lr }
.pushsection _ _ ex_ t a b l e ," a "
2013-08-09 15:38:43 -04:00
.align 8
2011-05-04 14:38:26 -04:00
.quad 1 b, 2 b
.popsection
.Lclear_aligned_user_asm :
1 : { st r0 , z e r o ; addi r0, r0, 8; addi r1, r1, -8 }
bnezt r1 , 1 b
2 : { move r0 , r1 ; jrp lr }
STD_ E N D P R O C ( c l e a r _ u s e r _ a s m )
.pushsection _ _ ex_ t a b l e ," a "
2013-08-09 15:38:43 -04:00
.align 8
2011-05-04 14:38:26 -04:00
.quad 1 b, 2 b
.popsection
/ *
* flush_ u s e r _ a s m t a k e s t h e u s e r t a r g e t a d d r e s s i n r0 a n d t h e
* number o f b y t e s t o f l u s h i n r1 .
* It r e t u r n s t h e n u m b e r o f u n f l u s h a b l e b y t e s ( h o p e f u l l y z e r o ) i n r0 .
* /
STD_ E N T R Y ( f l u s h _ u s e r _ a s m )
beqz r1 , 2 f
{ movei r2 , L 2 _ C A C H E _ B Y T E S ; add r1, r0, r1 }
{ sub r2 , z e r o , r2 ; addi r1, r1, L2_CACHE_BYTES-1 }
{ and r0 , r0 , r2 ; and r1, r1, r2 }
{ sub r1 , r1 , r0 }
1 : { flush r0 ; addi r1, r1, -CHIP_FLUSH_STRIDE() }
{ addi r0 , r0 , C H I P _ F L U S H _ S T R I D E ( ) ; bnezt r1, 1b }
2 : { move r0 , r1 ; jrp lr }
STD_ E N D P R O C ( f l u s h _ u s e r _ a s m )
.pushsection _ _ ex_ t a b l e ," a "
2013-08-09 15:38:43 -04:00
.align 8
2011-05-04 14:38:26 -04:00
.quad 1 b, 2 b
.popsection
/ *
* finv_ u s e r _ a s m t a k e s t h e u s e r t a r g e t a d d r e s s i n r0 a n d t h e
* number o f b y t e s t o f l u s h - i n v a l i d a t e i n r1 .
* It r e t u r n s t h e n u m b e r o f n o t f i n v ' a b l e b y t e s ( h o p e f u l l y z e r o ) i n r0 .
* /
STD_ E N T R Y ( f i n v _ u s e r _ a s m )
beqz r1 , 2 f
{ movei r2 , L 2 _ C A C H E _ B Y T E S ; add r1, r0, r1 }
{ sub r2 , z e r o , r2 ; addi r1, r1, L2_CACHE_BYTES-1 }
{ and r0 , r0 , r2 ; and r1, r1, r2 }
{ sub r1 , r1 , r0 }
1 : { finv r0 ; addi r1, r1, -CHIP_FINV_STRIDE() }
{ addi r0 , r0 , C H I P _ F I N V _ S T R I D E ( ) ; bnezt r1, 1b }
2 : { move r0 , r1 ; jrp lr }
STD_ E N D P R O C ( f i n v _ u s e r _ a s m )
.pushsection _ _ ex_ t a b l e ," a "
2013-08-09 15:38:43 -04:00
.align 8
2011-05-04 14:38:26 -04:00
.quad 1 b, 2 b
.popsection