2008-04-01 10:57:09 +04:00
/ *
2008-09-13 02:21:22 +04:00
* arch/ i a64 / k v m / o p t v f a u l t . S
2008-04-01 10:57:09 +04:00
* optimize v i r t u a l i z a t i o n f a u l t h a n d l e r
*
* Copyright ( C ) 2 0 0 6 I n t e l C o
* Xuefei X u ( A n t h o n y X u ) < a n t h o n y . x u @intel.com>
2008-09-13 02:21:22 +04:00
* Copyright ( C ) 2 0 0 8 I n t e l C o
* Add t h e s u p p o r t f o r T u k w i l a p r o c e s s o r s .
* Xiantao Z h a n g < x i a n t a o . z h a n g @intel.com>
2008-04-01 10:57:09 +04:00
* /
# include < a s m / a s m m a c r o . h >
# include < a s m / p r o c e s s o r . h >
2009-02-25 19:38:55 +03:00
# include < a s m / k v m _ h o s t . h >
2008-04-01 10:57:09 +04:00
# include " v t i . h "
# include " a s m - o f f s e t s . h "
# define A C C E _ M O V _ F R O M _ A R
# define A C C E _ M O V _ F R O M _ R R
# define A C C E _ M O V _ T O _ R R
# define A C C E _ R S M
# define A C C E _ S S M
# define A C C E _ M O V _ T O _ P S R
# define A C C E _ T H A S H
2008-09-13 02:21:22 +04:00
# define V M X _ V P S _ S Y N C _ R E A D \
add r16 =VMM_VPD_BASE_OFFSET ,r21 ; \
mov r17 = b0 ; \
mov r18 = r24 ; \
mov r19 = r25 ; \
mov r20 = r31 ; \
;; \
{ .mii ; \
ld8 r16 = [ r16 ] ; \
nop 0 x0 ; \
mov r24 = i p ; \
;; \
} ; \
{ .mmb ; \
add r24 =0x20 , r24 ; \
mov r25 =r16 ; \
br. s p t k . m a n y k v m _ v p s _ s y n c _ r e a d ; \
} ; \
mov b0 = r17 ; \
mov r24 = r18 ; \
mov r25 = r19 ; \
mov r31 = r20
2008-09-12 16:23:11 +04:00
ENTRY( k v m _ v p s _ e n t r y )
adds r29 = V M M _ V C P U _ V S A _ B A S E _ O F F S E T ,r21
;;
ld8 r29 = [ r29 ]
;;
add r29 = r29 , r30
;;
mov b0 = r29
br. s p t k . m a n y b0
END( k v m _ v p s _ e n t r y )
/ *
* Inputs :
* r2 4 : r e t u r n a d d r e s s
* r2 5 : v p d
* r2 9 : s c r a t c h
*
* /
GLOBAL_ E N T R Y ( k v m _ v p s _ s y n c _ r e a d )
movl r30 = P A L _ V P S _ S Y N C _ R E A D
;;
br. s p t k . m a n y k v m _ v p s _ e n t r y
END( k v m _ v p s _ s y n c _ r e a d )
/ *
* Inputs :
* r2 4 : r e t u r n a d d r e s s
* r2 5 : v p d
* r2 9 : s c r a t c h
*
* /
GLOBAL_ E N T R Y ( k v m _ v p s _ s y n c _ w r i t e )
movl r30 = P A L _ V P S _ S Y N C _ W R I T E
;;
br. s p t k . m a n y k v m _ v p s _ e n t r y
END( k v m _ v p s _ s y n c _ w r i t e )
/ *
* Inputs :
* r2 3 : p r
* r2 4 : g u e s t b0
* r2 5 : v p d
*
* /
GLOBAL_ E N T R Y ( k v m _ v p s _ r e s u m e _ n o r m a l )
movl r30 = P A L _ V P S _ R E S U M E _ N O R M A L
;;
mov p r =r23 ,- 2
br. s p t k . m a n y k v m _ v p s _ e n t r y
END( k v m _ v p s _ r e s u m e _ n o r m a l )
/ *
* Inputs :
* r2 3 : p r
* r2 4 : g u e s t b0
* r2 5 : v p d
* r1 7 : i s r
* /
GLOBAL_ E N T R Y ( k v m _ v p s _ r e s u m e _ h a n d l e r )
movl r30 = P A L _ V P S _ R E S U M E _ H A N D L E R
;;
2008-11-21 05:56:08 +03:00
ld8 r26 = [ r25 ]
2008-09-12 16:23:11 +04:00
shr r17 =r17 ,I A 6 4 _ I S R _ I R _ B I T
;;
2008-11-21 05:56:08 +03:00
dep r26 =r17 ,r26 ,6 3 ,1 / / b i t 6 3 o f r26 i n d i c a t e w h e t h e r e n a b l e C F L E
2008-09-12 16:23:11 +04:00
mov p r =r23 ,- 2
br. s p t k . m a n y k v m _ v p s _ e n t r y
END( k v m _ v p s _ r e s u m e _ h a n d l e r )
2008-04-01 10:57:09 +04:00
/ / mov r1 =ar3
GLOBAL_ E N T R Y ( k v m _ a s m _ m o v _ f r o m _ a r )
# ifndef A C C E _ M O V _ F R O M _ A R
br. m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
# endif
add r18 =VMM_VCPU_ITC_OFS_OFFSET , r21
add r16 =VMM_VCPU_LAST_ITC_OFFSET ,r21
extr. u r17 =r25 ,6 ,7
;;
ld8 r18 = [ r18 ]
mov r19 =ar . i t c
mov r24 =b0
;;
add r19 =r19 ,r18
addl r20 = @gprel(asm_mov_to_reg),gp
;;
st8 [ r16 ] = r19
adds r30 =kvm_resume_to_guest - a s m _ m o v _ t o _ r e g ,r20
shladd r17 =r17 ,4 ,r20
;;
mov b0 =r17
br. s p t k . f e w b0
;;
END( k v m _ a s m _ m o v _ f r o m _ a r )
2009-02-25 19:38:55 +03:00
/ *
* Special S G I S N 2 o p t i m i z e d v e r s i o n o f m o v _ f r o m _ a r u s i n g t h e S N 2 R T C
* clock a s i t ' s s o u r c e f o r e m u l a t i n g t h e I T C . T h i s v e r s i o n w i l l b e
* copied o n t o p o f t h e o r i g i n a l v e r s i o n i f t h e h o s t i s d e t e r m i n e d t o
* be a n S N 2 .
* /
GLOBAL_ E N T R Y ( k v m _ a s m _ m o v _ f r o m _ a r _ s n 2 )
add r18 =VMM_VCPU_ITC_OFS_OFFSET , r21
movl r19 = ( K V M _ V M M _ B A S E + ( 1 < < K V M _ V M M _ S H I F T ) )
add r16 =VMM_VCPU_LAST_ITC_OFFSET ,r21
extr. u r17 =r25 ,6 ,7
mov r24 =b0
;;
ld8 r18 = [ r18 ]
ld8 r19 = [ r19 ]
addl r20 = @gprel(asm_mov_to_reg),gp
;;
add r19 =r19 ,r18
shladd r17 =r17 ,4 ,r20
;;
adds r30 =kvm_resume_to_guest - a s m _ m o v _ t o _ r e g ,r20
st8 [ r16 ] = r19
mov b0 =r17
br. s p t k . f e w b0
;;
END( k v m _ a s m _ m o v _ f r o m _ a r _ s n 2 )
2008-04-01 10:57:09 +04:00
/ / mov r1 =rr [ r3 ]
GLOBAL_ E N T R Y ( k v m _ a s m _ m o v _ f r o m _ r r )
# ifndef A C C E _ M O V _ F R O M _ R R
br. m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
# endif
extr. u r16 =r25 ,2 0 ,7
extr. u r17 =r25 ,6 ,7
addl r20 = @gprel(asm_mov_from_reg),gp
;;
adds r30 =kvm_asm_mov_from_rr_back_1 - a s m _ m o v _ f r o m _ r e g ,r20
shladd r16 =r16 ,4 ,r20
mov r24 =b0
;;
add r27 =VMM_VCPU_VRR0_OFFSET ,r21
mov b0 =r16
br. m a n y b0
;;
kvm_asm_mov_from_rr_back_1 :
adds r30 =kvm_resume_to_guest - a s m _ m o v _ f r o m _ r e g ,r20
adds r22 =asm_mov_to_reg - a s m _ m o v _ f r o m _ r e g ,r20
shr. u r26 =r19 ,6 1
;;
shladd r17 =r17 ,4 ,r22
shladd r27 =r26 ,3 ,r27
;;
ld8 r19 = [ r27 ]
mov b0 =r17
br. m a n y b0
END( k v m _ a s m _ m o v _ f r o m _ r r )
/ / mov r r [ r3 ] =r2
GLOBAL_ E N T R Y ( k v m _ a s m _ m o v _ t o _ r r )
# ifndef A C C E _ M O V _ T O _ R R
br. m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
# endif
extr. u r16 =r25 ,2 0 ,7
extr. u r17 =r25 ,1 3 ,7
addl r20 = @gprel(asm_mov_from_reg),gp
;;
adds r30 =kvm_asm_mov_to_rr_back_1 - a s m _ m o v _ f r o m _ r e g ,r20
shladd r16 =r16 ,4 ,r20
mov r22 =b0
;;
add r27 =VMM_VCPU_VRR0_OFFSET ,r21
mov b0 =r16
br. m a n y b0
;;
kvm_asm_mov_to_rr_back_1 :
adds r30 =kvm_asm_mov_to_rr_back_2 - a s m _ m o v _ f r o m _ r e g ,r20
shr. u r23 =r19 ,6 1
shladd r17 =r17 ,4 ,r20
;;
/ / if r r6 , g o b a c k
cmp. e q p6 ,p0 =6 ,r23
mov b0 =r22
( p6 ) b r . c o n d . d p n t . m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
;;
mov r28 =r19
mov b0 =r17
br. m a n y b0
kvm_asm_mov_to_rr_back_2 :
adds r30 =kvm_resume_to_guest - a s m _ m o v _ f r o m _ r e g ,r20
shladd r27 =r23 ,3 ,r27
;; // vrr.rid<<4 |0xe
st8 [ r27 ] =r19
mov b0 =r30
;;
extr. u r16 =r19 ,8 ,2 6
extr. u r18 =r19 ,2 ,6
mov r17 =0xe
;;
shladd r16 = r16 , 4 , r17
extr. u r19 =r19 ,0 ,8
;;
shl r16 = r16 ,8
;;
add r19 = r19 , r16
;; //set ve 1
dep r19 = - 1 ,r19 ,0 ,1
cmp. l t p6 ,p0 =14 ,r18
;;
( p6 ) m o v r18 =14
;;
( p6 ) d e p r19 =r18 ,r19 ,2 ,6
;;
cmp. e q p6 ,p0 =0 ,r23
;;
cmp. e q . o r p6 ,p0 =4 ,r23
;;
adds r16 =VMM_VCPU_MODE_FLAGS_OFFSET ,r21
( p6 ) a d d s r17 =VMM_VCPU_META_SAVED_RR0_OFFSET ,r21
;;
ld4 r16 = [ r16 ]
cmp. e q p7 ,p0 =r0 ,r0
( p6 ) s h l a d d r17 =r23 ,1 ,r17
;;
( p6 ) s t 8 [ r17 ] =r19
( p6 ) t b i t . n z p6 ,p7 =r16 ,0
;;
( p7 ) m o v r r [ r28 ] =r19
mov r24 =r22
br. m a n y b0
END( k v m _ a s m _ m o v _ t o _ r r )
/ / rsm
GLOBAL_ E N T R Y ( k v m _ a s m _ r s m )
# ifndef A C C E _ R S M
br. m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
# endif
2008-09-13 02:21:22 +04:00
VMX_ V P S _ S Y N C _ R E A D
;;
2008-04-01 10:57:09 +04:00
extr. u r26 =r25 ,6 ,2 1
extr. u r27 =r25 ,3 1 ,2
;;
extr. u r28 =r25 ,3 6 ,1
dep r26 =r27 ,r26 ,2 1 ,2
;;
add r17 =VPD_VPSR_START_OFFSET ,r16
add r22 =VMM_VCPU_MODE_FLAGS_OFFSET ,r21
/ / r2 6 i s i m m 2 4
dep r26 =r28 ,r26 ,2 3 ,1
;;
ld8 r18 = [ r17 ]
movl r28 =IA64_PSR_IC + I A 6 4 _ P S R _ I + I A 6 4 _ P S R _ D T + I A 6 4 _ P S R _ S I
ld4 r23 = [ r22 ]
sub r27 = - 1 ,r26
mov r24 =b0
;;
mov r20 =cr . i p s r
or r28 =r27 ,r28
and r19 =r18 ,r27
;;
st8 [ r17 ] =r19
and r20 =r20 ,r28
/ * Comment i t o u t d u e t o s h o r t o f f p l a z y a l o r g i t h m s u p p o r t
adds r27 =IA64_VCPU_FP_PSR_OFFSET ,r21
;;
ld8 r27 = [ r27 ]
;;
tbit. n z p8 ,p0 = r27 ,I A 6 4 _ P S R _ D F H _ B I T
;;
( p8 ) d e p r20 = - 1 ,r20 ,I A 6 4 _ P S R _ D F H _ B I T ,1
* /
;;
mov c r . i p s r =r20
tbit. n z p6 ,p0 =r23 ,0
;;
tbit. z . o r p6 ,p0 =r26 ,I A 6 4 _ P S R _ D T _ B I T
2008-09-13 02:21:22 +04:00
( p6 ) b r . d p t k k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c
2008-04-01 10:57:09 +04:00
;;
add r26 =VMM_VCPU_META_RR0_OFFSET ,r21
add r27 =VMM_VCPU_META_RR0_OFFSET + 8 ,r21
dep r23 = - 1 ,r23 ,0 ,1
;;
ld8 r26 = [ r26 ]
ld8 r27 = [ r27 ]
st4 [ r22 ] =r23
dep. z r28 =4 ,6 1 ,3
;;
mov r r [ r0 ] =r26
;;
mov r r [ r28 ] =r27
;;
srlz. d
2008-09-13 02:21:22 +04:00
br. m a n y k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c
2008-04-01 10:57:09 +04:00
END( k v m _ a s m _ r s m )
/ / ssm
GLOBAL_ E N T R Y ( k v m _ a s m _ s s m )
# ifndef A C C E _ S S M
br. m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
# endif
2008-09-13 02:21:22 +04:00
VMX_ V P S _ S Y N C _ R E A D
;;
2008-04-01 10:57:09 +04:00
extr. u r26 =r25 ,6 ,2 1
extr. u r27 =r25 ,3 1 ,2
;;
extr. u r28 =r25 ,3 6 ,1
dep r26 =r27 ,r26 ,2 1 ,2
;; //r26 is imm24
add r27 =VPD_VPSR_START_OFFSET ,r16
dep r26 =r28 ,r26 ,2 3 ,1
;; //r19 vpsr
ld8 r29 = [ r27 ]
mov r24 =b0
;;
add r22 =VMM_VCPU_MODE_FLAGS_OFFSET ,r21
mov r20 =cr . i p s r
or r19 =r29 ,r26
;;
ld4 r23 = [ r22 ]
st8 [ r27 ] =r19
or r20 =r20 ,r26
;;
mov c r . i p s r =r20
movl r28 =IA64_PSR_DT + I A 6 4 _ P S R _ R T + I A 6 4 _ P S R _ I T
;;
and r19 =r28 ,r19
tbit. z p6 ,p0 =r23 ,0
;;
cmp. n e . o r p6 ,p0 =r28 ,r19
( p6 ) b r . d p t k k v m _ a s m _ s s m _ 1
;;
add r26 =VMM_VCPU_META_SAVED_RR0_OFFSET ,r21
add r27 =VMM_VCPU_META_SAVED_RR0_OFFSET + 8 ,r21
dep r23 =0 ,r23 ,0 ,1
;;
ld8 r26 = [ r26 ]
ld8 r27 = [ r27 ]
st4 [ r22 ] =r23
dep. z r28 =4 ,6 1 ,3
;;
mov r r [ r0 ] =r26
;;
mov r r [ r28 ] =r27
;;
srlz. d
;;
kvm_asm_ssm_1 :
tbit. n z p6 ,p0 =r29 ,I A 6 4 _ P S R _ I _ B I T
;;
tbit. z . o r p6 ,p0 =r19 ,I A 6 4 _ P S R _ I _ B I T
2008-09-13 02:21:22 +04:00
( p6 ) b r . d p t k k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c
2008-04-01 10:57:09 +04:00
;;
add r29 =VPD_VTPR_START_OFFSET ,r16
add r30 =VPD_VHPI_START_OFFSET ,r16
;;
ld8 r29 = [ r29 ]
ld8 r30 = [ r30 ]
;;
extr. u r17 =r29 ,4 ,4
extr. u r18 =r29 ,1 6 ,1
;;
dep r17 =r18 ,r17 ,4 ,1
;;
cmp. g t p6 ,p0 =r30 ,r17
( p6 ) b r . d p n t . f e w k v m _ a s m _ d i s p a t c h _ v e x i r q
2008-09-13 02:21:22 +04:00
br. m a n y k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c
2008-04-01 10:57:09 +04:00
END( k v m _ a s m _ s s m )
/ / mov p s r . l =r2
GLOBAL_ E N T R Y ( k v m _ a s m _ m o v _ t o _ p s r )
# ifndef A C C E _ M O V _ T O _ P S R
br. m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
# endif
2008-09-13 02:21:22 +04:00
VMX_ V P S _ S Y N C _ R E A D
2008-04-01 10:57:09 +04:00
;;
2008-09-13 02:21:22 +04:00
extr. u r26 =r25 ,1 3 ,7 / / r2
2008-04-01 10:57:09 +04:00
addl r20 = @gprel(asm_mov_from_reg),gp
;;
adds r30 =kvm_asm_mov_to_psr_back - a s m _ m o v _ f r o m _ r e g ,r20
shladd r26 =r26 ,4 ,r20
mov r24 =b0
;;
add r27 =VPD_VPSR_START_OFFSET ,r16
mov b0 =r26
br. m a n y b0
;;
kvm_asm_mov_to_psr_back :
ld8 r17 = [ r27 ]
add r22 =VMM_VCPU_MODE_FLAGS_OFFSET ,r21
dep r19 =0 ,r19 ,3 2 ,3 2
;;
ld4 r23 = [ r22 ]
dep r18 =0 ,r17 ,0 ,3 2
;;
add r30 =r18 ,r19
movl r28 =IA64_PSR_DT + I A 6 4 _ P S R _ R T + I A 6 4 _ P S R _ I T
;;
st8 [ r27 ] =r30
and r27 =r28 ,r30
and r29 =r28 ,r17
;;
cmp. e q p5 ,p0 =r29 ,r27
cmp. e q p6 ,p7 =r28 ,r27
( p5 ) b r . m a n y k v m _ a s m _ m o v _ t o _ p s r _ 1
;;
/ / virtual t o p h y s i c a l
( p7 ) a d d r26 =VMM_VCPU_META_RR0_OFFSET ,r21
( p7 ) a d d r27 =VMM_VCPU_META_RR0_OFFSET + 8 ,r21
( p7 ) d e p r23 = - 1 ,r23 ,0 ,1
;;
/ / physical t o v i r t u a l
( p6 ) a d d r26 =VMM_VCPU_META_SAVED_RR0_OFFSET ,r21
( p6 ) a d d r27 =VMM_VCPU_META_SAVED_RR0_OFFSET + 8 ,r21
( p6 ) d e p r23 =0 ,r23 ,0 ,1
;;
ld8 r26 = [ r26 ]
ld8 r27 = [ r27 ]
st4 [ r22 ] =r23
dep. z r28 =4 ,6 1 ,3
;;
mov r r [ r0 ] =r26
;;
mov r r [ r28 ] =r27
;;
srlz. d
;;
kvm_asm_mov_to_psr_1 :
mov r20 =cr . i p s r
movl r28 =IA64_PSR_IC + I A 6 4 _ P S R _ I + I A 6 4 _ P S R _ D T + I A 6 4 _ P S R _ S I + I A 6 4 _ P S R _ R T
;;
or r19 =r19 ,r28
dep r20 =0 ,r20 ,0 ,3 2
;;
add r20 =r19 ,r20
mov b0 =r24
;;
/ * Comment i t o u t d u e t o s h o r t o f f p l a z y a l g o r i t h m s u p p o r t
adds r27 =IA64_VCPU_FP_PSR_OFFSET ,r21
;;
ld8 r27 = [ r27 ]
;;
tbit. n z p8 ,p0 =r27 ,I A 6 4 _ P S R _ D F H _ B I T
;;
( p8 ) d e p r20 = - 1 ,r20 ,I A 6 4 _ P S R _ D F H _ B I T ,1
;;
* /
mov c r . i p s r =r20
cmp. n e p6 ,p0 =r0 ,r0
;;
tbit. n z . o r p6 ,p0 =r17 ,I A 6 4 _ P S R _ I _ B I T
tbit. z . o r p6 ,p0 =r30 ,I A 6 4 _ P S R _ I _ B I T
2008-09-13 02:21:22 +04:00
( p6 ) b r . d p n t . f e w k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c
2008-04-01 10:57:09 +04:00
;;
add r29 =VPD_VTPR_START_OFFSET ,r16
add r30 =VPD_VHPI_START_OFFSET ,r16
;;
ld8 r29 = [ r29 ]
ld8 r30 = [ r30 ]
;;
extr. u r17 =r29 ,4 ,4
extr. u r18 =r29 ,1 6 ,1
;;
dep r17 =r18 ,r17 ,4 ,1
;;
cmp. g t p6 ,p0 =r30 ,r17
( p6 ) b r . d p n t . f e w k v m _ a s m _ d i s p a t c h _ v e x i r q
2008-09-13 02:21:22 +04:00
br. m a n y k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c
2008-04-01 10:57:09 +04:00
END( k v m _ a s m _ m o v _ t o _ p s r )
ENTRY( k v m _ a s m _ d i s p a t c h _ v e x i r q )
/ / increment i i p
2008-09-13 02:21:22 +04:00
mov r17 = b0
mov r18 = r31
{ .mii
add r25 =VMM_VPD_BASE_OFFSET ,r21
nop 0 x0
mov r24 = i p
;;
}
{ .mmb
add r24 = 0 x20 , r24
ld8 r25 = [ r25 ]
br. s p t k . m a n y k v m _ v p s _ s y n c _ w r i t e
}
mov b0 =r17
2008-04-01 10:57:09 +04:00
mov r16 =cr . i p s r
2008-09-13 02:21:22 +04:00
mov r31 = r18
mov r19 = 3 7
2008-04-01 10:57:09 +04:00
;;
extr. u r17 =r16 ,I A 6 4 _ P S R _ R I _ B I T ,2
tbit. n z p6 ,p7 =r16 ,I A 6 4 _ P S R _ R I _ B I T + 1
;;
( p6 ) m o v r18 =cr . i i p
( p6 ) m o v r17 =r0
( p7 ) a d d r17 =1 ,r17
;;
( p6 ) a d d r18 =0x10 ,r18
dep r16 =r17 ,r16 ,I A 6 4 _ P S R _ R I _ B I T ,2
;;
( p6 ) m o v c r . i i p =r18
mov c r . i p s r =r16
mov r30 =1
br. m a n y k v m _ d i s p a t c h _ v e x i r q
END( k v m _ a s m _ d i s p a t c h _ v e x i r q )
/ / thash
/ / TODO : add s u p p o r t w h e n p t a . v f = 1
GLOBAL_ E N T R Y ( k v m _ a s m _ t h a s h )
# ifndef A C C E _ T H A S H
br. m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
# endif
extr. u r17 =r25 ,2 0 ,7 / / g e t r3 f r o m o p c o d e i n r25
extr. u r18 =r25 ,6 ,7 / / g e t r1 f r o m o p c o d e i n r25
addl r20 = @gprel(asm_mov_from_reg),gp
;;
adds r30 =kvm_asm_thash_back1 - a s m _ m o v _ f r o m _ r e g ,r20
shladd r17 =r17 ,4 ,r20 / / g e t a d d r o f M O V E _ F R O M _ R E G ( r17 )
adds r16 =VMM_VPD_BASE_OFFSET ,r21 / / g e t v c p u . a r c h . p r i v e r e g s
;;
mov r24 =b0
;;
ld8 r16 = [ r16 ] / / g e t V P D a d d r
mov b0 =r17
br. m a n y b0 / / r19 r e t u r n v a l u e
;;
kvm_asm_thash_back1 :
shr. u r23 =r19 ,6 1 / / g e t R R n u m b e r
2008-09-13 02:21:22 +04:00
adds r28 =VMM_VCPU_VRR0_OFFSET ,r21 / / g e t v c p u - > a r c h . v r r [ 0 ] ' s a d d r
2008-04-01 10:57:09 +04:00
adds r16 =VMM_VPD_VPTA_OFFSET ,r16 / / g e t v p t a
;;
2008-09-13 02:21:22 +04:00
shladd r27 =r23 ,3 ,r28 / / g e t v c p u - > a r c h . v r r [ r23 ] ' s a d d r
2008-04-01 10:57:09 +04:00
ld8 r17 = [ r16 ] / / g e t P T A
mov r26 =1
;;
2008-09-13 02:21:22 +04:00
extr. u r29 =r17 ,2 ,6 / / g e t p t a . s i z e
ld8 r28 = [ r27 ] / / g e t v c p u - > a r c h . v r r [ r23 ] ' s v a l u e
2008-04-01 10:57:09 +04:00
;;
2008-09-13 02:21:22 +04:00
mov b0 =r24
/ / Fallback t o C i f p t a . v f i s s e t
tbit. n z p6 ,p0 =r17 , 8
;;
( p6 ) m o v r24 =EVENT_THASH
( p6 ) b r . c o n d . d p n t . m a n y k v m _ v i r t u a l i z a t i o n _ f a u l t _ b a c k
extr. u r28 =r28 ,2 ,6 / / g e t r r . p s
2008-04-01 10:57:09 +04:00
shl r22 =r26 ,r29 / / 1 U L < < p t a . s i z e
;;
2008-09-13 02:21:22 +04:00
shr. u r23 =r19 ,r28 / / v a d d r > > r r . p s
2008-04-01 10:57:09 +04:00
adds r26 =3 ,r29 / / p t a . s i z e + 3
shl r27 =r17 ,3 / / p t a < < 3
;;
shl r23 =r23 ,3 / / ( v a d d r > > r r . p s ) < < 3
2008-09-13 02:21:22 +04:00
shr. u r27 =r27 ,r26 / / ( p t a < < 3 ) > > ( p t a . s i z e + 3 )
2008-04-01 10:57:09 +04:00
movl r16 =7 < < 6 1
;;
adds r22 = - 1 ,r22 / / ( 1 U L < < p t a . s i z e ) - 1
shl r27 =r27 ,r29 / / ( ( p t a < < 3 ) > > ( p t a . s i z e + 3 ) ) < < p t a . s i z e
and r19 =r19 ,r16 / / v a d d r & V R N _ M A S K
;;
and r22 =r22 ,r23 / / v h p t _ o f f s e t
or r19 =r19 ,r27 / / ( v a d r & V R N _ M A S K ) | ( ( ( p t a < < 3 ) > > ( p t a . s i z e + 3 ) ) < < p t a . s i z e )
adds r26 =asm_mov_to_reg - a s m _ m o v _ f r o m _ r e g ,r20
;;
or r19 =r19 ,r22 / / c a l c p v a l
shladd r17 =r18 ,4 ,r26
adds r30 =kvm_resume_to_guest - a s m _ m o v _ f r o m _ r e g ,r20
;;
mov b0 =r17
br. m a n y b0
END( k v m _ a s m _ t h a s h )
# define M O V _ T O _ R E G 0 \
{ ; \
nop. b 0 x0 ; \
nop. b 0 x0 ; \
nop. b 0 x0 ; \
;; \
} ;
# define M O V _ T O _ R E G ( n ) \
{ ; \
mov r ## n # # = r 19 ; \
mov b0 =r30 ; \
br. s p t k . m a n y b0 ; \
;; \
} ;
# define M O V _ F R O M _ R E G ( n ) \
{ ; \
mov r19 =r ## n # # ; \
mov b0 =r30 ; \
br. s p t k . m a n y b0 ; \
;; \
} ;
# define M O V _ T O _ B A N K 0 _ R E G ( n ) \
ENTRY_ M I N _ A L I G N ( a s m _ m o v _ t o _ b a n k 0 _ r e g ## n # # ) ; \
{ ; \
mov r26 =r2 ; \
mov r2 =r19 ; \
bsw. 1 ; \
;; \
} ; \
{ ; \
mov r ## n # # = r 2 ; \
nop. b 0 x0 ; \
bsw. 0 ; \
;; \
} ; \
{ ; \
mov r2 =r26 ; \
mov b0 =r30 ; \
br. s p t k . m a n y b0 ; \
;; \
} ; \
END( a s m _ m o v _ t o _ b a n k 0 _ r e g ## n # # )
# define M O V _ F R O M _ B A N K 0 _ R E G ( n ) \
ENTRY_ M I N _ A L I G N ( a s m _ m o v _ f r o m _ b a n k 0 _ r e g ## n # # ) ; \
{ ; \
mov r26 =r2 ; \
nop. b 0 x0 ; \
bsw. 1 ; \
;; \
} ; \
{ ; \
mov r2 =r ## n # # ; \
nop. b 0 x0 ; \
bsw. 0 ; \
;; \
} ; \
{ ; \
mov r19 =r2 ; \
mov r2 =r26 ; \
mov b0 =r30 ; \
} ; \
{ ; \
nop. b 0 x0 ; \
nop. b 0 x0 ; \
br. s p t k . m a n y b0 ; \
;; \
} ; \
END( a s m _ m o v _ f r o m _ b a n k 0 _ r e g ## n # # )
# define J M P _ T O _ M O V _ T O _ B A N K 0 _ R E G ( n ) \
{ ; \
nop. b 0 x0 ; \
nop. b 0 x0 ; \
br. s p t k . m a n y a s m _ m o v _ t o _ b a n k 0 _ r e g ## n # # ; \
;; \
}
# define J M P _ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( n ) \
{ ; \
nop. b 0 x0 ; \
nop. b 0 x0 ; \
br. s p t k . m a n y a s m _ m o v _ f r o m _ b a n k 0 _ r e g ## n # # ; \
;; \
}
MOV_ F R O M _ B A N K 0 _ R E G ( 1 6 )
MOV_ F R O M _ B A N K 0 _ R E G ( 1 7 )
MOV_ F R O M _ B A N K 0 _ R E G ( 1 8 )
MOV_ F R O M _ B A N K 0 _ R E G ( 1 9 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 0 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 1 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 2 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 3 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 4 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 5 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 6 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 7 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 8 )
MOV_ F R O M _ B A N K 0 _ R E G ( 2 9 )
MOV_ F R O M _ B A N K 0 _ R E G ( 3 0 )
MOV_ F R O M _ B A N K 0 _ R E G ( 3 1 )
/ / mov f r o m r e g t a b l e
ENTRY( a s m _ m o v _ f r o m _ r e g )
MOV_ F R O M _ R E G ( 0 )
MOV_ F R O M _ R E G ( 1 )
MOV_ F R O M _ R E G ( 2 )
MOV_ F R O M _ R E G ( 3 )
MOV_ F R O M _ R E G ( 4 )
MOV_ F R O M _ R E G ( 5 )
MOV_ F R O M _ R E G ( 6 )
MOV_ F R O M _ R E G ( 7 )
MOV_ F R O M _ R E G ( 8 )
MOV_ F R O M _ R E G ( 9 )
MOV_ F R O M _ R E G ( 1 0 )
MOV_ F R O M _ R E G ( 1 1 )
MOV_ F R O M _ R E G ( 1 2 )
MOV_ F R O M _ R E G ( 1 3 )
MOV_ F R O M _ R E G ( 1 4 )
MOV_ F R O M _ R E G ( 1 5 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 1 6 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 1 7 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 1 8 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 1 9 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 0 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 1 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 2 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 3 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 4 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 5 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 6 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 7 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 8 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 2 9 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 3 0 )
JMP_ T O _ M O V _ F R O M _ B A N K 0 _ R E G ( 3 1 )
MOV_ F R O M _ R E G ( 3 2 )
MOV_ F R O M _ R E G ( 3 3 )
MOV_ F R O M _ R E G ( 3 4 )
MOV_ F R O M _ R E G ( 3 5 )
MOV_ F R O M _ R E G ( 3 6 )
MOV_ F R O M _ R E G ( 3 7 )
MOV_ F R O M _ R E G ( 3 8 )
MOV_ F R O M _ R E G ( 3 9 )
MOV_ F R O M _ R E G ( 4 0 )
MOV_ F R O M _ R E G ( 4 1 )
MOV_ F R O M _ R E G ( 4 2 )
MOV_ F R O M _ R E G ( 4 3 )
MOV_ F R O M _ R E G ( 4 4 )
MOV_ F R O M _ R E G ( 4 5 )
MOV_ F R O M _ R E G ( 4 6 )
MOV_ F R O M _ R E G ( 4 7 )
MOV_ F R O M _ R E G ( 4 8 )
MOV_ F R O M _ R E G ( 4 9 )
MOV_ F R O M _ R E G ( 5 0 )
MOV_ F R O M _ R E G ( 5 1 )
MOV_ F R O M _ R E G ( 5 2 )
MOV_ F R O M _ R E G ( 5 3 )
MOV_ F R O M _ R E G ( 5 4 )
MOV_ F R O M _ R E G ( 5 5 )
MOV_ F R O M _ R E G ( 5 6 )
MOV_ F R O M _ R E G ( 5 7 )
MOV_ F R O M _ R E G ( 5 8 )
MOV_ F R O M _ R E G ( 5 9 )
MOV_ F R O M _ R E G ( 6 0 )
MOV_ F R O M _ R E G ( 6 1 )
MOV_ F R O M _ R E G ( 6 2 )
MOV_ F R O M _ R E G ( 6 3 )
MOV_ F R O M _ R E G ( 6 4 )
MOV_ F R O M _ R E G ( 6 5 )
MOV_ F R O M _ R E G ( 6 6 )
MOV_ F R O M _ R E G ( 6 7 )
MOV_ F R O M _ R E G ( 6 8 )
MOV_ F R O M _ R E G ( 6 9 )
MOV_ F R O M _ R E G ( 7 0 )
MOV_ F R O M _ R E G ( 7 1 )
MOV_ F R O M _ R E G ( 7 2 )
MOV_ F R O M _ R E G ( 7 3 )
MOV_ F R O M _ R E G ( 7 4 )
MOV_ F R O M _ R E G ( 7 5 )
MOV_ F R O M _ R E G ( 7 6 )
MOV_ F R O M _ R E G ( 7 7 )
MOV_ F R O M _ R E G ( 7 8 )
MOV_ F R O M _ R E G ( 7 9 )
MOV_ F R O M _ R E G ( 8 0 )
MOV_ F R O M _ R E G ( 8 1 )
MOV_ F R O M _ R E G ( 8 2 )
MOV_ F R O M _ R E G ( 8 3 )
MOV_ F R O M _ R E G ( 8 4 )
MOV_ F R O M _ R E G ( 8 5 )
MOV_ F R O M _ R E G ( 8 6 )
MOV_ F R O M _ R E G ( 8 7 )
MOV_ F R O M _ R E G ( 8 8 )
MOV_ F R O M _ R E G ( 8 9 )
MOV_ F R O M _ R E G ( 9 0 )
MOV_ F R O M _ R E G ( 9 1 )
MOV_ F R O M _ R E G ( 9 2 )
MOV_ F R O M _ R E G ( 9 3 )
MOV_ F R O M _ R E G ( 9 4 )
MOV_ F R O M _ R E G ( 9 5 )
MOV_ F R O M _ R E G ( 9 6 )
MOV_ F R O M _ R E G ( 9 7 )
MOV_ F R O M _ R E G ( 9 8 )
MOV_ F R O M _ R E G ( 9 9 )
MOV_ F R O M _ R E G ( 1 0 0 )
MOV_ F R O M _ R E G ( 1 0 1 )
MOV_ F R O M _ R E G ( 1 0 2 )
MOV_ F R O M _ R E G ( 1 0 3 )
MOV_ F R O M _ R E G ( 1 0 4 )
MOV_ F R O M _ R E G ( 1 0 5 )
MOV_ F R O M _ R E G ( 1 0 6 )
MOV_ F R O M _ R E G ( 1 0 7 )
MOV_ F R O M _ R E G ( 1 0 8 )
MOV_ F R O M _ R E G ( 1 0 9 )
MOV_ F R O M _ R E G ( 1 1 0 )
MOV_ F R O M _ R E G ( 1 1 1 )
MOV_ F R O M _ R E G ( 1 1 2 )
MOV_ F R O M _ R E G ( 1 1 3 )
MOV_ F R O M _ R E G ( 1 1 4 )
MOV_ F R O M _ R E G ( 1 1 5 )
MOV_ F R O M _ R E G ( 1 1 6 )
MOV_ F R O M _ R E G ( 1 1 7 )
MOV_ F R O M _ R E G ( 1 1 8 )
MOV_ F R O M _ R E G ( 1 1 9 )
MOV_ F R O M _ R E G ( 1 2 0 )
MOV_ F R O M _ R E G ( 1 2 1 )
MOV_ F R O M _ R E G ( 1 2 2 )
MOV_ F R O M _ R E G ( 1 2 3 )
MOV_ F R O M _ R E G ( 1 2 4 )
MOV_ F R O M _ R E G ( 1 2 5 )
MOV_ F R O M _ R E G ( 1 2 6 )
MOV_ F R O M _ R E G ( 1 2 7 )
END( a s m _ m o v _ f r o m _ r e g )
/ * must b e i n b a n k 0
* parameter :
* r31 : pr
* r24 : b0
* /
2008-09-13 02:21:22 +04:00
ENTRY( k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c )
adds r19 =VMM_VPD_BASE_OFFSET ,r21
mov r16 = r31
mov r17 = r24
;;
{ .mii
ld8 r25 = [ r19 ]
nop 0 x0
mov r24 = i p
;;
}
{ .mmb
add r24 =0x20 , r24
nop 0 x0
br. s p t k . m a n y k v m _ v p s _ s y n c _ w r i t e
}
mov r31 = r16
mov r24 =r17
;;
br. s p t k . m a n y k v m _ r e s u m e _ t o _ g u e s t
END( k v m _ r e s u m e _ t o _ g u e s t _ w i t h _ s y n c )
2008-04-01 10:57:09 +04:00
ENTRY( k v m _ r e s u m e _ t o _ g u e s t )
adds r16 = V M M _ V C P U _ S A V E D _ G P _ O F F S E T ,r21
;;
ld8 r1 = [ r16 ]
adds r20 = V M M _ V C P U _ V S A _ B A S E _ O F F S E T ,r21
;;
mov r16 =cr . i p s r
;;
ld8 r20 = [ r20 ]
adds r19 =VMM_VPD_BASE_OFFSET ,r21
;;
ld8 r25 = [ r19 ]
extr. u r17 =r16 ,I A 6 4 _ P S R _ R I _ B I T ,2
tbit. n z p6 ,p7 =r16 ,I A 6 4 _ P S R _ R I _ B I T + 1
;;
( p6 ) m o v r18 =cr . i i p
( p6 ) m o v r17 =r0
;;
( p6 ) a d d r18 =0x10 ,r18
( p7 ) a d d r17 =1 ,r17
;;
( p6 ) m o v c r . i i p =r18
dep r16 =r17 ,r16 ,I A 6 4 _ P S R _ R I _ B I T ,2
;;
mov c r . i p s r =r16
adds r19 = V P D _ V P S R _ S T A R T _ O F F S E T ,r25
add r28 =PAL_VPS_RESUME_NORMAL ,r20
add r29 =PAL_VPS_RESUME_HANDLER ,r20
;;
ld8 r19 = [ r19 ]
mov b0 =r29
2008-11-21 05:56:08 +03:00
mov r27 =cr . i s r
2008-04-01 10:57:09 +04:00
;;
2008-11-21 05:56:08 +03:00
tbit. z p6 ,p7 = r19 ,I A 6 4 _ P S R _ I C _ B I T / / p7 =vpsr . i c
shr r27 =r27 ,I A 6 4 _ I S R _ I R _ B I T
2008-04-01 10:57:09 +04:00
;;
( p6 ) l d8 r26 = [ r25 ]
( p7 ) m o v b0 =r28
2008-11-21 05:56:08 +03:00
;;
( p6 ) d e p r26 =r27 ,r26 ,6 3 ,1
2008-04-01 10:57:09 +04:00
mov p r =r31 ,- 2
br. s p t k . m a n y b0 / / c a l l p a l s e r v i c e
;;
END( k v m _ r e s u m e _ t o _ g u e s t )
MOV_ T O _ B A N K 0 _ R E G ( 1 6 )
MOV_ T O _ B A N K 0 _ R E G ( 1 7 )
MOV_ T O _ B A N K 0 _ R E G ( 1 8 )
MOV_ T O _ B A N K 0 _ R E G ( 1 9 )
MOV_ T O _ B A N K 0 _ R E G ( 2 0 )
MOV_ T O _ B A N K 0 _ R E G ( 2 1 )
MOV_ T O _ B A N K 0 _ R E G ( 2 2 )
MOV_ T O _ B A N K 0 _ R E G ( 2 3 )
MOV_ T O _ B A N K 0 _ R E G ( 2 4 )
MOV_ T O _ B A N K 0 _ R E G ( 2 5 )
MOV_ T O _ B A N K 0 _ R E G ( 2 6 )
MOV_ T O _ B A N K 0 _ R E G ( 2 7 )
MOV_ T O _ B A N K 0 _ R E G ( 2 8 )
MOV_ T O _ B A N K 0 _ R E G ( 2 9 )
MOV_ T O _ B A N K 0 _ R E G ( 3 0 )
MOV_ T O _ B A N K 0 _ R E G ( 3 1 )
/ / mov t o r e g t a b l e
ENTRY( a s m _ m o v _ t o _ r e g )
MOV_ T O _ R E G 0
MOV_ T O _ R E G ( 1 )
MOV_ T O _ R E G ( 2 )
MOV_ T O _ R E G ( 3 )
MOV_ T O _ R E G ( 4 )
MOV_ T O _ R E G ( 5 )
MOV_ T O _ R E G ( 6 )
MOV_ T O _ R E G ( 7 )
MOV_ T O _ R E G ( 8 )
MOV_ T O _ R E G ( 9 )
MOV_ T O _ R E G ( 1 0 )
MOV_ T O _ R E G ( 1 1 )
MOV_ T O _ R E G ( 1 2 )
MOV_ T O _ R E G ( 1 3 )
MOV_ T O _ R E G ( 1 4 )
MOV_ T O _ R E G ( 1 5 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 1 6 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 1 7 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 1 8 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 1 9 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 0 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 1 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 2 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 3 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 4 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 5 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 6 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 7 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 8 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 2 9 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 3 0 )
JMP_ T O _ M O V _ T O _ B A N K 0 _ R E G ( 3 1 )
MOV_ T O _ R E G ( 3 2 )
MOV_ T O _ R E G ( 3 3 )
MOV_ T O _ R E G ( 3 4 )
MOV_ T O _ R E G ( 3 5 )
MOV_ T O _ R E G ( 3 6 )
MOV_ T O _ R E G ( 3 7 )
MOV_ T O _ R E G ( 3 8 )
MOV_ T O _ R E G ( 3 9 )
MOV_ T O _ R E G ( 4 0 )
MOV_ T O _ R E G ( 4 1 )
MOV_ T O _ R E G ( 4 2 )
MOV_ T O _ R E G ( 4 3 )
MOV_ T O _ R E G ( 4 4 )
MOV_ T O _ R E G ( 4 5 )
MOV_ T O _ R E G ( 4 6 )
MOV_ T O _ R E G ( 4 7 )
MOV_ T O _ R E G ( 4 8 )
MOV_ T O _ R E G ( 4 9 )
MOV_ T O _ R E G ( 5 0 )
MOV_ T O _ R E G ( 5 1 )
MOV_ T O _ R E G ( 5 2 )
MOV_ T O _ R E G ( 5 3 )
MOV_ T O _ R E G ( 5 4 )
MOV_ T O _ R E G ( 5 5 )
MOV_ T O _ R E G ( 5 6 )
MOV_ T O _ R E G ( 5 7 )
MOV_ T O _ R E G ( 5 8 )
MOV_ T O _ R E G ( 5 9 )
MOV_ T O _ R E G ( 6 0 )
MOV_ T O _ R E G ( 6 1 )
MOV_ T O _ R E G ( 6 2 )
MOV_ T O _ R E G ( 6 3 )
MOV_ T O _ R E G ( 6 4 )
MOV_ T O _ R E G ( 6 5 )
MOV_ T O _ R E G ( 6 6 )
MOV_ T O _ R E G ( 6 7 )
MOV_ T O _ R E G ( 6 8 )
MOV_ T O _ R E G ( 6 9 )
MOV_ T O _ R E G ( 7 0 )
MOV_ T O _ R E G ( 7 1 )
MOV_ T O _ R E G ( 7 2 )
MOV_ T O _ R E G ( 7 3 )
MOV_ T O _ R E G ( 7 4 )
MOV_ T O _ R E G ( 7 5 )
MOV_ T O _ R E G ( 7 6 )
MOV_ T O _ R E G ( 7 7 )
MOV_ T O _ R E G ( 7 8 )
MOV_ T O _ R E G ( 7 9 )
MOV_ T O _ R E G ( 8 0 )
MOV_ T O _ R E G ( 8 1 )
MOV_ T O _ R E G ( 8 2 )
MOV_ T O _ R E G ( 8 3 )
MOV_ T O _ R E G ( 8 4 )
MOV_ T O _ R E G ( 8 5 )
MOV_ T O _ R E G ( 8 6 )
MOV_ T O _ R E G ( 8 7 )
MOV_ T O _ R E G ( 8 8 )
MOV_ T O _ R E G ( 8 9 )
MOV_ T O _ R E G ( 9 0 )
MOV_ T O _ R E G ( 9 1 )
MOV_ T O _ R E G ( 9 2 )
MOV_ T O _ R E G ( 9 3 )
MOV_ T O _ R E G ( 9 4 )
MOV_ T O _ R E G ( 9 5 )
MOV_ T O _ R E G ( 9 6 )
MOV_ T O _ R E G ( 9 7 )
MOV_ T O _ R E G ( 9 8 )
MOV_ T O _ R E G ( 9 9 )
MOV_ T O _ R E G ( 1 0 0 )
MOV_ T O _ R E G ( 1 0 1 )
MOV_ T O _ R E G ( 1 0 2 )
MOV_ T O _ R E G ( 1 0 3 )
MOV_ T O _ R E G ( 1 0 4 )
MOV_ T O _ R E G ( 1 0 5 )
MOV_ T O _ R E G ( 1 0 6 )
MOV_ T O _ R E G ( 1 0 7 )
MOV_ T O _ R E G ( 1 0 8 )
MOV_ T O _ R E G ( 1 0 9 )
MOV_ T O _ R E G ( 1 1 0 )
MOV_ T O _ R E G ( 1 1 1 )
MOV_ T O _ R E G ( 1 1 2 )
MOV_ T O _ R E G ( 1 1 3 )
MOV_ T O _ R E G ( 1 1 4 )
MOV_ T O _ R E G ( 1 1 5 )
MOV_ T O _ R E G ( 1 1 6 )
MOV_ T O _ R E G ( 1 1 7 )
MOV_ T O _ R E G ( 1 1 8 )
MOV_ T O _ R E G ( 1 1 9 )
MOV_ T O _ R E G ( 1 2 0 )
MOV_ T O _ R E G ( 1 2 1 )
MOV_ T O _ R E G ( 1 2 2 )
MOV_ T O _ R E G ( 1 2 3 )
MOV_ T O _ R E G ( 1 2 4 )
MOV_ T O _ R E G ( 1 2 5 )
MOV_ T O _ R E G ( 1 2 6 )
MOV_ T O _ R E G ( 1 2 7 )
END( a s m _ m o v _ t o _ r e g )