2008-04-17 08:28:09 +04:00
/ *
* This p r o g r a m i s f r e e s o f t w a r e ; you can redistribute it and/or modify
* it u n d e r t h e t e r m s o f t h e G N U G e n e r a l P u b l i c L i c e n s e , v e r s i o n 2 , a s
* published b y t h e F r e e S o f t w a r e F o u n d a t i o n .
*
* This p r o g r a m i s d i s t r i b u t e d i n t h e h o p e t h a t i t w i l l b e u s e f u l ,
* but W I T H O U T A N Y W A R R A N T Y ; without even the implied warranty of
* MERCHANTABILITY o r F I T N E S S F O R A P A R T I C U L A R P U R P O S E . S e e t h e
* GNU G e n e r a l P u b l i c L i c e n s e f o r m o r e d e t a i l s .
*
* You s h o u l d h a v e r e c e i v e d a c o p y o f t h e G N U G e n e r a l P u b l i c L i c e n s e
* along w i t h t h i s p r o g r a m ; if not, write to the Free Software
* Foundation, 5 1 F r a n k l i n S t r e e t , F i f t h F l o o r , B o s t o n , M A 0 2 1 1 0 - 1 3 0 1 , U S A .
*
* Copyright I B M C o r p . 2 0 0 7
*
* Authors : Hollis B l a n c h a r d < h o l l i s b @us.ibm.com>
* /
# include < a s m / p p c _ a s m . h >
# include < a s m / k v m _ a s m . h >
# include < a s m / r e g . h >
# include < a s m / m m u - 4 4 x . h >
# include < a s m / p a g e . h >
# include < a s m / a s m - o f f s e t s . h >
# define K V M P P C _ M S R _ M A S K ( M S R _ C E | M S R _ E E | M S R _ P R | M S R _ D E | M S R _ M E | M S R _ I S | M S R _ D S )
# define V C P U _ G P R ( n ) ( V C P U _ G P R S + ( n * 4 ) )
/* The host stack layout: */
# define H O S T _ R 1 0 / * I m p l i e d b y s t w u . * /
# define H O S T _ C A L L E E _ L R 4
# define H O S T _ R U N 8
/ * r2 i s s p e c i a l : i t h o l d s ' c u r r e n t ' , a n d i t m a d e n o n v o l a t i l e i n t h e
* kernel w i t h t h e - f f i x e d - r2 g c c o p t i o n . * /
# define H O S T _ R 2 1 2
# define H O S T _ N V _ G P R S 1 6
# define H O S T _ N V _ G P R ( n ) ( H O S T _ N V _ G P R S + ( ( n - 1 4 ) * 4 ) )
# define H O S T _ M I N _ S T A C K _ S I Z E ( H O S T _ N V _ G P R ( 3 1 ) + 4 )
# define H O S T _ S T A C K _ S I Z E ( ( ( H O S T _ M I N _ S T A C K _ S I Z E + 1 5 ) / 1 6 ) * 1 6 ) / * A l i g n . * /
# define H O S T _ S T A C K _ L R ( H O S T _ S T A C K _ S I Z E + 4 ) / * I n c a l l e r s t a c k f r a m e . * /
# define N E E D _ I N S T _ M A S K ( ( 1 < < B O O K E _ I N T E R R U P T _ P R O G R A M ) | \
2008-07-25 22:54:49 +04:00
( 1 < < BOOKE_ I N T E R R U P T _ D T L B _ M I S S ) | \
( 1 < < BOOKE_ I N T E R R U P T _ D E B U G ) )
2008-04-17 08:28:09 +04:00
# define N E E D _ D E A R _ M A S K ( ( 1 < < B O O K E _ I N T E R R U P T _ D A T A _ S T O R A G E ) | \
( 1 < < BOOKE_ I N T E R R U P T _ D T L B _ M I S S ) )
# define N E E D _ E S R _ M A S K ( ( 1 < < B O O K E _ I N T E R R U P T _ D A T A _ S T O R A G E ) | \
( 1 < < BOOKE_ I N T E R R U P T _ I N S T _ S T O R A G E ) | \
( 1 < < BOOKE_ I N T E R R U P T _ P R O G R A M ) | \
( 1 < < BOOKE_ I N T E R R U P T _ D T L B _ M I S S ) )
.macro KVM_HANDLER ivor_ n r
_ GLOBAL( k v m p p c _ h a n d l e r _ \ i v o r _ n r )
/* Get pointer to vcpu and record exit number. */
mtspr S P R N _ S P R G 0 , r4
mfspr r4 , S P R N _ S P R G 1
stw r5 , V C P U _ G P R ( r5 ) ( r4 )
stw r6 , V C P U _ G P R ( r6 ) ( r4 )
mfctr r5
lis r6 , k v m p p c _ r e s u m e _ h o s t @h
stw r5 , V C P U _ C T R ( r4 )
li r5 , \ i v o r _ n r
ori r6 , r6 , k v m p p c _ r e s u m e _ h o s t @l
mtctr r6
bctr
.endm
_ GLOBAL( k v m p p c _ h a n d l e r s _ s t a r t )
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ C R I T I C A L
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ M A C H I N E _ C H E C K
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ D A T A _ S T O R A G E
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ I N S T _ S T O R A G E
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ E X T E R N A L
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ A L I G N M E N T
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ P R O G R A M
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ F P _ U N A V A I L
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ S Y S C A L L
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ A P _ U N A V A I L
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ D E C R E M E N T E R
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ F I T
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ W A T C H D O G
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ D T L B _ M I S S
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ I T L B _ M I S S
KVM_ H A N D L E R B O O K E _ I N T E R R U P T _ D E B U G
_ GLOBAL( k v m p p c _ h a n d l e r _ l e n )
.long kvmppc_handler_1 - kvmppc_ h a n d l e r _ 0
/ * Registers :
* SPRG0 : guest r4
* r4 : vcpu p o i n t e r
* r5 : KVM e x i t n u m b e r
* /
_ GLOBAL( k v m p p c _ r e s u m e _ h o s t )
stw r3 , V C P U _ G P R ( r3 ) ( r4 )
mfcr r3
stw r3 , V C P U _ C R ( r4 )
stw r7 , V C P U _ G P R ( r7 ) ( r4 )
stw r8 , V C P U _ G P R ( r8 ) ( r4 )
stw r9 , V C P U _ G P R ( r9 ) ( r4 )
li r6 , 1
slw r6 , r6 , r5
/* Save the faulting instruction and all GPRs for emulation. */
andi. r7 , r6 , N E E D _ I N S T _ M A S K
beq . . s k i p _ i n s t _ c o p y
mfspr r9 , S P R N _ S R R 0
mfmsr r8
ori r7 , r8 , M S R _ D S
mtmsr r7
isync
lwz r9 , 0 ( r9 )
mtmsr r8
isync
stw r9 , V C P U _ L A S T _ I N S T ( r4 )
stw r15 , V C P U _ G P R ( r15 ) ( r4 )
stw r16 , V C P U _ G P R ( r16 ) ( r4 )
stw r17 , V C P U _ G P R ( r17 ) ( r4 )
stw r18 , V C P U _ G P R ( r18 ) ( r4 )
stw r19 , V C P U _ G P R ( r19 ) ( r4 )
stw r20 , V C P U _ G P R ( r20 ) ( r4 )
stw r21 , V C P U _ G P R ( r21 ) ( r4 )
stw r22 , V C P U _ G P R ( r22 ) ( r4 )
stw r23 , V C P U _ G P R ( r23 ) ( r4 )
stw r24 , V C P U _ G P R ( r24 ) ( r4 )
stw r25 , V C P U _ G P R ( r25 ) ( r4 )
stw r26 , V C P U _ G P R ( r26 ) ( r4 )
stw r27 , V C P U _ G P R ( r27 ) ( r4 )
stw r28 , V C P U _ G P R ( r28 ) ( r4 )
stw r29 , V C P U _ G P R ( r29 ) ( r4 )
stw r30 , V C P U _ G P R ( r30 ) ( r4 )
stw r31 , V C P U _ G P R ( r31 ) ( r4 )
. .skip_inst_copy :
/* Also grab DEAR and ESR before the host can clobber them. */
andi. r7 , r6 , N E E D _ D E A R _ M A S K
beq . . s k i p _ d e a r
mfspr r9 , S P R N _ D E A R
stw r9 , V C P U _ F A U L T _ D E A R ( r4 )
. .skip_dear :
andi. r7 , r6 , N E E D _ E S R _ M A S K
beq . . s k i p _ e s r
mfspr r9 , S P R N _ E S R
stw r9 , V C P U _ F A U L T _ E S R ( r4 )
. .skip_esr :
/* Save remaining volatile guest register state to vcpu. */
stw r0 , V C P U _ G P R ( r0 ) ( r4 )
stw r1 , V C P U _ G P R ( r1 ) ( r4 )
stw r2 , V C P U _ G P R ( r2 ) ( r4 )
stw r10 , V C P U _ G P R ( r10 ) ( r4 )
stw r11 , V C P U _ G P R ( r11 ) ( r4 )
stw r12 , V C P U _ G P R ( r12 ) ( r4 )
stw r13 , V C P U _ G P R ( r13 ) ( r4 )
stw r14 , V C P U _ G P R ( r14 ) ( r4 ) / * W e n e e d a N V G P R b e l o w . * /
mflr r3
stw r3 , V C P U _ L R ( r4 )
mfxer r3
stw r3 , V C P U _ X E R ( r4 )
mfspr r3 , S P R N _ S P R G 0
stw r3 , V C P U _ G P R ( r4 ) ( r4 )
mfspr r3 , S P R N _ S R R 0
stw r3 , V C P U _ P C ( r4 )
/ * Restore h o s t s t a c k p o i n t e r a n d P I D b e f o r e I V P R , s i n c e t h e h o s t
* exception h a n d l e r s u s e t h e m . * /
lwz r1 , V C P U _ H O S T _ S T A C K ( r4 )
lwz r3 , V C P U _ H O S T _ P I D ( r4 )
mtspr S P R N _ P I D , r3
/ * Restore h o s t I V P R b e f o r e r e - e n a b l i n g i n t e r r u p t s . W e c h e a t a n d k n o w
* that L i n u x I V P R i s a l w a y s 0 x c00 0 0 0 0 0 . * /
lis r3 , 0 x c00 0
mtspr S P R N _ I V P R , r3
/* Switch to kernel stack and jump to handler. */
LOAD_ R E G _ A D D R ( r3 , k v m p p c _ h a n d l e _ e x i t )
mtctr r3
lwz r3 , H O S T _ R U N ( r1 )
lwz r2 , H O S T _ R 2 ( r1 )
mr r14 , r4 / * S a v e v c p u p o i n t e r . * /
bctrl / * k v m p p c _ h a n d l e _ e x i t ( ) * /
/* Restore vcpu pointer and the nonvolatiles we used. */
mr r4 , r14
lwz r14 , V C P U _ G P R ( r14 ) ( r4 )
/* Sometimes instruction emulation must restore complete GPR state. */
andi. r5 , r3 , R E S U M E _ F L A G _ N V
beq . . s k i p _ n v _ l o a d
lwz r15 , V C P U _ G P R ( r15 ) ( r4 )
lwz r16 , V C P U _ G P R ( r16 ) ( r4 )
lwz r17 , V C P U _ G P R ( r17 ) ( r4 )
lwz r18 , V C P U _ G P R ( r18 ) ( r4 )
lwz r19 , V C P U _ G P R ( r19 ) ( r4 )
lwz r20 , V C P U _ G P R ( r20 ) ( r4 )
lwz r21 , V C P U _ G P R ( r21 ) ( r4 )
lwz r22 , V C P U _ G P R ( r22 ) ( r4 )
lwz r23 , V C P U _ G P R ( r23 ) ( r4 )
lwz r24 , V C P U _ G P R ( r24 ) ( r4 )
lwz r25 , V C P U _ G P R ( r25 ) ( r4 )
lwz r26 , V C P U _ G P R ( r26 ) ( r4 )
lwz r27 , V C P U _ G P R ( r27 ) ( r4 )
lwz r28 , V C P U _ G P R ( r28 ) ( r4 )
lwz r29 , V C P U _ G P R ( r29 ) ( r4 )
lwz r30 , V C P U _ G P R ( r30 ) ( r4 )
lwz r31 , V C P U _ G P R ( r31 ) ( r4 )
. .skip_nv_load :
/* Should we return to the guest? */
andi. r5 , r3 , R E S U M E _ F L A G _ H O S T
beq l i g h t w e i g h t _ e x i t
srawi r3 , r3 , 2 / * S h i f t - E R R b a c k d o w n . * /
heavyweight_exit :
/* Not returning to guest. */
/ * We a l r e a d y s a v e d g u e s t v o l a t i l e r e g i s t e r s t a t e ; now save the
* non- v o l a t i l e s . * /
stw r15 , V C P U _ G P R ( r15 ) ( r4 )
stw r16 , V C P U _ G P R ( r16 ) ( r4 )
stw r17 , V C P U _ G P R ( r17 ) ( r4 )
stw r18 , V C P U _ G P R ( r18 ) ( r4 )
stw r19 , V C P U _ G P R ( r19 ) ( r4 )
stw r20 , V C P U _ G P R ( r20 ) ( r4 )
stw r21 , V C P U _ G P R ( r21 ) ( r4 )
stw r22 , V C P U _ G P R ( r22 ) ( r4 )
stw r23 , V C P U _ G P R ( r23 ) ( r4 )
stw r24 , V C P U _ G P R ( r24 ) ( r4 )
stw r25 , V C P U _ G P R ( r25 ) ( r4 )
stw r26 , V C P U _ G P R ( r26 ) ( r4 )
stw r27 , V C P U _ G P R ( r27 ) ( r4 )
stw r28 , V C P U _ G P R ( r28 ) ( r4 )
stw r29 , V C P U _ G P R ( r29 ) ( r4 )
stw r30 , V C P U _ G P R ( r30 ) ( r4 )
stw r31 , V C P U _ G P R ( r31 ) ( r4 )
/* Load host non-volatile register state from host stack. */
lwz r14 , H O S T _ N V _ G P R ( r14 ) ( r1 )
lwz r15 , H O S T _ N V _ G P R ( r15 ) ( r1 )
lwz r16 , H O S T _ N V _ G P R ( r16 ) ( r1 )
lwz r17 , H O S T _ N V _ G P R ( r17 ) ( r1 )
lwz r18 , H O S T _ N V _ G P R ( r18 ) ( r1 )
lwz r19 , H O S T _ N V _ G P R ( r19 ) ( r1 )
lwz r20 , H O S T _ N V _ G P R ( r20 ) ( r1 )
lwz r21 , H O S T _ N V _ G P R ( r21 ) ( r1 )
lwz r22 , H O S T _ N V _ G P R ( r22 ) ( r1 )
lwz r23 , H O S T _ N V _ G P R ( r23 ) ( r1 )
lwz r24 , H O S T _ N V _ G P R ( r24 ) ( r1 )
lwz r25 , H O S T _ N V _ G P R ( r25 ) ( r1 )
lwz r26 , H O S T _ N V _ G P R ( r26 ) ( r1 )
lwz r27 , H O S T _ N V _ G P R ( r27 ) ( r1 )
lwz r28 , H O S T _ N V _ G P R ( r28 ) ( r1 )
lwz r29 , H O S T _ N V _ G P R ( r29 ) ( r1 )
lwz r30 , H O S T _ N V _ G P R ( r30 ) ( r1 )
lwz r31 , H O S T _ N V _ G P R ( r31 ) ( r1 )
/* Return to kvm_vcpu_run(). */
lwz r4 , H O S T _ S T A C K _ L R ( r1 )
addi r1 , r1 , H O S T _ S T A C K _ S I Z E
mtlr r4
/* r3 still contains the return code from kvmppc_handle_exit(). */
blr
/ * Registers :
* r3 : kvm_ r u n p o i n t e r
* r4 : vcpu p o i n t e r
* /
_ GLOBAL( _ _ k v m p p c _ v c p u _ r u n )
stwu r1 , - H O S T _ S T A C K _ S I Z E ( r1 )
stw r1 , V C P U _ H O S T _ S T A C K ( r4 ) / * S a v e s t a c k p o i n t e r t o v c p u . * /
/* Save host state to stack. */
stw r3 , H O S T _ R U N ( r1 )
mflr r3
stw r3 , H O S T _ S T A C K _ L R ( r1 )
/* Save host non-volatile register state to stack. */
stw r14 , H O S T _ N V _ G P R ( r14 ) ( r1 )
stw r15 , H O S T _ N V _ G P R ( r15 ) ( r1 )
stw r16 , H O S T _ N V _ G P R ( r16 ) ( r1 )
stw r17 , H O S T _ N V _ G P R ( r17 ) ( r1 )
stw r18 , H O S T _ N V _ G P R ( r18 ) ( r1 )
stw r19 , H O S T _ N V _ G P R ( r19 ) ( r1 )
stw r20 , H O S T _ N V _ G P R ( r20 ) ( r1 )
stw r21 , H O S T _ N V _ G P R ( r21 ) ( r1 )
stw r22 , H O S T _ N V _ G P R ( r22 ) ( r1 )
stw r23 , H O S T _ N V _ G P R ( r23 ) ( r1 )
stw r24 , H O S T _ N V _ G P R ( r24 ) ( r1 )
stw r25 , H O S T _ N V _ G P R ( r25 ) ( r1 )
stw r26 , H O S T _ N V _ G P R ( r26 ) ( r1 )
stw r27 , H O S T _ N V _ G P R ( r27 ) ( r1 )
stw r28 , H O S T _ N V _ G P R ( r28 ) ( r1 )
stw r29 , H O S T _ N V _ G P R ( r29 ) ( r1 )
stw r30 , H O S T _ N V _ G P R ( r30 ) ( r1 )
stw r31 , H O S T _ N V _ G P R ( r31 ) ( r1 )
/* Load guest non-volatiles. */
lwz r14 , V C P U _ G P R ( r14 ) ( r4 )
lwz r15 , V C P U _ G P R ( r15 ) ( r4 )
lwz r16 , V C P U _ G P R ( r16 ) ( r4 )
lwz r17 , V C P U _ G P R ( r17 ) ( r4 )
lwz r18 , V C P U _ G P R ( r18 ) ( r4 )
lwz r19 , V C P U _ G P R ( r19 ) ( r4 )
lwz r20 , V C P U _ G P R ( r20 ) ( r4 )
lwz r21 , V C P U _ G P R ( r21 ) ( r4 )
lwz r22 , V C P U _ G P R ( r22 ) ( r4 )
lwz r23 , V C P U _ G P R ( r23 ) ( r4 )
lwz r24 , V C P U _ G P R ( r24 ) ( r4 )
lwz r25 , V C P U _ G P R ( r25 ) ( r4 )
lwz r26 , V C P U _ G P R ( r26 ) ( r4 )
lwz r27 , V C P U _ G P R ( r27 ) ( r4 )
lwz r28 , V C P U _ G P R ( r28 ) ( r4 )
lwz r29 , V C P U _ G P R ( r29 ) ( r4 )
lwz r30 , V C P U _ G P R ( r30 ) ( r4 )
lwz r31 , V C P U _ G P R ( r31 ) ( r4 )
lightweight_exit :
stw r2 , H O S T _ R 2 ( r1 )
mfspr r3 , S P R N _ P I D
stw r3 , V C P U _ H O S T _ P I D ( r4 )
2008-07-25 22:54:53 +04:00
lwz r3 , V C P U _ S H A D O W _ P I D ( r4 )
2008-04-17 08:28:09 +04:00
mtspr S P R N _ P I D , r3
iccci 0 , 0 / * X X X h a c k * /
/* Load some guest volatiles. */
lwz r0 , V C P U _ G P R ( r0 ) ( r4 )
lwz r2 , V C P U _ G P R ( r2 ) ( r4 )
lwz r9 , V C P U _ G P R ( r9 ) ( r4 )
lwz r10 , V C P U _ G P R ( r10 ) ( r4 )
lwz r11 , V C P U _ G P R ( r11 ) ( r4 )
lwz r12 , V C P U _ G P R ( r12 ) ( r4 )
lwz r13 , V C P U _ G P R ( r13 ) ( r4 )
lwz r3 , V C P U _ L R ( r4 )
mtlr r3
lwz r3 , V C P U _ X E R ( r4 )
mtxer r3
/ * Switch t h e I V P R . X X X I f w e t a k e a T L B m i s s a f t e r t h i s w e ' r e s c r e w e d ,
* so h o w d o w e m a k e s u r e v c p u w o n ' t f a u l t ? * /
lis r8 , k v m p p c _ b o o k e _ h a n d l e r s @ha
lwz r8 , k v m p p c _ b o o k e _ h a n d l e r s @l(r8)
mtspr S P R N _ I V P R , r8
/* Save vcpu pointer for the exception handlers. */
mtspr S P R N _ S P R G 1 , r4
/ * Can' t s w i t c h t h e s t a c k p o i n t e r u n t i l a f t e r I V P R i s s w i t c h e d ,
* because h o s t i n t e r r u p t h a n d l e r s w o u l d g e t c o n f u s e d . * /
lwz r1 , V C P U _ G P R ( r1 ) ( r4 )
/* XXX handle USPRG0 */
/ * Host i n t e r r u p t h a n d l e r s m a y h a v e c l o b b e r e d t h e s e g u e s t - r e a d a b l e
* SPRGs, s o w e n e e d t o r e l o a d t h e m h e r e w i t h t h e g u e s t ' s v a l u e s . * /
lwz r3 , V C P U _ S P R G 4 ( r4 )
mtspr S P R N _ S P R G 4 , r3
lwz r3 , V C P U _ S P R G 5 ( r4 )
mtspr S P R N _ S P R G 5 , r3
lwz r3 , V C P U _ S P R G 6 ( r4 )
mtspr S P R N _ S P R G 6 , r3
lwz r3 , V C P U _ S P R G 7 ( r4 )
mtspr S P R N _ S P R G 7 , r3
/* Finish loading guest volatiles and jump to guest. */
lwz r3 , V C P U _ C T R ( r4 )
mtctr r3
lwz r3 , V C P U _ C R ( r4 )
mtcr r3
lwz r5 , V C P U _ G P R ( r5 ) ( r4 )
lwz r6 , V C P U _ G P R ( r6 ) ( r4 )
lwz r7 , V C P U _ G P R ( r7 ) ( r4 )
lwz r8 , V C P U _ G P R ( r8 ) ( r4 )
lwz r3 , V C P U _ P C ( r4 )
mtsrr0 r3
lwz r3 , V C P U _ M S R ( r4 )
oris r3 , r3 , K V M P P C _ M S R _ M A S K @h
ori r3 , r3 , K V M P P C _ M S R _ M A S K @l
mtsrr1 r3
2008-07-25 22:54:49 +04:00
/ * Clear a n y d e b u g e v e n t s w h i c h o c c u r r e d s i n c e w e d i s a b l e d M S R [ D E ] .
* XXX T h i s g i v e s u s a 3 - i n s t r u c t i o n w i n d o w i n w h i c h a b r e a k p o i n t
* intended f o r g u e s t c o n t e x t c o u l d f i r e i n t h e h o s t i n s t e a d . * /
lis r3 , 0 x f f f f
ori r3 , r3 , 0 x f f f f
mtspr S P R N _ D B S R , r3
2008-04-17 08:28:09 +04:00
lwz r3 , V C P U _ G P R ( r3 ) ( r4 )
lwz r4 , V C P U _ G P R ( r4 ) ( r4 )
rfi