2009-11-02 15:02:29 +03:00
/ *
* This p r o g r a m i s f r e e s o f t w a r e ; you can redistribute it and/or modify
* it u n d e r t h e t e r m s o f t h e G N U G e n e r a l P u b l i c L i c e n s e , v e r s i o n 2 , a s
* published b y t h e F r e e S o f t w a r e F o u n d a t i o n .
*
* This p r o g r a m i s d i s t r i b u t e d i n t h e h o p e t h a t i t w i l l b e u s e f u l ,
* but W I T H O U T A N Y W A R R A N T Y ; without even the implied warranty of
* MERCHANTABILITY o r F I T N E S S F O R A P A R T I C U L A R P U R P O S E . S e e t h e
* GNU G e n e r a l P u b l i c L i c e n s e f o r m o r e d e t a i l s .
*
* You s h o u l d h a v e r e c e i v e d a c o p y o f t h e G N U G e n e r a l P u b l i c L i c e n s e
* along w i t h t h i s p r o g r a m ; if not, write to the Free Software
* Foundation, 5 1 F r a n k l i n S t r e e t , F i f t h F l o o r , B o s t o n , M A 0 2 1 1 0 - 1 3 0 1 , U S A .
*
* Copyright S U S E L i n u x P r o d u c t s G m b H 2 0 0 9
*
* Authors : Alexander G r a f < a g r a f @suse.de>
* /
# define S H A D O W _ S L B _ E S I D ( n u m ) ( S L B S H A D O W _ S A V E A R E A + ( n u m * 0 x10 ) )
# define S H A D O W _ S L B _ V S I D ( n u m ) ( S L B S H A D O W _ S A V E A R E A + ( n u m * 0 x10 ) + 0 x8 )
# define U N B O L T _ S L B _ E N T R Y ( n u m ) \
ld r9 , S H A D O W _ S L B _ E S I D ( n u m ) ( r12 ) ; \
/* Invalid? Skip. */ ; \
rldicl. r0 , r9 , 3 7 , 6 3 ; \
beq s l b _ e n t r y _ s k i p _ ## n u m ; \
xoris r9 , r9 , S L B _ E S I D _ V @h; \
std r9 , S H A D O W _ S L B _ E S I D ( n u m ) ( r12 ) ; \
slb_ e n t r y _ s k i p _ ## n u m :
# define R E B O L T _ S L B _ E N T R Y ( n u m ) \
ld r10 , S H A D O W _ S L B _ E S I D ( n u m ) ( r11 ) ; \
cmpdi r10 , 0 ; \
2010-01-05 00:19:22 +03:00
beq s l b _ e x i t _ s k i p _ ## n u m ; \
2009-11-02 15:02:29 +03:00
oris r10 , r10 , S L B _ E S I D _ V @h; \
ld r9 , S H A D O W _ S L B _ V S I D ( n u m ) ( r11 ) ; \
slbmte r9 , r10 ; \
std r10 , S H A D O W _ S L B _ E S I D ( n u m ) ( r11 ) ; \
slb_ e x i t _ s k i p _ ## n u m :
/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Entry c o d e *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * /
2010-04-16 02:11:48 +04:00
.macro LOAD_GUEST_SEGMENTS
2009-11-02 15:02:29 +03:00
/ * Required s t a t e :
*
* MSR = ~ I R | D R
* R1 3 = P A C A
2010-01-08 04:58:03 +03:00
* R1 = h o s t R 1
* R2 = h o s t R 2
2010-04-16 02:11:48 +04:00
* R3 = s h a d o w v c p u
* all o t h e r v o l a t i l e G P R S = f r e e
* SVCPU[ C R ] = g u e s t C R
* SVCPU[ X E R ] = g u e s t X E R
* SVCPU[ C T R ] = g u e s t C T R
* SVCPU[ L R ] = g u e s t L R
2009-11-02 15:02:29 +03:00
* /
/* Remove LPAR shadow entries */
# if S L B _ N U M _ B O L T E D = = 3
ld r12 , P A C A _ S L B S H A D O W P T R ( r13 )
/* Save off the first entry so we can slbie it later */
ld r10 , S H A D O W _ S L B _ E S I D ( 0 ) ( r12 )
ld r11 , S H A D O W _ S L B _ V S I D ( 0 ) ( r12 )
/* Remove bolted entries */
UNBOLT_ S L B _ E N T R Y ( 0 )
UNBOLT_ S L B _ E N T R Y ( 1 )
UNBOLT_ S L B _ E N T R Y ( 2 )
# else
# error u n k n o w n n u m b e r o f b o l t e d e n t r i e s
# endif
/* Flush SLB */
slbia
/* r0 = esid & ESID_MASK */
rldicr r10 , r10 , 0 , 3 5
/* r0 |= CLASS_BIT(VSID) */
rldic r12 , r11 , 5 6 - 3 6 , 3 6
or r10 , r10 , r12
slbie r10
isync
/* Fill SLB with our shadow */
2010-04-16 02:11:48 +04:00
lbz r12 , S V C P U _ S L B _ M A X ( r3 )
2009-11-02 15:02:29 +03:00
mulli r12 , r12 , 1 6
2010-04-16 02:11:48 +04:00
addi r12 , r12 , S V C P U _ S L B
add r12 , r12 , r3
2009-11-02 15:02:29 +03:00
/* for (r11 = kvm_slb; r11 < kvm_slb + kvm_slb_size; r11+=slb_entry) */
2010-04-16 02:11:48 +04:00
li r11 , S V C P U _ S L B
add r11 , r11 , r3
2009-11-02 15:02:29 +03:00
slb_loop_enter :
ld r10 , 0 ( r11 )
rldicl. r0 , r10 , 3 7 , 6 3
beq s l b _ l o o p _ e n t e r _ s k i p
ld r9 , 8 ( r11 )
slbmte r9 , r10
slb_loop_enter_skip :
addi r11 , r11 , 1 6
cmpd c r0 , r11 , r12
blt s l b _ l o o p _ e n t e r
slb_do_enter :
2010-04-16 02:11:48 +04:00
.endm
2009-11-02 15:02:29 +03:00
/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Exit c o d e *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * /
2010-04-16 02:11:48 +04:00
.macro LOAD_HOST_SEGMENTS
2009-11-02 15:02:29 +03:00
/ * Register u s a g e a t t h i s p o i n t :
*
2010-04-16 02:11:48 +04:00
* R1 = h o s t R 1
* R2 = h o s t R 2
* R1 2 = e x i t h a n d l e r i d
* R1 3 = s h a d o w v c p u - S H A D O W _ V C P U _ O F F [ =PACA o n P P C 6 4 ]
* SVCPU. * = g u e s t *
* SVCPU[ C R ] = g u e s t C R
* SVCPU[ X E R ] = g u e s t X E R
* SVCPU[ C T R ] = g u e s t C T R
* SVCPU[ L R ] = g u e s t L R
2009-11-02 15:02:29 +03:00
*
* /
/* Restore bolted entries from the shadow and fix it along the way */
/* We don't store anything in entry 0, so we don't need to take care of it */
slbia
isync
# if S L B _ N U M _ B O L T E D = = 3
ld r11 , P A C A _ S L B S H A D O W P T R ( r13 )
REBOLT_ S L B _ E N T R Y ( 0 )
REBOLT_ S L B _ E N T R Y ( 1 )
REBOLT_ S L B _ E N T R Y ( 2 )
# else
# error u n k n o w n n u m b e r o f b o l t e d e n t r i e s
# endif
slb_do_exit :
2010-04-16 02:11:48 +04:00
.endm