2009-05-28 21:56:16 +04:00
/ *
* linux/ a r c h / a r m / m a c h - o m a p2 / s l e e p . S
*
* ( C) C o p y r i g h t 2 0 0 7
* Texas I n s t r u m e n t s
* Karthik D a s u < k a r t h i k - d p @ti.com>
*
* ( C) C o p y r i g h t 2 0 0 4
* Texas I n s t r u m e n t s , < w w w . t i . c o m >
* Richard W o o d r u f f < r - w o o d r u f f2 @ti.com>
*
* This p r o g r a m i s f r e e s o f t w a r e ; you can redistribute it and/or
* modify i t u n d e r t h e t e r m s o f t h e G N U G e n e r a l P u b l i c L i c e n s e a s
* published b y t h e F r e e S o f t w a r e F o u n d a t i o n ; either version 2 of
* the L i c e n s e , o r ( a t y o u r o p t i o n ) a n y l a t e r v e r s i o n .
*
* This p r o g r a m i s d i s t r i b u t e d i n t h e h o p e t h a t i t w i l l b e u s e f u l ,
* but W I T H O U T A N Y W A R R A N T Y ; without even the implied warranty of
* MERCHANTABILITY o r F I T N E S S F O R A P A R T I C U L A R / P U R P O S E . S e e t h e
* GNU G e n e r a l P u b l i c L i c e n s e f o r m o r e d e t a i l s .
*
* You s h o u l d h a v e r e c e i v e d a c o p y o f t h e G N U G e n e r a l P u b l i c L i c e n s e
* along w i t h t h i s p r o g r a m ; if not, write to the Free Software
* Foundation, I n c . , 5 9 T e m p l e P l a c e , S u i t e 3 3 0 , B o s t o n ,
* MA 0 2 1 1 1 - 1 3 0 7 U S A
* /
# include < l i n u x / l i n k a g e . h >
# include < a s m / a s s e m b l e r . h >
# include < m a c h / i o . h >
2009-10-20 20:40:47 +04:00
# include < p l a t / c o n t r o l . h >
2009-05-28 21:56:16 +04:00
2009-01-16 19:53:48 +03:00
# include " c m . h "
2009-05-28 21:56:16 +04:00
# include " p r m . h "
# include " s d r c . h "
# define P M _ P R E P W S T S T _ C O R E _ V O M A P 3 4 X X _ P R M _ R E G A D D R ( C O R E _ M O D , \
OMAP3 4 3 0 _ P M _ P R E P W S T S T )
2008-10-13 18:58:50 +04:00
# define P M _ P R E P W S T S T _ C O R E _ P 0 x48 3 0 6 A E 8
2009-05-28 21:56:16 +04:00
# define P M _ P R E P W S T S T _ M P U _ V O M A P 3 4 X X _ P R M _ R E G A D D R ( M P U _ M O D , \
OMAP3 4 3 0 _ P M _ P R E P W S T S T )
2008-09-26 16:19:56 +04:00
# define P M _ P W S T C T R L _ M P U _ P O M A P 3 4 3 0 _ P R M _ B A S E + M P U _ M O D + P M _ P W S T C T R L
2009-01-16 19:53:48 +03:00
# define C M _ I D L E S T 1 _ C O R E _ V O M A P 3 4 X X _ C M _ R E G A D D R ( C O R E _ M O D , C M _ I D L E S T 1 )
2008-10-13 14:15:00 +04:00
# define S R A M _ B A S E _ P 0 x40 2 0 0 0 0 0
# define C O N T R O L _ S T A T 0 x48 0 0 2 2 F 0
2009-05-28 21:56:16 +04:00
# define S C R A T C H P A D _ M E M _ O F F S 0 x31 0 / * M o v e t h i s a s c o r r e c t p l a c e i s
* available * /
2008-09-26 16:19:56 +04:00
# define S C R A T C H P A D _ B A S E _ P ( O M A P 3 4 3 X _ C T R L _ B A S E + O M A P 3 4 3 X _ C O N T R O L _ M E M _ W K U P \
+ SCRATCHPAD_ M E M _ O F F S )
2009-05-28 21:56:16 +04:00
# define S D R C _ P O W E R _ V O M A P 3 4 X X _ S D R C _ R E G A D D R ( S D R C _ P O W E R )
2008-10-13 18:58:50 +04:00
# define S D R C _ S Y S C O N F I G _ P ( O M A P 3 4 3 X _ S D R C _ B A S E + S D R C _ S Y S C O N F I G )
# define S D R C _ M R _ 0 _ P ( O M A P 3 4 3 X _ S D R C _ B A S E + S D R C _ M R _ 0 )
# define S D R C _ E M R 2 _ 0 _ P ( O M A P 3 4 3 X _ S D R C _ B A S E + S D R C _ E M R 2 _ 0 )
# define S D R C _ M A N U A L _ 0 _ P ( O M A P 3 4 3 X _ S D R C _ B A S E + S D R C _ M A N U A L _ 0 )
# define S D R C _ M R _ 1 _ P ( O M A P 3 4 3 X _ S D R C _ B A S E + S D R C _ M R _ 1 )
# define S D R C _ E M R 2 _ 1 _ P ( O M A P 3 4 3 X _ S D R C _ B A S E + S D R C _ E M R 2 _ 1 )
# define S D R C _ M A N U A L _ 1 _ P ( O M A P 3 4 3 X _ S D R C _ B A S E + S D R C _ M A N U A L _ 1 )
2009-01-16 19:53:48 +03:00
# define S D R C _ D L L A _ S T A T U S _ V O M A P 3 4 X X _ S D R C _ R E G A D D R ( S D R C _ D L L A _ S T A T U S )
# define S D R C _ D L L A _ C T R L _ V O M A P 3 4 X X _ S D R C _ R E G A D D R ( S D R C _ D L L A _ C T R L )
2009-05-28 21:56:16 +04:00
.text
/* Function call to get the restore pointer for resume from OFF */
ENTRY( g e t _ r e s t o r e _ p o i n t e r )
stmfd s p ! , { l r } @ save registers on stack
adr r0 , r e s t o r e
ldmfd s p ! , { p c } @ restore regs and return
ENTRY( g e t _ r e s t o r e _ p o i n t e r _ s z )
2008-10-13 18:58:50 +04:00
.word . - get_ r e s t o r e _ p o i n t e r
.text
/* Function call to get the restore pointer for for ES3 to resume from OFF */
ENTRY( g e t _ e s3 _ r e s t o r e _ p o i n t e r )
stmfd s p ! , { l r } @ save registers on stack
adr r0 , r e s t o r e _ e s3
ldmfd s p ! , { p c } @ restore regs and return
ENTRY( g e t _ e s3 _ r e s t o r e _ p o i n t e r _ s z )
.word . - get_ e s3 _ r e s t o r e _ p o i n t e r
ENTRY( e s3 _ s d r c _ f i x )
ldr r4 , s d r c _ s y s c f g @ get config addr
ldr r5 , [ r4 ] @ get value
tst r5 , #0x100 @ is part access blocked
it e q
biceq r5 , r5 , #0x100 @ clear bit if set
str r5 , [ r4 ] @ write back change
ldr r4 , s d r c _ m r _ 0 @ get config addr
ldr r5 , [ r4 ] @ get value
str r5 , [ r4 ] @ write back change
ldr r4 , s d r c _ e m r2 _ 0 @ get config addr
ldr r5 , [ r4 ] @ get value
str r5 , [ r4 ] @ write back change
ldr r4 , s d r c _ m a n u a l _ 0 @ get config addr
mov r5 , #0x2 @ autorefresh command
str r5 , [ r4 ] @ kick off refreshes
ldr r4 , s d r c _ m r _ 1 @ get config addr
ldr r5 , [ r4 ] @ get value
str r5 , [ r4 ] @ write back change
ldr r4 , s d r c _ e m r2 _ 1 @ get config addr
ldr r5 , [ r4 ] @ get value
str r5 , [ r4 ] @ write back change
ldr r4 , s d r c _ m a n u a l _ 1 @ get config addr
mov r5 , #0x2 @ autorefresh command
str r5 , [ r4 ] @ kick off refreshes
bx l r
sdrc_syscfg :
.word SDRC_SYSCONFIG_P
sdrc_mr_0 :
.word SDRC_MR_0_P
sdrc_emr2_0 :
.word SDRC_EMR2_0_P
sdrc_manual_0 :
.word SDRC_MANUAL_0_P
sdrc_mr_1 :
.word SDRC_MR_1_P
sdrc_emr2_1 :
.word SDRC_EMR2_1_P
sdrc_manual_1 :
.word SDRC_MANUAL_1_P
ENTRY( e s3 _ s d r c _ f i x _ s z )
.word . - es3 _ s d r c _ f i x
2008-10-13 14:15:00 +04:00
/* Function to call rom code to save secure ram context */
ENTRY( s a v e _ s e c u r e _ r a m _ c o n t e x t )
stmfd s p ! , { r1 - r12 , l r } @ save registers on stack
save_secure_ram_debug :
/* b save_secure_ram_debug */ @ enable to debug save code
adr r3 , a p i _ p a r a m s @ r3 points to parameters
str r0 , [ r3 ,#0x4 ] @ r0 has sdram address
ldr r12 , h i g h _ m a s k
and r3 , r3 , r12
ldr r12 , s r a m _ p h y _ a d d r _ m a s k
orr r3 , r3 , r12
mov r0 , #25 @ set service ID for PPA
mov r12 , r0 @ copy secure service ID in r12
mov r1 , #0 @ set task id for ROM code in r1
2009-03-26 16:59:00 +03:00
mov r2 , #4 @ set some flags in r2, r6
2008-10-13 14:15:00 +04:00
mov r6 , #0xff
mcr p15 , 0 , r0 , c7 , c10 , 4 @ data write barrier
mcr p15 , 0 , r0 , c7 , c10 , 5 @ data memory barrier
.word 0xE1600071 @ call SMI monitor (smi #1)
nop
nop
nop
nop
ldmfd s p ! , { r1 - r12 , p c }
sram_phy_addr_mask :
.word SRAM_BASE_P
high_mask :
.word 0xffff
api_params :
.word 0 x4 , 0 x0 , 0 x0 , 0 x1 , 0 x1
ENTRY( s a v e _ s e c u r e _ r a m _ c o n t e x t _ s z )
.word . - save_ s e c u r e _ r a m _ c o n t e x t
2009-05-28 21:56:16 +04:00
/ *
* Forces O M A P i n t o i d l e s t a t e
*
* omap3 4 x x _ s u s p e n d ( ) - T h i s b i t o f c o d e j u s t e x e c u t e s t h e W F I
* for n o r m a l i d l e s .
*
* Note : This c o d e g e t ' s c o p i e d t o i n t e r n a l S R A M a t b o o t . W h e n t h e O M A P
* wakes u p i t c o n t i n u e s e x e c u t i o n a t t h e p o i n t i t w e n t t o s l e e p .
* /
ENTRY( o m a p34 x x _ c p u _ s u s p e n d )
stmfd s p ! , { r0 - r12 , l r } @ save registers on stack
loop :
/*b loop*/ @Enable to debug by stepping through code
/* r0 contains restore pointer in sdram */
/* r1 contains information about saving context */
ldr r4 , s d r c _ p o w e r @ read the SDRC_POWER register
ldr r5 , [ r4 ] @ read the contents of SDRC_POWER
orr r5 , r5 , #0x40 @ enable self refresh on idle req
str r5 , [ r4 ] @ write back to SDRC_POWER register
cmp r1 , #0x0
/* If context save is required, do that and execute wfi */
bne s a v e _ c o n t e x t _ w f i
/* Data memory barrier and Data sync barrier */
mov r1 , #0
mcr p15 , 0 , r1 , c7 , c10 , 4
mcr p15 , 0 , r1 , c7 , c10 , 5
wfi @ wait for interrupt
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
2009-01-16 19:53:48 +03:00
bl w a i t _ s d r c _ o k
2009-05-28 21:56:16 +04:00
ldmfd s p ! , { r0 - r12 , p c } @ restore regs and return
2008-10-13 18:58:50 +04:00
restore_es3 :
/*b restore_es3*/ @ Enable to debug restore code
ldr r5 , p m _ p r e p w s t s t _ c o r e _ p
ldr r4 , [ r5 ]
and r4 , r4 , #0x3
cmp r4 , #0x0 @ Check if previous power state of CORE is OFF
bne r e s t o r e
adr r0 , e s3 _ s d r c _ f i x
ldr r1 , s r a m _ b a s e
ldr r2 , e s3 _ s d r c _ f i x _ s z
mov r2 , r2 , r o r #2
copy_to_sram :
ldmia r0 ! , { r3 } @ val = *src
stmia r1 ! , { r3 } @ *dst = val
subs r2 , r2 , #0x1 @ num_words--
bne c o p y _ t o _ s r a m
ldr r1 , s r a m _ b a s e
blx r1
2009-05-28 21:56:16 +04:00
restore :
2008-09-26 16:19:56 +04:00
/* b restore*/ @ Enable to debug restore code
2009-05-28 21:56:16 +04:00
/* Check what was the reason for mpu reset and store the reason in r9*/
/* 1 - Only L1 and logic lost */
/* 2 - Only L2 lost - In this case, we wont be here */
/* 3 - Both L1 and L2 lost */
ldr r1 , p m _ p w s t c t r l _ m p u
ldr r2 , [ r1 ]
and r2 , r2 , #0x3
cmp r2 , #0x0 @ Check if target power state was OFF or RET
moveq r9 , #0x3 @ MPU OFF => L1 and L2 lost
movne r9 , #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
bne l o g i c _ l 1 _ r e s t o r e
2008-10-13 14:15:00 +04:00
ldr r0 , c o n t r o l _ s t a t
ldr r1 , [ r0 ]
and r1 , #0x700
cmp r1 , #0x300
beq l 2 _ i n v _ g p
mov r0 , #40 @ set service ID for PPA
mov r12 , r0 @ copy secure Service ID in r12
mov r1 , #0 @ set task id for ROM code in r1
mov r2 , #4 @ set some flags in r2, r6
mov r6 , #0xff
adr r3 , l 2 _ i n v _ a p i _ p a r a m s @ r3 points to dummy parameters
mcr p15 , 0 , r0 , c7 , c10 , 4 @ data write barrier
mcr p15 , 0 , r0 , c7 , c10 , 5 @ data memory barrier
.word 0xE1600071 @ call SMI monitor (smi #1)
/* Write to Aux control register to set some bits */
mov r0 , #42 @ set service ID for PPA
mov r12 , r0 @ copy secure Service ID in r12
mov r1 , #0 @ set task id for ROM code in r1
mov r2 , #4 @ set some flags in r2, r6
mov r6 , #0xff
adr r3 , w r i t e _ a u x _ c o n t r o l _ p a r a m s @ r3 points to parameters
mcr p15 , 0 , r0 , c7 , c10 , 4 @ data write barrier
mcr p15 , 0 , r0 , c7 , c10 , 5 @ data memory barrier
.word 0xE1600071 @ call SMI monitor (smi #1)
b l o g i c _ l 1 _ r e s t o r e
l2_inv_api_params :
.word 0 x1 , 0 x00
write_aux_control_params :
.word 0 x1 , 0 x72
l2_inv_gp :
2009-05-28 21:56:16 +04:00
/* Execute smi to invalidate L2 cache */
mov r12 , #0x1 @ set up to invalide L2
2008-10-13 14:15:00 +04:00
smi : .word 0xE1600070 @ Call SMI monitor (smieq)
/* Write to Aux control register to set some bits */
mov r0 , #0x72
mov r12 , #0x3
.word 0xE1600070 @ Call SMI monitor (smieq)
2009-05-28 21:56:16 +04:00
logic_l1_restore :
mov r1 , #0
/ * Invalidate a l l i n s t r u c t i o n c a c h e s t o P o U
* and f l u s h b r a n c h t a r g e t c a c h e * /
mcr p15 , 0 , r1 , c7 , c5 , 0
ldr r4 , s c r a t c h p a d _ b a s e
ldr r3 , [ r4 ,#0xBC ]
ldmia r3 ! , { r4 - r6 }
mov s p , r4
msr s p s r _ c x s f , r5
mov l r , r6
ldmia r3 ! , { r4 - r9 }
/* Coprocessor access Control Register */
mcr p15 , 0 , r4 , c1 , c0 , 2
/* TTBR0 */
MCR p15 , 0 , r5 , c2 , c0 , 0
/* TTBR1 */
MCR p15 , 0 , r6 , c2 , c0 , 1
/* Translation table base control register */
MCR p15 , 0 , r7 , c2 , c0 , 2
/*domain access Control Register */
MCR p15 , 0 , r8 , c3 , c0 , 0
/* data fault status Register */
MCR p15 , 0 , r9 , c5 , c0 , 0
ldmia r3 ! ,{ r4 - r8 }
/* instruction fault status Register */
MCR p15 , 0 , r4 , c5 , c0 , 1
/*Data Auxiliary Fault Status Register */
MCR p15 , 0 , r5 , c5 , c1 , 0
/*Instruction Auxiliary Fault Status Register*/
MCR p15 , 0 , r6 , c5 , c1 , 1
/*Data Fault Address Register */
MCR p15 , 0 , r7 , c6 , c0 , 0
/*Instruction Fault Address Register*/
MCR p15 , 0 , r8 , c6 , c0 , 2
ldmia r3 ! ,{ r4 - r7 }
/* user r/w thread and process ID */
MCR p15 , 0 , r4 , c13 , c0 , 2
/* user ro thread and process ID */
MCR p15 , 0 , r5 , c13 , c0 , 3
/*Privileged only thread and process ID */
MCR p15 , 0 , r6 , c13 , c0 , 4
/* cache size selection */
MCR p15 , 2 , r7 , c0 , c0 , 0
ldmia r3 ! ,{ r4 - r8 }
/* Data TLB lockdown registers */
MCR p15 , 0 , r4 , c10 , c0 , 0
/* Instruction TLB lockdown registers */
MCR p15 , 0 , r5 , c10 , c0 , 1
/* Secure or Nonsecure Vector Base Address */
MCR p15 , 0 , r6 , c12 , c0 , 0
/* FCSE PID */
MCR p15 , 0 , r7 , c13 , c0 , 0
/* Context PID */
MCR p15 , 0 , r8 , c13 , c0 , 1
ldmia r3 ! ,{ r4 - r5 }
/* primary memory remap register */
MCR p15 , 0 , r4 , c10 , c2 , 0
/*normal memory remap register */
MCR p15 , 0 , r5 , c10 , c2 , 1
/* Restore cpsr */
ldmia r3 ! ,{ r4 } / * l o a d C P S R f r o m S D R A M * /
msr c p s r , r4 / * s t o r e c p s r * /
/* Enabling MMU here */
mrc p15 , 0 , r7 , c2 , c0 , 2 / * R e a d T T B R C o n t r o l * /
/* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
and r7 , #0x7
cmp r7 , #0x0
beq u s e t t b r0
ttbr_error :
/ * More w o r k n e e d s t o b e d o n e t o s u p p o r t N [ 0 : 2 ] v a l u e o t h e r t h a n 0
* So l o o p i n g h e r e s o t h a t t h e e r r o r c a n b e d e t e c t e d
* /
b t t b r _ e r r o r
usettbr0 :
mrc p15 , 0 , r2 , c2 , c0 , 0
ldr r5 , t t b r b i t _ m a s k
and r2 , r5
mov r4 , p c
ldr r5 , t a b l e _ i n d e x _ m a s k
and r4 , r5 / * r4 = 3 1 t o 2 0 b i t s o f p c * /
/* Extract the value to be written to table entry */
ldr r1 , t a b l e _ e n t r y
add r1 , r1 , r4 / * r1 h a s v a l u e t o b e w r i t t e n t o t a b l e e n t r y * /
/* Getting the address of table entry to modify */
lsr r4 , #18
add r2 , r4 / * r2 h a s t h e l o c a t i o n w h i c h n e e d s t o b e m o d i f i e d * /
/* Storing previous entry of location being modified */
ldr r5 , s c r a t c h p a d _ b a s e
ldr r4 , [ r2 ]
str r4 , [ r5 , #0xC0 ]
/* Modify the table entry */
str r1 , [ r2 ]
/ * Storing a d d r e s s o f e n t r y b e i n g m o d i f i e d
* - will b e r e s t o r e d a f t e r e n a b l i n g M M U * /
ldr r5 , s c r a t c h p a d _ b a s e
str r2 , [ r5 , #0xC4 ]
mov r0 , #0
mcr p15 , 0 , r0 , c7 , c5 , 4 @ Flush prefetch buffer
mcr p15 , 0 , r0 , c7 , c5 , 6 @ Invalidate branch predictor array
mcr p15 , 0 , r0 , c8 , c5 , 0 @ Invalidate instruction TLB
mcr p15 , 0 , r0 , c8 , c6 , 0 @ Invalidate data TLB
/* Restore control register but dont enable caches here*/
/* Caches will be enabled after restoring MMU table entry */
ldmia r3 ! , { r4 }
/* Store previous value of control register in scratchpad */
str r4 , [ r5 , #0xC8 ]
ldr r2 , c a c h e _ p r e d _ d i s a b l e _ m a s k
and r4 , r2
mcr p15 , 0 , r4 , c1 , c0 , 0
ldmfd s p ! , { r0 - r12 , p c } @ restore regs and return
save_context_wfi :
/*b save_context_wfi*/ @ enable to debug save code
mov r8 , r0 / * S t o r e S D R A M a d d r e s s i n r8 * /
/* Check what that target sleep state is:stored in r1*/
/* 1 - Only L1 and logic lost */
/* 2 - Only L2 lost */
/* 3 - Both L1 and L2 lost */
cmp r1 , #0x2 / * O n l y L 2 l o s t * /
beq c l e a n _ l 2
cmp r1 , #0x1 / * L 2 r e t a i n e d * /
/* r9 stores whether to clean L2 or not*/
moveq r9 , #0x0 / * D o n t C l e a n L 2 * /
movne r9 , #0x1 / * C l e a n L 2 * /
l1_logic_lost :
/* Store sp and spsr to SDRAM */
mov r4 , s p
mrs r5 , s p s r
mov r6 , l r
stmia r8 ! , { r4 - r6 }
/* Save all ARM registers */
/* Coprocessor access control register */
mrc p15 , 0 , r6 , c1 , c0 , 2
stmia r8 ! , { r6 }
/* TTBR0, TTBR1 and Translation table base control */
mrc p15 , 0 , r4 , c2 , c0 , 0
mrc p15 , 0 , r5 , c2 , c0 , 1
mrc p15 , 0 , r6 , c2 , c0 , 2
stmia r8 ! , { r4 - r6 }
/ * Domain a c c e s s c o n t r o l r e g i s t e r , d a t a f a u l t s t a t u s r e g i s t e r ,
and i n s t r u c t i o n f a u l t s t a t u s r e g i s t e r * /
mrc p15 , 0 , r4 , c3 , c0 , 0
mrc p15 , 0 , r5 , c5 , c0 , 0
mrc p15 , 0 , r6 , c5 , c0 , 1
stmia r8 ! , { r4 - r6 }
/ * Data a u x f a u l t s t a t u s r e g i s t e r , i n s t r u c t i o n a u x f a u l t s t a t u s ,
datat f a u l t a d d r e s s r e g i s t e r a n d i n s t r u c t i o n f a u l t a d d r e s s r e g i s t e r * /
mrc p15 , 0 , r4 , c5 , c1 , 0
mrc p15 , 0 , r5 , c5 , c1 , 1
mrc p15 , 0 , r6 , c6 , c0 , 0
mrc p15 , 0 , r7 , c6 , c0 , 2
stmia r8 ! , { r4 - r7 }
/ * user r / w t h r e a d a n d p r o c e s s I D , u s e r r / o t h r e a d a n d p r o c e s s I D ,
priv o n l y t h r e a d a n d p r o c e s s I D , c a c h e s i z e s e l e c t i o n * /
mrc p15 , 0 , r4 , c13 , c0 , 2
mrc p15 , 0 , r5 , c13 , c0 , 3
mrc p15 , 0 , r6 , c13 , c0 , 4
mrc p15 , 2 , r7 , c0 , c0 , 0
stmia r8 ! , { r4 - r7 }
/* Data TLB lockdown, instruction TLB lockdown registers */
mrc p15 , 0 , r5 , c10 , c0 , 0
mrc p15 , 0 , r6 , c10 , c0 , 1
stmia r8 ! , { r5 - r6 }
/* Secure or non secure vector base address, FCSE PID, Context PID*/
mrc p15 , 0 , r4 , c12 , c0 , 0
mrc p15 , 0 , r5 , c13 , c0 , 0
mrc p15 , 0 , r6 , c13 , c0 , 1
stmia r8 ! , { r4 - r6 }
/* Primary remap, normal remap registers */
mrc p15 , 0 , r4 , c10 , c2 , 0
mrc p15 , 0 , r5 , c10 , c2 , 1
stmia r8 ! ,{ r4 - r5 }
/* Store current cpsr*/
mrs r2 , c p s r
stmia r8 ! , { r2 }
mrc p15 , 0 , r4 , c1 , c0 , 0
/* save control register */
stmia r8 ! , { r4 }
clean_caches :
/* Clean Data or unified cache to POU*/
/* How to invalidate only L1 cache???? - #FIX_ME# */
/* mcr p15, 0, r11, c7, c11, 1 */
cmp r9 , #1 / * C h e c k w h e t h e r L 2 i n v a l i s r e q u i r e d o r n o t * /
bne s k i p _ l 2 _ i n v a l
clean_l2 :
/* read clidr */
mrc p15 , 1 , r0 , c0 , c0 , 1
/* extract loc from clidr */
ands r3 , r0 , #0x7000000
/* left align loc bit field */
mov r3 , r3 , l s r #23
/* if loc is 0, then no need to clean */
beq f i n i s h e d
/* start clean at cache level 0 */
mov r10 , #0
loop1 :
/* work out 3x current cache level */
add r2 , r10 , r10 , l s r #1
/* extract cache type bits from clidr*/
mov r1 , r0 , l s r r2
/* mask of the bits for current cache only */
and r1 , r1 , #7
/* see what cache we have at this level */
cmp r1 , #2
/* skip if no cache, or just i-cache */
blt s k i p
/* select current cache level in cssr */
mcr p15 , 2 , r10 , c0 , c0 , 0
/* isb to sych the new cssr&csidr */
isb
/* read the new csidr */
mrc p15 , 1 , r1 , c0 , c0 , 0
/* extract the length of the cache lines */
and r2 , r1 , #7
/* add 4 (line length offset) */
add r2 , r2 , #4
ldr r4 , a s s o c _ m a s k
/* find maximum number on the way size */
ands r4 , r4 , r1 , l s r #3
/* find bit position of way size increment */
clz r5 , r4
ldr r7 , n u m s e t _ m a s k
/* extract max number of the index size*/
ands r7 , r7 , r1 , l s r #13
loop2 :
mov r9 , r4
/* create working copy of max way size*/
loop3 :
/* factor way and cache number into r11 */
orr r11 , r10 , r9 , l s l r5
/* factor index number into r11 */
orr r11 , r11 , r7 , l s l r2
/*clean & invalidate by set/way */
mcr p15 , 0 , r11 , c7 , c10 , 2
/* decrement the way*/
subs r9 , r9 , #1
bge l o o p3
/*decrement the index */
subs r7 , r7 , #1
bge l o o p2
skip :
add r10 , r10 , #2
/* increment cache number */
cmp r3 , r10
bgt l o o p1
finished :
/*swith back to cache level 0 */
mov r10 , #0
/* select current cache level in cssr */
mcr p15 , 2 , r10 , c0 , c0 , 0
isb
skip_l2_inval :
/* Data memory barrier and Data sync barrier */
mov r1 , #0
mcr p15 , 0 , r1 , c7 , c10 , 4
mcr p15 , 0 , r1 , c7 , c10 , 5
wfi @ wait for interrupt
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
2009-01-16 19:53:48 +03:00
bl w a i t _ s d r c _ o k
2009-05-28 21:56:16 +04:00
/* restore regs and return */
ldmfd s p ! , { r0 - r12 , p c }
2009-01-16 19:53:48 +03:00
/* Make sure SDRC accesses are ok */
wait_sdrc_ok :
ldr r4 , c m _ i d l e s t 1 _ c o r e
ldr r5 , [ r4 ]
and r5 , r5 , #0x2
cmp r5 , #0
bne w a i t _ s d r c _ o k
ldr r4 , s d r c _ p o w e r
ldr r5 , [ r4 ]
bic r5 , r5 , #0x40
str r5 , [ r4 ]
wait_dll_lock :
/* Is dll in lock mode? */
ldr r4 , s d r c _ d l l a _ c t r l
ldr r5 , [ r4 ]
tst r5 , #0x4
bxne l r
/* wait till dll locks */
ldr r4 , s d r c _ d l l a _ s t a t u s
ldr r5 , [ r4 ]
and r5 , r5 , #0x4
cmp r5 , #0x4
bne w a i t _ d l l _ l o c k
bx l r
2009-05-28 21:56:16 +04:00
2009-01-16 19:53:48 +03:00
cm_idlest1_core :
.word CM_IDLEST1_CORE_V
sdrc_dlla_status :
.word SDRC_DLLA_STATUS_V
sdrc_dlla_ctrl :
.word SDRC_DLLA_CTRL_V
2009-05-28 21:56:16 +04:00
pm_prepwstst_core :
.word PM_PREPWSTST_CORE_V
2008-10-13 18:58:50 +04:00
pm_prepwstst_core_p :
.word PM_PREPWSTST_CORE_P
2009-05-28 21:56:16 +04:00
pm_prepwstst_mpu :
.word PM_PREPWSTST_MPU_V
pm_pwstctrl_mpu :
.word PM_PWSTCTRL_MPU_P
scratchpad_base :
.word SCRATCHPAD_BASE_P
2008-10-13 18:58:50 +04:00
sram_base :
.word SRAM_BASE_P + 0 x8 0 0 0
2009-05-28 21:56:16 +04:00
sdrc_power :
.word SDRC_POWER_V
clk_stabilize_delay :
.word 0x000001FF
assoc_mask :
.word 0x3ff
numset_mask :
.word 0x7fff
ttbrbit_mask :
.word 0xFFFFC000
table_index_mask :
.word 0xFFF00000
table_entry :
.word 0x00000C02
cache_pred_disable_mask :
.word 0xFFFFE7FB
2008-10-13 14:15:00 +04:00
control_stat :
.word CONTROL_STAT
2009-05-28 21:56:16 +04:00
ENTRY( o m a p34 x x _ c p u _ s u s p e n d _ s z )
.word . - omap3 4 x x _ c p u _ s u s p e n d