2005-04-16 15:20:36 -07:00
/ *
* linux/ a r c h / a r m / k e r n e l / e n t r y - a r m v . S
*
* Copyright ( C ) 1 9 9 6 ,1 9 9 7 ,1 9 9 8 R u s s e l l K i n g .
* ARM7 0 0 f i x b y M a t t h e w G o d b o l t ( l i n u x - u s e r @willothewisp.demon.co.uk)
*
* This p r o g r a m i s f r e e s o f t w a r e ; you can redistribute it and/or modify
* it u n d e r t h e t e r m s o f t h e G N U G e n e r a l P u b l i c L i c e n s e v e r s i o n 2 a s
* published b y t h e F r e e S o f t w a r e F o u n d a t i o n .
*
* Low- l e v e l v e c t o r i n t e r f a c e r o u t i n e s
*
* Note : there i s a S t r o n g A R M b u g i n t h e S T M I A r n , { r e g s } ^ i n s t r u c t i o n t h a t c a u s e s
* it t o s a v e w r o n g v a l u e s . . . B e a w a r e !
* /
# include < l i n u x / c o n f i g . h >
# include < l i n u x / i n i t . h >
# include < a s m / t h r e a d _ i n f o . h >
# include < a s m / g l u e . h >
# include < a s m / p t r a c e . h >
# include < a s m / v f p m a c r o s . h >
# include " e n t r y - h e a d e r . S "
/ *
* Invalid m o d e h a n d l e r s
* /
.macro inv_ e n t r y , s y m , r e a s o n
sub s p , s p , #S _ F R A M E _ S I Z E @ A l l o c a t e f r a m e s i z e i n o n e g o
stmia s p , { r0 - l r } @ Save XXX r0 - lr
ldr r4 , . L C \ s y m
mov r1 , #\ r e a s o n
.endm
__pabt_invalid :
inv_ e n t r y a b t , B A D _ P R E F E T C H
b 1 f
__dabt_invalid :
inv_ e n t r y a b t , B A D _ D A T A
b 1 f
__irq_invalid :
inv_ e n t r y i r q , B A D _ I R Q
b 1 f
__und_invalid :
inv_ e n t r y u n d , B A D _ U N D E F I N S T R
1 : zero_ f p
ldmia r4 , { r5 - r7 } @ Get XXX pc, cpsr, old_r0
add r4 , s p , #S _ P C
stmia r4 , { r5 - r7 } @ Save XXX pc, cpsr, old_r0
mov r0 , s p
and r2 , r6 , #31 @ int mode
b b a d _ m o d e
/ *
* SVC m o d e h a n d l e r s
* /
.macro svc_ e n t r y , s y m
sub s p , s p , #S _ F R A M E _ S I Z E
stmia s p , { r0 - r12 } @ save r0 - r12
ldr r2 , . L C \ s y m
add r0 , s p , #S _ F R A M E _ S I Z E
ldmia r2 , { r2 - r4 } @ get pc, cpsr
add r5 , s p , #S _ S P
mov r1 , l r
@
@ We are now ready to fill in the remaining blanks on the stack:
@
@ r0 - sp_svc
@ r1 - lr_svc
@ r2 - lr_<exception>, already fixed up for correct return/restart
@ r3 - spsr_<exception>
@ r4 - orig_r0 (see pt_regs definition in ptrace.h)
@
stmia r5 , { r0 - r4 }
.endm
.align 5
__dabt_svc :
svc_ e n t r y a b t
@
@ get ready to re-enable interrupts if appropriate
@
mrs r9 , c p s r
tst r3 , #P S R _ I _ B I T
biceq r9 , r9 , #P S R _ I _ B I T
@
@ Call the processor-specific abort handler:
@
@ r2 - aborted context pc
@ r3 - aborted context cpsr
@
@ The abort handler must return the aborted address in r0, and
@ the fault status register in r1. r9 must be preserved.
@
# ifdef M U L T I _ A B O R T
ldr r4 , . L C p r o c f n s
mov l r , p c
ldr p c , [ r4 ]
# else
bl C P U _ A B O R T _ H A N D L E R
# endif
@
@ set desired IRQ state, then call main handler
@
msr c p s r _ c , r9
mov r2 , s p
bl d o _ D a t a A b o r t
@
@ IRQs off again before pulling preserved data off the stack
@
disable_ i r q r0
@
@ restore SPSR and restart the instruction
@
ldr r0 , [ s p , #S _ P S R ]
msr s p s r _ c x s f , r0
ldmia s p , { r0 - p c } ^ @ load r0 - pc, cpsr
.align 5
__irq_svc :
svc_ e n t r y i r q
# ifdef C O N F I G _ P R E E M P T
get_ t h r e a d _ i n f o r8
ldr r9 , [ r8 , #T I _ P R E E M P T ] @ g e t p r e e m p t c o u n t
add r7 , r9 , #1 @ increment it
str r7 , [ r8 , #T I _ P R E E M P T ]
# endif
1 : get_ i r q n r _ a n d _ b a s e r0 , r6 , r5 , l r
movne r1 , s p
@
@ routine called with r0 = irq number, r1 = struct pt_regs *
@
adrne l r , 1 b
bne a s m _ d o _ I R Q
# ifdef C O N F I G _ P R E E M P T
ldr r0 , [ r8 , #T I _ F L A G S ] @ g e t f l a g s
tst r0 , #_ T I F _ N E E D _ R E S C H E D
blne s v c _ p r e e m p t
preempt_return :
ldr r0 , [ r8 , #T I _ P R E E M P T ] @ r e a d p r e e m p t v a l u e
teq r0 , r7
str r9 , [ r8 , #T I _ P R E E M P T ] @ r e s t o r e p r e e m p t c o u n t
strne r0 , [ r0 , - r0 ] @ bug()
# endif
ldr r0 , [ s p , #S _ P S R ] @ i r q s a r e a l r e a d y d i s a b l e d
msr s p s r _ c x s f , r0
ldmia s p , { r0 - p c } ^ @ load r0 - pc, cpsr
.ltorg
# ifdef C O N F I G _ P R E E M P T
svc_preempt :
teq r9 , #0 @ was preempt count = 0
ldreq r6 , . L C i r q _ s t a t
movne p c , l r @ no
ldr r0 , [ r6 , #4 ] @ local_irq_count
ldr r1 , [ r6 , #8 ] @ local_bh_count
adds r0 , r0 , r1
movne p c , l r
mov r7 , #0 @ preempt_schedule_irq
str r7 , [ r8 , #T I _ P R E E M P T ] @ e x p e c t s p r e e m p t _ c o u n t = = 0
1 : bl p r e e m p t _ s c h e d u l e _ i r q @ irq en/disable is done inside
ldr r0 , [ r8 , #T I _ F L A G S ] @ g e t n e w t a s k s T I _ F L A G S
tst r0 , #_ T I F _ N E E D _ R E S C H E D
beq p r e e m p t _ r e t u r n @ go again
b 1 b
# endif
.align 5
__und_svc :
svc_ e n t r y u n d
@
@ call emulation code, which returns using r9 if it has emulated
@ the instruction, or the more conventional lr if we are to treat
@ this as a real undefined instruction
@
@ r0 - instruction
@
ldr r0 , [ r2 , #- 4 ]
adr r9 , 1 f
bl c a l l _ f p e
mov r0 , s p @ struct pt_regs *regs
bl d o _ u n d e f i n s t r
@
@ IRQs off again before pulling preserved data off the stack
@
1 : disable_ i r q r0
@
@ restore SPSR and restart the instruction
@
ldr l r , [ s p , #S _ P S R ] @ G e t S V C c p s r
msr s p s r _ c x s f , l r
ldmia s p , { r0 - p c } ^ @ Restore SVC registers
.align 5
__pabt_svc :
svc_ e n t r y a b t
@
@ re-enable interrupts if appropriate
@
mrs r9 , c p s r
tst r3 , #P S R _ I _ B I T
biceq r9 , r9 , #P S R _ I _ B I T
msr c p s r _ c , r9
@
@ set args, then call main handler
@
@ r0 - address of faulting instruction
@ r1 - pointer to registers on stack
@
mov r0 , r2 @ address (pc)
mov r1 , s p @ regs
bl d o _ P r e f e t c h A b o r t @ call abort handler
@
@ IRQs off again before pulling preserved data off the stack
@
disable_ i r q r0
@
@ restore SPSR and restart the instruction
@
ldr r0 , [ s p , #S _ P S R ]
msr s p s r _ c x s f , r0
ldmia s p , { r0 - p c } ^ @ load r0 - pc, cpsr
.align 5
.LCirq :
.word __temp_irq
.LCund :
.word __temp_und
.LCabt :
.word __temp_abt
# ifdef M U L T I _ A B O R T
.LCprocfns :
.word processor
# endif
.LCfp :
.word fp_enter
# ifdef C O N F I G _ P R E E M P T
.LCirq_stat :
.word irq_stat
# endif
/ *
* User m o d e h a n d l e r s
* /
.macro usr_ e n t r y , s y m
sub s p , s p , #S _ F R A M E _ S I Z E @ A l l o c a t e f r a m e s i z e i n o n e g o
stmia s p , { r0 - r12 } @ save r0 - r12
ldr r7 , . L C \ s y m
add r5 , s p , #S _ P C
ldmia r7 , { r2 - r4 } @ Get USR pc, cpsr
@
@ We are now ready to fill in the remaining blanks on the stack:
@
@ r2 - lr_<exception>, already fixed up for correct return/restart
@ r3 - spsr_<exception>
@ r4 - orig_r0 (see pt_regs definition in ptrace.h)
@
@ Also, separately save sp_usr and lr_usr
@
stmia r5 , { r2 - r4 }
stmdb r5 , { s p , l r } ^
@
@ Enable the alignment trap while in kernel mode
@
alignment_ t r a p r7 , r0 , _ _ t e m p _ \ s y m
@
@ Clear FP to mark the first stack frame
@
zero_ f p
.endm
.align 5
__dabt_usr :
usr_ e n t r y a b t
@
@ Call the processor-specific abort handler:
@
@ r2 - aborted context pc
@ r3 - aborted context cpsr
@
@ The abort handler must return the aborted address in r0, and
@ the fault status register in r1.
@
# ifdef M U L T I _ A B O R T
ldr r4 , . L C p r o c f n s
mov l r , p c
ldr p c , [ r4 ]
# else
bl C P U _ A B O R T _ H A N D L E R
# endif
@
@ IRQs on, then call the main handler
@
enable_ i r q r2
mov r2 , s p
adr l r , r e t _ f r o m _ e x c e p t i o n
b d o _ D a t a A b o r t
.align 5
__irq_usr :
usr_ e n t r y i r q
# ifdef C O N F I G _ P R E E M P T
get_ t h r e a d _ i n f o r8
ldr r9 , [ r8 , #T I _ P R E E M P T ] @ g e t p r e e m p t c o u n t
add r7 , r9 , #1 @ increment it
str r7 , [ r8 , #T I _ P R E E M P T ]
# endif
1 : get_ i r q n r _ a n d _ b a s e r0 , r6 , r5 , l r
movne r1 , s p
adrne l r , 1 b
@
@ routine called with r0 = irq number, r1 = struct pt_regs *
@
bne a s m _ d o _ I R Q
# ifdef C O N F I G _ P R E E M P T
ldr r0 , [ r8 , #T I _ P R E E M P T ]
teq r0 , r7
str r9 , [ r8 , #T I _ P R E E M P T ]
strne r0 , [ r0 , - r0 ]
mov t s k , r8
# else
get_ t h r e a d _ i n f o t s k
# endif
mov w h y , #0
b r e t _ t o _ u s e r
.ltorg
.align 5
__und_usr :
usr_ e n t r y u n d
tst r3 , #P S R _ T _ B I T @ T h u m b m o d e ?
bne f p u n d e f i n s t r @ ignore FP
sub r4 , r2 , #4
@
@ fall through to the emulation code, which returns using r9 if
@ it has emulated the instruction, or the more conventional lr
@ if we are to treat this as a real undefined instruction
@
@ r0 - instruction
@
1 : ldrt r0 , [ r4 ]
adr r9 , r e t _ f r o m _ e x c e p t i o n
adr l r , f p u n d e f i n s t r
@
@ fallthrough to call_fpe
@
/ *
* The o u t o f l i n e f i x u p f o r t h e l d r t a b o v e .
* /
.section .fixup , " ax"
2 : mov p c , r9
.previous
.section _ _ ex_ t a b l e ," a "
.long 1 b, 2 b
.previous
/ *
* Check w h e t h e r t h e i n s t r u c t i o n i s a c o - p r o c e s s o r i n s t r u c t i o n .
* If y e s , w e n e e d t o c a l l t h e r e l e v a n t c o - p r o c e s s o r h a n d l e r .
*
* Note t h a t w e d o n ' t d o a f u l l c h e c k h e r e f o r t h e c o - p r o c e s s o r
* instructions; all instructions with bit 27 set are well
* defined. T h e o n l y i n s t r u c t i o n s t h a t s h o u l d f a u l t a r e t h e
* co- p r o c e s s o r i n s t r u c t i o n s . H o w e v e r , w e h a v e t o w a t c h o u t
* for t h e A R M 6 / A R M 7 S W I b u g .
*
* Emulators m a y w i s h t o m a k e u s e o f t h e f o l l o w i n g r e g i s t e r s :
* r0 = i n s t r u c t i o n o p c o d e .
* r2 = P C + 4
* r1 0 = t h i s t h r e a d s t h r e a d _ i n f o s t r u c t u r e .
* /
call_fpe :
tst r0 , #0x08000000 @ only CDP/CPRT/LDC/STC have bit 27
# if d e f i n e d ( C O N F I G _ C P U _ A R M 6 1 0 ) | | d e f i n e d ( C O N F I G _ C P U _ A R M 7 1 0 )
and r8 , r0 , #0x0f000000 @ mask out op-code bits
teqne r8 , #0x0f000000 @ SWI (ARM6/7 bug)?
# endif
moveq p c , l r
get_ t h r e a d _ i n f o r10 @ get current thread
and r8 , r0 , #0x00000f00 @ mask out CP number
mov r7 , #1
add r6 , r10 , #T I _ U S E D _ C P
strb r7 , [ r6 , r8 , l s r #8 ] @ set appropriate used_cp[]
# ifdef C O N F I G _ I W M M X T
@ Test if we need to give access to iWMMXt coprocessors
ldr r5 , [ r10 , #T I _ F L A G S ]
rsbs r7 , r8 , #( 1 < < 8 ) @ CP 0 or 1 only
movcss r7 , r5 , l s r #( T I F _ U S I N G _ I W M M X T + 1 )
bcs i w m m x t _ t a s k _ e n a b l e
# endif
enable_ i r q r7
add p c , p c , r8 , l s r #6
mov r0 , r0
mov p c , l r @ CP#0
b d o _ f p e @ CP#1 (FPE)
b d o _ f p e @ CP#2 (FPE)
mov p c , l r @ CP#3
mov p c , l r @ CP#4
mov p c , l r @ CP#5
mov p c , l r @ CP#6
mov p c , l r @ CP#7
mov p c , l r @ CP#8
mov p c , l r @ CP#9
# ifdef C O N F I G _ V F P
b d o _ v f p @ CP#10 (VFP)
b d o _ v f p @ CP#11 (VFP)
# else
mov p c , l r @ CP#10 (VFP)
mov p c , l r @ CP#11 (VFP)
# endif
mov p c , l r @ CP#12
mov p c , l r @ CP#13
mov p c , l r @ CP#14 (Debug)
mov p c , l r @ CP#15 (Control)
do_fpe :
ldr r4 , . L C f p
add r10 , r10 , #T I _ F P S T A T E @ r 10 = w o r k s p a c e
ldr p c , [ r4 ] @ Call FP module USR entry point
/ *
* The F P m o d u l e i s c a l l e d w i t h t h e s e r e g i s t e r s s e t :
* r0 = i n s t r u c t i o n
* r2 = P C + 4
* r9 = n o r m a l " s u c c e s s f u l " r e t u r n a d d r e s s
* r1 0 = F P w o r k s p a c e
* lr = u n r e c o g n i s e d F P i n s t r u c t i o n r e t u r n a d d r e s s
* /
.data
ENTRY( f p _ e n t e r )
.word fpundefinstr
.text
fpundefinstr :
mov r0 , s p
adr l r , r e t _ f r o m _ e x c e p t i o n
b d o _ u n d e f i n s t r
.align 5
__pabt_usr :
usr_ e n t r y a b t
enable_ i r q r0 @ Enable interrupts
mov r0 , r2 @ address (pc)
mov r1 , s p @ regs
bl d o _ P r e f e t c h A b o r t @ call abort handler
/* fall through */
/ *
* This i s t h e r e t u r n c o d e t o u s e r m o d e f o r a b o r t h a n d l e r s
* /
ENTRY( r e t _ f r o m _ e x c e p t i o n )
get_ t h r e a d _ i n f o t s k
mov w h y , #0
b r e t _ t o _ u s e r
/ *
* Register s w i t c h f o r A R M v3 a n d A R M v4 p r o c e s s o r s
* r0 = p r e v i o u s t a s k _ s t r u c t , r1 = p r e v i o u s t h r e a d _ i n f o , r2 = n e x t t h r e a d _ i n f o
* previous a n d n e x t a r e g u a r a n t e e d n o t t o b e t h e s a m e .
* /
ENTRY( _ _ s w i t c h _ t o )
add i p , r1 , #T I _ C P U _ S A V E
ldr r3 , [ r2 , #T I _ T P _ V A L U E ]
stmia i p ! , { r4 - s l , f p , s p , l r } @ Store most regs on stack
ldr r6 , [ r2 , #T I _ C P U _ D O M A I N ] !
# if d e f i n e d ( C O N F I G _ C P U _ X S C A L E ) & & ! d e f i n e d ( C O N F I G _ I W M M X T )
mra r4 , r5 , a c c0
stmia i p , { r4 , r5 }
# endif
mov r4 , #0xffff0fff
str r3 , [ r4 , #- 3 ] @ Set TLS ptr
mcr p15 , 0 , r6 , c3 , c0 , 0 @ Set domain register
# ifdef C O N F I G _ V F P
@ Always disable VFP so we can lazily save/restore the old
@ state. This occurs in the context of the previous thread.
VFPFMRX r4 , F P E X C
bic r4 , r4 , #F P E X C _ E N A B L E
VFPFMXR F P E X C , r4
# endif
# if d e f i n e d ( C O N F I G _ I W M M X T )
bl i w m m x t _ t a s k _ s w i t c h
# elif d e f i n e d ( C O N F I G _ C P U _ X S C A L E )
add r4 , r2 , #40 @ cpu_context_save->extra
ldmib r4 , { r4 , r5 }
mar a c c0 , r4 , r5
# endif
ldmib r2 , { r4 - s l , f p , s p , p c } @ Load all regs saved previously
_ _ INIT
/ *
* Vector s t u b s .
*
2005-04-26 15:17:42 +01:00
* This c o d e i s c o p i e d t o 0 x f f f f02 0 0 s o w e c a n u s e b r a n c h e s i n t h e
* vectors, r a t h e r t h a n l d r ' s . N o t e t h a t t h i s c o d e m u s t n o t
* exceed 0 x30 0 b y t e s .
2005-04-16 15:20:36 -07:00
*
* Common s t u b e n t r y m a c r o :
* Enter i n I R Q m o d e , s p s r = S V C / U S R C P S R , l r = S V C / U S R P C
* /
.macro vector_ s t u b , n a m e , s y m , c o r r e c t i o n =0
.align 5
vector_ \ n a m e :
ldr r13 , . L C s \ s y m
.if \ correction
sub l r , l r , #\ c o r r e c t i o n
.endif
str l r , [ r13 ] @ save lr_IRQ
mrs l r , s p s r
str l r , [ r13 , #4 ] @ save spsr_IRQ
@
@ now branch to the relevant MODE handling routine
@
mrs r13 , c p s r
bic r13 , r13 , #M O D E _ M A S K
orr r13 , r13 , #M O D E _ S V C
msr s p s r _ c x s f , r13 @ switch to SVC_32 mode
and l r , l r , #15
ldr l r , [ p c , l r , l s l #2 ]
movs p c , l r @ Changes mode and branches
.endm
2005-04-26 15:17:42 +01:00
.globl __stubs_start
2005-04-16 15:20:36 -07:00
__stubs_start :
/ *
* Interrupt d i s p a t c h e r
* /
vector_ s t u b i r q , i r q , 4
.long __irq_usr @ 0 (USR_26 / USR_32)
.long __irq_invalid @ 1 (FIQ_26 / FIQ_32)
.long __irq_invalid @ 2 (IRQ_26 / IRQ_32)
.long __irq_svc @ 3 (SVC_26 / SVC_32)
.long __irq_invalid @ 4
.long __irq_invalid @ 5
.long __irq_invalid @ 6
.long __irq_invalid @ 7
.long __irq_invalid @ 8
.long __irq_invalid @ 9
.long __irq_invalid @ a
.long __irq_invalid @ b
.long __irq_invalid @ c
.long __irq_invalid @ d
.long __irq_invalid @ e
.long __irq_invalid @ f
/ *
* Data a b o r t d i s p a t c h e r
* Enter i n A B T m o d e , s p s r = U S R C P S R , l r = U S R P C
* /
vector_ s t u b d a b t , a b t , 8
.long __dabt_usr @ 0 (USR_26 / USR_32)
.long __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
.long __dabt_invalid @ 2 (IRQ_26 / IRQ_32)
.long __dabt_svc @ 3 (SVC_26 / SVC_32)
.long __dabt_invalid @ 4
.long __dabt_invalid @ 5
.long __dabt_invalid @ 6
.long __dabt_invalid @ 7
.long __dabt_invalid @ 8
.long __dabt_invalid @ 9
.long __dabt_invalid @ a
.long __dabt_invalid @ b
.long __dabt_invalid @ c
.long __dabt_invalid @ d
.long __dabt_invalid @ e
.long __dabt_invalid @ f
/ *
* Prefetch a b o r t d i s p a t c h e r
* Enter i n A B T m o d e , s p s r = U S R C P S R , l r = U S R P C
* /
vector_ s t u b p a b t , a b t , 4
.long __pabt_usr @ 0 (USR_26 / USR_32)
.long __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
.long __pabt_invalid @ 2 (IRQ_26 / IRQ_32)
.long __pabt_svc @ 3 (SVC_26 / SVC_32)
.long __pabt_invalid @ 4
.long __pabt_invalid @ 5
.long __pabt_invalid @ 6
.long __pabt_invalid @ 7
.long __pabt_invalid @ 8
.long __pabt_invalid @ 9
.long __pabt_invalid @ a
.long __pabt_invalid @ b
.long __pabt_invalid @ c
.long __pabt_invalid @ d
.long __pabt_invalid @ e
.long __pabt_invalid @ f
/ *
* Undef i n s t r e n t r y d i s p a t c h e r
* Enter i n U N D m o d e , s p s r = S V C / U S R C P S R , l r = S V C / U S R P C
* /
vector_ s t u b u n d , u n d
.long __und_usr @ 0 (USR_26 / USR_32)
.long __und_invalid @ 1 (FIQ_26 / FIQ_32)
.long __und_invalid @ 2 (IRQ_26 / IRQ_32)
.long __und_svc @ 3 (SVC_26 / SVC_32)
.long __und_invalid @ 4
.long __und_invalid @ 5
.long __und_invalid @ 6
.long __und_invalid @ 7
.long __und_invalid @ 8
.long __und_invalid @ 9
.long __und_invalid @ a
.long __und_invalid @ b
.long __und_invalid @ c
.long __und_invalid @ d
.long __und_invalid @ e
.long __und_invalid @ f
.align 5
/ * = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Undefined F I Q s
* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
* Enter i n F I Q m o d e , s p s r = A N Y C P S R , l r = A N Y P C
* MUST P R E S E R V E S V C S P S R , b u t n e e d t o s w i t c h t o S V C m o d e t o s h o w o u r m s g .
* Basically t o s w i t c h m o d e s , w e * H A V E * t o c l o b b e r o n e r e g i s t e r . . . b r a i n
* damage a l e r t ! I d o n ' t t h i n k t h a t w e c a n e x e c u t e a n y c o d e i n h e r e i n a n y
* other m o d e t h a n F I Q . . . O k y o u c a n s w i t c h t o a n o t h e r m o d e , b u t y o u c a n ' t
* get o u t o f t h a t m o d e w i t h o u t c l o b b e r i n g o n e r e g i s t e r .
* /
vector_fiq :
disable_ f i q
subs p c , l r , #4
/ * = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Address e x c e p t i o n h a n d l e r
* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
* These a r e n ' t t o o c r i t i c a l .
* ( they' r e n o t s u p p o s e d t o h a p p e n , a n d w o n ' t h a p p e n i n 3 2 - b i t d a t a m o d e ) .
* /
vector_addrexcptn :
b v e c t o r _ a d d r e x c p t n
/ *
* We g r o u p a l l t h e f o l l o w i n g d a t a t o g e t h e r t o o p t i m i s e
* for C P U s w i t h s e p a r a t e I & D c a c h e s .
* /
.align 5
.LCvswi :
.word vector_swi
.LCsirq :
.word __temp_irq
.LCsund :
.word __temp_und
.LCsabt :
.word __temp_abt
2005-04-26 15:17:42 +01:00
.globl __stubs_end
2005-04-16 15:20:36 -07:00
__stubs_end :
2005-04-26 15:17:42 +01:00
.equ stubs_ o f f s e t , _ _ v e c t o r s _ s t a r t + 0 x20 0 - _ _ s t u b s _ s t a r t
2005-04-16 15:20:36 -07:00
2005-04-26 15:17:42 +01:00
.globl __vectors_start
__vectors_start :
2005-04-16 15:20:36 -07:00
swi S Y S _ E R R O R 0
2005-04-26 15:17:42 +01:00
b v e c t o r _ u n d + s t u b s _ o f f s e t
ldr p c , . L C v s w i + s t u b s _ o f f s e t
b v e c t o r _ p a b t + s t u b s _ o f f s e t
b v e c t o r _ d a b t + s t u b s _ o f f s e t
b v e c t o r _ a d d r e x c p t n + s t u b s _ o f f s e t
b v e c t o r _ i r q + s t u b s _ o f f s e t
b v e c t o r _ f i q + s t u b s _ o f f s e t
.globl __vectors_end
__vectors_end :
2005-04-16 15:20:36 -07:00
.data
/ *
* Do n o t r e o r d e r t h e s e , a n d d o n o t i n s e r t e x t r a d a t a b e t w e e n . . .
* /
__temp_irq :
.word 0 @ saved lr_irq
.word 0 @ saved spsr_irq
.word - 1 @ old_r0
__temp_und :
.word 0 @ Saved lr_und
.word 0 @ Saved spsr_und
.word - 1 @ old_r0
__temp_abt :
.word 0 @ Saved lr_abt
.word 0 @ Saved spsr_abt
.word - 1 @ old_r0
.globl cr_alignment
.globl cr_no_alignment
cr_alignment :
.space 4
cr_no_alignment :
.space 4