2005-04-17 02:20:36 +04:00
# ifndef __M68KNOMMU_ENTRY_H
# define __M68KNOMMU_ENTRY_H
# include <asm/setup.h>
# include <asm/page.h>
/*
* Stack layout in ' ret_from_exception ' :
*
* This allows access to the syscall arguments in registers d1 - d5
*
* 0 ( sp ) - d1
* 4 ( sp ) - d2
* 8 ( sp ) - d3
* C ( sp ) - d4
* 10 ( sp ) - d5
* 14 ( sp ) - a0
* 18 ( sp ) - a1
* 1 C ( sp ) - a2
* 20 ( sp ) - d0
* 24 ( sp ) - orig_d0
* 28 ( sp ) - stack adjustment
* 2 C ( sp ) - [ sr ] [ format & vector ]
* 2 E ( sp ) - [ pc - hiword ] [ sr ]
* 30 ( sp ) - [ pc - loword ] [ pc - hiword ]
* 32 ( sp ) - [ format & vector ] [ pc - loword ]
* ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^
* M68K COLDFIRE
*/
2010-10-07 17:08:55 +04:00
# define ALLOWINT (~0x700)
2005-04-17 02:20:36 +04:00
# ifdef __ASSEMBLY__
# define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
/*
* This defines the normal kernel pt - regs layout .
*
* regs are a2 - a6 and d6 - d7 preserved by C code
* the kernel doesn ' t mess with usp unless it needs to
*/
# ifdef CONFIG_COLDFIRE
2010-11-04 06:53:26 +03:00
# ifdef CONFIG_COLDFIRE_SW_A7
2005-04-17 02:20:36 +04:00
/*
2010-11-04 06:53:26 +03:00
* This is made a little more tricky on older ColdFires . There is no
* separate supervisor and user stack pointers . Need to artificially
2005-04-17 02:20:36 +04:00
* construct a usp in software . . . When doing this we need to disable
2010-11-04 06:53:26 +03:00
* interrupts , otherwise bad things will happen .
2005-04-17 02:20:36 +04:00
*/
2010-11-04 06:53:26 +03:00
. globl sw_usp
. globl sw_ksp
2005-04-17 02:20:36 +04:00
. macro SAVE_ALL
move # 0x2700 , % sr /* disable intrs */
btst # 5 , % sp @ ( 2 ) /* from user? */
bnes 6f /* no, skip */
movel % sp , sw_usp /* save user sp */
addql # 8 , sw_usp /* remove exception */
movel sw_ksp , % sp /* kernel sp */
subql # 8 , % sp /* room for exception */
clrl % sp @ - /* stkadj */
movel % d0 , % sp @ - /* orig d0 */
movel % d0 , % sp @ - /* d0 */
lea % sp @ ( - 32 ) , % sp /* space for 8 regs */
moveml % d1 - % d5 / % a0 - % a2 , % sp @
movel sw_usp , % a0 /* get usp */
2009-08-31 16:43:33 +04:00
movel % a0 @ - , % sp @ ( PT_OFF_PC ) /* copy exception program counter */
movel % a0 @ - , % sp @ ( PT_OFF_FORMATVEC ) /*copy exception format/vector/sr */
2005-04-17 02:20:36 +04:00
bra 7f
6 :
clrl % sp @ - /* stkadj */
movel % d0 , % sp @ - /* orig d0 */
movel % d0 , % sp @ - /* d0 */
lea % sp @ ( - 32 ) , % sp /* space for 8 regs */
moveml % d1 - % d5 / % a0 - % a2 , % sp @
7 :
. endm
2010-11-04 06:53:26 +03:00
. macro RESTORE_USER
2005-04-17 02:20:36 +04:00
move # 0x2700 , % sr /* disable intrs */
movel sw_usp , % a0 /* get usp */
2009-08-31 16:43:33 +04:00
movel % sp @ ( PT_OFF_PC ) , % a0 @ - /* copy exception program counter */
movel % sp @ ( PT_OFF_FORMATVEC ) , % a0 @ - /*copy exception format/vector/sr */
2005-04-17 02:20:36 +04:00
moveml % sp @ , % d1 - % d5 / % a0 - % a2
lea % sp @ ( 32 ) , % sp /* space for 8 regs */
movel % sp @ + , % d0
addql # 4 , % sp /* orig d0 */
addl % sp @ + , % sp /* stkadj */
addql # 8 , % sp /* remove exception */
movel % sp , sw_ksp /* save ksp */
subql # 8 , sw_usp /* set exception */
movel sw_usp , % sp /* restore usp */
rte
. endm
2010-11-04 06:53:26 +03:00
. macro RDUSP
2011-05-05 16:32:12 +04:00
movel sw_usp , % a3
2010-11-04 06:53:26 +03:00
. endm
. macro WRUSP
2011-05-05 16:32:12 +04:00
movel % a3 , sw_usp
2010-11-04 06:53:26 +03:00
. endm
# else /* !CONFIG_COLDFIRE_SW_A7 */
2005-04-17 02:20:36 +04:00
/*
2010-11-04 06:53:26 +03:00
* Modern ColdFire parts have separate supervisor and user stack
* pointers . Simple load and restore macros for this case .
2005-04-17 02:20:36 +04:00
*/
2010-11-04 06:53:26 +03:00
. macro SAVE_ALL
2005-04-17 02:20:36 +04:00
move # 0x2700 , % sr /* disable intrs */
clrl % sp @ - /* stkadj */
movel % d0 , % sp @ - /* orig d0 */
movel % d0 , % sp @ - /* d0 */
lea % sp @ ( - 32 ) , % sp /* space for 8 regs */
moveml % d1 - % d5 / % a0 - % a2 , % sp @
. endm
2010-11-04 06:53:26 +03:00
. macro RESTORE_USER
2005-04-17 02:20:36 +04:00
moveml % sp @ , % d1 - % d5 / % a0 - % a2
lea % sp @ ( 32 ) , % sp /* space for 8 regs */
movel % sp @ + , % d0
addql # 4 , % sp /* orig d0 */
addl % sp @ + , % sp /* stkadj */
rte
. endm
2010-11-04 06:53:26 +03:00
. macro RDUSP
2011-05-05 16:32:12 +04:00
/*move %usp,%a3*/
. word 0x4e6b
2010-11-04 06:53:26 +03:00
. endm
. macro WRUSP
2011-05-05 16:32:12 +04:00
/*move %a3,%usp*/
. word 0x4e63
2010-11-04 06:53:26 +03:00
. endm
# endif /* !CONFIG_COLDFIRE_SW_A7 */
2005-04-17 02:20:36 +04:00
. macro SAVE_SWITCH_STACK
lea % sp @ ( - 24 ) , % sp /* 6 regs */
moveml % a3 - % a6 / % d6 - % d7 , % sp @
. endm
. macro RESTORE_SWITCH_STACK
moveml % sp @ , % a3 - % a6 / % d6 - % d7
lea % sp @ ( 24 ) , % sp /* 6 regs */
. endm
# else /* !CONFIG_COLDFIRE */
/*
* Standard 68 k interrupt entry and exit macros .
*/
. macro SAVE_ALL
clrl % sp @ - /* stkadj */
movel % d0 , % sp @ - /* orig d0 */
movel % d0 , % sp @ - /* d0 */
moveml % d1 - % d5 / % a0 - % a2 , % sp @ -
. endm
. macro RESTORE_ALL
moveml % sp @ + , % a0 - % a2 / % d1 - % d5
movel % sp @ + , % d0
addql # 4 , % sp /* orig d0 */
addl % sp @ + , % sp /* stkadj */
rte
. endm
. macro SAVE_SWITCH_STACK
moveml % a3 - % a6 / % d6 - % d7 , % sp @ -
. endm
. macro RESTORE_SWITCH_STACK
moveml % sp @ + , % a3 - % a6 / % d6 - % d7
. endm
2010-11-04 06:53:26 +03:00
# endif /* !COLDFIRE_SW_A7 */
2005-04-17 02:20:36 +04:00
# endif /* __ASSEMBLY__ */
# endif /* __M68KNOMMU_ENTRY_H */