2008-01-30 13:31:19 +01:00
/ *
* Function c a l l i n g A B I c o n v e r s i o n f r o m L i n u x t o E F I f o r x86 _ 6 4
*
* Copyright ( C ) 2 0 0 7 I n t e l C o r p
* Bibo M a o < b i b o . m a o @intel.com>
* Huang Y i n g < y i n g . h u a n g @intel.com>
* /
# include < l i n u x / l i n k a g e . h >
# define S A V E _ X M M \
mov % r s p , % r a x ; \
subq $ 0 x70 , % r s p ; \
and $ ~ 0 x f , % r s p ; \
mov % r a x , ( % r s p ) ; \
mov % c r0 , % r a x ; \
clts; \
mov % r a x , 0 x8 ( % r s p ) ; \
movaps % x m m 0 , 0 x60 ( % r s p ) ; \
movaps % x m m 1 , 0 x50 ( % r s p ) ; \
movaps % x m m 2 , 0 x40 ( % r s p ) ; \
movaps % x m m 3 , 0 x30 ( % r s p ) ; \
movaps % x m m 4 , 0 x20 ( % r s p ) ; \
movaps % x m m 5 , 0 x10 ( % r s p )
# define R E S T O R E _ X M M \
movaps 0 x60 ( % r s p ) , % x m m 0 ; \
movaps 0 x50 ( % r s p ) , % x m m 1 ; \
movaps 0 x40 ( % r s p ) , % x m m 2 ; \
movaps 0 x30 ( % r s p ) , % x m m 3 ; \
movaps 0 x20 ( % r s p ) , % x m m 4 ; \
movaps 0 x10 ( % r s p ) , % x m m 5 ; \
mov 0 x8 ( % r s p ) , % r s i ; \
mov % r s i , % c r0 ; \
mov ( % r s p ) , % r s p
ENTRY( e f i _ c a l l 0 )
SAVE_ X M M
subq $ 3 2 , % r s p
call * % r d i
addq $ 3 2 , % r s p
RESTORE_ X M M
ret
2009-02-23 22:57:02 +03:00
ENDPROC( e f i _ c a l l 0 )
2008-01-30 13:31:19 +01:00
ENTRY( e f i _ c a l l 1 )
SAVE_ X M M
subq $ 3 2 , % r s p
mov % r s i , % r c x
call * % r d i
addq $ 3 2 , % r s p
RESTORE_ X M M
ret
2009-02-23 22:57:02 +03:00
ENDPROC( e f i _ c a l l 1 )
2008-01-30 13:31:19 +01:00
ENTRY( e f i _ c a l l 2 )
SAVE_ X M M
subq $ 3 2 , % r s p
mov % r s i , % r c x
call * % r d i
addq $ 3 2 , % r s p
RESTORE_ X M M
ret
2009-02-23 22:57:02 +03:00
ENDPROC( e f i _ c a l l 2 )
2008-01-30 13:31:19 +01:00
ENTRY( e f i _ c a l l 3 )
SAVE_ X M M
subq $ 3 2 , % r s p
mov % r c x , % r8
mov % r s i , % r c x
call * % r d i
addq $ 3 2 , % r s p
RESTORE_ X M M
ret
2009-02-23 22:57:02 +03:00
ENDPROC( e f i _ c a l l 3 )
2008-01-30 13:31:19 +01:00
ENTRY( e f i _ c a l l 4 )
SAVE_ X M M
subq $ 3 2 , % r s p
mov % r8 , % r9
mov % r c x , % r8
mov % r s i , % r c x
call * % r d i
addq $ 3 2 , % r s p
RESTORE_ X M M
ret
2009-02-23 22:57:02 +03:00
ENDPROC( e f i _ c a l l 4 )
2008-01-30 13:31:19 +01:00
ENTRY( e f i _ c a l l 5 )
SAVE_ X M M
subq $ 4 8 , % r s p
mov % r9 , 3 2 ( % r s p )
mov % r8 , % r9
mov % r c x , % r8
mov % r s i , % r c x
call * % r d i
addq $ 4 8 , % r s p
RESTORE_ X M M
ret
2009-02-23 22:57:02 +03:00
ENDPROC( e f i _ c a l l 5 )
2008-01-30 13:31:19 +01:00
ENTRY( e f i _ c a l l 6 )
SAVE_ X M M
mov ( % r s p ) , % r a x
mov 8 ( % r a x ) , % r a x
subq $ 4 8 , % r s p
mov % r9 , 3 2 ( % r s p )
mov % r a x , 4 0 ( % r s p )
mov % r8 , % r9
mov % r c x , % r8
mov % r s i , % r c x
call * % r d i
addq $ 4 8 , % r s p
RESTORE_ X M M
ret
2009-02-23 22:57:02 +03:00
ENDPROC( e f i _ c a l l 6 )