x86: add memory clobber to save/loadsegment
Add "memory" clobbers to savesegment and loadsegment, since they can affect memory accesses and we never want the compiler to reorder them with respect to memory references. Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com> Cc: xen-devel <xen-devel@lists.xensource.com> Cc: Stephen Tweedie <sct@redhat.com> Cc: Eduardo Habkost <ehabkost@redhat.com> Cc: Mark McLoughlin <markmc@redhat.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
This commit is contained in:
parent
bea41808ef
commit
d338c73c39
@ -153,14 +153,14 @@ extern void load_gs_index(unsigned);
|
||||
"jmp 2b\n" \
|
||||
".previous\n" \
|
||||
_ASM_EXTABLE(1b,3b) \
|
||||
: :"r" (value), "r" (0))
|
||||
: :"r" (value), "r" (0) : "memory")
|
||||
|
||||
|
||||
/*
|
||||
* Save a segment register away
|
||||
*/
|
||||
#define savesegment(seg, value) \
|
||||
asm volatile("mov %%" #seg ",%0":"=rm" (value))
|
||||
asm("mov %%" #seg ",%0":"=rm" (value) : : "memory")
|
||||
|
||||
static inline unsigned long get_limit(unsigned long segment)
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user