Commit 8d783b3e authored by Pavel Machek's avatar Pavel Machek Committed by Linus Torvalds

[PATCH] swsusp: clean assembly parts

This patch fixes register saving so that each register is only saved once,
and adds missing saving of %cr8 on x86-64.  Some reordering so that
save/restore is more logical/safer (segment registers should be restored
after gdt).
Signed-off-by: default avatarPavel Machek <pavel@suse.cz>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent c61978b3
...@@ -44,7 +44,6 @@ void __save_processor_state(struct saved_context *ctxt) ...@@ -44,7 +44,6 @@ void __save_processor_state(struct saved_context *ctxt)
*/ */
asm volatile ("sgdt %0" : "=m" (ctxt->gdt_limit)); asm volatile ("sgdt %0" : "=m" (ctxt->gdt_limit));
asm volatile ("sidt %0" : "=m" (ctxt->idt_limit)); asm volatile ("sidt %0" : "=m" (ctxt->idt_limit));
asm volatile ("sldt %0" : "=m" (ctxt->ldt));
asm volatile ("str %0" : "=m" (ctxt->tr)); asm volatile ("str %0" : "=m" (ctxt->tr));
/* /*
...@@ -107,7 +106,6 @@ static void fix_processor_context(void) ...@@ -107,7 +106,6 @@ static void fix_processor_context(void)
void __restore_processor_state(struct saved_context *ctxt) void __restore_processor_state(struct saved_context *ctxt)
{ {
/* /*
* control registers * control registers
*/ */
...@@ -116,6 +114,13 @@ void __restore_processor_state(struct saved_context *ctxt) ...@@ -116,6 +114,13 @@ void __restore_processor_state(struct saved_context *ctxt)
asm volatile ("movl %0, %%cr2" :: "r" (ctxt->cr2)); asm volatile ("movl %0, %%cr2" :: "r" (ctxt->cr2));
asm volatile ("movl %0, %%cr0" :: "r" (ctxt->cr0)); asm volatile ("movl %0, %%cr0" :: "r" (ctxt->cr0));
/*
* now restore the descriptor tables to their proper values
* ltr is done i fix_processor_context().
*/
asm volatile ("lgdt %0" :: "m" (ctxt->gdt_limit));
asm volatile ("lidt %0" :: "m" (ctxt->idt_limit));
/* /*
* segment registers * segment registers
*/ */
...@@ -124,14 +129,6 @@ void __restore_processor_state(struct saved_context *ctxt) ...@@ -124,14 +129,6 @@ void __restore_processor_state(struct saved_context *ctxt)
asm volatile ("movw %0, %%gs" :: "r" (ctxt->gs)); asm volatile ("movw %0, %%gs" :: "r" (ctxt->gs));
asm volatile ("movw %0, %%ss" :: "r" (ctxt->ss)); asm volatile ("movw %0, %%ss" :: "r" (ctxt->ss));
/*
* now restore the descriptor tables to their proper values
* ltr is done i fix_processor_context().
*/
asm volatile ("lgdt %0" :: "m" (ctxt->gdt_limit));
asm volatile ("lidt %0" :: "m" (ctxt->idt_limit));
asm volatile ("lldt %0" :: "m" (ctxt->ldt));
/* /*
* sysenter MSRs * sysenter MSRs
*/ */
......
...@@ -44,7 +44,6 @@ void __save_processor_state(struct saved_context *ctxt) ...@@ -44,7 +44,6 @@ void __save_processor_state(struct saved_context *ctxt)
*/ */
asm volatile ("sgdt %0" : "=m" (ctxt->gdt_limit)); asm volatile ("sgdt %0" : "=m" (ctxt->gdt_limit));
asm volatile ("sidt %0" : "=m" (ctxt->idt_limit)); asm volatile ("sidt %0" : "=m" (ctxt->idt_limit));
asm volatile ("sldt %0" : "=m" (ctxt->ldt));
asm volatile ("str %0" : "=m" (ctxt->tr)); asm volatile ("str %0" : "=m" (ctxt->tr));
/* XMM0..XMM15 should be handled by kernel_fpu_begin(). */ /* XMM0..XMM15 should be handled by kernel_fpu_begin(). */
...@@ -69,6 +68,7 @@ void __save_processor_state(struct saved_context *ctxt) ...@@ -69,6 +68,7 @@ void __save_processor_state(struct saved_context *ctxt)
asm volatile ("movq %%cr2, %0" : "=r" (ctxt->cr2)); asm volatile ("movq %%cr2, %0" : "=r" (ctxt->cr2));
asm volatile ("movq %%cr3, %0" : "=r" (ctxt->cr3)); asm volatile ("movq %%cr3, %0" : "=r" (ctxt->cr3));
asm volatile ("movq %%cr4, %0" : "=r" (ctxt->cr4)); asm volatile ("movq %%cr4, %0" : "=r" (ctxt->cr4));
asm volatile ("movq %%cr8, %0" : "=r" (ctxt->cr8));
} }
void save_processor_state(void) void save_processor_state(void)
...@@ -90,11 +90,19 @@ void __restore_processor_state(struct saved_context *ctxt) ...@@ -90,11 +90,19 @@ void __restore_processor_state(struct saved_context *ctxt)
/* /*
* control registers * control registers
*/ */
asm volatile ("movq %0, %%cr8" :: "r" (ctxt->cr8));
asm volatile ("movq %0, %%cr4" :: "r" (ctxt->cr4)); asm volatile ("movq %0, %%cr4" :: "r" (ctxt->cr4));
asm volatile ("movq %0, %%cr3" :: "r" (ctxt->cr3)); asm volatile ("movq %0, %%cr3" :: "r" (ctxt->cr3));
asm volatile ("movq %0, %%cr2" :: "r" (ctxt->cr2)); asm volatile ("movq %0, %%cr2" :: "r" (ctxt->cr2));
asm volatile ("movq %0, %%cr0" :: "r" (ctxt->cr0)); asm volatile ("movq %0, %%cr0" :: "r" (ctxt->cr0));
/*
* now restore the descriptor tables to their proper values
* ltr is done i fix_processor_context().
*/
asm volatile ("lgdt %0" :: "m" (ctxt->gdt_limit));
asm volatile ("lidt %0" :: "m" (ctxt->idt_limit));
/* /*
* segment registers * segment registers
*/ */
...@@ -108,14 +116,6 @@ void __restore_processor_state(struct saved_context *ctxt) ...@@ -108,14 +116,6 @@ void __restore_processor_state(struct saved_context *ctxt)
wrmsrl(MSR_GS_BASE, ctxt->gs_base); wrmsrl(MSR_GS_BASE, ctxt->gs_base);
wrmsrl(MSR_KERNEL_GS_BASE, ctxt->gs_kernel_base); wrmsrl(MSR_KERNEL_GS_BASE, ctxt->gs_kernel_base);
/*
* now restore the descriptor tables to their proper values
* ltr is done i fix_processor_context().
*/
asm volatile ("lgdt %0" :: "m" (ctxt->gdt_limit));
asm volatile ("lidt %0" :: "m" (ctxt->idt_limit));
asm volatile ("lldt %0" :: "m" (ctxt->ldt));
fix_processor_context(); fix_processor_context();
do_fpu_end(); do_fpu_end();
......
...@@ -16,7 +16,7 @@ arch_prepare_suspend(void) ...@@ -16,7 +16,7 @@ arch_prepare_suspend(void)
struct saved_context { struct saved_context {
u16 ds, es, fs, gs, ss; u16 ds, es, fs, gs, ss;
unsigned long gs_base, gs_kernel_base, fs_base; unsigned long gs_base, gs_kernel_base, fs_base;
unsigned long cr0, cr2, cr3, cr4; unsigned long cr0, cr2, cr3, cr4, cr8;
u16 gdt_pad; u16 gdt_pad;
u16 gdt_limit; u16 gdt_limit;
unsigned long gdt_base; unsigned long gdt_base;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment