Commit 83b584d9 authored by Andrew Cooper's avatar Andrew Cooper Committed by Thomas Gleixner

x86/paravirt: Drop {read,write}_cr8() hooks

There is a lot of infrastructure for functionality which is used
exclusively in __{save,restore}_processor_state() on the suspend/resume
path.

cr8 is an alias of APIC_TASKPRI, and APIC_TASKPRI is saved/restored by
lapic_{suspend,resume}().  Saving and restoring cr8 independently of the
rest of the Local APIC state isn't a clever thing to be doing.

Delete the suspend/resume cr8 handling, which shrinks the size of struct
saved_context, and allows for the removal of both PVOPS.
Signed-off-by: default avatarAndrew Cooper <andrew.cooper3@citrix.com>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
Reviewed-by: default avatarJuergen Gross <jgross@suse.com>
Link: https://lkml.kernel.org/r/20190715151641.29210-1-andrew.cooper3@citrix.com
parent 229b969b
...@@ -139,18 +139,6 @@ static inline void __write_cr4(unsigned long x) ...@@ -139,18 +139,6 @@ static inline void __write_cr4(unsigned long x)
PVOP_VCALL1(cpu.write_cr4, x); PVOP_VCALL1(cpu.write_cr4, x);
} }
#ifdef CONFIG_X86_64
static inline unsigned long read_cr8(void)
{
return PVOP_CALL0(unsigned long, cpu.read_cr8);
}
static inline void write_cr8(unsigned long x)
{
PVOP_VCALL1(cpu.write_cr8, x);
}
#endif
static inline void arch_safe_halt(void) static inline void arch_safe_halt(void)
{ {
PVOP_VCALL0(irq.safe_halt); PVOP_VCALL0(irq.safe_halt);
......
...@@ -119,11 +119,6 @@ struct pv_cpu_ops { ...@@ -119,11 +119,6 @@ struct pv_cpu_ops {
void (*write_cr4)(unsigned long); void (*write_cr4)(unsigned long);
#ifdef CONFIG_X86_64
unsigned long (*read_cr8)(void);
void (*write_cr8)(unsigned long);
#endif
/* Segment descriptor handling */ /* Segment descriptor handling */
void (*load_tr_desc)(void); void (*load_tr_desc)(void);
void (*load_gdt)(const struct desc_ptr *); void (*load_gdt)(const struct desc_ptr *);
......
...@@ -73,20 +73,6 @@ static inline unsigned long native_read_cr4(void) ...@@ -73,20 +73,6 @@ static inline unsigned long native_read_cr4(void)
void native_write_cr4(unsigned long val); void native_write_cr4(unsigned long val);
#ifdef CONFIG_X86_64
static inline unsigned long native_read_cr8(void)
{
unsigned long cr8;
asm volatile("movq %%cr8,%0" : "=r" (cr8));
return cr8;
}
static inline void native_write_cr8(unsigned long val)
{
asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
}
#endif
#ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
static inline u32 rdpkru(void) static inline u32 rdpkru(void)
{ {
...@@ -200,16 +186,6 @@ static inline void wbinvd(void) ...@@ -200,16 +186,6 @@ static inline void wbinvd(void)
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
static inline unsigned long read_cr8(void)
{
return native_read_cr8();
}
static inline void write_cr8(unsigned long x)
{
native_write_cr8(x);
}
static inline void load_gs_index(unsigned selector) static inline void load_gs_index(unsigned selector)
{ {
native_load_gs_index(selector); native_load_gs_index(selector);
......
...@@ -34,7 +34,7 @@ struct saved_context { ...@@ -34,7 +34,7 @@ struct saved_context {
*/ */
unsigned long kernelmode_gs_base, usermode_gs_base, fs_base; unsigned long kernelmode_gs_base, usermode_gs_base, fs_base;
unsigned long cr0, cr2, cr3, cr4, cr8; unsigned long cr0, cr2, cr3, cr4;
u64 misc_enable; u64 misc_enable;
bool misc_enable_saved; bool misc_enable_saved;
struct saved_msrs saved_msrs; struct saved_msrs saved_msrs;
......
...@@ -62,7 +62,6 @@ int main(void) ...@@ -62,7 +62,6 @@ int main(void)
ENTRY(cr2); ENTRY(cr2);
ENTRY(cr3); ENTRY(cr3);
ENTRY(cr4); ENTRY(cr4);
ENTRY(cr8);
ENTRY(gdt_desc); ENTRY(gdt_desc);
BLANK(); BLANK();
#undef ENTRY #undef ENTRY
......
...@@ -311,10 +311,6 @@ struct paravirt_patch_template pv_ops = { ...@@ -311,10 +311,6 @@ struct paravirt_patch_template pv_ops = {
.cpu.read_cr0 = native_read_cr0, .cpu.read_cr0 = native_read_cr0,
.cpu.write_cr0 = native_write_cr0, .cpu.write_cr0 = native_write_cr0,
.cpu.write_cr4 = native_write_cr4, .cpu.write_cr4 = native_write_cr4,
#ifdef CONFIG_X86_64
.cpu.read_cr8 = native_read_cr8,
.cpu.write_cr8 = native_write_cr8,
#endif
.cpu.wbinvd = native_wbinvd, .cpu.wbinvd = native_wbinvd,
.cpu.read_msr = native_read_msr, .cpu.read_msr = native_read_msr,
.cpu.write_msr = native_write_msr, .cpu.write_msr = native_write_msr,
......
...@@ -122,9 +122,6 @@ static void __save_processor_state(struct saved_context *ctxt) ...@@ -122,9 +122,6 @@ static void __save_processor_state(struct saved_context *ctxt)
ctxt->cr2 = read_cr2(); ctxt->cr2 = read_cr2();
ctxt->cr3 = __read_cr3(); ctxt->cr3 = __read_cr3();
ctxt->cr4 = __read_cr4(); ctxt->cr4 = __read_cr4();
#ifdef CONFIG_X86_64
ctxt->cr8 = read_cr8();
#endif
ctxt->misc_enable_saved = !rdmsrl_safe(MSR_IA32_MISC_ENABLE, ctxt->misc_enable_saved = !rdmsrl_safe(MSR_IA32_MISC_ENABLE,
&ctxt->misc_enable); &ctxt->misc_enable);
msr_save_context(ctxt); msr_save_context(ctxt);
...@@ -207,7 +204,6 @@ static void notrace __restore_processor_state(struct saved_context *ctxt) ...@@ -207,7 +204,6 @@ static void notrace __restore_processor_state(struct saved_context *ctxt)
#else #else
/* CONFIG X86_64 */ /* CONFIG X86_64 */
wrmsrl(MSR_EFER, ctxt->efer); wrmsrl(MSR_EFER, ctxt->efer);
write_cr8(ctxt->cr8);
__write_cr4(ctxt->cr4); __write_cr4(ctxt->cr4);
#endif #endif
write_cr3(ctxt->cr3); write_cr3(ctxt->cr3);
......
...@@ -877,16 +877,6 @@ static void xen_write_cr4(unsigned long cr4) ...@@ -877,16 +877,6 @@ static void xen_write_cr4(unsigned long cr4)
native_write_cr4(cr4); native_write_cr4(cr4);
} }
#ifdef CONFIG_X86_64
static inline unsigned long xen_read_cr8(void)
{
return 0;
}
static inline void xen_write_cr8(unsigned long val)
{
BUG_ON(val);
}
#endif
static u64 xen_read_msr_safe(unsigned int msr, int *err) static u64 xen_read_msr_safe(unsigned int msr, int *err)
{ {
...@@ -1023,11 +1013,6 @@ static const struct pv_cpu_ops xen_cpu_ops __initconst = { ...@@ -1023,11 +1013,6 @@ static const struct pv_cpu_ops xen_cpu_ops __initconst = {
.write_cr4 = xen_write_cr4, .write_cr4 = xen_write_cr4,
#ifdef CONFIG_X86_64
.read_cr8 = xen_read_cr8,
.write_cr8 = xen_write_cr8,
#endif
.wbinvd = native_wbinvd, .wbinvd = native_wbinvd,
.read_msr = xen_read_msr, .read_msr = xen_read_msr,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment