Commit 051a2d3e authored by Sean Christopherson's avatar Sean Christopherson Committed by Paolo Bonzini

KVM: VMX: Explicitly reference RCX as the vmx_vcpu pointer in asm blobs

Use '%% " _ASM_CX"' instead of '%0' to dereference RCX, i.e. the
'struct vcpu_vmx' pointer, in the VM-Enter asm blobs of vmx_vcpu_run()
and nested_vmx_check_vmentry_hw().  Using the symbolic name means that
adding/removing an output parameter(s) requires "rewriting" almost all
of the asm blob, which makes it nearly impossible to understand what's
being changed in even the most minor patches.

Opportunistically improve the code comments.
Signed-off-by: default avatarSean Christopherson <sean.j.christopherson@intel.com>
Reviewed-by: default avatarAndi Kleen <ak@linux.intel.com>
Signed-off-by: default avatarPaolo Bonzini <pbonzini@redhat.com>
parent c6ad4597
...@@ -2759,17 +2759,17 @@ static int __noclone nested_vmx_check_vmentry_hw(struct kvm_vcpu *vcpu) ...@@ -2759,17 +2759,17 @@ static int __noclone nested_vmx_check_vmentry_hw(struct kvm_vcpu *vcpu)
asm( asm(
/* Set HOST_RSP */ /* Set HOST_RSP */
__ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t" __ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t"
"mov %%" _ASM_SP ", %c[host_rsp](%0)\n\t" "mov %%" _ASM_SP ", %c[host_rsp](%% " _ASM_CX")\n\t"
/* Check if vmlaunch or vmresume is needed */ /* Check if vmlaunch or vmresume is needed */
"cmpl $0, %c[launched](%0)\n\t" "cmpl $0, %c[launched](%% " _ASM_CX")\n\t"
"jne 1f\n\t" "jne 1f\n\t"
__ex("vmlaunch") "\n\t" __ex("vmlaunch") "\n\t"
"jmp 2f\n\t" "jmp 2f\n\t"
"1: " __ex("vmresume") "\n\t" "1: " __ex("vmresume") "\n\t"
"2: " "2: "
/* Set vmx->fail accordingly */ /* Set vmx->fail accordingly */
"setbe %c[fail](%0)\n\t" "setbe %c[fail](%% " _ASM_CX")\n\t"
".pushsection .rodata\n\t" ".pushsection .rodata\n\t"
".global vmx_early_consistency_check_return\n\t" ".global vmx_early_consistency_check_return\n\t"
......
...@@ -6440,9 +6440,9 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu) ...@@ -6440,9 +6440,9 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
"push %%" _ASM_DX "; push %%" _ASM_BP ";" "push %%" _ASM_DX "; push %%" _ASM_BP ";"
"push %%" _ASM_CX " \n\t" /* placeholder for guest rcx */ "push %%" _ASM_CX " \n\t" /* placeholder for guest rcx */
"push %%" _ASM_CX " \n\t" "push %%" _ASM_CX " \n\t"
"cmp %%" _ASM_SP ", %c[host_rsp](%0) \n\t" "cmp %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t"
"je 1f \n\t" "je 1f \n\t"
"mov %%" _ASM_SP ", %c[host_rsp](%0) \n\t" "mov %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t"
/* Avoid VMWRITE when Enlightened VMCS is in use */ /* Avoid VMWRITE when Enlightened VMCS is in use */
"test %%" _ASM_SI ", %%" _ASM_SI " \n\t" "test %%" _ASM_SI ", %%" _ASM_SI " \n\t"
"jz 2f \n\t" "jz 2f \n\t"
...@@ -6452,32 +6452,33 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu) ...@@ -6452,32 +6452,33 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
__ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t" __ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t"
"1: \n\t" "1: \n\t"
/* Reload cr2 if changed */ /* Reload cr2 if changed */
"mov %c[cr2](%0), %%" _ASM_AX " \n\t" "mov %c[cr2](%%" _ASM_CX "), %%" _ASM_AX " \n\t"
"mov %%cr2, %%" _ASM_DX " \n\t" "mov %%cr2, %%" _ASM_DX " \n\t"
"cmp %%" _ASM_AX ", %%" _ASM_DX " \n\t" "cmp %%" _ASM_AX ", %%" _ASM_DX " \n\t"
"je 3f \n\t" "je 3f \n\t"
"mov %%" _ASM_AX", %%cr2 \n\t" "mov %%" _ASM_AX", %%cr2 \n\t"
"3: \n\t" "3: \n\t"
/* Check if vmlaunch or vmresume is needed */ /* Check if vmlaunch or vmresume is needed */
"cmpl $0, %c[launched](%0) \n\t" "cmpl $0, %c[launched](%%" _ASM_CX ") \n\t"
/* Load guest registers. Don't clobber flags. */ /* Load guest registers. Don't clobber flags. */
"mov %c[rax](%0), %%" _ASM_AX " \n\t" "mov %c[rax](%%" _ASM_CX "), %%" _ASM_AX " \n\t"
"mov %c[rbx](%0), %%" _ASM_BX " \n\t" "mov %c[rbx](%%" _ASM_CX "), %%" _ASM_BX " \n\t"
"mov %c[rdx](%0), %%" _ASM_DX " \n\t" "mov %c[rdx](%%" _ASM_CX "), %%" _ASM_DX " \n\t"
"mov %c[rsi](%0), %%" _ASM_SI " \n\t" "mov %c[rsi](%%" _ASM_CX "), %%" _ASM_SI " \n\t"
"mov %c[rdi](%0), %%" _ASM_DI " \n\t" "mov %c[rdi](%%" _ASM_CX "), %%" _ASM_DI " \n\t"
"mov %c[rbp](%0), %%" _ASM_BP " \n\t" "mov %c[rbp](%%" _ASM_CX "), %%" _ASM_BP " \n\t"
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
"mov %c[r8](%0), %%r8 \n\t" "mov %c[r8](%%" _ASM_CX "), %%r8 \n\t"
"mov %c[r9](%0), %%r9 \n\t" "mov %c[r9](%%" _ASM_CX "), %%r9 \n\t"
"mov %c[r10](%0), %%r10 \n\t" "mov %c[r10](%%" _ASM_CX "), %%r10 \n\t"
"mov %c[r11](%0), %%r11 \n\t" "mov %c[r11](%%" _ASM_CX "), %%r11 \n\t"
"mov %c[r12](%0), %%r12 \n\t" "mov %c[r12](%%" _ASM_CX "), %%r12 \n\t"
"mov %c[r13](%0), %%r13 \n\t" "mov %c[r13](%%" _ASM_CX "), %%r13 \n\t"
"mov %c[r14](%0), %%r14 \n\t" "mov %c[r14](%%" _ASM_CX "), %%r14 \n\t"
"mov %c[r15](%0), %%r15 \n\t" "mov %c[r15](%%" _ASM_CX "), %%r15 \n\t"
#endif #endif
"mov %c[rcx](%0), %%" _ASM_CX " \n\t" /* kills %0 (ecx) */ /* Load guest RCX. This kills the vmx_vcpu pointer! */
"mov %c[rcx](%%" _ASM_CX "), %%" _ASM_CX " \n\t"
/* Enter guest mode */ /* Enter guest mode */
"jne 1f \n\t" "jne 1f \n\t"
...@@ -6485,26 +6486,33 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu) ...@@ -6485,26 +6486,33 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
"jmp 2f \n\t" "jmp 2f \n\t"
"1: " __ex("vmresume") "\n\t" "1: " __ex("vmresume") "\n\t"
"2: " "2: "
/* Save guest registers, load host registers, keep flags */
"mov %0, %c[wordsize](%%" _ASM_SP ") \n\t" /* Save guest's RCX to the stack placeholder (see above) */
"pop %0 \n\t" "mov %%" _ASM_CX ", %c[wordsize](%%" _ASM_SP ") \n\t"
"setbe %c[fail](%0)\n\t"
"mov %%" _ASM_AX ", %c[rax](%0) \n\t" /* Load host's RCX, i.e. the vmx_vcpu pointer */
"mov %%" _ASM_BX ", %c[rbx](%0) \n\t" "pop %%" _ASM_CX " \n\t"
__ASM_SIZE(pop) " %c[rcx](%0) \n\t"
"mov %%" _ASM_DX ", %c[rdx](%0) \n\t" /* Set vmx->fail based on EFLAGS.{CF,ZF} */
"mov %%" _ASM_SI ", %c[rsi](%0) \n\t" "setbe %c[fail](%%" _ASM_CX ")\n\t"
"mov %%" _ASM_DI ", %c[rdi](%0) \n\t"
"mov %%" _ASM_BP ", %c[rbp](%0) \n\t" /* Save all guest registers, including RCX from the stack */
"mov %%" _ASM_AX ", %c[rax](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_BX ", %c[rbx](%%" _ASM_CX ") \n\t"
__ASM_SIZE(pop) " %c[rcx](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_DX ", %c[rdx](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_SI ", %c[rsi](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_DI ", %c[rdi](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_BP ", %c[rbp](%%" _ASM_CX ") \n\t"
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
"mov %%r8, %c[r8](%0) \n\t" "mov %%r8, %c[r8](%%" _ASM_CX ") \n\t"
"mov %%r9, %c[r9](%0) \n\t" "mov %%r9, %c[r9](%%" _ASM_CX ") \n\t"
"mov %%r10, %c[r10](%0) \n\t" "mov %%r10, %c[r10](%%" _ASM_CX ") \n\t"
"mov %%r11, %c[r11](%0) \n\t" "mov %%r11, %c[r11](%%" _ASM_CX ") \n\t"
"mov %%r12, %c[r12](%0) \n\t" "mov %%r12, %c[r12](%%" _ASM_CX ") \n\t"
"mov %%r13, %c[r13](%0) \n\t" "mov %%r13, %c[r13](%%" _ASM_CX ") \n\t"
"mov %%r14, %c[r14](%0) \n\t" "mov %%r14, %c[r14](%%" _ASM_CX ") \n\t"
"mov %%r15, %c[r15](%0) \n\t" "mov %%r15, %c[r15](%%" _ASM_CX ") \n\t"
/* /*
* Clear host registers marked as clobbered to prevent * Clear host registers marked as clobbered to prevent
* speculative use. * speculative use.
...@@ -6519,7 +6527,7 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu) ...@@ -6519,7 +6527,7 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
"xor %%r15d, %%r15d \n\t" "xor %%r15d, %%r15d \n\t"
#endif #endif
"mov %%cr2, %%" _ASM_AX " \n\t" "mov %%cr2, %%" _ASM_AX " \n\t"
"mov %%" _ASM_AX ", %c[cr2](%0) \n\t" "mov %%" _ASM_AX ", %c[cr2](%%" _ASM_CX ") \n\t"
"xor %%eax, %%eax \n\t" "xor %%eax, %%eax \n\t"
"xor %%ebx, %%ebx \n\t" "xor %%ebx, %%ebx \n\t"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment