[PATCH 2/3] KVM: VMX: Make use of asm.h

2012-09-12 Thread Avi Kivity
Use macros for bitness-insensitive register names, instead of
rolling our own.

Signed-off-by: Avi Kivity 
---
 arch/x86/kvm/vmx.c | 69 --
 1 file changed, 30 insertions(+), 39 deletions(-)

diff --git a/arch/x86/kvm/vmx.c b/arch/x86/kvm/vmx.c
index 0302c0f..04a7334 100644
--- a/arch/x86/kvm/vmx.c
+++ b/arch/x86/kvm/vmx.c
@@ -6184,14 +6184,6 @@ static void atomic_switch_perf_msrs(struct vcpu_vmx *vmx)
msrs[i].host);
 }
 
-#ifdef CONFIG_X86_64
-#define R "r"
-#define Q "q"
-#else
-#define R "e"
-#define Q "l"
-#endif
-
 static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
 {
struct vcpu_vmx *vmx = to_vmx(vcpu);
@@ -6240,30 +6232,30 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu 
*vcpu)
vmx->__launched = vmx->loaded_vmcs->launched;
asm(
/* Store host registers */
-   "push %%"R"dx; push %%"R"bp;"
-   "push %%"R"cx \n\t" /* placeholder for guest rcx */
-   "push %%"R"cx \n\t"
-   "cmp %%"R"sp, %c[host_rsp](%0) \n\t"
+   "push %%" _ASM_DX "; push %%" _ASM_BP ";"
+   "push %%" _ASM_CX " \n\t" /* placeholder for guest rcx */
+   "push %%" _ASM_CX " \n\t"
+   "cmp %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
"je 1f \n\t"
-   "mov %%"R"sp, %c[host_rsp](%0) \n\t"
+   "mov %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
__ex(ASM_VMX_VMWRITE_RSP_RDX) "\n\t"
"1: \n\t"
/* Reload cr2 if changed */
-   "mov %c[cr2](%0), %%"R"ax \n\t"
-   "mov %%cr2, %%"R"dx \n\t"
-   "cmp %%"R"ax, %%"R"dx \n\t"
+   "mov %c[cr2](%0), %%" _ASM_AX " \n\t"
+   "mov %%cr2, %%" _ASM_DX " \n\t"
+   "cmp %%" _ASM_AX ", %%" _ASM_DX " \n\t"
"je 2f \n\t"
-   "mov %%"R"ax, %%cr2 \n\t"
+   "mov %%" _ASM_AX", %%cr2 \n\t"
"2: \n\t"
/* Check if vmlaunch of vmresume is needed */
"cmpl $0, %c[launched](%0) \n\t"
/* Load guest registers.  Don't clobber flags. */
-   "mov %c[rax](%0), %%"R"ax \n\t"
-   "mov %c[rbx](%0), %%"R"bx \n\t"
-   "mov %c[rdx](%0), %%"R"dx \n\t"
-   "mov %c[rsi](%0), %%"R"si \n\t"
-   "mov %c[rdi](%0), %%"R"di \n\t"
-   "mov %c[rbp](%0), %%"R"bp \n\t"
+   "mov %c[rax](%0), %%" _ASM_AX " \n\t"
+   "mov %c[rbx](%0), %%" _ASM_BX " \n\t"
+   "mov %c[rdx](%0), %%" _ASM_DX " \n\t"
+   "mov %c[rsi](%0), %%" _ASM_SI " \n\t"
+   "mov %c[rdi](%0), %%" _ASM_DI " \n\t"
+   "mov %c[rbp](%0), %%" _ASM_BP " \n\t"
 #ifdef CONFIG_X86_64
"mov %c[r8](%0),  %%r8  \n\t"
"mov %c[r9](%0),  %%r9  \n\t"
@@ -6274,7 +6266,7 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
"mov %c[r14](%0), %%r14 \n\t"
"mov %c[r15](%0), %%r15 \n\t"
 #endif
-   "mov %c[rcx](%0), %%"R"cx \n\t" /* kills %0 (ecx) */
+   "mov %c[rcx](%0), %%" _ASM_CX " \n\t" /* kills %0 (ecx) */
 
/* Enter guest mode */
"jne 1f \n\t"
@@ -6283,15 +6275,15 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu 
*vcpu)
"1: " __ex(ASM_VMX_VMRESUME) "\n\t"
"2: "
/* Save guest registers, load host registers, keep flags */
-   "mov %0, %c[wordsize](%%"R"sp) \n\t"
+   "mov %0, %c[wordsize](%%" _ASM_SP ") \n\t"
"pop %0 \n\t"
-   "mov %%"R"ax, %c[rax](%0) \n\t"
-   "mov %%"R"bx, %c[rbx](%0) \n\t"
-   "pop"Q" %c[rcx](%0) \n\t"
-   "mov %%"R"dx, %c[rdx](%0) \n\t"
-   "mov %%"R"si, %c[rsi](%0) \n\t"
-   "mov %%"R"di, %c[rdi](%0) \n\t"
-   "mov %%"R"bp, %c[rbp](%0) \n\t"
+   "mov %%" _ASM_AX ", %c[rax](%0) \n\t"
+   "mov %%" _ASM_BX ", %c[rbx](%0) \n\t"
+   __ASM_SIZE(pop) " %c[rcx](%0) \n\t"
+   "mov %%" _ASM_DX ", %c[rdx](%0) \n\t"
+   "mov %%" _ASM_SI ", %c[rsi](%0) \n\t"
+   "mov %%" _ASM_DI ", %c[rdi](%0) \n\t"
+   "mov %%" _ASM_BP ", %c[rbp](%0) \n\t"
 #ifdef CONFIG_X86_64
"mov %%r8,  %c[r8](%0) \n\t"
"mov %%r9,  %c[r9](%0) \n\t"
@@ -6302,10 +6294,10 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu 
*vcpu)
"mov %%r14, %c[r14](%0) \n\t"
"mov %%r15, %c[r15](%0) \n\t"
 #endif
-   "mov %%cr2, %%"R"ax   \n\t"
-   "mov %%"R"ax, %c[cr2](%0) \n\t"
+   "mov %%cr2, %%" _ASM_AX "   \n\t"
+   "mov %%" _ASM_AX ", %c[cr2](%0) \n\t"
 
-   "pop  %%"R"bp; pop  %%"R"dx \n\t"
+   "pop  %%"

[PATCH 2/3] KVM: VMX: Make use of asm.h

2012-09-16 Thread Avi Kivity
Use macros for bitness-insensitive register names, instead of
rolling our own.

Signed-off-by: Avi Kivity 
---
 arch/x86/kvm/vmx.c | 69 --
 1 file changed, 30 insertions(+), 39 deletions(-)

diff --git a/arch/x86/kvm/vmx.c b/arch/x86/kvm/vmx.c
index 5faf12a..30bcb95 100644
--- a/arch/x86/kvm/vmx.c
+++ b/arch/x86/kvm/vmx.c
@@ -6184,14 +6184,6 @@ static void atomic_switch_perf_msrs(struct vcpu_vmx *vmx)
msrs[i].host);
 }
 
-#ifdef CONFIG_X86_64
-#define R "r"
-#define Q "q"
-#else
-#define R "e"
-#define Q "l"
-#endif
-
 static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
 {
struct vcpu_vmx *vmx = to_vmx(vcpu);
@@ -6240,30 +6232,30 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu 
*vcpu)
vmx->__launched = vmx->loaded_vmcs->launched;
asm(
/* Store host registers */
-   "push %%"R"dx; push %%"R"bp;"
-   "push %%"R"cx \n\t" /* placeholder for guest rcx */
-   "push %%"R"cx \n\t"
-   "cmp %%"R"sp, %c[host_rsp](%0) \n\t"
+   "push %%" _ASM_DX "; push %%" _ASM_BP ";"
+   "push %%" _ASM_CX " \n\t" /* placeholder for guest rcx */
+   "push %%" _ASM_CX " \n\t"
+   "cmp %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
"je 1f \n\t"
-   "mov %%"R"sp, %c[host_rsp](%0) \n\t"
+   "mov %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
__ex(ASM_VMX_VMWRITE_RSP_RDX) "\n\t"
"1: \n\t"
/* Reload cr2 if changed */
-   "mov %c[cr2](%0), %%"R"ax \n\t"
-   "mov %%cr2, %%"R"dx \n\t"
-   "cmp %%"R"ax, %%"R"dx \n\t"
+   "mov %c[cr2](%0), %%" _ASM_AX " \n\t"
+   "mov %%cr2, %%" _ASM_DX " \n\t"
+   "cmp %%" _ASM_AX ", %%" _ASM_DX " \n\t"
"je 2f \n\t"
-   "mov %%"R"ax, %%cr2 \n\t"
+   "mov %%" _ASM_AX", %%cr2 \n\t"
"2: \n\t"
/* Check if vmlaunch of vmresume is needed */
"cmpl $0, %c[launched](%0) \n\t"
/* Load guest registers.  Don't clobber flags. */
-   "mov %c[rax](%0), %%"R"ax \n\t"
-   "mov %c[rbx](%0), %%"R"bx \n\t"
-   "mov %c[rdx](%0), %%"R"dx \n\t"
-   "mov %c[rsi](%0), %%"R"si \n\t"
-   "mov %c[rdi](%0), %%"R"di \n\t"
-   "mov %c[rbp](%0), %%"R"bp \n\t"
+   "mov %c[rax](%0), %%" _ASM_AX " \n\t"
+   "mov %c[rbx](%0), %%" _ASM_BX " \n\t"
+   "mov %c[rdx](%0), %%" _ASM_DX " \n\t"
+   "mov %c[rsi](%0), %%" _ASM_SI " \n\t"
+   "mov %c[rdi](%0), %%" _ASM_DI " \n\t"
+   "mov %c[rbp](%0), %%" _ASM_BP " \n\t"
 #ifdef CONFIG_X86_64
"mov %c[r8](%0),  %%r8  \n\t"
"mov %c[r9](%0),  %%r9  \n\t"
@@ -6274,7 +6266,7 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
"mov %c[r14](%0), %%r14 \n\t"
"mov %c[r15](%0), %%r15 \n\t"
 #endif
-   "mov %c[rcx](%0), %%"R"cx \n\t" /* kills %0 (ecx) */
+   "mov %c[rcx](%0), %%" _ASM_CX " \n\t" /* kills %0 (ecx) */
 
/* Enter guest mode */
"jne 1f \n\t"
@@ -6283,15 +6275,15 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu 
*vcpu)
"1: " __ex(ASM_VMX_VMRESUME) "\n\t"
"2: "
/* Save guest registers, load host registers, keep flags */
-   "mov %0, %c[wordsize](%%"R"sp) \n\t"
+   "mov %0, %c[wordsize](%%" _ASM_SP ") \n\t"
"pop %0 \n\t"
-   "mov %%"R"ax, %c[rax](%0) \n\t"
-   "mov %%"R"bx, %c[rbx](%0) \n\t"
-   "pop"Q" %c[rcx](%0) \n\t"
-   "mov %%"R"dx, %c[rdx](%0) \n\t"
-   "mov %%"R"si, %c[rsi](%0) \n\t"
-   "mov %%"R"di, %c[rdi](%0) \n\t"
-   "mov %%"R"bp, %c[rbp](%0) \n\t"
+   "mov %%" _ASM_AX ", %c[rax](%0) \n\t"
+   "mov %%" _ASM_BX ", %c[rbx](%0) \n\t"
+   __ASM_SIZE(pop) " %c[rcx](%0) \n\t"
+   "mov %%" _ASM_DX ", %c[rdx](%0) \n\t"
+   "mov %%" _ASM_SI ", %c[rsi](%0) \n\t"
+   "mov %%" _ASM_DI ", %c[rdi](%0) \n\t"
+   "mov %%" _ASM_BP ", %c[rbp](%0) \n\t"
 #ifdef CONFIG_X86_64
"mov %%r8,  %c[r8](%0) \n\t"
"mov %%r9,  %c[r9](%0) \n\t"
@@ -6302,10 +6294,10 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu 
*vcpu)
"mov %%r14, %c[r14](%0) \n\t"
"mov %%r15, %c[r15](%0) \n\t"
 #endif
-   "mov %%cr2, %%"R"ax   \n\t"
-   "mov %%"R"ax, %c[cr2](%0) \n\t"
+   "mov %%cr2, %%" _ASM_AX "   \n\t"
+   "mov %%" _ASM_AX ", %c[cr2](%0) \n\t"
 
-   "pop  %%"R"bp; pop  %%"R"dx \n\t"
+   "pop  %%"