> @@ -174,10 +174,61 @@ compat_bad_hypercall:
>  /* %rbx: struct vcpu, interrupts disabled */
>  ENTRY(compat_restore_all_guest)
>          ASSERT_INTERRUPTS_DISABLED
> +.Lcr4_orig:
> +        ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */
> +        ASM_NOP2 /* jpe   .Lcr4_alt_end */
> +        ASM_NOP8 /* mov   CPUINFO_cr4...(%rsp), %rax */
> +        ASM_NOP6 /* and   $..., %rax */
> +        ASM_NOP8 /* mov   %rax, CPUINFO_cr4...(%rsp) */
> +        ASM_NOP3 /* mov   %rax, %cr4 */
> +.Lcr4_orig_end:
> +        .pushsection .altinstr_replacement, "ax"
> +.Lcr4_alt:
> +        testb $3,UREGS_cs(%rsp)
> +        jpe   .Lcr4_alt_end

This would jump if the last operation had even bits set. And the
'testb' is 'and' operation which would give us the '011' (for $3).

Why not just depend on the ZF ? Other places that test UREGS_cs()
look to be using that?

> +        mov   CPUINFO_cr4-CPUINFO_guest_cpu_user_regs(%rsp), %rax
> +        and   $~(X86_CR4_SMEP|X86_CR4_SMAP), %rax
> +        mov   %rax, CPUINFO_cr4-CPUINFO_guest_cpu_user_regs(%rsp)
> +        mov   %rax, %cr4
> +.Lcr4_alt_end:
> +        .section .altinstructions, "a"
> +        altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, \
> +                             (.Lcr4_orig_end - .Lcr4_orig), \
> +                             (.Lcr4_alt_end - .Lcr4_alt)
> +        altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMAP, \
> +                             (.Lcr4_orig_end - .Lcr4_orig), \
> +                             (.Lcr4_alt_end - .Lcr4_alt)
> +        .popsection
>          RESTORE_ALL adj=8 compat=1
>  .Lft0:  iretq
>          _ASM_PRE_EXTABLE(.Lft0, handle_exception)
>  
> +/* This mustn't modify registers other than %rax. */
> +ENTRY(cr4_pv32_restore)
> +        push  %rdx
> +        GET_CPUINFO_FIELD(cr4, %rdx)
> +        mov   (%rdx), %rax
> +        test  $X86_CR4_SMEP|X86_CR4_SMAP,%eax
> +        jnz   0f
> +        or    cr4_pv32_mask(%rip), %rax
> +        mov   %rax, %cr4
> +        mov   %rax, (%rdx)

Here you leave %rax with the cr4_pv32_mask value, but:

> +        pop   %rdx
> +        ret
> +0:
> +#ifndef NDEBUG
> +        /* Check that _all_ of the bits intended to be set actually are. */
> +        mov   %cr4, %rax
> +        and   cr4_pv32_mask(%rip), %eax
> +        cmp   cr4_pv32_mask(%rip), %eax
> +        je    1f
> +        BUG
> +1:
> +#endif
> +        pop   %rdx
> +        xor   %eax, %eax

.. Here you clear it. Any particular reason?

> +        ret
> +
>  /* %rdx: trap_bounce, %rbx: struct vcpu */
>  ENTRY(compat_post_handle_exception)
>          testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
.. snip..
> -.macro LOAD_C_CLOBBERED compat=0
> +.macro LOAD_C_CLOBBERED compat=0 ax=1
>  .if !\compat
>          movq  UREGS_r11(%rsp),%r11
>          movq  UREGS_r10(%rsp),%r10
>          movq  UREGS_r9(%rsp),%r9
>          movq  UREGS_r8(%rsp),%r8
> -.endif
> +.if \ax
>          movq  UREGS_rax(%rsp),%rax
> +.endif

Why the .endif here considering you are doing an:

> +.elseif \ax

an else if here?
> +        movl  UREGS_rax(%rsp),%eax
> +.endif

Actually, Why two 'if ax' ? checks?

Or am I reading this incorrect?


_______________________________________________
Xen-devel mailing list
Xen-devel@lists.xen.org
http://lists.xen.org/xen-devel

Reply via email to