As we run out of bits in the KVM emulator instruction flags, we can merge
together the Mmx/Sse/Avx bits. These bits are mutual exclusive (i.e., each
instruction is either MMX, SSE, AVX, or none), so we can save one bit in the
flags by merging them.

Signed-off-by: Nadav Amit <na...@cs.technion.ac.il>
---
 arch/x86/kvm/emulate.c | 44 ++++++++++++++++++++++++++++----------------
 1 file changed, 28 insertions(+), 16 deletions(-)

diff --git a/arch/x86/kvm/emulate.c b/arch/x86/kvm/emulate.c
index cd2029b..f98ead7 100644
--- a/arch/x86/kvm/emulate.c
+++ b/arch/x86/kvm/emulate.c
@@ -123,7 +123,6 @@
 #define Prefix      (3<<15)     /* Instruction varies with 66/f2/f3 prefix */
 #define RMExt       (4<<15)     /* Opcode extension in ModRM r/m if mod == 3 */
 #define Escape      (5<<15)     /* Escape to coprocessor instruction */
-#define Sse         (1<<18)     /* SSE Vector instruction */
 /* Generic ModRM decode. */
 #define ModRM       (1<<19)
 /* Destination is only written; never read. */
@@ -155,9 +154,11 @@
 #define Src2GS      (OpGS << Src2Shift)
 #define Src2Mask    (OpMask << Src2Shift)
 #define Mmx         ((u64)1 << 40)  /* MMX Vector instruction */
-#define Aligned     ((u64)1 << 41)  /* Explicitly aligned (e.g. MOVDQA) */
-#define Unaligned   ((u64)1 << 42)  /* Explicitly unaligned (e.g. MOVDQU) */
-#define Avx         ((u64)1 << 43)  /* Advanced Vector Extensions */
+#define Sse         ((u64)2 << 40)  /* SSE Vector instruction */
+#define Avx         ((u64)3 << 40)  /* Advanced Vector Extensions */
+#define OpExtMask   ((u64)3 << 40)
+#define Aligned     ((u64)1 << 42)  /* Explicitly aligned (e.g. MOVDQA) */
+#define Unaligned   ((u64)1 << 43)  /* Explicitly unaligned (e.g. MOVDQU) */
 #define Fastop      ((u64)1 << 44)  /* Use opcode::u.fastop */
 #define NoWrite     ((u64)1 << 45)  /* No writeback */
 #define SrcWrite    ((u64)1 << 46)  /* Write back src operand */
@@ -1082,18 +1083,19 @@ static void decode_register_operand(struct 
x86_emulate_ctxt *ctxt,
                                    struct operand *op)
 {
        unsigned reg = ctxt->modrm_reg;
+       u64 op_ext = ctxt->d & OpExtMask;
 
        if (!(ctxt->d & ModRM))
                reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3);
 
-       if (ctxt->d & Sse) {
+       if (op_ext == Sse) {
                op->type = OP_XMM;
                op->bytes = 16;
                op->addr.xmm = reg;
                read_sse_reg(ctxt, &op->vec_val, reg);
                return;
        }
-       if (ctxt->d & Mmx) {
+       if (op_ext == Mmx) {
                reg &= 7;
                op->type = OP_MM;
                op->bytes = 8;
@@ -1122,6 +1124,7 @@ static int decode_modrm(struct x86_emulate_ctxt *ctxt,
        int index_reg, base_reg, scale;
        int rc = X86EMUL_CONTINUE;
        ulong modrm_ea = 0;
+       u64 op_ext = ctxt->d & OpExtMask;
 
        ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */
        index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */
@@ -1137,14 +1140,15 @@ static int decode_modrm(struct x86_emulate_ctxt *ctxt,
                op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
                op->addr.reg = decode_register(ctxt, ctxt->modrm_rm,
                                ctxt->d & ByteOp);
-               if (ctxt->d & Sse) {
+
+               if (op_ext == Sse) {
                        op->type = OP_XMM;
                        op->bytes = 16;
                        op->addr.xmm = ctxt->modrm_rm;
                        read_sse_reg(ctxt, &op->vec_val, ctxt->modrm_rm);
                        return rc;
                }
-               if (ctxt->d & Mmx) {
+               if (op_ext == Mmx) {
                        op->type = OP_MM;
                        op->bytes = 8;
                        op->addr.mm = ctxt->modrm_rm & 7;
@@ -4555,7 +4559,9 @@ done_prefixes:
                return EMULATION_FAILED;
 
        if (unlikely(ctxt->d &
-           (NotImpl|Stack|Op3264|Sse|Mmx|Intercept|CheckPerm|NearBranch))) {
+           (NotImpl|Stack|Op3264|OpExtMask|Intercept|CheckPerm|NearBranch))) {
+               u64 op_ext = ctxt->d & OpExtMask;
+
                /*
                 * These are copied unconditionally here, and checked 
unconditionally
                 * in x86_emulate_insn.
@@ -4580,9 +4586,9 @@ done_prefixes:
                                ctxt->op_bytes = 4;
                }
 
-               if (ctxt->d & Sse)
+               if (op_ext == Sse)
                        ctxt->op_bytes = 16;
-               else if (ctxt->d & Mmx)
+               else if (op_ext == Mmx)
                        ctxt->op_bytes = 8;
        }
 
@@ -4728,25 +4734,31 @@ int x86_emulate_insn(struct x86_emulate_ctxt *ctxt)
        }
 
        if (unlikely(ctxt->d &
-                    
(No64|Undefined|Sse|Mmx|Intercept|CheckPerm|Priv|Prot|String))) {
+                    (No64|Undefined|OpExtMask|Intercept|CheckPerm|Priv|Prot|
+                     String))) {
+               u64 op_ext = ctxt->d & OpExtMask;
+
                if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) ||
                                (ctxt->d & Undefined)) {
                        rc = emulate_ud(ctxt);
                        goto done;
                }
 
-               if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & 
X86_CR0_EM)))
-                   || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & 
X86_CR4_OSFXSR))) {
+               if (((op_ext == Sse || op_ext == Mmx) &&
+                   ((ops->get_cr(ctxt, 0) & X86_CR0_EM)))
+                   || ((op_ext == Sse) &&
+                       !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
                        rc = emulate_ud(ctxt);
                        goto done;
                }
 
-               if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & 
X86_CR0_TS)) {
+               if ((op_ext == Sse || op_ext == Mmx) &&
+                   (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
                        rc = emulate_nm(ctxt);
                        goto done;
                }
 
-               if (ctxt->d & Mmx) {
+               if (op_ext == Mmx) {
                        rc = flush_pending_x87_faults(ctxt);
                        if (rc != X86EMUL_CONTINUE)
                                goto done;
-- 
1.9.1

--
To unsubscribe from this list: send the line "unsubscribe kvm" in
the body of a message to majord...@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html

Reply via email to