On Fri, Dec 01, 2017 at 01:18:43PM +0100, Jakub Jelinek wrote:
> > Furthermore this
> > 
> > typedef double __attribute__((vector_size(16))) v2df_t;
> > typedef double __attribute__((vector_size(32))) v4df_t;
> > 
> > void test1(void) {
> >     register v2df_t x asm("xmm31") = {};
> >     asm volatile("" :: "v" (x));
> > }
> > 
> > void test2(void) {
> >     register v4df_t x asm("ymm31") = {};
> >     asm volatile("" :: "v" (x));
> > }
> > 
> > translates to "vxorpd %xmm31, %xmm31, %xmm31" for both
> > functions with -mavx512vl, yet afaict the instructions would #UD
> > without AVX-512DQ, which suggests to me that the original
> > intention wasn't fully met.
> 
> This indeed is a bug, please file a PR; we should IMHO just use
> vpxorq instead in that case, which is just AVX512VL and doesn't need
> DQ.  Of course if DQ is available, we should use vxorpd.
> Working on a fix.

Will try this:

2017-12-01  Jakub Jelinek  <ja...@redhat.com>

        * config/i386/i386-protos.h (standard_sse_constant_opcode): Change
        last argument to rtx pointer.
        * config/i386/i386.c (standard_sse_constant_opcode): Replace X argument
        with OPERANDS.  For AVX+ 128-bit VEX encoded instructions over 256-bit
        or 512-bit.  If setting EXT_REX_SSE_REG_P, use EVEX encoded insn
        depending on the chosen ISAs.
        * config/i386/i386.md (*movxi_internal_avx512f, *movoi_internal_avx,
        *movti_internal, *movdi_internal, *movsi_internal, *movtf_internal,
        *movdf_internal, *movsf_internal): Adjust standard_sse_constant_opcode
        callers.
        * config/i386/sse.md (mov<mode>_internal): Likewise.
        * config/i386/mmx.md (*mov<mode>_internal): Likewise.

--- gcc/config/i386/i386-protos.h.jj    2017-10-28 09:00:44.000000000 +0200
+++ gcc/config/i386/i386-protos.h       2017-12-01 14:39:36.498608799 +0100
@@ -52,7 +52,7 @@ extern int standard_80387_constant_p (rt
 extern const char *standard_80387_constant_opcode (rtx);
 extern rtx standard_80387_constant_rtx (int);
 extern int standard_sse_constant_p (rtx, machine_mode);
-extern const char *standard_sse_constant_opcode (rtx_insn *, rtx);
+extern const char *standard_sse_constant_opcode (rtx_insn *, rtx *);
 extern bool ix86_standard_x87sse_constant_load_p (const rtx_insn *, rtx);
 extern bool symbolic_reference_mentioned_p (rtx);
 extern bool extended_reg_mentioned_p (rtx);
--- gcc/config/i386/i386.c.jj   2017-12-01 09:19:07.000000000 +0100
+++ gcc/config/i386/i386.c      2017-12-01 14:36:38.884847618 +0100
@@ -10380,12 +10380,13 @@ standard_sse_constant_p (rtx x, machine_
 }
 
 /* Return the opcode of the special instruction to be used to load
-   the constant X.  */
+   the constant operands[1] into operands[0].  */
 
 const char *
-standard_sse_constant_opcode (rtx_insn *insn, rtx x)
+standard_sse_constant_opcode (rtx_insn *insn, rtx *operands)
 {
   machine_mode mode;
+  rtx x = operands[1];
 
   gcc_assert (TARGET_SSE);
 
@@ -10395,34 +10396,51 @@ standard_sse_constant_opcode (rtx_insn *
     {
       switch (get_attr_mode (insn))
        {
+       case MODE_TI:
+         if (!EXT_REX_SSE_REG_P (operands[0]))
+           return "%vpxor\t%0, %d0";
+         /* FALLTHRU */
        case MODE_XI:
-         return "vpxord\t%g0, %g0, %g0";
        case MODE_OI:
-         return (TARGET_AVX512VL
-                 ? "vpxord\t%x0, %x0, %x0"
-                 : "vpxor\t%x0, %x0, %x0");
-       case MODE_TI:
-         return (TARGET_AVX512VL
-                 ? "vpxord\t%x0, %x0, %x0"
-                 : "%vpxor\t%0, %d0");
+         if (EXT_REX_SSE_REG_P (operands[0]))
+           return (TARGET_AVX512VL
+                   ? "vpxord\t%x0, %x0, %x0"
+                   : "vpxord\t%g0, %g0, %g0");
+         return "vpxor\t%x0, %x0, %x0";
 
+       case MODE_V2DF:
+         if (!EXT_REX_SSE_REG_P (operands[0]))
+           return "%vxorpd\t%0, %d0";
+         /* FALLTHRU */
        case MODE_V8DF:
-         return (TARGET_AVX512DQ
-                 ? "vxorpd\t%g0, %g0, %g0"
-                 : "vpxorq\t%g0, %g0, %g0");
        case MODE_V4DF:
-         return "vxorpd\t%x0, %x0, %x0";
-       case MODE_V2DF:
-         return "%vxorpd\t%0, %d0";
+         if (EXT_REX_SSE_REG_P (operands[0]))
+           if (TARGET_AVX512DQ)
+             return (TARGET_AVX512VL
+                     ? "vxorpd\t%x0, %x0, %x0"
+                     : "vxorpd\t%g0, %g0, %g0");
+           else
+             return (TARGET_AVX512VL
+                     ? "vpxorq\t%x0, %x0, %x0"
+                     : "vpxorq\t%g0, %g0, %g0");
+          return "vxorpd\t%x0, %x0, %x0";
 
+       case MODE_V4SF:
+         if (!EXT_REX_SSE_REG_P (operands[0]))
+           return "%vxorps\t%0, %d0";
+         /* FALLTHRU */
        case MODE_V16SF:
-         return (TARGET_AVX512DQ
-                 ? "vxorps\t%g0, %g0, %g0"
-                 : "vpxord\t%g0, %g0, %g0");
        case MODE_V8SF:
-         return "vxorps\t%x0, %x0, %x0";
-       case MODE_V4SF:
-         return "%vxorps\t%0, %d0";
+         if (EXT_REX_SSE_REG_P (operands[0]))
+           if (TARGET_AVX512DQ)
+             return (TARGET_AVX512VL
+                     ? "vxorps\t%x0, %x0, %x0"
+                     : "vxorps\t%g0, %g0, %g0");
+           else
+             return (TARGET_AVX512VL
+                     ? "vpxord\t%x0, %x0, %x0"
+                     : "vpxord\t%g0, %g0, %g0");
+         return "vxorps\t%x0, %x0, %x0";
 
        default:
          gcc_unreachable ();
@@ -10449,11 +10467,14 @@ standard_sse_constant_opcode (rtx_insn *
        case MODE_V2DF:
        case MODE_V4SF:
          gcc_assert (TARGET_SSE2);
-         return (TARGET_AVX512F
-                 ? "vpternlogd\t{$0xFF, %0, %0, %0|%0, %0, %0, 0xFF}"
-                 : TARGET_AVX
+         if (!EXT_REX_SSE_REG_P (operands[0]))
+           return (TARGET_AVX
                    ? "vpcmpeqd\t%0, %0, %0"
                    : "pcmpeqd\t%0, %0");
+         if (TARGET_AVX512VL)
+           return "vpternlogd\t{$0xFF, %0, %0, %0|%0, %0, %0, 0xFF}";
+         else
+           return "vpternlogd\t{$0xFF, %g0, %g0, %g0|%g0, %g0, %g0, 0xFF}";
 
        default:
          gcc_unreachable ();
--- gcc/config/i386/i386.md.jj  2017-12-01 09:06:14.000000000 +0100
+++ gcc/config/i386/i386.md     2017-12-01 14:39:25.359749204 +0100
@@ -2044,7 +2044,7 @@ (define_insn "*movxi_internal_avx512f"
   switch (get_attr_type (insn))
     {
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       if (misaligned_operand (operands[0], XImode)
@@ -2071,7 +2071,7 @@ (define_insn "*movoi_internal_avx"
   switch (get_attr_type (insn))
     {
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       if (misaligned_operand (operands[0], OImode)
@@ -2131,7 +2131,7 @@ (define_insn "*movti_internal"
       return "#";
 
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       /* TDmode values are passed as TImode on the stack.  Moving them
@@ -2243,7 +2243,7 @@ (define_insn "*movdi_internal"
       return "movq\t{%1, %0|%0, %1}";
 
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       switch (get_attr_mode (insn))
@@ -2456,7 +2456,7 @@ (define_insn "*movsi_internal"
   switch (get_attr_type (insn))
     {
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_MSKMOV:
       return "kmovd\t{%1, %0|%0, %1}";
@@ -3327,7 +3327,7 @@ (define_insn "*movtf_internal"
   switch (get_attr_type (insn))
     {
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       /* Handle misaligned load/store since we
@@ -3504,7 +3504,7 @@ (define_insn "*movdf_internal"
        return "mov{q}\t{%1, %0|%0, %1}";
 
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       switch (get_attr_mode (insn))
@@ -3698,7 +3698,7 @@ (define_insn "*movsf_internal"
       return "mov{l}\t{%1, %0|%0, %1}";
 
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       switch (get_attr_mode (insn))
--- gcc/config/i386/sse.md.jj   2017-11-30 09:42:46.000000000 +0100
+++ gcc/config/i386/sse.md      2017-12-01 13:29:09.064964872 +0100
@@ -923,7 +923,7 @@ (define_insn "mov<mode>_internal"
   switch (get_attr_type (insn))
     {
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       /* There is no evex-encoded vmov* for sizes smaller than 64-bytes
--- gcc/config/i386/mmx.md.jj   2017-08-01 10:25:42.000000000 +0200
+++ gcc/config/i386/mmx.md      2017-12-01 13:28:44.541274286 +0100
@@ -112,7 +112,7 @@ (define_insn "*mov<mode>_internal"
        return "movdq2q\t{%1, %0|%0, %1}";
 
     case TYPE_SSELOG1:
-      return standard_sse_constant_opcode (insn, operands[1]);
+      return standard_sse_constant_opcode (insn, operands);
 
     case TYPE_SSEMOV:
       switch (get_attr_mode (insn))


        Jakub

Reply via email to