On Wed, Feb 07, 2018 at 04:21:43PM -0500, Jason Merrill wrote:
> On Wed, Feb 7, 2018 at 4:14 PM, Jakub Jelinek <ja...@redhat.com> wrote:
> > On Wed, Feb 07, 2018 at 03:52:39PM -0500, Jason Merrill wrote:
> >> > E.g. the constexpr function uses 
> >> > same_type_ignoring_top_level_qualifiers_p
> >> > instead of == type comparisons, the COMPONENT_REF stuff, ...
> >>
> >> > For poly_* stuff, I think Richard S. wants to introduce it into the FEs 
> >> > at
> >> > some point, but I could be wrong; certainly it hasn't been done yet and
> >> > generally, poly*int seems to be a nightmare to deal with.
> >>
> >> Yes, I understand how we got to this point, but having the functions
> >> diverge because of this guideline seems like a mistake.  And there
> >> seem to be two ways to avoid the divergence: make an exception to the
> >> guideline, or move the function.
> >
> > Functionally, I think the following patch should turn fold_indirect_ref_1
> > to be equivalent to the patched constexpr.c version (with the known
> > documented differences), so if this is the obstackle for the acceptance
> > of the patch, I can test this.
> >
> > Otherwise, I must say I have no idea how to share the code,
> > same_type_ignoring_qualifiers is only a C++ FE function, so the middle-end
> > can't use it even conditionally, and similarly with the TBAA issues.
> 
> Again, can we make an exception and use poly_int in this function
> because it's mirroring a middle-end function?

So like this if it passes bootstrap/regtest?  It is kind of bidirectional
merge of changes between the 2 functions, except for intentional differences
(e.g. the same_type_ignoring_top_level_qualifiers_p vs. ==, in_gimple_form
stuff in fold-const.c, the C++ specific empty class etc. handling in
constexpr.c etc.).

2018-01-26  Marek Polacek  <pola...@redhat.com>
            Jakub Jelinek  <ja...@redhat.com>

        PR c++/83659
        * fold-const.c (fold_indirect_ref_1): Use VECTOR_TYPE_P macro.
        Formatting fixes.  Verify first that tree_fits_poly_int64_p (op01).
        Sync some changes from cxx_fold_indirect_ref.

        * constexpr.c (cxx_fold_indirect_ref): Sync some changes from
        fold_indirect_ref_1, including poly_*int64.  Verify first that
        tree_fits_poly_int64_p (op01).  Formatting fixes.

        * g++.dg/torture/pr83659.C: New test.

--- gcc/fold-const.c.jj 2018-01-26 12:43:23.140922419 +0100
+++ gcc/fold-const.c    2018-02-08 12:43:50.654727317 +0100
@@ -14115,6 +14115,7 @@ fold_indirect_ref_1 (location_t loc, tre
     {
       tree op = TREE_OPERAND (sub, 0);
       tree optype = TREE_TYPE (op);
+
       /* *&CONST_DECL -> to the value of the const decl.  */
       if (TREE_CODE (op) == CONST_DECL)
        return DECL_INITIAL (op);
@@ -14148,12 +14149,13 @@ fold_indirect_ref_1 (location_t loc, tre
               && type == TREE_TYPE (optype))
        return fold_build1_loc (loc, REALPART_EXPR, type, op);
       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
-      else if (TREE_CODE (optype) == VECTOR_TYPE
+      else if (VECTOR_TYPE_P (optype)
               && type == TREE_TYPE (optype))
        {
          tree part_width = TYPE_SIZE (type);
          tree index = bitsize_int (0);
-         return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, 
index);
+         return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
+                                 index);
        }
     }
 
@@ -14171,8 +14173,17 @@ fold_indirect_ref_1 (location_t loc, tre
          op00type = TREE_TYPE (op00);
 
          /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
-         if (TREE_CODE (op00type) == VECTOR_TYPE
-             && type == TREE_TYPE (op00type))
+         if (VECTOR_TYPE_P (op00type)
+             && type == TREE_TYPE (op00type)
+             /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
+                but we want to treat offsets with MSB set as negative.
+                For the code below negative offsets are invalid and
+                TYPE_SIZE of the element is something unsigned, so
+                check whether op01 fits into poly_int64, which implies
+                it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
+                then just use poly_uint64 because we want to treat the
+                value as unsigned.  */
+             && tree_fits_poly_int64_p (op01))
            {
              tree part_width = TYPE_SIZE (type);
              poly_uint64 max_offset
@@ -14199,16 +14210,16 @@ fold_indirect_ref_1 (location_t loc, tre
                   && type == TREE_TYPE (op00type))
            {
              tree type_domain = TYPE_DOMAIN (op00type);
-             tree min = size_zero_node;
+             tree min_val = size_zero_node;
              if (type_domain && TYPE_MIN_VALUE (type_domain))
-               min = TYPE_MIN_VALUE (type_domain);
+               min_val = TYPE_MIN_VALUE (type_domain);
              offset_int off = wi::to_offset (op01);
              offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
              offset_int remainder;
              off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
-             if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
+             if (remainder == 0 && TREE_CODE (min_val) == INTEGER_CST)
                {
-                 off = off + wi::to_offset (min);
+                 off = off + wi::to_offset (min_val);
                  op01 = wide_int_to_tree (sizetype, off);
                  return build4_loc (loc, ARRAY_REF, type, op00, op01,
                                     NULL_TREE, NULL_TREE);
--- gcc/cp/constexpr.c.jj       2018-02-06 13:12:48.072808498 +0100
+++ gcc/cp/constexpr.c  2018-02-08 12:43:59.526722279 +0100
@@ -3025,9 +3025,10 @@ cxx_eval_vec_init (const constexpr_ctx *
 static tree
 cxx_fold_indirect_ref (location_t loc, tree type, tree op0, bool *empty_base)
 {
-  tree sub, subtype;
+  tree sub = op0;
+  tree subtype;
+  poly_uint64 const_op01;
 
-  sub = op0;
   STRIP_NOPS (sub);
   subtype = TREE_TYPE (sub);
   if (!POINTER_TYPE_P (subtype))
@@ -3082,7 +3083,8 @@ cxx_fold_indirect_ref (location_t loc, t
        {
          tree part_width = TYPE_SIZE (type);
          tree index = bitsize_int (0);
-         return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, 
index);
+         return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
+                                 index);
        }
       /* Also handle conversion to an empty base class, which
         is represented with a NOP_EXPR.  */
@@ -3107,7 +3109,7 @@ cxx_fold_indirect_ref (location_t loc, t
        }
     }
   else if (TREE_CODE (sub) == POINTER_PLUS_EXPR
-          && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
+          && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
     {
       tree op00 = TREE_OPERAND (sub, 0);
       tree op01 = TREE_OPERAND (sub, 1);
@@ -3121,29 +3123,37 @@ cxx_fold_indirect_ref (location_t loc, t
 
          /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
          if (VECTOR_TYPE_P (op00type)
-             && (same_type_ignoring_top_level_qualifiers_p
-                 (type, TREE_TYPE (op00type))))
+             && same_type_ignoring_top_level_qualifiers_p
+                                               (type, TREE_TYPE (op00type))
+             /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
+                but we want to treat offsets with MSB set as negative.
+                For the code below negative offsets are invalid and
+                TYPE_SIZE of the element is something unsigned, so
+                check whether op01 fits into poly_int64, which implies
+                it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
+                then just use poly_uint64 because we want to treat the
+                value as unsigned.  */
+             && tree_fits_poly_int64_p (op01))
            {
-             HOST_WIDE_INT offset = tree_to_shwi (op01);
              tree part_width = TYPE_SIZE (type);
-             unsigned HOST_WIDE_INT part_widthi = tree_to_shwi 
(part_width)/BITS_PER_UNIT;
-             unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
-             tree index = bitsize_int (indexi);
-
-             if (known_lt (offset / part_widthi,
-                           TYPE_VECTOR_SUBPARTS (op00type)))
-               return fold_build3_loc (loc,
-                                       BIT_FIELD_REF, type, op00,
-                                       part_width, index);
-
+             poly_uint64 max_offset
+               = (tree_to_uhwi (part_width) / BITS_PER_UNIT
+                  * TYPE_VECTOR_SUBPARTS (op00type));
+             if (known_lt (const_op01, max_offset))
+               {
+                 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
+                 return fold_build3_loc (loc,
+                                         BIT_FIELD_REF, type, op00,
+                                         part_width, index);
+               }
            }
          /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
          else if (TREE_CODE (op00type) == COMPLEX_TYPE
                   && (same_type_ignoring_top_level_qualifiers_p
                       (type, TREE_TYPE (op00type))))
            {
-             tree size = TYPE_SIZE_UNIT (type);
-             if (tree_int_cst_equal (size, op01))
+             if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
+                           const_op01))
                return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
            }
          /* ((foo *)&fooarray)[1] => fooarray[1] */
@@ -3198,7 +3208,8 @@ cxx_fold_indirect_ref (location_t loc, t
     {
       tree type_domain;
       tree min_val = size_zero_node;
-      tree newsub = cxx_fold_indirect_ref (loc, TREE_TYPE (subtype), sub, 
NULL);
+      tree newsub
+       = cxx_fold_indirect_ref (loc, TREE_TYPE (subtype), sub, NULL);
       if (newsub)
        sub = newsub;
       else
--- gcc/testsuite/g++.dg/torture/pr83659.C.jj   2018-02-08 12:29:11.994353867 
+0100
+++ gcc/testsuite/g++.dg/torture/pr83659.C      2018-02-08 12:29:11.994353867 
+0100
@@ -0,0 +1,18 @@
+// PR c++/83659
+// { dg-do compile }
+
+typedef int V __attribute__ ((__vector_size__ (16)));
+V a;
+V b[2];
+
+int
+foo ()
+{
+  return reinterpret_cast <int *> (&a)[-1] += 1;
+}
+
+int
+bar ()
+{
+  return reinterpret_cast <int *> (&a[1])[-1];
+}


        Jakub

Reply via email to