This avoids use of valid_gimple_rhs_p and instead gimplifies to
such a RHS, avoiding more SSA copies being generated by IVOPTs.

Bootstrapped and tested on x86_64-unknown-linux-gnu, queued for stage1

2021-04-14  Richard Biener  <rguent...@suse.de>

        * tree-ssa-loop-ivopts.c (rewrite_use_nonlinear_expr): Avoid
        valid_gimple_rhs_p by instead gimplifying to one.
---
 gcc/tree-ssa-loop-ivopts.c | 13 +++++++------
 1 file changed, 7 insertions(+), 6 deletions(-)

diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 4012ae3f19d..12a8a49a307 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -7286,12 +7286,13 @@ rewrite_use_nonlinear_expr (struct ivopts_data *data,
     }
 
   comp = fold_convert (type, comp);
-  if (!valid_gimple_rhs_p (comp)
-      || (gimple_code (use->stmt) != GIMPLE_PHI
-         /* We can't allow re-allocating the stmt as it might be pointed
-            to still.  */
-         && (get_gimple_rhs_num_ops (TREE_CODE (comp))
-             >= gimple_num_ops (gsi_stmt (bsi)))))
+  comp = force_gimple_operand (comp, &seq, false, NULL);
+  gimple_seq_add_seq (&stmt_list, seq);
+  if (gimple_code (use->stmt) != GIMPLE_PHI
+      /* We can't allow re-allocating the stmt as it might be pointed
+        to still.  */
+      && (get_gimple_rhs_num_ops (TREE_CODE (comp))
+         >= gimple_num_ops (gsi_stmt (bsi))))
     {
       comp = force_gimple_operand (comp, &seq, true, NULL);
       gimple_seq_add_seq (&stmt_list, seq);
-- 
2.26.2

Reply via email to