From 4a60186570defebed7d811b37c46092267407a96 Mon Sep 17 00:00:00 2001
From: Maximilian Downey Twiss <creatorsmithmdt@gmail.com>
Date: Fri, 18 Nov 2022 09:05:50 +1100
Subject: [PATCH 13/56] Re-add flag_evaluation_order, reorder_operands_p, and
 add reorder bool argument to tree_swap_operands_p.

gcc/ChangeLog:

	* common.opt (flag_evaluation_order): Re-add.
	* expr.cc (expand_operands): Re-add code guarded by flag_evaluation_order.
	* fold-const.cc (reorder_operands_p): Re-add.
	(negate_expr_p): Handle reorder_operands_p.
	(fold_negate_expr_1): Likewise.
	(tree_swap_operands_p): Re-add reorder argument.
	(fold_binary_loc): Likewise.
	(fold_ternary_loc): Likewise
	* fold-const.h (tree_swap_operands_p): Likewise.
	* genmatch.cc (dt_operand::gen_gimple_expr): Likewise.
	* gimple-fold.cc (fold_stmt_1): Likewise.
	* gimple-match-head.cc (gimple_resimplify2): Likewise.
	(gimple_resimplify3): Likewise.
	(gimple_resimplify4): Likewise.
	(gimple_resimplify5): Likewise.
	(gimple_simplify): Likewise.
	* match.pd: Likewise.
	* tree-ssa-dom.cc (record_equality): Likewise.
	* tree-ssa-reassoc.cc (optimize_range_tests_var_bound): Likewise.
	* tree-ssa-sccvn.cc (vn_nary_op_compute_hash): Likewise.
	* tree-ssa-threadedge.cc: Likewise.
---
 gcc/common.opt             |  4 +++
 gcc/expr.cc                |  4 +++
 gcc/fold-const.cc          | 51 ++++++++++++++++++++++++++++++--------
 gcc/fold-const.h           |  2 +-
 gcc/genmatch.cc            |  2 +-
 gcc/gimple-fold.cc         |  8 +++---
 gcc/gimple-match-head.cc   | 20 +++++++++------
 gcc/match.pd               |  2 +-
 gcc/tree-ssa-dom.cc        |  2 +-
 gcc/tree-ssa-reassoc.cc    |  2 +-
 gcc/tree-ssa-sccvn.cc      |  4 +--
 gcc/tree-ssa-threadedge.cc |  2 +-
 12 files changed, 74 insertions(+), 29 deletions(-)

diff --git a/gcc/common.opt b/gcc/common.opt
index 26e9d1cc4e7..318eafc2e8c 100644
--- a/gcc/common.opt
+++ b/gcc/common.opt
@@ -62,6 +62,10 @@ int flag_complex_method = 1
 Variable
 int flag_default_complex_method = 1
 
+; Nonzero if subexpressions must be evaluated from left-to-right.
+Variable
+int flag_evaluation_order = 0
+
 ; Language specific warning pass for unused results.
 Variable
 bool flag_warn_unused_result = false
diff --git a/gcc/expr.cc b/gcc/expr.cc
index d9407432ea5..2e770ab090d 100644
--- a/gcc/expr.cc
+++ b/gcc/expr.cc
@@ -8576,6 +8576,10 @@ expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
     }
   else
     {
+      /* If we need to preserve evaluation order, copy exp0 into its own
+	 temporary variable so that it can't be clobbered by exp1.  */
+      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
+	exp0 = save_expr (exp0);
       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
     }
diff --git a/gcc/fold-const.cc b/gcc/fold-const.cc
index b89cac91cae..374878b31a4 100644
--- a/gcc/fold-const.cc
+++ b/gcc/fold-const.cc
@@ -139,6 +139,7 @@ static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
 static tree unextend (tree, int, int, tree);
 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
+static bool reorder_operands_p (const_tree, const_tree);
 static tree fold_binary_op_with_conditional_arg (location_t,
 						 enum tree_code, tree,
 						 tree, tree,
@@ -472,7 +473,9 @@ negate_expr_p (tree t)
 	      && ! TYPE_OVERFLOW_WRAPS (type)))
 	return false;
       /* -(A + B) -> (-B) - A.  */
-      if (negate_expr_p (TREE_OPERAND (t, 1)))
+      if (negate_expr_p (TREE_OPERAND (t, 1))
+	  && reorder_operands_p (TREE_OPERAND (t, 0),
+				 TREE_OPERAND (t, 1)))
 	return true;
       /* -(A + B) -> (-A) - B.  */
       return negate_expr_p (TREE_OPERAND (t, 0));
@@ -482,7 +485,9 @@ negate_expr_p (tree t)
       return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
 	     && !HONOR_SIGNED_ZEROS (type)
 	     && (! ANY_INTEGRAL_TYPE_P (type)
-		 || TYPE_OVERFLOW_WRAPS (type));
+		 || TYPE_OVERFLOW_WRAPS (type))
+	     && reorder_operands_p (TREE_OPERAND (t, 0),
+				    TREE_OPERAND (t, 1));
 
     case MULT_EXPR:
       if (TYPE_UNSIGNED (type))
@@ -643,7 +648,9 @@ fold_negate_expr_1 (location_t loc, tree t)
 	  && !HONOR_SIGNED_ZEROS (type))
 	{
 	  /* -(A + B) -> (-B) - A.  */
-	  if (negate_expr_p (TREE_OPERAND (t, 1)))
+	  if (negate_expr_p (TREE_OPERAND (t, 1))
+	      && reorder_operands_p (TREE_OPERAND (t, 0),
+				     TREE_OPERAND (t, 1)))
 	    {
 	      tem = negate_expr (TREE_OPERAND (t, 1));
 	      return fold_build2_loc (loc, MINUS_EXPR, type,
@@ -663,7 +670,8 @@ fold_negate_expr_1 (location_t loc, tree t)
     case MINUS_EXPR:
       /* - (A - B) -> B - A  */
       if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
-	  && !HONOR_SIGNED_ZEROS (type))
+	  && !HONOR_SIGNED_ZEROS (element_mode (type))
+	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
 	return fold_build2_loc (loc, MINUS_EXPR, type,
 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
       break;
@@ -7492,12 +7500,27 @@ fold_single_bit_test (location_t loc, enum tree_code code,
   return NULL_TREE;
 }
 
+/* Check whether we are allowed to reorder operands arg0 and arg1,
+   such that the evaluation of arg1 occurs before arg0.  */
+
+static bool
+reorder_operands_p (const_tree arg0, const_tree arg1)
+{
+  if (! flag_evaluation_order)
+      return true;
+  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
+    return true;
+  return ! TREE_SIDE_EFFECTS (arg0)
+	 && ! TREE_SIDE_EFFECTS (arg1);
+}
+
 /* Test whether it is preferable to swap two operands, ARG0 and
    ARG1, for example because ARG0 is an integer constant and ARG1
-   isn't.  */
+   isn't.  If REORDER is true, only recommend swapping if we can
+   evaluate the operands in reverse order.  */
 
 bool
-tree_swap_operands_p (const_tree arg0, const_tree arg1)
+tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
 {
   if (CONSTANT_CLASS_P (arg1))
     return 0;
@@ -7512,6 +7535,10 @@ tree_swap_operands_p (const_tree arg0, const_tree arg1)
   if (TREE_CONSTANT (arg0))
     return 1;
 
+  if (reorder && flag_evaluation_order
+      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
+    return 0;
+
   /* It is preferable to swap two SSA_NAME to ensure a canonical form
      for commutative and comparison operators.  Ensuring a canonical
      form allows the optimizers to find additional redundancies without
@@ -10904,13 +10931,13 @@ fold_binary_loc (location_t loc, enum tree_code code, tree type,
   /* If this is a commutative operation, and ARG0 is a constant, move it
      to ARG1 to reduce the number of tests below.  */
   if (commutative_tree_code (code)
-      && tree_swap_operands_p (arg0, arg1))
+      && tree_swap_operands_p (arg0, arg1, true))
     return fold_build2_loc (loc, code, type, op1, op0);
 
   /* Likewise if this is a comparison, and ARG0 is a constant, move it
      to ARG1 to reduce the number of tests below.  */
   if (kind == tcc_comparison
-      && tree_swap_operands_p (arg0, arg1))
+      && tree_swap_operands_p (arg0, arg1, true))
     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
 
   tem = generic_simplify (loc, code, type, op0, op1);
@@ -10969,7 +10996,8 @@ fold_binary_loc (location_t loc, enum tree_code code, tree type,
 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
 			     tem);
 	}
-      if (TREE_CODE (arg1) == COMPOUND_EXPR)
+      if (TREE_CODE (arg1) == COMPOUND_EXPR
+	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
 	{
 	  tem = fold_build2_loc (loc, code, type, op0,
 			     fold_convert_loc (loc, TREE_TYPE (op1),
@@ -11519,6 +11547,7 @@ fold_binary_loc (location_t loc, enum tree_code code, tree type,
       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
       if (TREE_CODE (arg0) == NEGATE_EXPR
 	  && negate_expr_p (op1)
+	  && reorder_operands_p (arg0, arg1)
 	  /* If arg0 is e.g. unsigned int and type is int, then this could
 	     introduce UB, because if A is INT_MIN at runtime, the original
 	     expression can be well defined while the latter is not.
@@ -12798,7 +12827,7 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type,
   /* If this is a commutative operation, and OP0 is a constant, move it
      to OP1 to reduce the number of tests below.  */
   if (commutative_ternary_tree_code (code)
-      && tree_swap_operands_p (op0, op1))
+      && tree_swap_operands_p (op0, op1, true))
     return fold_build3_loc (loc, code, type, op1, op0, op2);
 
   tem = generic_simplify (loc, code, type, op0, op1, op2);
@@ -12931,7 +12960,7 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type,
       /* If the second operand is simpler than the third, swap them
 	 since that produces better jump optimization results.  */
       if (truth_value_p (TREE_CODE (arg0))
-	  && tree_swap_operands_p (op1, op2))
+	  && tree_swap_operands_p (op1, op2, false))
 	{
 	  location_t loc0 = expr_location_or (arg0, loc);
 	  /* See if this can be inverted.  If it can't, possibly because
diff --git a/gcc/fold-const.h b/gcc/fold-const.h
index fa284c712bf..486dad8d252 100644
--- a/gcc/fold-const.h
+++ b/gcc/fold-const.h
@@ -149,7 +149,7 @@ extern tree build_invariant_address (tree, tree, poly_int64);
 extern tree constant_boolean_node (bool, tree);
 extern tree div_if_zero_remainder (const_tree, const_tree);
 
-extern bool tree_swap_operands_p (const_tree, const_tree);
+extern bool tree_swap_operands_p (const_tree, const_tree, bool);
 extern enum tree_code swap_tree_comparison (enum tree_code);
 
 extern bool ptr_difference_const (tree, tree, poly_int64_pod *);
diff --git a/gcc/genmatch.cc b/gcc/genmatch.cc
index 4a8802469cd..6b2be7da519 100644
--- a/gcc/genmatch.cc
+++ b/gcc/genmatch.cc
@@ -2873,7 +2873,7 @@ dt_operand::gen_gimple_expr (FILE *f, int indent, int depth)
       gen_opname (child_opname0, opno);
       gen_opname (child_opname1, opno + 1);
       fprintf_indent (f, indent,
-		      "if (tree_swap_operands_p (%s, %s))\n",
+		      "if (tree_swap_operands_p (%s, %s, false))\n",
 		      child_opname0, child_opname1);
       fprintf_indent (f, indent,
 		      "  std::swap (%s, %s);\n",
diff --git a/gcc/gimple-fold.cc b/gcc/gimple-fold.cc
index c2d9c806aee..efc0354566e 100644
--- a/gcc/gimple-fold.cc
+++ b/gcc/gimple-fold.cc
@@ -6142,7 +6142,7 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
 	    {
 	      tree rhs1 = gimple_assign_rhs1 (stmt);
 	      tree rhs2 = gimple_assign_rhs2 (stmt);
-	      if (tree_swap_operands_p (rhs1, rhs2))
+	      if (tree_swap_operands_p (rhs1, rhs2, false))
 		{
 		  gimple_assign_set_rhs1 (stmt, rhs2);
 		  gimple_assign_set_rhs2 (stmt, rhs1);
@@ -6178,7 +6178,9 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
 	      {
 		tree arg1 = gimple_call_arg (call, opno);
 		tree arg2 = gimple_call_arg (call, opno + 1);
-		if (tree_swap_operands_p (arg1, arg2))
+		/* FIXME: Find the correct third value to set for
+		tree_swap_operands_p here. */
+		if (tree_swap_operands_p (arg1, arg2, false))
 		  {
 		    gimple_call_set_arg (call, opno, arg2);
 		    gimple_call_set_arg (call, opno + 1, arg1);
@@ -6226,7 +6228,7 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
 	/* Canonicalize operand order.  */
 	tree lhs = gimple_cond_lhs (stmt);
 	tree rhs = gimple_cond_rhs (stmt);
-	if (tree_swap_operands_p (lhs, rhs))
+	if (tree_swap_operands_p (lhs, rhs, false))
 	  {
 	    gcond *gc = as_a <gcond *> (stmt);
 	    gimple_cond_set_lhs (gc, rhs);
diff --git a/gcc/gimple-match-head.cc b/gcc/gimple-match-head.cc
index 9986e3479f9..d48a4ca5c86 100644
--- a/gcc/gimple-match-head.cc
+++ b/gcc/gimple-match-head.cc
@@ -300,7 +300,7 @@ gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
     = (res_op->code.is_tree_code ()
        && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison);
   if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type))
-      && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
+      && tree_swap_operands_p (res_op->ops[0], res_op->ops[1], false))
     {
       std::swap (res_op->ops[0], res_op->ops[1]);
       if (is_comparison)
@@ -374,11 +374,13 @@ gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
 	}
     }
 
-  /* Canonicalize operand order.  */
+  /* Canonicalize operand order. */
   bool canonicalized = false;
   int argno = first_commutative_argument (res_op->code, res_op->type);
   if (argno >= 0
-      && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
+      /* FIXME: Find the correct third value to set for
+	 tree_swap_operands_p here. */
+      && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1], false))
     {
       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
       canonicalized = true;
@@ -428,7 +430,9 @@ gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
   bool canonicalized = false;
   int argno = first_commutative_argument (res_op->code, res_op->type);
   if (argno >= 0
-      && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
+      /* FIXME: Find the correct third value to set for
+	 tree_swap_operands_p here. */
+      && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1], false))
     {
       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
       canonicalized = true;
@@ -479,7 +483,9 @@ gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
   bool canonicalized = false;
   int argno = first_commutative_argument (res_op->code, res_op->type);
   if (argno >= 0
-      && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
+      /* FIXME: Find the correct third value to set for
+	 tree_swap_operands_p here. */
+      && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1], false))
     {
       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
       canonicalized = true;
@@ -732,7 +738,7 @@ gimple_simplify (enum tree_code code, tree type,
      generation.  */
   if ((commutative_tree_code (code)
        || TREE_CODE_CLASS (code) == tcc_comparison)
-      && tree_swap_operands_p (op0, op1))
+      && tree_swap_operands_p (op0, op1, false))
     {
       std::swap (op0, op1);
       if (TREE_CODE_CLASS (code) == tcc_comparison)
@@ -764,7 +770,7 @@ gimple_simplify (enum tree_code code, tree type,
   /* Canonicalize operand order both for matching and fallback stmt
      generation.  */
   if (commutative_ternary_tree_code (code)
-      && tree_swap_operands_p (op0, op1))
+      && tree_swap_operands_p (op0, op1, false))
     std::swap (op0, op1);
 
   gimple_match_op res_op;
diff --git a/gcc/match.pd b/gcc/match.pd
index a4d1386fd9f..367cd6d8ca6 100644
--- a/gcc/match.pd
+++ b/gcc/match.pd
@@ -7870,7 +7870,7 @@ and,
       /* Since the order of @0 and @2 doesn't matter, let tree_swap_operands_p
 	 pick a canonical order.  This increases the chances of using the
 	 same pointer_plus in multiple checks.  */
-      (with { bool swap_p = tree_swap_operands_p (@0, @2);
+      (with { bool swap_p = tree_swap_operands_p (@0, @2, false);
 	      tree rhs_tree = wide_int_to_tree (sizetype, rhs); }
        (if (cmp == LT_EXPR)
 	(gt (convert:sizetype
diff --git a/gcc/tree-ssa-dom.cc b/gcc/tree-ssa-dom.cc
index c7f095d79fc..edcaa71e12e 100644
--- a/gcc/tree-ssa-dom.cc
+++ b/gcc/tree-ssa-dom.cc
@@ -1449,7 +1449,7 @@ record_equality (tree x, tree y, class const_and_copies *const_and_copies)
 {
   tree prev_x = NULL, prev_y = NULL;
 
-  if (tree_swap_operands_p (x, y))
+  if (tree_swap_operands_p (x, y, false))
     std::swap (x, y);
 
   /* Most of the time tree_swap_operands_p does what we want.  But there
diff --git a/gcc/tree-ssa-reassoc.cc b/gcc/tree-ssa-reassoc.cc
index b39c3c882c4..d50bece1d38 100644
--- a/gcc/tree-ssa-reassoc.cc
+++ b/gcc/tree-ssa-reassoc.cc
@@ -3987,7 +3987,7 @@ optimize_range_tests_var_bound (enum tree_code opcode, int first, int length,
 	  rhs2 = gimple_assign_lhs (g);
 	  gsi_insert_before (&gsi, g, GSI_SAME_STMT);
 	}
-      if (tree_swap_operands_p (rhs1, rhs2))
+      if (tree_swap_operands_p (rhs1, rhs2, false))
 	{
 	  std::swap (rhs1, rhs2);
 	  ccode = swap_tree_comparison (ccode);
diff --git a/gcc/tree-ssa-sccvn.cc b/gcc/tree-ssa-sccvn.cc
index 37484403c56..47872db7bcd 100644
--- a/gcc/tree-ssa-sccvn.cc
+++ b/gcc/tree-ssa-sccvn.cc
@@ -4146,10 +4146,10 @@ vn_nary_op_compute_hash (const vn_nary_op_t vno1)
 	&& commutative_tree_code (vno1->opcode))
        || (vno1->length == 3
 	   && commutative_ternary_tree_code (vno1->opcode)))
-      && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
+      && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
     std::swap (vno1->op[0], vno1->op[1]);
   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
-	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
+	   && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
     {
       std::swap (vno1->op[0], vno1->op[1]);
       vno1->opcode = swap_tree_comparison  (vno1->opcode);
diff --git a/gcc/tree-ssa-threadedge.cc b/gcc/tree-ssa-threadedge.cc
index 905a98c8c68..0caef94a62b 100644
--- a/gcc/tree-ssa-threadedge.cc
+++ b/gcc/tree-ssa-threadedge.cc
@@ -490,7 +490,7 @@ jump_threader::simplify_control_stmt_condition_1
      example, op0 might be a constant while op1 is an
      SSA_NAME.  Failure to canonicalize will cause us to
      miss threading opportunities.  */
-  if (tree_swap_operands_p (op0, op1))
+  if (tree_swap_operands_p (op0, op1, false))
     {
       cond_code = swap_tree_comparison (cond_code);
       std::swap (op0, op1);
-- 
2.38.1

Reply via email to