Index: gcc/tree-ssa-reassoc.c
===================================================================
--- gcc/tree-ssa-reassoc.c	(revision 194263)
+++ gcc/tree-ssa-reassoc.c	(working copy)
@@ -1154,6 +1154,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
       && (!op2def || gimple_nop_p (op2def)))
     {
       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+      gimple_set_uid (sum, gimple_uid (gsi_stmt (gsi)));
       gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
     }
   else if ((!op1def || gimple_nop_p (op1def))
@@ -1163,6 +1164,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
       if (gimple_code (op2def) == GIMPLE_PHI)
 	{
 	  gsi = gsi_after_labels (gimple_bb (op2def));
+          gimple_set_uid (sum, gimple_uid (gsi_stmt (gsi)));
 	  gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
 	}
       else
@@ -1170,6 +1172,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
 	  if (!stmt_ends_bb_p (op2def))
 	    {
 	      gsi = gsi_for_stmt (op2def);
+              gimple_set_uid (sum, gimple_uid (op2def));
 	      gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
 	    }
 	  else
@@ -1188,6 +1191,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
       if (gimple_code (op1def) == GIMPLE_PHI)
 	{
 	  gsi = gsi_after_labels (gimple_bb (op1def));
+          gimple_set_uid (sum, gimple_uid (op1def));
 	  gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
 	}
       else
@@ -1195,6 +1199,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
 	  if (!stmt_ends_bb_p (op1def))
 	    {
 	      gsi = gsi_for_stmt (op1def);
+              gimple_set_uid (sum, gimple_uid (op1def));
 	      gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
 	    }
 	  else
@@ -2829,6 +2834,116 @@ swap_ops_for_binary_stmt (vec<operand_entry_t> ops
     }
 }
 
+/* Find the statement after which STMT must be moved so that the
+   dependency from DEP_STMT to STMT is maintained.  */
+
+static gimple
+find_insert_point (gimple stmt, gimple dep_stmt)
+{
+  gimple insert_stmt = stmt;
+  basic_block dep_bb;
+  basic_block insert_bb = gimple_bb (stmt);
+  if (dep_stmt == NULL)
+    return stmt;
+  dep_bb = gimple_bb (dep_stmt);
+  if ((dep_bb != insert_bb
+       && !dominated_by_p (CDI_DOMINATORS, insert_bb, dep_bb))
+      || (dep_bb == insert_bb
+          && gimple_uid (insert_stmt) < gimple_uid (dep_stmt)))
+    {
+      insert_stmt = dep_stmt;
+    }
+  return insert_stmt;
+}
+
+/* Insert STMT after INSERT_POINT.  */
+
+static void
+insert_stmt_after (gimple stmt, gimple insert_point)
+{
+  imm_use_iterator iter;
+  tree lhs;
+  gimple use_stmt;
+  gimple_stmt_iterator gsistmt = gsi_for_stmt (stmt), gsi_insert;
+  basic_block insert_bb = gimple_bb (insert_point);
+  lhs = gimple_assign_lhs (stmt);
+  /* If there are any debug uses of LHS, reset them.  */
+  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
+    {
+      if (is_gimple_debug (use_stmt))
+        {
+          gimple_debug_bind_reset_value (use_stmt);
+          update_stmt (use_stmt);
+        }
+    }
+  /* If INSERT_STMT is a phi node, then do not insert just after that statement.
+     Instead, find the first non-label gimple statement in BB and insert before
+     that.  */
+  if (gimple_code (insert_point) == GIMPLE_PHI)
+    {
+      gsi_insert = gsi_after_labels (insert_bb);
+      gsi_move_before (&gsistmt, &gsi_insert);
+    }
+  /* Statements marked for throw can not be in the middle of a basic block. So
+     we can not insert a statement (not marked for throw) immediately after.  */
+  else if (stmt_can_throw_internal (insert_point))
+    {
+      edge succ_edge = find_fallthru_edge (insert_bb->succs);
+      insert_bb = succ_edge->dest;
+      /* Insert STMT at the beginning of the successor basic block.  */
+      gsi_insert = gsi_after_labels (insert_bb);
+      gsi_move_before (&gsistmt, &gsi_insert);
+    }
+  else
+    {
+      gsi_insert = gsi_for_stmt (insert_point);
+      gsi_move_after (&gsistmt, &gsi_insert);
+    }
+  /* Set the UID of STMT to that of INSERT_POINT so that subsequent comparisons
+     of UIDs to determine dominance within a basic block works.  */
+  gimple_set_uid (stmt, gimple_uid (insert_point));
+}
+
+/* If OP is a SSA variable and is not the default definition, return the
+   gimple statement that defines OP. Else return NULL.  */
+
+static inline gimple
+get_def_stmt (tree op)
+{
+  if (TREE_CODE (op) == SSA_NAME
+      && !SSA_NAME_IS_DEFAULT_DEF (op))
+    return SSA_NAME_DEF_STMT (op);
+  else
+    return NULL;
+}
+
+/* Ensure that operands in the OPS vector are available for STMT and all
+   gimple statements on which STMT depends.  */
+
+static void
+ensure_ops_are_available (gimple stmt, vec<operand_entry_t> ops, int opindex)
+{
+  unsigned int len = ops.length ();
+  gimple insert_stmt = stmt;
+  gimple dep_stmts[2];
+  dep_stmts[0] = get_def_stmt (ops[opindex]->op);
+  if (len - opindex == 2)
+    {
+      dep_stmts[1] = get_def_stmt (ops[opindex + 1]->op);
+    }
+  else
+    {
+      gimple stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+      ensure_ops_are_available (stmt1, ops, opindex + 1);
+      dep_stmts[1] = stmt1;
+    }
+  for (int i = 0; i < 2; i++)
+    insert_stmt = find_insert_point (insert_stmt, dep_stmts[i]);
+
+  if (insert_stmt != stmt)
+    insert_stmt_after (stmt, insert_stmt);
+}
+
 /* Recursively rewrite our linearized statements so that the operators
    match those in OPS[OPINDEX], putting the computation in rank
    order.  */
@@ -2841,11 +2956,6 @@ rewrite_expr_tree (gimple stmt, unsigned int opind
   tree rhs2 = gimple_assign_rhs2 (stmt);
   operand_entry_t oe;
 
-  /* If we have three operands left, then we want to make sure the ones
-     that get the double binary op are chosen wisely.  */
-  if (opindex + 3 == ops.length ())
-    swap_ops_for_binary_stmt (ops, opindex, stmt);
-
   /* The final recursion case for this function is that you have
      exactly two operations left.
      If we had one exactly one op in the entire list to start with, we
@@ -2891,20 +3001,7 @@ rewrite_expr_tree (gimple stmt, unsigned int opind
     {
       if (!moved)
 	{
-	  gimple_stmt_iterator gsinow, gsirhs1;
-	  gimple stmt1 = stmt, stmt2;
-	  unsigned int count;
-
-	  gsinow = gsi_for_stmt (stmt);
-	  count = ops.length () - opindex - 2;
-	  while (count-- != 0)
-	    {
-	      stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1));
-	      gsirhs1 = gsi_for_stmt (stmt2);
-	      gsi_move_before (&gsirhs1, &gsinow);
-	      gsi_prev (&gsinow);
-	      stmt1 = stmt2;
-	    }
+          ensure_ops_are_available (stmt, ops, opindex);
 	  moved = true;
 	}
 
@@ -3116,6 +3213,7 @@ linearize_expr (gimple stmt)
   gsinow = gsi_for_stmt (stmt);
   gsirhs = gsi_for_stmt (binrhs);
   gsi_move_before (&gsirhs, &gsinow);
+  gimple_set_uid (binrhs, gimple_uid (stmt));
 
   gimple_assign_set_rhs2 (stmt, gimple_assign_rhs1 (binrhs));
   gimple_assign_set_rhs1 (binrhs, gimple_assign_lhs (binlhs));
@@ -4122,8 +4220,17 @@ reassociate_bb (basic_block bb)
 		      && ops.length () > 3)
 		    rewrite_expr_tree_parallel (stmt, width, ops);
 		  else
-		    rewrite_expr_tree (stmt, 0, ops, false);
+                    {
+                      /* When there are three operands left, we want
+                         to make sure the ones that get the double
+                         binary op are chosen wisely.  */
+                      int len = ops.length ();
+                      if (len >= 3)
+                        swap_ops_for_binary_stmt (ops, len - 3, stmt);
 
+		      rewrite_expr_tree (stmt, 0, ops, false);
+                    }
+
 		  /* If we combined some repeated factors into a 
 		     __builtin_powi call, multiply that result by the
 		     reassociated operands.  */
@@ -4183,6 +4290,7 @@ static void
 do_reassoc (void)
 {
   break_up_subtract_bb (ENTRY_BLOCK_PTR);
+  renumber_gimple_stmt_uids ();
   reassociate_bb (EXIT_BLOCK_PTR);
 }
 
