The following fixes the situation where the initial sweep over the
CFG to remove trivially dead code regions causes excessive compile-time
because of using remove_edge_and_dominated_blocks and thus
iterate_fix_dominators.

The good thing is that I added cleanup_control_flow_pre doing this
initial sweep in PRE order.  So we can simply remove the entry edges
into the dead code regions and use the visited bitmap kept by the PRE
walk to remove unreachable blocks afterwards.

For dominators we then re-compute them from scratch which is way faster
for the testcase (a reduced one gets CFG cleanup time down from
19s to 0.16s).  The testcase still runs into a backwards jump-threading
scalability issue.

Note the patch will be slower for the case of not many removed edges
but it looks hard to find a good cut-off upfront.

Note we unfortunately cannot merge this with the unreachable block
removal because of the intermediate code to insert loop entry
forwarders which needs dominators to identify natural loop backedges.

Any opinions?

Bootstrap & regtest running on x86_64-unknown-linux-gnu.

Thanks,
Richard.

diff --git a/gcc/tree-cfgcleanup.c b/gcc/tree-cfgcleanup.c
index 1bf7771dac1..4915d5e8f5f 100644
--- a/gcc/tree-cfgcleanup.c
+++ b/gcc/tree-cfgcleanup.c
@@ -57,7 +57,7 @@ bitmap cfgcleanup_altered_bbs;
 /* Remove any fallthru edge from EV.  Return true if an edge was removed.  */
 
 static bool
-remove_fallthru_edge (vec<edge, va_gc> *ev)
+remove_fallthru_edge (vec<edge, va_gc> *ev, bool only_edges_p)
 {
   edge_iterator ei;
   edge e;
@@ -68,7 +68,12 @@ remove_fallthru_edge (vec<edge, va_gc> *ev)
        if (e->flags & EDGE_COMPLEX)
          e->flags &= ~EDGE_FALLTHRU;
        else
-         remove_edge_and_dominated_blocks (e);
+         {
+           if (only_edges_p)
+             remove_edge (e);
+           else
+             remove_edge_and_dominated_blocks (e);
+         }
        return true;
       }
   return false;
@@ -122,7 +127,8 @@ convert_single_case_switch (gswitch *swtch, 
gimple_stmt_iterator &gsi)
    at block BB.  */
 
 static bool
-cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
+cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi,
+                           bool only_edges_p)
 {
   edge taken_edge;
   bool retval = false;
@@ -182,7 +188,10 @@ cleanup_control_expr_graph (basic_block bb, 
gimple_stmt_iterator gsi)
                }
 
              taken_edge->probability += e->probability;
-             remove_edge_and_dominated_blocks (e);
+             if (only_edges_p)
+               remove_edge (e);
+             else
+               remove_edge_and_dominated_blocks (e);
              retval = true;
            }
          else
@@ -222,7 +231,7 @@ cleanup_call_ctrl_altering_flag (gimple *bb_end)
    true if anything changes.  */
 
 static bool
-cleanup_control_flow_bb (basic_block bb)
+cleanup_control_flow_bb (basic_block bb, bool only_edges_p)
 {
   gimple_stmt_iterator gsi;
   bool retval = false;
@@ -230,7 +239,27 @@ cleanup_control_flow_bb (basic_block bb)
 
   /* If the last statement of the block could throw and now cannot,
      we need to prune cfg.  */
-  retval |= gimple_purge_dead_eh_edges (bb);
+  if (only_edges_p)
+    {
+      gimple *stmt = last_stmt (bb);
+      if (!(stmt && stmt_can_throw_internal (stmt)))
+       {
+         edge e;
+         edge_iterator ei;
+         for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
+           {
+             if (e->flags & EDGE_EH)
+               {
+                 remove_edge (e);
+                 retval = true;
+               }
+             else
+               ei_next (&ei);
+           }
+       }
+    }
+  else
+    retval |= gimple_purge_dead_eh_edges (bb);
 
   gsi = gsi_last_nondebug_bb (bb);
   if (gsi_end_p (gsi))
@@ -245,7 +274,7 @@ cleanup_control_flow_bb (basic_block bb)
       || gimple_code (stmt) == GIMPLE_SWITCH)
     {
       gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
-      retval |= cleanup_control_expr_graph (bb, gsi);
+      retval |= cleanup_control_expr_graph (bb, gsi, only_edges_p);
     }
   else if (gimple_code (stmt) == GIMPLE_GOTO
           && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
@@ -270,7 +299,12 @@ cleanup_control_flow_bb (basic_block bb)
       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
        {
          if (e->dest != target_block)
-           remove_edge_and_dominated_blocks (e);
+           {
+             if (only_edges_p)
+               remove_edge (e);
+             else
+               remove_edge_and_dominated_blocks (e);
+           }
          else
            {
              /* Turn off the EDGE_ABNORMAL flag.  */
@@ -300,7 +334,7 @@ cleanup_control_flow_bb (basic_block bb)
         now, they should be all unreachable anyway.  */
       for (gsi_next (&gsi); !gsi_end_p (gsi); )
        gsi_remove (&gsi, true);
-      if (remove_fallthru_edge (bb->succs))
+      if (remove_fallthru_edge (bb->succs, only_edges_p))
        retval = true;
     }
 
@@ -741,7 +775,7 @@ cleanup_control_flow_pre ()
          && ! bitmap_bit_p (visited, dest->index))
        {
          bitmap_set_bit (visited, dest->index);
-         retval |= cleanup_control_flow_bb (dest);
+         retval |= cleanup_control_flow_bb (dest, true);
          if (EDGE_COUNT (dest->succs) > 0)
            stack.quick_push (ei_start (dest->succs));
        }
@@ -754,6 +788,19 @@ cleanup_control_flow_pre ()
        }
     }
 
+  /* If we removed any edge remove all now unreachable blocks.  */
+  if (retval)
+    {
+      free_dominance_info (CDI_DOMINATORS);
+      for (int i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); ++i)
+       {
+         basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
+         if (bb && !bitmap_bit_p (visited, bb->index))
+           delete_basic_block (bb);
+       }
+      calculate_dominance_info (CDI_DOMINATORS);
+    }
+
   return retval;
 }
 
@@ -808,7 +855,7 @@ cleanup_tree_cfg_1 (void)
       if (!bb)
        continue;
 
-      retval |= cleanup_control_flow_bb (bb);
+      retval |= cleanup_control_flow_bb (bb, false);
       retval |= cleanup_tree_cfg_bb (bb);
     }
 

Reply via email to