This fixes a latent bug I ran into with RPO value-numbering work which similar to all SSA propagators and existing FRE/PRE do not perform elimination in unreachable code-regions. Those unreachable code-regions are controlled by if (0 != 0) style conditions and CFG cleanup is supposed to deal with invalid IL in them by removing them before doing any "real" work. My original attempt at doing this was flawed since (as I ran into with a Go testcase) removing dead EH edges done by the early sweep inspects the possibly throwing stmt and can end up ICEing on released SSA names.
The following patch makes the whole thing more bullet-proof by making sure to never visit blocks in such dead regions by performing a DFS walk and removing dead outgoing edges in PRE order. This adds a little overhead but should also speedup processing given we previously visited most blocks twice by means of the removed - /* During a first iteration on the CFG only remove trivially - dead edges but mark other conditions for re-evaluation. */ - if (first_p) - { - val = const_binop (gimple_cond_code (stmt), boolean_type_node, - gimple_cond_lhs (stmt), - gimple_cond_rhs (stmt)); - if (! val) - bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); Bootstrapped on x86_64-unknown-linux-gnu, testing in progress. Richard. 2017-11-14 Richard Biener <rguent...@suse.de> * tree-cfgcleanup.c (cleanup_control_expr_graph): Remove first_p paramter and handling. (cleanup_control_flow_bb): Likewise. (cleanup_control_flow_pre): New helper performing a DFS walk to call cleanup_control_flow_bb in PRE order. (cleanup_tree_cfg_1): Do the first phase of cleanup_control_flow_bb via cleanup_control_flow_pre. Index: gcc/tree-cfgcleanup.c =================================================================== --- gcc/tree-cfgcleanup.c (revision 254718) +++ gcc/tree-cfgcleanup.c (working copy) @@ -122,8 +122,7 @@ convert_single_case_switch (gswitch *swt at block BB. */ static bool -cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi, - bool first_p) +cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi) { edge taken_edge; bool retval = false; @@ -146,25 +145,14 @@ cleanup_control_expr_graph (basic_block switch (gimple_code (stmt)) { case GIMPLE_COND: - /* During a first iteration on the CFG only remove trivially - dead edges but mark other conditions for re-evaluation. */ - if (first_p) - { - val = const_binop (gimple_cond_code (stmt), boolean_type_node, - gimple_cond_lhs (stmt), - gimple_cond_rhs (stmt)); - if (! val) - bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); - } - else - { - code_helper rcode; - tree ops[3] = {}; - if (gimple_simplify (stmt, &rcode, ops, NULL, no_follow_ssa_edges, - no_follow_ssa_edges) - && rcode == INTEGER_CST) - val = ops[0]; - } + { + code_helper rcode; + tree ops[3] = {}; + if (gimple_simplify (stmt, &rcode, ops, NULL, no_follow_ssa_edges, + no_follow_ssa_edges) + && rcode == INTEGER_CST) + val = ops[0]; + } break; case GIMPLE_SWITCH: @@ -235,7 +223,7 @@ cleanup_call_ctrl_altering_flag (gimple true if anything changes. */ static bool -cleanup_control_flow_bb (basic_block bb, bool first_p) +cleanup_control_flow_bb (basic_block bb) { gimple_stmt_iterator gsi; bool retval = false; @@ -258,7 +246,7 @@ cleanup_control_flow_bb (basic_block bb, || gimple_code (stmt) == GIMPLE_SWITCH) { gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt); - retval |= cleanup_control_expr_graph (bb, gsi, first_p); + retval |= cleanup_control_expr_graph (bb, gsi); } else if (gimple_code (stmt) == GIMPLE_GOTO && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR @@ -732,6 +720,45 @@ cleanup_tree_cfg_bb (basic_block bb) return false; } +/* Do cleanup_control_flow_bb in PRE order. */ + +static bool +cleanup_control_flow_pre () +{ + bool retval = false; + + auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1); + auto_sbitmap visited (last_basic_block_for_fn (cfun)); + bitmap_clear (visited); + + stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)); + + while (! stack.is_empty ()) + { + /* Look at the edge on the top of the stack. */ + edge_iterator ei = stack.last (); + basic_block dest = ei_edge (ei)->dest; + + if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) + && ! bitmap_bit_p (visited, dest->index)) + { + bitmap_set_bit (visited, dest->index); + retval |= cleanup_control_flow_bb (dest); + if (EDGE_COUNT (dest->succs) > 0) + stack.quick_push (ei_start (dest->succs)); + } + else + { + if (!ei_one_before_end_p (ei)) + ei_next (&stack.last ()); + else + stack.pop (); + } + } + + return retval; +} + /* Iterate the cfg cleanups, while anything changes. */ static bool @@ -752,17 +779,11 @@ cleanup_tree_cfg_1 (void) /* We cannot use FOR_EACH_BB_FN for the BB iterations below since the basic blocks may get removed. */ - /* Start by iterating over all basic blocks looking for edge removal - opportunities. Do this first because incoming SSA form may be - invalid and we want to avoid performing SSA related tasks such + /* Start by iterating over all basic blocks in PRE order looking for + edge removal opportunities. Do this first because incoming SSA form + may be invalid and we want to avoid performing SSA related tasks such as propgating out a PHI node during BB merging in that state. */ - n = last_basic_block_for_fn (cfun); - for (i = NUM_FIXED_BLOCKS; i < n; i++) - { - bb = BASIC_BLOCK_FOR_FN (cfun, i); - if (bb) - retval |= cleanup_control_flow_bb (bb, true); - } + retval |= cleanup_control_flow_pre (true); /* After doing the above SSA form should be valid (or an update SSA should be required). */ @@ -789,7 +810,7 @@ cleanup_tree_cfg_1 (void) if (!bb) continue; - retval |= cleanup_control_flow_bb (bb, false); + retval |= cleanup_control_flow_bb (bb); retval |= cleanup_tree_cfg_bb (bb); }