gcc/ * ifcvt.c (count_bb_insns): Strengthen local "insn" from rtx to rtx_insn *. (cheap_bb_rtx_cost_p): Likewise. (first_active_insn): Likewise for return type and local "insn". (last_active_insn): Likewise for return type and locals "insn", "head". (struct noce_if_info): Likewise for fields "jump", "insn_a", "insn_b". (end_ifcvt_sequence): Likewise for return type and locals "insn", "seq". (noce_try_move): Likewise for local "seq". (noce_try_store_flag): Likewise. (noce_try_store_flag_constants): Likewise. (noce_try_addcc): Likewise. (noce_try_store_flag_mask): Likewise. (noce_try_cmove): Likewise. (noce_try_minmax): Likewise. (noce_try_abs): Likewise. (noce_try_sign_mask): Likewise. (noce_try_bitop): Likewise. (noce_can_store_speculate_p): Likewise for local "insn". (noce_process_if_block): Likewise for locals "insn_a", "insn_b", seq". (check_cond_move_block): Likewise for local "insn". (cond_move_convert_if_block): Likewise. (cond_move_process_if_block): Likewise for locals "seq", "loc_insn". (noce_find_if_block): Likewise for local "jump". (merge_if_block): Likewise for local "last". (block_jumps_and_fallthru_p): Likewise for locals "insn", "end". (find_cond_trap): Likewise for locals "trap", "jump", "newjump". (block_has_only_trap): Likewise for return type and local "trap". (find_if_case_1): Likewise for local "jump". (dead_or_predicable): Likewise for locals "head", "end", "jump", "insn". --- gcc/ifcvt.c | 136 ++++++++++++++++++++++++++++++++++-------------------------- 1 file changed, 78 insertions(+), 58 deletions(-)
diff --git a/gcc/ifcvt.c b/gcc/ifcvt.c index 3c127b1..c9cca6e 100644 --- a/gcc/ifcvt.c +++ b/gcc/ifcvt.c @@ -86,8 +86,8 @@ static int cond_exec_changed_p; /* Forward references. */ static int count_bb_insns (const_basic_block); static bool cheap_bb_rtx_cost_p (const_basic_block, int, int); -static rtx first_active_insn (basic_block); -static rtx last_active_insn (basic_block, int); +static rtx_insn *first_active_insn (basic_block); +static rtx_insn *last_active_insn (basic_block, int); static rtx find_active_insn_before (basic_block, rtx); static rtx find_active_insn_after (basic_block, rtx); static basic_block block_fallthru (basic_block); @@ -106,7 +106,7 @@ static int find_if_case_2 (basic_block, edge, edge); static int dead_or_predicable (basic_block, basic_block, basic_block, edge, int); static void noce_emit_move_insn (rtx, rtx); -static rtx block_has_only_trap (basic_block); +static rtx_insn *block_has_only_trap (basic_block); /* Count the number of non-jump active insns in BB. */ @@ -114,7 +114,7 @@ static int count_bb_insns (const_basic_block bb) { int count = 0; - rtx insn = BB_HEAD (bb); + rtx_insn *insn = BB_HEAD (bb); while (1) { @@ -141,7 +141,7 @@ static bool cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost) { int count = 0; - rtx insn = BB_HEAD (bb); + rtx_insn *insn = BB_HEAD (bb); bool speed = optimize_bb_for_speed_p (bb); /* Set scale to REG_BR_PROB_BASE to void the identical scaling @@ -204,38 +204,38 @@ cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost) /* Return the first non-jump active insn in the basic block. */ -static rtx +static rtx_insn * first_active_insn (basic_block bb) { - rtx insn = BB_HEAD (bb); + rtx_insn *insn = BB_HEAD (bb); if (LABEL_P (insn)) { if (insn == BB_END (bb)) - return NULL_RTX; + return NULL; insn = NEXT_INSN (insn); } while (NOTE_P (insn) || DEBUG_INSN_P (insn)) { if (insn == BB_END (bb)) - return NULL_RTX; + return NULL; insn = NEXT_INSN (insn); } if (JUMP_P (insn)) - return NULL_RTX; + return NULL; return insn; } /* Return the last non-jump active (non-jump) insn in the basic block. */ -static rtx +static rtx_insn * last_active_insn (basic_block bb, int skip_use_p) { - rtx insn = BB_END (bb); - rtx head = BB_HEAD (bb); + rtx_insn *insn = BB_END (bb); + rtx_insn *head = BB_HEAD (bb); while (NOTE_P (insn) || JUMP_P (insn) @@ -245,12 +245,12 @@ last_active_insn (basic_block bb, int skip_use_p) && GET_CODE (PATTERN (insn)) == USE)) { if (insn == head) - return NULL_RTX; + return NULL; insn = PREV_INSN (insn); } if (LABEL_P (insn)) - return NULL_RTX; + return NULL; return insn; } @@ -756,7 +756,7 @@ struct noce_if_info basic_block test_bb, then_bb, else_bb, join_bb; /* The jump that ends TEST_BB. */ - rtx jump; + rtx_insn *jump; /* The jump condition. */ rtx cond; @@ -770,7 +770,7 @@ struct noce_if_info COND_EARLIEST, or NULL_RTX. In the former case, the insn operands are still valid, as if INSN_B was moved down below the jump. */ - rtx insn_a, insn_b; + rtx_insn *insn_a, *insn_b; /* The SET_SRC of INSN_A and INSN_B. */ rtx a, b; @@ -983,11 +983,11 @@ noce_emit_move_insn (rtx x, rtx y) that are instructions are unshared, recognizable non-jump insns. On failure, this function returns a NULL_RTX. */ -static rtx +static rtx_insn * end_ifcvt_sequence (struct noce_if_info *if_info) { - rtx insn; - rtx seq = get_insns (); + rtx_insn *insn; + rtx_insn *seq = get_insns (); set_used_flags (if_info->x); set_used_flags (if_info->cond); @@ -1003,7 +1003,7 @@ end_ifcvt_sequence (struct noce_if_info *if_info) for (insn = seq; insn; insn = NEXT_INSN (insn)) if (JUMP_P (insn) || recog_memoized (insn) == -1) - return NULL_RTX; + return NULL; return seq; } @@ -1016,7 +1016,8 @@ noce_try_move (struct noce_if_info *if_info) { rtx cond = if_info->cond; enum rtx_code code = GET_CODE (cond); - rtx y, seq; + rtx y; + rtx_insn *seq; if (code != NE && code != EQ) return FALSE; @@ -1063,7 +1064,8 @@ static int noce_try_store_flag (struct noce_if_info *if_info) { int reversep; - rtx target, seq; + rtx target; + rtx_insn *seq; if (CONST_INT_P (if_info->b) && INTVAL (if_info->b) == STORE_FLAG_VALUE @@ -1106,7 +1108,8 @@ noce_try_store_flag (struct noce_if_info *if_info) static int noce_try_store_flag_constants (struct noce_if_info *if_info) { - rtx target, seq; + rtx target; + rtx_insn *seq; int reversep; HOST_WIDE_INT itrue, ifalse, diff, tmp; int normalize, can_reverse; @@ -1236,7 +1239,8 @@ noce_try_store_flag_constants (struct noce_if_info *if_info) static int noce_try_addcc (struct noce_if_info *if_info) { - rtx target, seq; + rtx target; + rtx_insn *seq; int subtract, normalize; if (GET_CODE (if_info->a) == PLUS @@ -1326,7 +1330,8 @@ noce_try_addcc (struct noce_if_info *if_info) static int noce_try_store_flag_mask (struct noce_if_info *if_info) { - rtx target, seq; + rtx target; + rtx_insn *seq; int reversep; reversep = 0; @@ -1486,7 +1491,8 @@ static int noce_try_cmove (struct noce_if_info *if_info) { enum rtx_code code; - rtx target, seq; + rtx target; + rtx_insn *seq; if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode)) && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode))) @@ -1876,7 +1882,8 @@ noce_get_alt_condition (struct noce_if_info *if_info, rtx target, static int noce_try_minmax (struct noce_if_info *if_info) { - rtx cond, earliest, target, seq; + rtx cond, earliest, target; + rtx_insn *seq; enum rtx_code code, op; int unsignedp; @@ -1971,7 +1978,8 @@ noce_try_minmax (struct noce_if_info *if_info) static int noce_try_abs (struct noce_if_info *if_info) { - rtx cond, earliest, target, seq, a, b, c; + rtx cond, earliest, target, a, b, c; + rtx_insn *seq; int negate; bool one_cmpl = false; @@ -2116,7 +2124,8 @@ noce_try_abs (struct noce_if_info *if_info) static int noce_try_sign_mask (struct noce_if_info *if_info) { - rtx cond, t, m, c, seq; + rtx cond, t, m, c; + rtx_insn *seq; enum machine_mode mode; enum rtx_code code; bool t_unconditional; @@ -2194,7 +2203,8 @@ noce_try_sign_mask (struct noce_if_info *if_info) static int noce_try_bitop (struct noce_if_info *if_info) { - rtx cond, x, a, result, seq; + rtx cond, x, a, result; + rtx_insn *seq; enum machine_mode mode; enum rtx_code code; int bitnum; @@ -2429,7 +2439,7 @@ noce_can_store_speculate_p (basic_block top_bb, const_rtx mem) dominator != NULL; dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator)) { - rtx insn; + rtx_insn *insn; FOR_BB_INSNS (dominator, insn) { @@ -2466,7 +2476,7 @@ noce_process_if_block (struct noce_if_info *if_info) basic_block join_bb = if_info->join_bb; /* JOIN */ rtx jump = if_info->jump; rtx cond = if_info->cond; - rtx insn_a, insn_b; + rtx_insn *insn_a, *insn_b; rtx set_a, set_b; rtx orig_x, x, a, b; @@ -2533,7 +2543,10 @@ noce_process_if_block (struct noce_if_info *if_info) || reg_overlap_mentioned_p (x, cond) || reg_overlap_mentioned_p (x, a) || modified_between_p (x, insn_b, jump)) - insn_b = set_b = NULL_RTX; + { + insn_b = NULL; + set_b = NULL_RTX; + } } /* If x has side effects then only the if-then-else form is safe to @@ -2602,7 +2615,7 @@ noce_process_if_block (struct noce_if_info *if_info) if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0) remove_note (insn_b, note); - insn_b = NULL_RTX; + insn_b = NULL; } /* If we have "x = b; if (...) x = a;", and x has side-effects, then x must be executed twice. */ @@ -2669,7 +2682,8 @@ noce_process_if_block (struct noce_if_info *if_info) if (!else_bb && set_b) { - insn_b = set_b = NULL_RTX; + insn_b = NULL; + set_b = NULL_RTX; b = orig_x; goto retry; } @@ -2681,7 +2695,7 @@ noce_process_if_block (struct noce_if_info *if_info) /* If we used a temporary, fix it up now. */ if (orig_x != x) { - rtx seq; + rtx_insn *seq; start_sequence (); noce_emit_move_insn (orig_x, x); @@ -2731,7 +2745,7 @@ check_cond_move_block (basic_block bb, vec<rtx> *regs, rtx cond) { - rtx insn; + rtx_insn *insn; /* We can only handle simple jumps at the end of the basic block. It is almost impossible to update the CFG otherwise. */ @@ -2814,7 +2828,8 @@ cond_move_convert_if_block (struct noce_if_info *if_infop, bool else_block_p) { enum rtx_code code; - rtx insn, cond_arg0, cond_arg1; + rtx_insn *insn; + rtx cond_arg0, cond_arg1; code = GET_CODE (cond); cond_arg0 = XEXP (cond, 0); @@ -2879,7 +2894,7 @@ cond_move_process_if_block (struct noce_if_info *if_info) basic_block join_bb = if_info->join_bb; rtx jump = if_info->jump; rtx cond = if_info->cond; - rtx seq, loc_insn; + rtx_insn *seq, *loc_insn; rtx reg; int c; struct pointer_map_t *then_vals; @@ -3010,7 +3025,8 @@ noce_find_if_block (basic_block test_bb, edge then_edge, edge else_edge, { basic_block then_bb, else_bb, join_bb; bool then_else_reversed = false; - rtx jump, cond; + rtx_insn *jump; + rtx cond; rtx cond_earliest; struct noce_if_info if_info; @@ -3206,7 +3222,7 @@ merge_if_block (struct ce_if_block * ce_info) if (! join_bb) { - rtx last = BB_END (combo_bb); + rtx_insn *last = BB_END (combo_bb); /* The outgoing edge for the current COMBO block should already be correct. Verify this. */ @@ -3361,8 +3377,8 @@ block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb) edge cur_edge; int fallthru_p = FALSE; int jump_p = FALSE; - rtx insn; - rtx end; + rtx_insn *insn; + rtx_insn *end; int n_insns = 0; edge_iterator ei; @@ -3664,7 +3680,8 @@ find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge) basic_block then_bb = then_edge->dest; basic_block else_bb = else_edge->dest; basic_block other_bb, trap_bb; - rtx trap, jump, cond, cond_earliest, seq; + rtx_insn *trap, *jump; + rtx cond, cond_earliest, seq; enum rtx_code code; /* Locate the block with the trap instruction. */ @@ -3734,7 +3751,8 @@ find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge) single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU; else if (trap_bb == then_bb) { - rtx lab, newjump; + rtx lab; + rtx_insn *newjump; lab = JUMP_LABEL (jump); newjump = emit_jump_insn_after (gen_jump (lab), jump); @@ -3757,25 +3775,25 @@ find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge) /* Subroutine of find_cond_trap: if BB contains only a trap insn, return it. */ -static rtx +static rtx_insn * block_has_only_trap (basic_block bb) { - rtx trap; + rtx_insn *trap; /* We're not the exit block. */ if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) - return NULL_RTX; + return NULL; /* The block must have no successors. */ if (EDGE_COUNT (bb->succs) > 0) - return NULL_RTX; + return NULL; /* The only instruction in the THEN block must be the trap. */ trap = first_active_insn (bb); if (! (trap == BB_END (bb) && GET_CODE (PATTERN (trap)) == TRAP_IF && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx)) - return NULL_RTX; + return NULL; return trap; } @@ -3923,7 +3941,7 @@ find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge) if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) { - rtx jump = BB_END (else_edge->src); + rtx_insn *jump = BB_END (else_edge->src); gcc_assert (JUMP_P (jump)); else_target = JUMP_LABEL (jump); } @@ -4098,7 +4116,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, basic_block other_bb, edge dest_edge, int reversep) { basic_block new_dest = dest_edge->dest; - rtx head, end, jump, earliest = NULL_RTX, old_dest; + rtx_insn *head, *end, *jump; + rtx earliest = NULL_RTX, old_dest; bitmap merge_set = NULL; /* Number of pending changes. */ int n_validated_changes = 0; @@ -4128,7 +4147,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, { if (head == end) { - head = end = NULL_RTX; + head = end = NULL; goto no_body; } head = NEXT_INSN (head); @@ -4140,7 +4159,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, { if (head == end) { - head = end = NULL_RTX; + head = end = NULL; goto no_body; } end = PREV_INSN (end); @@ -4152,7 +4171,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, can lead to one of the paths of the branch having wrong unwind info. */ if (epilogue_completed) { - rtx insn = head; + rtx_insn *insn = head; while (1) { if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn)) @@ -4213,7 +4232,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, /* Try the NCE path if the CE path did not result in any changes. */ if (n_validated_changes == 0) { - rtx cond, insn; + rtx cond; + rtx_insn *insn; regset live; bool success; @@ -4356,7 +4376,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, /* Move the insns out of MERGE_BB to before the branch. */ if (head != NULL) { - rtx insn; + rtx_insn *insn; if (end == BB_END (merge_bb)) SET_BB_END (merge_bb) = PREV_INSN (head); -- 1.8.5.3