gcc/
        * postreload-gcse.c (struct occr): Strengthen field "insn" from
        rtx to rtx_insn *.
        (struct unoccr): Likewise.
        (struct modifies_mem): Likewise.
        (alloc_mem): Likewise for local "insn".
        (insert_expr_in_table): Likewise for param "insn".
        (dump_expr_hash_table_entry): Likewise for local "insn".
        (oprs_unchanged_p): Likewise for param "insn".
        (load_killed_in_block_p): Likewise for local "setter".
        (record_last_reg_set_info): Likewise for param "insn".
        (record_last_reg_set_info_regno): Likewise.
        (record_last_mem_set_info): Likewise.
        (record_last_set_info): Likewise for local "last_set_insn".
        (record_opr_changes): Likewise for param "insn".
        (hash_scan_set): Likewise.
        (compute_hash_table): Likewise for local "insn".
        (get_avail_load_store_reg): Likewise for param "insn".
        (eliminate_partially_redundant_load): Likewise, also for locals
        "avail_insn", "next_pred_bb_end".  Replace use of NULL_RTX with
        RTX for insns.
        (eliminate_partially_redundant_loads): Likewise for local "insn".
---
 gcc/postreload-gcse.c | 62 +++++++++++++++++++++++++--------------------------
 1 file changed, 31 insertions(+), 31 deletions(-)

diff --git a/gcc/postreload-gcse.c b/gcc/postreload-gcse.c
index af2d731..73848b7 100644
--- a/gcc/postreload-gcse.c
+++ b/gcc/postreload-gcse.c
@@ -162,7 +162,7 @@ struct occr
   /* Next occurrence of this expression.  */
   struct occr *next;
   /* The insn that computes the expression.  */
-  rtx insn;
+  rtx_insn *insn;
   /* Nonzero if this [anticipatable] occurrence has been deleted.  */
   char deleted_p;
 };
@@ -175,7 +175,7 @@ struct unoccr
 {
   struct unoccr *next;
   edge pred;
-  rtx insn;
+  rtx_insn *insn;
 };
 
 static struct obstack unoccr_obstack;
@@ -194,7 +194,7 @@ static int *reg_avail_info;
 /* A list of insns that may modify memory within the current basic block.  */
 struct modifies_mem
 {
-  rtx insn;
+  rtx_insn *insn;
   struct modifies_mem *next;
 };
 static struct modifies_mem *modifies_mem_list;
@@ -218,12 +218,12 @@ static void alloc_mem (void);
 static void free_mem (void);
 
 /* Support for hash table construction and transformations.  */
-static bool oprs_unchanged_p (rtx, rtx, bool);
-static void record_last_reg_set_info (rtx, rtx);
-static void record_last_reg_set_info_regno (rtx, int);
-static void record_last_mem_set_info (rtx);
+static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
+static void record_last_reg_set_info (rtx_insn *, rtx);
+static void record_last_reg_set_info_regno (rtx_insn *, int);
+static void record_last_mem_set_info (rtx_insn *);
 static void record_last_set_info (rtx, const_rtx, void *);
-static void record_opr_changes (rtx);
+static void record_opr_changes (rtx_insn *);
 
 static void find_mem_conflicts (rtx, const_rtx, void *);
 static int load_killed_in_block_p (int, rtx, bool);
@@ -231,7 +231,7 @@ static void reset_opr_set_tables (void);
 
 /* Hash table support.  */
 static hashval_t hash_expr (rtx, int *);
-static void insert_expr_in_table (rtx, rtx);
+static void insert_expr_in_table (rtx, rtx_insn *);
 static struct expr *lookup_expr_in_table (rtx);
 static void dump_hash_table (FILE *);
 
@@ -239,16 +239,16 @@ static void dump_hash_table (FILE *);
 static bool reg_killed_on_edge (rtx, edge);
 static bool reg_used_on_edge (rtx, edge);
 
-static rtx get_avail_load_store_reg (rtx);
+static rtx get_avail_load_store_reg (rtx_insn *);
 
 static bool bb_has_well_behaved_predecessors (basic_block);
 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
-static void hash_scan_set (rtx);
+static void hash_scan_set (rtx_insn *);
 static void compute_hash_table (void);
 
 /* The work horses of this pass.  */
 static void eliminate_partially_redundant_load (basic_block,
-                                               rtx,
+                                               rtx_insn *,
                                                struct expr *);
 static void eliminate_partially_redundant_loads (void);
 
@@ -261,7 +261,7 @@ alloc_mem (void)
 {
   int i;
   basic_block bb;
-  rtx insn;
+  rtx_insn *insn;
 
   /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
   uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
@@ -322,7 +322,7 @@ free_mem (void)
    basic block.  */
 
 static void
-insert_expr_in_table (rtx x, rtx insn)
+insert_expr_in_table (rtx x, rtx_insn *insn)
 {
   int do_not_record_p;
   hashval_t hash;
@@ -443,7 +443,7 @@ dump_expr_hash_table_entry (expr **slot, FILE *file)
   occr = exprs->avail_occr;
   while (occr)
     {
-      rtx insn = occr->insn;
+      rtx_insn *insn = occr->insn;
       print_rtl_single (file, insn);
       fprintf (file, "\n");
       occr = occr->next;
@@ -491,7 +491,7 @@ reg_changed_after_insn_p (rtx x, int cuid)
    2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
 
 static bool
-oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
+oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
 {
   int i, j;
   enum rtx_code code;
@@ -605,7 +605,7 @@ load_killed_in_block_p (int uid_limit, rtx x, bool 
after_insn)
 
   while (list_entry)
     {
-      rtx setter = list_entry->insn;
+      rtx_insn *setter = list_entry->insn;
 
       /* Ignore entries in the list that do not apply.  */
       if ((after_insn
@@ -641,7 +641,7 @@ load_killed_in_block_p (int uid_limit, rtx x, bool 
after_insn)
 /* Record register first/last/block set information for REGNO in INSN.  */
 
 static inline void
-record_last_reg_set_info (rtx insn, rtx reg)
+record_last_reg_set_info (rtx_insn *insn, rtx reg)
 {
   unsigned int regno, end_regno;
 
@@ -653,7 +653,7 @@ record_last_reg_set_info (rtx insn, rtx reg)
 }
 
 static inline void
-record_last_reg_set_info_regno (rtx insn, int regno)
+record_last_reg_set_info_regno (rtx_insn *insn, int regno)
 {
   reg_avail_info[regno] = INSN_CUID (insn);
 }
@@ -664,7 +664,7 @@ record_last_reg_set_info_regno (rtx insn, int regno)
    a CALL_INSN).  We merely need to record which insns modify memory.  */
 
 static void
-record_last_mem_set_info (rtx insn)
+record_last_mem_set_info (rtx_insn *insn)
 {
   struct modifies_mem *list_entry;
 
@@ -682,7 +682,7 @@ record_last_mem_set_info (rtx insn)
 static void
 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
 {
-  rtx last_set_insn = (rtx) data;
+  rtx_insn *last_set_insn = (rtx_insn *) data;
 
   if (GET_CODE (dest) == SUBREG)
     dest = SUBREG_REG (dest);
@@ -720,7 +720,7 @@ reset_opr_set_tables (void)
    This data is used by oprs_unchanged_p.  */
 
 static void
-record_opr_changes (rtx insn)
+record_opr_changes (rtx_insn *insn)
 {
   rtx note;
 
@@ -762,7 +762,7 @@ record_opr_changes (rtx insn)
    After reload we are interested in loads/stores only.  */
 
 static void
-hash_scan_set (rtx insn)
+hash_scan_set (rtx_insn *insn)
 {
   rtx pat = PATTERN (insn);
   rtx src = SET_SRC (pat);
@@ -830,7 +830,7 @@ compute_hash_table (void)
 
   FOR_EACH_BB_FN (bb, cfun)
     {
-      rtx insn;
+      rtx_insn *insn;
 
       /* First pass over the instructions records information used to
         determine when registers and memory are last set.
@@ -888,7 +888,7 @@ reg_used_on_edge (rtx reg, edge e)
 /* Return the loaded/stored register of a load/store instruction.  */
 
 static rtx
-get_avail_load_store_reg (rtx insn)
+get_avail_load_store_reg (rtx_insn *insn)
 {
   if (REG_P (SET_DEST (PATTERN (insn))))
     /* A load.  */
@@ -953,11 +953,11 @@ get_bb_avail_insn (basic_block bb, struct occr *occr)
    a redundancy is also worth doing, assuming it is possible.  */
 
 static void
-eliminate_partially_redundant_load (basic_block bb, rtx insn,
+eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
                                    struct expr *expr)
 {
   edge pred;
-  rtx avail_insn = NULL_RTX;
+  rtx_insn *avail_insn = NULL;
   rtx avail_reg;
   rtx dest, pat;
   struct occr *a_occr;
@@ -986,9 +986,9 @@ eliminate_partially_redundant_load (basic_block bb, rtx 
insn,
   /* Check potential for replacing load with copy for predecessors.  */
   FOR_EACH_EDGE (pred, ei, bb->preds)
     {
-      rtx next_pred_bb_end;
+      rtx_insn *next_pred_bb_end;
 
-      avail_insn = NULL_RTX;
+      avail_insn = NULL;
       avail_reg = NULL_RTX;
       pred_bb = pred->src;
       next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
@@ -1051,7 +1051,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx 
insn,
          not_ok_count += pred->count;
          unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
                                                    sizeof (struct unoccr));
-         unoccr->insn = NULL_RTX;
+         unoccr->insn = NULL;
          unoccr->pred = pred;
          unoccr->next = unavail_occrs;
          unavail_occrs = unoccr;
@@ -1153,7 +1153,7 @@ cleanup:
 static void
 eliminate_partially_redundant_loads (void)
 {
-  rtx insn;
+  rtx_insn *insn;
   basic_block bb;
 
   /* Note we start at block 1.  */
-- 
1.8.5.3

Reply via email to