aboutsummaryrefslogtreecommitdiff
path: root/gcc/postreload-gcse.c
diff options
context:
space:
mode:
authorDavid Malcolm <dmalcolm@redhat.com>2014-08-22 19:05:50 +0000
committerDavid Malcolm <dmalcolm@gcc.gnu.org>2014-08-22 19:05:50 +0000
commit6c6d76bec50feb6f9d9455d968c05de121b591d8 (patch)
tree39adb1582ac134d060877453f15302b976990b5b /gcc/postreload-gcse.c
parent38295e01e2688696523e999002060a39545d218b (diff)
downloadgcc-6c6d76bec50feb6f9d9455d968c05de121b591d8.zip
gcc-6c6d76bec50feb6f9d9455d968c05de121b591d8.tar.gz
gcc-6c6d76bec50feb6f9d9455d968c05de121b591d8.tar.bz2
postreload-gcse.c: Use rtx_insn in various places
gcc/ * postreload-gcse.c (struct occr): Strengthen field "insn" from rtx to rtx_insn *. (struct unoccr): Likewise. (struct modifies_mem): Likewise. (alloc_mem): Likewise for local "insn". (insert_expr_in_table): Likewise for param "insn". (dump_expr_hash_table_entry): Likewise for local "insn". (oprs_unchanged_p): Likewise for param "insn". (load_killed_in_block_p): Likewise for local "setter". (record_last_reg_set_info): Likewise for param "insn". (record_last_reg_set_info_regno): Likewise. (record_last_mem_set_info): Likewise. (record_last_set_info): Likewise for local "last_set_insn". (record_opr_changes): Likewise for param "insn". (hash_scan_set): Likewise. (compute_hash_table): Likewise for local "insn". (get_avail_load_store_reg): Likewise for param "insn". (eliminate_partially_redundant_load): Likewise, also for locals "avail_insn", "next_pred_bb_end". Replace use of NULL_RTX with RTX for insns. (eliminate_partially_redundant_loads): Likewise for local "insn". From-SVN: r214356
Diffstat (limited to 'gcc/postreload-gcse.c')
-rw-r--r--gcc/postreload-gcse.c62
1 files changed, 31 insertions, 31 deletions
diff --git a/gcc/postreload-gcse.c b/gcc/postreload-gcse.c
index 97200ad..e8f0408 100644
--- a/gcc/postreload-gcse.c
+++ b/gcc/postreload-gcse.c
@@ -162,7 +162,7 @@ struct occr
/* Next occurrence of this expression. */
struct occr *next;
/* The insn that computes the expression. */
- rtx insn;
+ rtx_insn *insn;
/* Nonzero if this [anticipatable] occurrence has been deleted. */
char deleted_p;
};
@@ -175,7 +175,7 @@ struct unoccr
{
struct unoccr *next;
edge pred;
- rtx insn;
+ rtx_insn *insn;
};
static struct obstack unoccr_obstack;
@@ -194,7 +194,7 @@ static int *reg_avail_info;
/* A list of insns that may modify memory within the current basic block. */
struct modifies_mem
{
- rtx insn;
+ rtx_insn *insn;
struct modifies_mem *next;
};
static struct modifies_mem *modifies_mem_list;
@@ -218,12 +218,12 @@ static void alloc_mem (void);
static void free_mem (void);
/* Support for hash table construction and transformations. */
-static bool oprs_unchanged_p (rtx, rtx, bool);
-static void record_last_reg_set_info (rtx, rtx);
-static void record_last_reg_set_info_regno (rtx, int);
-static void record_last_mem_set_info (rtx);
+static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
+static void record_last_reg_set_info (rtx_insn *, rtx);
+static void record_last_reg_set_info_regno (rtx_insn *, int);
+static void record_last_mem_set_info (rtx_insn *);
static void record_last_set_info (rtx, const_rtx, void *);
-static void record_opr_changes (rtx);
+static void record_opr_changes (rtx_insn *);
static void find_mem_conflicts (rtx, const_rtx, void *);
static int load_killed_in_block_p (int, rtx, bool);
@@ -231,7 +231,7 @@ static void reset_opr_set_tables (void);
/* Hash table support. */
static hashval_t hash_expr (rtx, int *);
-static void insert_expr_in_table (rtx, rtx);
+static void insert_expr_in_table (rtx, rtx_insn *);
static struct expr *lookup_expr_in_table (rtx);
static void dump_hash_table (FILE *);
@@ -239,16 +239,16 @@ static void dump_hash_table (FILE *);
static bool reg_killed_on_edge (rtx, edge);
static bool reg_used_on_edge (rtx, edge);
-static rtx get_avail_load_store_reg (rtx);
+static rtx get_avail_load_store_reg (rtx_insn *);
static bool bb_has_well_behaved_predecessors (basic_block);
static struct occr* get_bb_avail_insn (basic_block, struct occr *);
-static void hash_scan_set (rtx);
+static void hash_scan_set (rtx_insn *);
static void compute_hash_table (void);
/* The work horses of this pass. */
static void eliminate_partially_redundant_load (basic_block,
- rtx,
+ rtx_insn *,
struct expr *);
static void eliminate_partially_redundant_loads (void);
@@ -261,7 +261,7 @@ alloc_mem (void)
{
int i;
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
/* Find the largest UID and create a mapping from UIDs to CUIDs. */
uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
@@ -323,7 +323,7 @@ free_mem (void)
basic block. */
static void
-insert_expr_in_table (rtx x, rtx insn)
+insert_expr_in_table (rtx x, rtx_insn *insn)
{
int do_not_record_p;
hashval_t hash;
@@ -444,7 +444,7 @@ dump_expr_hash_table_entry (expr **slot, FILE *file)
occr = exprs->avail_occr;
while (occr)
{
- rtx insn = occr->insn;
+ rtx_insn *insn = occr->insn;
print_rtl_single (file, insn);
fprintf (file, "\n");
occr = occr->next;
@@ -492,7 +492,7 @@ reg_changed_after_insn_p (rtx x, int cuid)
2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
static bool
-oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
+oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
{
int i, j;
enum rtx_code code;
@@ -606,7 +606,7 @@ load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
while (list_entry)
{
- rtx setter = list_entry->insn;
+ rtx_insn *setter = list_entry->insn;
/* Ignore entries in the list that do not apply. */
if ((after_insn
@@ -642,7 +642,7 @@ load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
/* Record register first/last/block set information for REGNO in INSN. */
static inline void
-record_last_reg_set_info (rtx insn, rtx reg)
+record_last_reg_set_info (rtx_insn *insn, rtx reg)
{
unsigned int regno, end_regno;
@@ -654,7 +654,7 @@ record_last_reg_set_info (rtx insn, rtx reg)
}
static inline void
-record_last_reg_set_info_regno (rtx insn, int regno)
+record_last_reg_set_info_regno (rtx_insn *insn, int regno)
{
reg_avail_info[regno] = INSN_CUID (insn);
}
@@ -665,7 +665,7 @@ record_last_reg_set_info_regno (rtx insn, int regno)
a CALL_INSN). We merely need to record which insns modify memory. */
static void
-record_last_mem_set_info (rtx insn)
+record_last_mem_set_info (rtx_insn *insn)
{
struct modifies_mem *list_entry;
@@ -683,7 +683,7 @@ record_last_mem_set_info (rtx insn)
static void
record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
{
- rtx last_set_insn = (rtx) data;
+ rtx_insn *last_set_insn = (rtx_insn *) data;
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
@@ -721,7 +721,7 @@ reset_opr_set_tables (void)
This data is used by oprs_unchanged_p. */
static void
-record_opr_changes (rtx insn)
+record_opr_changes (rtx_insn *insn)
{
rtx note;
@@ -763,7 +763,7 @@ record_opr_changes (rtx insn)
After reload we are interested in loads/stores only. */
static void
-hash_scan_set (rtx insn)
+hash_scan_set (rtx_insn *insn)
{
rtx pat = PATTERN (insn);
rtx src = SET_SRC (pat);
@@ -831,7 +831,7 @@ compute_hash_table (void)
FOR_EACH_BB_FN (bb, cfun)
{
- rtx insn;
+ rtx_insn *insn;
/* First pass over the instructions records information used to
determine when registers and memory are last set.
@@ -889,7 +889,7 @@ reg_used_on_edge (rtx reg, edge e)
/* Return the loaded/stored register of a load/store instruction. */
static rtx
-get_avail_load_store_reg (rtx insn)
+get_avail_load_store_reg (rtx_insn *insn)
{
if (REG_P (SET_DEST (PATTERN (insn))))
/* A load. */
@@ -954,11 +954,11 @@ get_bb_avail_insn (basic_block bb, struct occr *occr)
a redundancy is also worth doing, assuming it is possible. */
static void
-eliminate_partially_redundant_load (basic_block bb, rtx insn,
+eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
struct expr *expr)
{
edge pred;
- rtx avail_insn = NULL_RTX;
+ rtx_insn *avail_insn = NULL;
rtx avail_reg;
rtx dest, pat;
struct occr *a_occr;
@@ -987,9 +987,9 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn,
/* Check potential for replacing load with copy for predecessors. */
FOR_EACH_EDGE (pred, ei, bb->preds)
{
- rtx next_pred_bb_end;
+ rtx_insn *next_pred_bb_end;
- avail_insn = NULL_RTX;
+ avail_insn = NULL;
avail_reg = NULL_RTX;
pred_bb = pred->src;
next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
@@ -1052,7 +1052,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn,
not_ok_count += pred->count;
unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
sizeof (struct unoccr));
- unoccr->insn = NULL_RTX;
+ unoccr->insn = NULL;
unoccr->pred = pred;
unoccr->next = unavail_occrs;
unavail_occrs = unoccr;
@@ -1154,7 +1154,7 @@ cleanup:
static void
eliminate_partially_redundant_loads (void)
{
- rtx insn;
+ rtx_insn *insn;
basic_block bb;
/* Note we start at block 1. */