aboutsummaryrefslogtreecommitdiff
path: root/gcc/df.c
diff options
context:
space:
mode:
authorZdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>2002-05-27 15:45:44 +0200
committerZdenek Dvorak <rakdver@gcc.gnu.org>2002-05-27 13:45:44 +0000
commitd55bc081750ac5dba90e6043071408ff6d7f29ba (patch)
tree5e75f1f3015434e7c5c88a788dd95d9066e0d55f /gcc/df.c
parent71d2c5bd9b1373e2db43821fc64597f9c067e442 (diff)
downloadgcc-d55bc081750ac5dba90e6043071408ff6d7f29ba.zip
gcc-d55bc081750ac5dba90e6043071408ff6d7f29ba.tar.gz
gcc-d55bc081750ac5dba90e6043071408ff6d7f29ba.tar.bz2
basic-block.h (last_basic_block): Defined as synonym for n_basic_blocks.
* basic-block.h (last_basic_block): Defined as synonym for n_basic_blocks. * cfganal.c (mark_dfs_back_edges, flow_reverse_top_sort_order_compute, flow_depth_first_order_compute, flow_preorder_transversal_compute, flow_dfs_compute_reverse_init): Replaced relevant occurences of n_basic_blocks with last_basic_block. * cfgbuild.c (make_edges): Likewise. * cfgloop.c (flow_loop_scan, flow_loops_find): Likewise. * cfgrtl.c (verify_flow_info, purge_all_dead_edges): Likewise. * combine.c (combine_instructions): Likewise. * df.c (df_alloc, df_analyse_1, df_analyse, iterative_dataflow_sbitmap, iterative_dataflow_bitmap): Likewise. * dominance.c (init_dom_info, calc_dfs_tree_nonrec, calc_dfs_tree, calc_idoms, idoms_to_doms): Likewise. * flow.c (update_life_info_in_dirty_blocks, free_basic_block_vars): Likewise. * gcse.c (gcse_main, alloc_gcse_mem, compute_local_properties, compute_hash_table, expr_reaches_here_p, one_classic_gcse_pass, one_cprop_pass, compute_pre_data, pre_expr_reaches_here_p, one_pre_gcse_pass, compute_transpout, delete_null_pointer_checks_1, delete_null_pointer_checks, compute_code_hoist_vbeinout, hoist_expr_reaches_here_p, hoist_code, one_code_hoisting_pass, compute_store_table, build_store_vectors): Likewise. * haifa-sched.c (sched_init): Likewise. * ifcvt.c (if_convert): Likewise. * lcm.c (compute_antinout_edge, compute_laterin, compute_insert_delete, pre_edge_lcm, compute_available, compute_nearerout, compute_rev_insert_delete, pre_edge_rev_lcm, optimize_mode_switching): Likewise. * predict.c (estimate_probability, process_note_prediction, note_prediction_to_br_prob): Likewise. * profile.c (GCOV_INDEX_TO_BB, BB_TO_GCOV_INDEX): Likewise. * recog.c (split_all_insns, peephole2_optimize): Likewise. * regrename.c (copyprop_hardreg_forward): Likewise. * resource.c (init_resource_info): Likewise. * sched-rgn.c (build_control_flow, find_rgns, compute_trg_info, init_regions, schedule_insns): Likewise. * ssa-ccp.c (ssa_const_prop): Likewise. * ssa-dce.c (ssa_eliminate_dead_code): Likewise. * ssa.c (compute_dominance_frontiers, compute_iterated_dominance_frontiers, convert_to_ssa): Likewise. * df.c (df_refs_unlink): Fix FOR_EACH_BB usage (in #if 0'ed code) * gcse.c (alloc_rd_mem, alloc_avail_expr_mem): Use n_blocks for vector sizes consistently. From-SVN: r53924
Diffstat (limited to 'gcc/df.c')
-rw-r--r--gcc/df.c48
1 files changed, 23 insertions, 25 deletions
diff --git a/gcc/df.c b/gcc/df.c
index 4711e33..a35978e 100644
--- a/gcc/df.c
+++ b/gcc/df.c
@@ -547,7 +547,7 @@ df_alloc (df, n_regs)
df->uses = xmalloc (df->use_size * sizeof (*df->uses));
df->n_regs = n_regs;
- df->n_bbs = n_basic_blocks;
+ df->n_bbs = last_basic_block;
/* Allocate temporary working array used during local dataflow analysis. */
df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
@@ -561,7 +561,7 @@ df_alloc (df, n_regs)
df->flags = 0;
- df->bbs = xcalloc (df->n_bbs, sizeof (struct bb_info));
+ df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
df->all_blocks = BITMAP_XMALLOC ();
FOR_EACH_BB (bb)
@@ -2006,9 +2006,9 @@ df_analyse_1 (df, blocks, flags, update)
df->dfs_order = xmalloc (sizeof(int) * n_basic_blocks);
df->rc_order = xmalloc (sizeof(int) * n_basic_blocks);
df->rts_order = xmalloc (sizeof(int) * n_basic_blocks);
- df->inverse_dfs_map = xmalloc (sizeof(int) * n_basic_blocks);
- df->inverse_rc_map = xmalloc (sizeof(int) * n_basic_blocks);
- df->inverse_rts_map = xmalloc (sizeof(int) * n_basic_blocks);
+ df->inverse_dfs_map = xmalloc (sizeof(int) * last_basic_block);
+ df->inverse_rc_map = xmalloc (sizeof(int) * last_basic_block);
+ df->inverse_rts_map = xmalloc (sizeof(int) * last_basic_block);
flow_depth_first_order_compute (df->dfs_order, df->rc_order);
flow_reverse_top_sort_order_compute (df->rts_order);
@@ -2023,10 +2023,10 @@ df_analyse_1 (df, blocks, flags, update)
/* Compute the sets of gens and kills for the defs of each bb. */
df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
{
- bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
+ bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
@@ -2059,10 +2059,10 @@ df_analyse_1 (df, blocks, flags, update)
uses in each bb. */
df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
{
- bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
+ bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
@@ -2098,10 +2098,10 @@ df_analyse_1 (df, blocks, flags, update)
/* Compute the sets of defs and uses of live variables. */
df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
{
- bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *use = xmalloc (sizeof (bitmap) * n_basic_blocks);
- bitmap *def = xmalloc (sizeof (bitmap) * n_basic_blocks);
+ bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
+ bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
@@ -2292,7 +2292,7 @@ df_analyse (df, blocks, flags)
/* We could deal with additional basic blocks being created by
rescanning everything again. */
- if (df->n_bbs && df->n_bbs != (unsigned int)n_basic_blocks)
+ if (df->n_bbs && df->n_bbs != (unsigned int) last_basic_block)
abort ();
update = df_modified_p (df, blocks);
@@ -2402,10 +2402,8 @@ df_refs_unlink (df, blocks)
}
else
{
- FOR_EACH_BB (bb,
- {
+ FOR_EACH_BB (bb)
df_bb_refs_unlink (df, bb);
- });
}
}
#endif
@@ -3846,8 +3844,8 @@ iterative_dataflow_sbitmap (in, out, gen, kill, blocks,
fibheap_t worklist;
basic_block bb;
sbitmap visited, pending;
- pending = sbitmap_alloc (n_basic_blocks);
- visited = sbitmap_alloc (n_basic_blocks);
+ pending = sbitmap_alloc (last_basic_block);
+ visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (pending);
sbitmap_zero (visited);
worklist = fibheap_new ();
@@ -3905,8 +3903,8 @@ iterative_dataflow_bitmap (in, out, gen, kill, blocks,
fibheap_t worklist;
basic_block bb;
sbitmap visited, pending;
- pending = sbitmap_alloc (n_basic_blocks);
- visited = sbitmap_alloc (n_basic_blocks);
+ pending = sbitmap_alloc (last_basic_block);
+ visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (pending);
sbitmap_zero (visited);
worklist = fibheap_new ();