aboutsummaryrefslogtreecommitdiff
path: root/gcc/bb-reorder.c
diff options
context:
space:
mode:
authorSteven Bosscher <steven@gcc.gnu.org>2007-03-23 23:05:28 +0000
committerSteven Bosscher <steven@gcc.gnu.org>2007-03-23 23:05:28 +0000
commitad21dab7f7977e9741b7bdace4645ae365f45b01 (patch)
treea15eb2e5e903aed64b074a1be1be2b4bb4d71136 /gcc/bb-reorder.c
parent1f93ef9228b60d66677a2880c32c4b78b19400e7 (diff)
downloadgcc-ad21dab7f7977e9741b7bdace4645ae365f45b01.zip
gcc-ad21dab7f7977e9741b7bdace4645ae365f45b01.tar.gz
gcc-ad21dab7f7977e9741b7bdace4645ae365f45b01.tar.bz2
tracer.c (tracer): Don't take FLAGS argument.
* tracer.c (tracer): Don't take FLAGS argument. Assert we are in cfglayout mode. Don't go into and out of cfglayout mode. Link the blocks in the order of the constructed traces. (rest_of_handle_tracer): Adjust call to tracer. * loop-init.c (rtl_loop_init): Assert we are in cfglayout mode. Don't go into cfglayout mode. (rtl_loop_done): Don't go out of cfglayout mode. * cfglayout.c (relink_block_chain): New function, split out from... (fixup_reorder_chain): ...here. Remove redundant checking. (cfg_layout_finalize): Don't clear the header, footer, and aux fields here, move the code to do so to relink_block_chain. Likewise for free_original_copy_tables. * rtl.h (tracer): Update prototype. * bb-reorder.c (reorder_basic_blocks): Don't take FLAGS argument. Assert we are in cfglayout mode. Don't go into and out of cfglayout mode. Use relink_block_chain to serialize the CFG according to the new basic block order. Move targetm.cannot_modify_jumps_p check from here... (gate_handle_reorder_blocks): ...to here. (duplicate_computed_gotos): Move targetm.cannot_modify_jumps_p check from here... (gate_duplicate_computed_gotos): ...to here. (rest_of_handle_reorder_blocks): Don't see if anything has changed, something always changes when going into and out of cfglayout mode. Perform an expensive cfg cleanup while going into cfglayout mode. Always update liveness information on HAVE_conditional_execution targets. Reserialize the basic blocks and go out of cfglayout mode. * reg-stack.c: Include cfglayout.h. (rest_of_handle_stack_regs): Go into and out of cfglayout mode around the call to reorder_basic_blocks. * basic-block.h (reorder_basic_blocks): Update prototype. (relink_block_chain): New prototype. * passes.c (pass_outof_cfg_layout_mode): Move after cse2. From-SVN: r123167
Diffstat (limited to 'gcc/bb-reorder.c')
-rw-r--r--gcc/bb-reorder.c36
1 files changed, 20 insertions, 16 deletions
diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c
index 9428ef3..27f24fc 100644
--- a/gcc/bb-reorder.c
+++ b/gcc/bb-reorder.c
@@ -1889,20 +1889,17 @@ verify_hot_cold_block_grouping (void)
the set of flags to pass to cfg_layout_initialize(). */
void
-reorder_basic_blocks (unsigned int flags)
+reorder_basic_blocks (void)
{
int n_traces;
int i;
struct trace *traces;
- if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
- return;
+ gcc_assert (current_ir_type () == IR_RTL_CFGLAYOUT);
- if (targetm.cannot_modify_jumps_p ())
+ if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
return;
- cfg_layout_initialize (flags);
-
set_edge_can_fallthru_flag ();
mark_dfs_back_edges ();
@@ -1930,10 +1927,11 @@ reorder_basic_blocks (unsigned int flags)
FREE (traces);
FREE (bbd);
+ relink_block_chain (/*stay_in_cfglayout_mode=*/true);
+
if (dump_file)
dump_flow_info (dump_file, dump_flags);
- cfg_layout_finalize ();
if (flag_reorder_blocks_and_partition)
verify_hot_cold_block_grouping ();
}
@@ -1976,6 +1974,8 @@ insert_section_boundary_note (void)
static bool
gate_duplicate_computed_gotos (void)
{
+ if (targetm.cannot_modify_jumps_p ())
+ return false;
return (optimize > 0 && flag_expensive_optimizations && !optimize_size);
}
@@ -1990,9 +1990,6 @@ duplicate_computed_gotos (void)
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
return 0;
- if (targetm.cannot_modify_jumps_p ())
- return 0;
-
cfg_layout_initialize (0);
/* We are estimating the length of uncond jump insn only once
@@ -2198,6 +2195,8 @@ partition_hot_cold_basic_blocks (void)
static bool
gate_handle_reorder_blocks (void)
{
+ if (targetm.cannot_modify_jumps_p ())
+ return false;
return (optimize > 0);
}
@@ -2206,34 +2205,39 @@ gate_handle_reorder_blocks (void)
static unsigned int
rest_of_handle_reorder_blocks (void)
{
- bool changed;
unsigned int liveness_flags;
+ basic_block bb;
/* Last attempt to optimize CFG, as scheduling, peepholing and insn
splitting possibly introduced more crossjumping opportunities. */
liveness_flags = (!HAVE_conditional_execution ? CLEANUP_UPDATE_LIFE : 0);
- changed = cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
+ cfg_layout_initialize (CLEANUP_EXPENSIVE | liveness_flags);
if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
{
timevar_push (TV_TRACER);
- tracer (liveness_flags);
+ tracer ();
timevar_pop (TV_TRACER);
}
if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
- reorder_basic_blocks (liveness_flags);
+ reorder_basic_blocks ();
if (flag_reorder_blocks || flag_reorder_blocks_and_partition
|| (flag_sched2_use_traces && flag_schedule_insns_after_reload))
- changed |= cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
+ cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
/* On conditional execution targets we can not update the life cheaply, so
we deffer the updating to after both cleanups. This may lose some cases
but should not be terribly bad. */
- if (changed && HAVE_conditional_execution)
+ if (HAVE_conditional_execution)
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_DEATH_NOTES);
+ FOR_EACH_BB (bb)
+ if (bb->next_bb != EXIT_BLOCK_PTR)
+ bb->aux = bb->next_bb;
+ cfg_layout_finalize ();
+
/* Add NOTE_INSN_SWITCH_TEXT_SECTIONS notes. */
insert_section_boundary_note ();
return 0;