aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorJan Hubicka <jh@suse.cz>2002-03-06 20:34:36 +0100
committerJan Hubicka <hubicka@gcc.gnu.org>2002-03-06 19:34:36 +0000
commit2ca6672b2713bed413a3a21e0468db1723de41cb (patch)
tree1f1d32f26b4675b6497ee473cc04e606b49810e1 /gcc
parent31d0dd4fa30cbe7fc65d3e27461833c0bcca757a (diff)
downloadgcc-2ca6672b2713bed413a3a21e0468db1723de41cb.zip
gcc-2ca6672b2713bed413a3a21e0468db1723de41cb.tar.gz
gcc-2ca6672b2713bed413a3a21e0468db1723de41cb.tar.bz2
toplev.c (rest_of_compilation): Do jump threading before SSA path...
* toplev.c (rest_of_compilation): Do jump threading before SSA path; consistently call delete_trivially_dead_insns after CSE and GCSE; fix DFI_life dumping; do jump threading after liveness; do crossjumping after liveness2; update comment in last crossjumping. * cfgcleanup.c (try_crossjump_to_edge): Dirtify block. From-SVN: r50373
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog8
-rw-r--r--gcc/cfgcleanup.c2
-rw-r--r--gcc/toplev.c30
3 files changed, 24 insertions, 16 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index ff9a8dc..2a5c8c6 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,11 @@
+Wed Mar 6 20:32:09 CET 2002 Jan Hubicka <jh@suse.cz>
+
+ * toplev.c (rest_of_compilation): Do jump threading before SSA path;
+ consistently call delete_trivially_dead_insns after CSE and GCSE;
+ fix DFI_life dumping; do jump threading after liveness; do crossjumping
+ after liveness2; update comment in last crossjumping.
+ * cfgcleanup.c (try_crossjump_to_edge): Dirtify block.
+
Wed Mar 6 12:27:10 2002 Jeffrey A Law (law@redhat.com)
* m68k.h (CONST_COSTS): Lower cost of 0.0 when used inside a
diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index bce4153..2733726 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -1353,6 +1353,8 @@ try_crossjump_to_edge (mode, e1, e2)
redirect_to->count += src1->count;
redirect_to->frequency += src1->frequency;
+ /* We may have some registers visible trought the block. */
+ redirect_to->flags |= BB_DIRTY;
/* Recompute the frequencies and counts of outgoing edges. */
for (s = redirect_to->succ; s; s = s->succ_next)
diff --git a/gcc/toplev.c b/gcc/toplev.c
index 5cd2e44..bd25b41 100644
--- a/gcc/toplev.c
+++ b/gcc/toplev.c
@@ -2629,7 +2629,8 @@ rest_of_compilation (decl)
find_basic_blocks (insns, max_reg_num (), rtl_dump_file);
if (rtl_dump_file)
dump_flow_info (rtl_dump_file);
- cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP);
+ cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
+ | (flag_thread_jumps ? CLEANUP_THREADING : 0));
/* CFG is no longer maintained up-to-date. */
free_bb_for_insn ();
@@ -2722,8 +2723,7 @@ rest_of_compilation (decl)
find_basic_blocks (insns, max_reg_num (), rtl_dump_file);
if (rtl_dump_file)
dump_flow_info (rtl_dump_file);
- cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP
- | (flag_thread_jumps ? CLEANUP_THREADING : 0));
+ cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
/* Try to identify useless null pointer tests and delete them. */
if (flag_delete_null_pointer_checks)
@@ -2769,25 +2769,19 @@ rest_of_compilation (decl)
rebuild_jump_labels (insns);
purge_all_dead_edges (0);
+ delete_trivially_dead_insns (insns, max_reg_num ());
+
/* If we are not running more CSE passes, then we are no longer
expecting CSE to be run. But always rerun it in a cheap mode. */
cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
if (tem || optimize > 1)
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
-
- /* Run this after jump optmizations remove all the unreachable code
- so that unreachable code will not keep values live. */
- delete_trivially_dead_insns (insns, max_reg_num ());
-
/* Try to identify useless null pointer tests and delete them. */
if (flag_delete_null_pointer_checks || flag_thread_jumps)
{
timevar_push (TV_JUMP);
- cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP
- | (flag_thread_jumps ? CLEANUP_THREADING : 0));
-
if (flag_delete_null_pointer_checks)
delete_null_pointer_checks (insns);
/* CFG is no longer maintained up-to-date. */
@@ -2827,6 +2821,7 @@ rest_of_compilation (decl)
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
tem = gcse_main (insns, rtl_dump_file);
rebuild_jump_labels (insns);
+ delete_trivially_dead_insns (insns, max_reg_num ());
save_csb = flag_cse_skip_blocks;
save_cfj = flag_cse_follow_jumps;
@@ -2840,6 +2835,7 @@ rest_of_compilation (decl)
reg_scan (insns, max_reg_num (), 1);
tem2 = cse_main (insns, max_reg_num (), 0, rtl_dump_file);
purge_all_dead_edges (0);
+ delete_trivially_dead_insns (insns, max_reg_num ());
timevar_pop (TV_CSE);
cse_not_expected = !flag_rerun_cse_after_loop;
}
@@ -2851,7 +2847,6 @@ rest_of_compilation (decl)
tem = tem2 = 0;
timevar_push (TV_JUMP);
rebuild_jump_labels (insns);
- delete_trivially_dead_insns (insns, max_reg_num ());
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
timevar_pop (TV_JUMP);
@@ -2861,6 +2856,7 @@ rest_of_compilation (decl)
reg_scan (insns, max_reg_num (), 1);
tem2 = cse_main (insns, max_reg_num (), 0, rtl_dump_file);
purge_all_dead_edges (0);
+ delete_trivially_dead_insns (insns, max_reg_num ());
timevar_pop (TV_CSE);
}
}
@@ -2991,6 +2987,7 @@ rest_of_compilation (decl)
reg_scan (insns, max_reg_num (), 0);
tem = cse_main (insns, max_reg_num (), 1, rtl_dump_file);
purge_all_dead_edges (0);
+ delete_trivially_dead_insns (insns, max_reg_num ());
if (tem)
{
@@ -3009,7 +3006,7 @@ rest_of_compilation (decl)
cse_not_expected = 1;
- close_dump_file (DFI_life, print_rtl_with_bb, insns);
+ open_dump_file (DFI_life, decl);
regclass_init ();
check_function_return_warnings ();
@@ -3277,8 +3274,8 @@ rest_of_compilation (decl)
if (optimize)
{
- cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_CROSSJUMP);
life_analysis (insns, rtl_dump_file, PROP_FINAL);
+ cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_CROSSJUMP | CLEANUP_UPDATE_LIFE);
/* This is kind of a heuristic. We need to run combine_stack_adjustments
even for machines with possibly nonzero RETURN_POPS_ARGS
@@ -3380,8 +3377,9 @@ rest_of_compilation (decl)
timevar_push (TV_REORDER_BLOCKS);
open_dump_file (DFI_bbro, decl);
- /* Last attempt to optimize CFG, as life analyzis possibly removed
- some instructions. */
+ /* Last attempt to optimize CFG, as scheduling, peepholing
+ and insn splitting possibly introduced more crossjumping
+ oppurtuntities. */
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
| CLEANUP_CROSSJUMP);
if (flag_reorder_blocks)