aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog26
-rw-r--r--gcc/tree-cfg.c6
-rw-r--r--gcc/tree-cfgcleanup.c44
-rw-r--r--gcc/tree-ssa-dom.c28
-rw-r--r--gcc/tree-ssa-forwprop.c22
-rw-r--r--gcc/tree-ssa-pre.c24
-rw-r--r--gcc/tree-ssa-propagate.c32
7 files changed, 138 insertions, 44 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 5692ea4..18ada57 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,29 @@
+2015-03-13 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/44563
+ * tree-cfgcleanup.c (split_bb_on_noreturn_calls): Remove.
+ (cleanup_tree_cfg_1): Do not call it.
+ (execute_cleanup_cfg_post_optimizing): Fixup the CFG here.
+ (fixup_noreturn_call): Mark the stmt as control altering.
+ * tree-cfg.c (execute_fixup_cfg): Do not dump the function
+ here.
+ (pass_data_fixup_cfg): Produce a dump file.
+ * tree-ssa-dom.c: Include tree-cfgcleanup.h.
+ (need_noreturn_fixup): New global.
+ (pass_dominator::execute): Fixup queued noreturn calls.
+ (optimize_stmt): Queue calls that became noreturn for fixup.
+ * tree-ssa-forwprop.c (pass_forwprop::execute): Likewise.
+ * tree-ssa-pre.c: Include tree-cfgcleanup.h.
+ (el_to_fixup): New global.
+ (eliminate_dom_walker::before_dom_childre): Queue calls that
+ became noreturn for fixup.
+ (eliminate): Fixup queued noreturn calls.
+ * tree-ssa-propagate.c: Include tree-cfgcleanup.h.
+ (substitute_and_fold_dom_walker): New member stmts_to_fixup.
+ (substitute_and_fold_dom_walker::before_dom_children): Queue
+ alls that became noreturn for fixup.
+ (substitute_and_fold): Fixup queued noreturn calls.
+
2015-03-12 Jan Hubicka <hubicka@ucw.cz>
* ipa-icf.c (sem_function::equals_wpa): Match CXX_CONSTRUCTOR_P
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index e1bc143..ac12a58 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -8721,10 +8721,6 @@ execute_fixup_cfg (void)
if (count_scale != REG_BR_PROB_BASE)
compute_function_frequency ();
- /* Dump a textual representation of the flowgraph. */
- if (dump_file)
- gimple_dump_cfg (dump_file, dump_flags);
-
if (current_loops
&& (todo & TODO_cleanup_cfg))
loops_state_set (LOOPS_NEED_FIXUP);
@@ -8737,7 +8733,7 @@ namespace {
const pass_data pass_data_fixup_cfg =
{
GIMPLE_PASS, /* type */
- "*free_cfg_annotations", /* name */
+ "fixup_cfg", /* name */
OPTGROUP_NONE, /* optinfo_flags */
TV_NONE, /* tv_id */
PROP_cfg, /* properties_required */
diff --git a/gcc/tree-cfgcleanup.c b/gcc/tree-cfgcleanup.c
index 5c5c7ba..f5a21a30 100644
--- a/gcc/tree-cfgcleanup.c
+++ b/gcc/tree-cfgcleanup.c
@@ -625,35 +625,13 @@ fixup_noreturn_call (gimple stmt)
update_stmt (stmt);
}
+ /* Mark the call as altering control flow. */
+ gimple_call_set_ctrl_altering (stmt, true);
+
return remove_fallthru_edge (bb->succs);
}
-/* Split basic blocks on calls in the middle of a basic block that are now
- known not to return, and remove the unreachable code. */
-
-static bool
-split_bb_on_noreturn_calls (basic_block bb)
-{
- bool changed = false;
- gimple_stmt_iterator gsi;
-
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- {
- gimple stmt = gsi_stmt (gsi);
-
- if (!is_gimple_call (stmt))
- continue;
-
- if (gimple_call_noreturn_p (stmt))
- changed |= fixup_noreturn_call (stmt);
- }
-
- if (changed)
- bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
- return changed;
-}
-
/* Tries to cleanup cfg in basic block BB. Returns true if anything
changes. */
@@ -703,10 +681,7 @@ cleanup_tree_cfg_1 (void)
{
bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (bb)
- {
- retval |= cleanup_tree_cfg_bb (bb);
- retval |= split_bb_on_noreturn_calls (bb);
- }
+ retval |= cleanup_tree_cfg_bb (bb);
}
/* Now process the altered blocks, as long as any are available. */
@@ -722,10 +697,6 @@ cleanup_tree_cfg_1 (void)
continue;
retval |= cleanup_tree_cfg_bb (bb);
-
- /* Rerun split_bb_on_noreturn_calls, in case we have altered any noreturn
- calls. */
- retval |= split_bb_on_noreturn_calls (bb);
}
end_recording_case_labels ();
@@ -1111,9 +1082,12 @@ make_pass_merge_phi (gcc::context *ctxt)
static unsigned int
execute_cleanup_cfg_post_optimizing (void)
{
- unsigned int todo = 0;
+ unsigned int todo = execute_fixup_cfg ();
if (cleanup_tree_cfg ())
- todo |= TODO_update_ssa;
+ {
+ todo &= ~TODO_cleanup_cfg;
+ todo |= TODO_update_ssa;
+ }
maybe_remove_unreachable_handlers ();
cleanup_dead_labels ();
group_case_labels ();
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index d230ce1..ea49eb7 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -74,6 +74,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-ssa-dom.h"
#include "inchash.h"
#include "gimplify.h"
+#include "tree-cfgcleanup.h"
/* This file implements optimizations on the dominator tree. */
@@ -246,6 +247,7 @@ static bool cfg_altered;
/* Bitmap of blocks that have had EH statements cleaned. We should
remove their dead edges eventually. */
static bitmap need_eh_cleanup;
+static vec<gimple> need_noreturn_fixup;
/* Statistics for dominator optimizations. */
struct opt_stats_d
@@ -885,6 +887,7 @@ pass_dominator::execute (function *fun)
avail_exprs_stack.create (20);
const_and_copies_stack.create (20);
need_eh_cleanup = BITMAP_ALLOC (NULL);
+ need_noreturn_fixup.create (0);
calculate_dominance_info (CDI_DOMINATORS);
cfg_altered = false;
@@ -967,6 +970,23 @@ pass_dominator::execute (function *fun)
bitmap_clear (need_eh_cleanup);
}
+ /* Fixup stmts that became noreturn calls. This may require splitting
+ blocks and thus isn't possible during the dominator walk or before
+ jump threading finished. Do this in reverse order so we don't
+ inadvertedly remove a stmt we want to fixup by visiting a dominating
+ now noreturn call first. */
+ while (!need_noreturn_fixup.is_empty ())
+ {
+ gimple stmt = need_noreturn_fixup.pop ();
+ if (dump_file && dump_flags & TDF_DETAILS)
+ {
+ fprintf (dump_file, "Fixing up noreturn call ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ fprintf (dump_file, "\n");
+ }
+ fixup_noreturn_call (stmt);
+ }
+
statistics_counter_event (fun, "Redundant expressions eliminated",
opt_stats.num_re);
statistics_counter_event (fun, "Constants propagated",
@@ -986,7 +1006,7 @@ pass_dominator::execute (function *fun)
/* Free asserted bitmaps and stacks. */
BITMAP_FREE (need_eh_cleanup);
-
+ need_noreturn_fixup.release ();
avail_exprs_stack.release ();
const_and_copies_stack.release ();
@@ -2364,8 +2384,10 @@ optimize_stmt (basic_block bb, gimple_stmt_iterator si)
gimple stmt, old_stmt;
bool may_optimize_p;
bool modified_p = false;
+ bool was_noreturn;
old_stmt = stmt = gsi_stmt (si);
+ was_noreturn = is_gimple_call (stmt) && gimple_call_noreturn_p (stmt);
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -2545,6 +2567,10 @@ optimize_stmt (basic_block bb, gimple_stmt_iterator si)
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Flagged to clear EH edges.\n");
}
+
+ if (!was_noreturn
+ && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
+ need_noreturn_fixup.safe_push (stmt);
}
}
diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c
index d8db20a..93f92f3 100644
--- a/gcc/tree-ssa-forwprop.c
+++ b/gcc/tree-ssa-forwprop.c
@@ -2141,6 +2141,7 @@ pass_forwprop::execute (function *fun)
lattice.quick_grow_cleared (num_ssa_names);
int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
int postorder_num = inverted_post_order_compute (postorder);
+ auto_vec<gimple, 4> to_fixup;
to_purge = BITMAP_ALLOC (NULL);
for (int i = 0; i < postorder_num; ++i)
{
@@ -2340,6 +2341,8 @@ pass_forwprop::execute (function *fun)
gimple stmt = gsi_stmt (gsi);
gimple orig_stmt = stmt;
bool changed = false;
+ bool was_noreturn = (is_gimple_call (stmt)
+ && gimple_call_noreturn_p (stmt));
/* Mark stmt as potentially needing revisiting. */
gimple_set_plf (stmt, GF_PLF_1, false);
@@ -2350,6 +2353,9 @@ pass_forwprop::execute (function *fun)
stmt = gsi_stmt (gsi);
if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
bitmap_set_bit (to_purge, bb->index);
+ if (!was_noreturn
+ && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
+ to_fixup.safe_push (stmt);
/* Cleanup the CFG if we simplified a condition to
true or false. */
if (gcond *cond = dyn_cast <gcond *> (stmt))
@@ -2470,6 +2476,22 @@ pass_forwprop::execute (function *fun)
free (postorder);
lattice.release ();
+ /* Fixup stmts that became noreturn calls. This may require splitting
+ blocks and thus isn't possible during the walk. Do this
+ in reverse order so we don't inadvertedly remove a stmt we want to
+ fixup by visiting a dominating now noreturn call first. */
+ while (!to_fixup.is_empty ())
+ {
+ gimple stmt = to_fixup.pop ();
+ if (dump_file && dump_flags & TDF_DETAILS)
+ {
+ fprintf (dump_file, "Fixing up noreturn call ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ fprintf (dump_file, "\n");
+ }
+ cfg_changed |= fixup_noreturn_call (stmt);
+ }
+
cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
BITMAP_FREE (to_purge);
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 83b48df..c985e79 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -98,6 +98,7 @@ along with GCC; see the file COPYING3. If not see
#include "ipa-prop.h"
#include "tree-ssa-propagate.h"
#include "ipa-utils.h"
+#include "tree-cfgcleanup.h"
/* TODO:
@@ -3922,6 +3923,7 @@ compute_avail (void)
/* Local state for the eliminate domwalk. */
static vec<gimple> el_to_remove;
+static vec<gimple> el_to_fixup;
static unsigned int el_todo;
static vec<tree> el_avail;
static vec<tree> el_avail_stack;
@@ -4429,7 +4431,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
/* When changing a call into a noreturn call, cfg cleanup
is needed to fix up the noreturn call. */
if (!was_noreturn && gimple_call_noreturn_p (stmt))
- el_todo |= TODO_cleanup_cfg;
+ el_to_fixup.safe_push (stmt);
}
else
{
@@ -4529,6 +4531,7 @@ eliminate (bool do_pre)
need_ab_cleanup = BITMAP_ALLOC (NULL);
el_to_remove.create (0);
+ el_to_fixup.create (0);
el_todo = 0;
el_avail.create (num_ssa_names);
el_avail_stack.create (0);
@@ -4580,6 +4583,25 @@ eliminate (bool do_pre)
}
el_to_remove.release ();
+ /* Fixup stmts that became noreturn calls. This may require splitting
+ blocks and thus isn't possible during the dominator walk. Do this
+ in reverse order so we don't inadvertedly remove a stmt we want to
+ fixup by visiting a dominating now noreturn call first. */
+ while (!el_to_fixup.is_empty ())
+ {
+ stmt = el_to_fixup.pop ();
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Fixing up noreturn call ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ }
+
+ if (fixup_noreturn_call (stmt))
+ el_todo |= TODO_cleanup_cfg;
+ }
+ el_to_fixup.release ();
+
return el_todo;
}
diff --git a/gcc/tree-ssa-propagate.c b/gcc/tree-ssa-propagate.c
index 8b82f9e..c3f9d3e 100644
--- a/gcc/tree-ssa-propagate.c
+++ b/gcc/tree-ssa-propagate.c
@@ -67,6 +67,7 @@
#include "value-prof.h"
#include "domwalk.h"
#include "cfgloop.h"
+#include "tree-cfgcleanup.h"
/* This file implements a generic value propagation engine based on
the same propagation used by the SSA-CCP algorithm [1].
@@ -1071,11 +1072,13 @@ public:
fold_fn (fold_fn_), do_dce (do_dce_), something_changed (false)
{
stmts_to_remove.create (0);
+ stmts_to_fixup.create (0);
need_eh_cleanup = BITMAP_ALLOC (NULL);
}
~substitute_and_fold_dom_walker ()
{
stmts_to_remove.release ();
+ stmts_to_fixup.release ();
BITMAP_FREE (need_eh_cleanup);
}
@@ -1087,6 +1090,7 @@ public:
bool do_dce;
bool something_changed;
vec<gimple> stmts_to_remove;
+ vec<gimple> stmts_to_fixup;
bitmap need_eh_cleanup;
};
@@ -1125,7 +1129,6 @@ substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
{
bool did_replace;
gimple stmt = gsi_stmt (i);
- gimple old_stmt;
enum gimple_code code = gimple_code (stmt);
/* Ignore ASSERT_EXPRs. They are used by VRP to generate
@@ -1163,7 +1166,9 @@ substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
}
- old_stmt = stmt;
+ gimple old_stmt = stmt;
+ bool was_noreturn = (is_gimple_call (stmt)
+ && gimple_call_noreturn_p (stmt));
/* Some statements may be simplified using propagator
specific information. Do this before propagating
@@ -1194,6 +1199,13 @@ substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
bitmap_set_bit (need_eh_cleanup, bb->index);
+ /* If we turned a not noreturn call into a noreturn one
+ schedule it for fixup. */
+ if (!was_noreturn
+ && is_gimple_call (stmt)
+ && gimple_call_noreturn_p (stmt))
+ stmts_to_fixup.safe_push (stmt);
+
if (is_gimple_assign (stmt)
&& (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
== GIMPLE_SINGLE_RHS))
@@ -1286,6 +1298,22 @@ substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
if (!bitmap_empty_p (walker.need_eh_cleanup))
gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
+ /* Fixup stmts that became noreturn calls. This may require splitting
+ blocks and thus isn't possible during the dominator walk. Do this
+ in reverse order so we don't inadvertedly remove a stmt we want to
+ fixup by visiting a dominating now noreturn call first. */
+ while (!walker.stmts_to_fixup.is_empty ())
+ {
+ gimple stmt = walker.stmts_to_fixup.pop ();
+ if (dump_file && dump_flags & TDF_DETAILS)
+ {
+ fprintf (dump_file, "Fixing up noreturn call ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ fprintf (dump_file, "\n");
+ }
+ fixup_noreturn_call (stmt);
+ }
+
statistics_counter_event (cfun, "Constants propagated",
prop_stats.num_const_prop);
statistics_counter_event (cfun, "Copies propagated",