aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2020-01-10 11:23:53 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2020-01-10 11:23:53 +0000
commit734efcdda91645d6425f584b39362cf0c3af2587 (patch)
treea10a2ca745e6a7d0835082eba4315b24a8bcc059
parent5eaf0c498f718f60591b06fa81fc51ace6a16c01 (diff)
downloadgcc-734efcdda91645d6425f584b39362cf0c3af2587.zip
gcc-734efcdda91645d6425f584b39362cf0c3af2587.tar.gz
gcc-734efcdda91645d6425f584b39362cf0c3af2587.tar.bz2
re PR tree-optimization/93199 (Compile time hog in sink_clobbers)
2020-01-10 Richard Biener <rguenther@suse.de> PR middle-end/93199 * tree-eh.c (sink_clobbers): Move clobbers to out-of-IL sequences to avoid walking them again for secondary opportunities. (pass_lower_eh_dispatch::execute): Instead actually insert them here. From-SVN: r280102
-rw-r--r--gcc/ChangeLog8
-rw-r--r--gcc/tree-eh.c39
2 files changed, 40 insertions, 7 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 8b3b780..f93e919 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,6 +1,14 @@
2020-01-10 Richard Biener <rguenther@suse.de>
PR middle-end/93199
+ * tree-eh.c (sink_clobbers): Move clobbers to out-of-IL
+ sequences to avoid walking them again for secondary opportunities.
+ (pass_lower_eh_dispatch::execute): Instead actually insert
+ them here.
+
+2020-01-10 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/93199
* tree-eh.c (redirect_eh_edge_1): Avoid some work if possible.
(cleanup_all_empty_eh): Walk landing pads in reverse order to
avoid quadraticness.
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index 408ff48..dc80f57 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -3550,11 +3550,15 @@ optimize_clobbers (basic_block bb)
}
/* Try to sink var = {v} {CLOBBER} stmts followed just by
- internal throw to successor BB. If FOUND_OPPORTUNITY is not NULL
- then do not perform the optimization but set *FOUND_OPPORTUNITY to true. */
+ internal throw to successor BB.
+ SUNK, if not NULL, is an array of sequences indexed by basic-block
+ index to sink to and to pick up sinking opportunities from.
+ If FOUND_OPPORTUNITY is not NULL then do not perform the optimization
+ but set *FOUND_OPPORTUNITY to true. */
static int
-sink_clobbers (basic_block bb, bool *found_opportunity = NULL)
+sink_clobbers (basic_block bb,
+ gimple_seq *sunk = NULL, bool *found_opportunity = NULL)
{
edge e;
edge_iterator ei;
@@ -3589,7 +3593,7 @@ sink_clobbers (basic_block bb, bool *found_opportunity = NULL)
return 0;
any_clobbers = true;
}
- if (!any_clobbers)
+ if (!any_clobbers && (!sunk || gimple_seq_empty_p (sunk[bb->index])))
return 0;
/* If this was a dry run, tell it we found clobbers to sink. */
@@ -3618,7 +3622,10 @@ sink_clobbers (basic_block bb, bool *found_opportunity = NULL)
gimple *first_sunk = NULL;
gimple *last_sunk = NULL;
- dgsi = gsi_after_labels (succbb);
+ if (sunk)
+ dgsi = gsi_start (sunk[succbb->index]);
+ else
+ dgsi = gsi_after_labels (succbb);
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
@@ -3653,6 +3660,15 @@ sink_clobbers (basic_block bb, bool *found_opportunity = NULL)
first_sunk = stmt;
last_sunk = stmt;
}
+ if (sunk && !gimple_seq_empty_p (sunk[bb->index]))
+ {
+ if (!first_sunk)
+ first_sunk = gsi_stmt (gsi_last (sunk[bb->index]));
+ last_sunk = gsi_stmt (gsi_start (sunk[bb->index]));
+ gsi_insert_seq_before_without_update (&dgsi,
+ sunk[bb->index], GSI_NEW_STMT);
+ sunk[bb->index] = NULL;
+ }
if (first_sunk)
{
/* Adjust virtual operands if we sunk across a virtual PHI. */
@@ -3892,7 +3908,7 @@ pass_lower_eh_dispatch::execute (function *fun)
if (stmt_can_throw_external (fun, last))
optimize_clobbers (bb);
else if (!any_resx_to_process)
- sink_clobbers (bb, &any_resx_to_process);
+ sink_clobbers (bb, NULL, &any_resx_to_process);
}
}
if (redirected)
@@ -3908,6 +3924,7 @@ pass_lower_eh_dispatch::execute (function *fun)
and unreachable block removal. */
int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun));
int rpo_n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
+ gimple_seq *sunk = XCNEWVEC (gimple_seq, last_basic_block_for_fn (fun));
for (int i = 0; i < rpo_n; ++i)
{
bb = BASIC_BLOCK_FOR_FN (fun, rpo[i]);
@@ -3915,9 +3932,17 @@ pass_lower_eh_dispatch::execute (function *fun)
if (last
&& gimple_code (last) == GIMPLE_RESX
&& !stmt_can_throw_external (fun, last))
- flags |= sink_clobbers (bb);
+ flags |= sink_clobbers (bb, sunk);
+ /* If there were any clobbers sunk into this BB, insert them now. */
+ if (!gimple_seq_empty_p (sunk[bb->index]))
+ {
+ gimple_stmt_iterator gsi = gsi_after_labels (bb);
+ gsi_insert_seq_before (&gsi, sunk[bb->index], GSI_NEW_STMT);
+ sunk[bb->index] = NULL;
+ }
}
free (rpo);
+ free (sunk);
}
return flags;