aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2022-12-15 13:42:16 +0100
committerRichard Biener <rguenther@suse.de>2022-12-15 18:42:52 +0100
commitd49b2a3a1dbbe3ac9ae134bc5742be7fdb417d32 (patch)
tree82a998e3764579fc4352b58094d71049624dfbd5
parent3f101e32e2fb616633722fb552779f537e9a9891 (diff)
downloadgcc-d49b2a3a1dbbe3ac9ae134bc5742be7fdb417d32.zip
gcc-d49b2a3a1dbbe3ac9ae134bc5742be7fdb417d32.tar.gz
gcc-d49b2a3a1dbbe3ac9ae134bc5742be7fdb417d32.tar.bz2
middle-end/108086 - avoid quadraticness in copy_edges_for_bb
For the testcase in PR108086 it's visible that we split blocks multiple times when inlining and that causes us to adjust the block tail stmt BBs multiple times, once for each split. The fix is to walk backwards and split from the tail instead. For a reduced testcase this improves compile-time at -O by 4%. PR middle-end/108086 * tree-inline.cc (copy_edges_for_bb): Walk stmts backwards for splitting the block to avoid quadratic behavior with setting stmts BB on multliple splits.
-rw-r--r--gcc/tree-inline.cc34
1 files changed, 18 insertions, 16 deletions
diff --git a/gcc/tree-inline.cc b/gcc/tree-inline.cc
index addfe7f..0360f1f 100644
--- a/gcc/tree-inline.cc
+++ b/gcc/tree-inline.cc
@@ -2569,13 +2569,17 @@ copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
&& !old_edge->src->aux)
new_bb->count -= old_edge->count ().apply_scale (num, den);
- for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
+ /* Walk stmts from end to start so that splitting will adjust the BB
+ pointer for each stmt at most once, even when we split the block
+ multiple times. */
+ bool seen_nondebug = false;
+ for (si = gsi_last_bb (new_bb); !gsi_end_p (si);)
{
bool can_throw, nonlocal_goto;
gimple *copy_stmt = gsi_stmt (si);
/* Do this before the possible split_block. */
- gsi_next (&si);
+ gsi_prev (&si);
/* If this tree could throw an exception, there are two
cases where we need to add abnormal edge(s): the
@@ -2595,25 +2599,23 @@ copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
if (can_throw || nonlocal_goto)
{
- if (!gsi_end_p (si))
- {
- while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
- gsi_next (&si);
- if (gsi_end_p (si))
- need_debug_cleanup = true;
- }
- if (!gsi_end_p (si))
- /* Note that bb's predecessor edges aren't necessarily
- right at this point; split_block doesn't care. */
+ /* If there's only debug insns after copy_stmt don't split
+ the block but instead mark the block for cleanup. */
+ if (!seen_nondebug)
+ need_debug_cleanup = true;
+ else
{
+ /* Note that bb's predecessor edges aren't necessarily
+ right at this point; split_block doesn't care. */
edge e = split_block (new_bb, copy_stmt);
-
- new_bb = e->dest;
- new_bb->aux = e->src->aux;
- si = gsi_start_bb (new_bb);
+ e->dest->aux = new_bb->aux;
+ seen_nondebug = false;
}
}
+ if (!is_gimple_debug (copy_stmt))
+ seen_nondebug = true;
+
bool update_probs = false;
if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)