aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSegher Boessenkool <segher@kernel.crashing.org>2020-08-07 01:31:38 +0000
committerSegher Boessenkool <segher@kernel.crashing.org>2020-09-09 16:55:22 +0000
commit69ca5f3a988266da8905aef9cf22aa02807e0471 (patch)
tree661a3de4c286667c65fa793edf8a63e48399540b
parentad2a37157d3476dffb3baf93ca5001c9f6c44bda (diff)
downloadgcc-69ca5f3a988266da8905aef9cf22aa02807e0471.zip
gcc-69ca5f3a988266da8905aef9cf22aa02807e0471.tar.gz
gcc-69ca5f3a988266da8905aef9cf22aa02807e0471.tar.bz2
bb-reorder: Remove a misfiring micro-optimization (PR96475)
When the compgotos pass copies the tail of blocks ending in an indirect jump, there is a micro-optimization to not copy the last one, since the original block will then just be deleted. This does not work properly if cleanup_cfg does not merge all pairs of blocks we expect it to. It also does not work if that last block can be merged into multiple predecessors. 2020-09-09 Segher Boessenkool <segher@kernel.crashing.org> PR rtl-optimization/96475 * bb-reorder.c (maybe_duplicate_computed_goto): Remove single_pred_p micro-optimization.
-rw-r--r--gcc/bb-reorder.c10
1 files changed, 3 insertions, 7 deletions
diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c
index c635010..76e56b5 100644
--- a/gcc/bb-reorder.c
+++ b/gcc/bb-reorder.c
@@ -2680,9 +2680,6 @@ make_pass_reorder_blocks (gcc::context *ctxt)
static bool
maybe_duplicate_computed_goto (basic_block bb, int max_size)
{
- if (single_pred_p (bb))
- return false;
-
/* Make sure that the block is small enough. */
rtx_insn *insn;
FOR_BB_INSNS (bb, insn)
@@ -2700,10 +2697,9 @@ maybe_duplicate_computed_goto (basic_block bb, int max_size)
{
basic_block pred = e->src;
- /* Do not duplicate BB into PRED if that is the last predecessor, or if
- we cannot merge a copy of BB with PRED. */
- if (single_pred_p (bb)
- || !single_succ_p (pred)
+ /* Do not duplicate BB into PRED if we cannot merge a copy of BB
+ with PRED. */
+ if (!single_succ_p (pred)
|| e->flags & EDGE_COMPLEX
|| pred->index < NUM_FIXED_BLOCKS
|| (JUMP_P (BB_END (pred)) && !simplejump_p (BB_END (pred)))