aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2016-04-14 07:30:53 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2016-04-14 07:30:53 +0000
commit6db61d6f3961365a6efb26af9366da5baeb74275 (patch)
tree9e98dff70f5a37f1fd638289b140d7e1595ed361
parent16f87bed36630c18ad4af8ac7456d04ee14f7830 (diff)
downloadgcc-6db61d6f3961365a6efb26af9366da5baeb74275.zip
gcc-6db61d6f3961365a6efb26af9366da5baeb74275.tar.gz
gcc-6db61d6f3961365a6efb26af9366da5baeb74275.tar.bz2
re PR tree-optimization/70623 (ICE in compute_antic at -O2)
2016-04-14 Richard Biener <rguenther@suse.de> PR tree-optimization/70623 * tree-ssa-pre.c (changed_blocks): Make global ... (compute_antic): ... local here. Move and fix worklist handling here. Do not clear EDGE_DFS_BACK or call mark_dfs_back_edges. (compute_antic_aux): Add dumping for MAX assumed succs. Remove worklist handling, dump when ANTIC_IN changed. (compute_partial_antic_aux): Remove worklist handling. (init_pre): Do not compute post dominators. Add a comment about the CFG order chosen. (fini_pre): Do not free post dominators. * gcc.dg/torture/pr70623.c: New testcase. * gcc.dg/torture/pr70623-2.c: Likewise. From-SVN: r234970
-rw-r--r--gcc/ChangeLog13
-rw-r--r--gcc/testsuite/ChangeLog6
-rw-r--r--gcc/testsuite/gcc.dg/torture/pr70623-2.c41
-rw-r--r--gcc/testsuite/gcc.dg/torture/pr70623.c32
-rw-r--r--gcc/tree-ssa-pre.c97
5 files changed, 140 insertions, 49 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index a3c6c12..495716c 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,16 @@
+2016-04-14 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/70623
+ * tree-ssa-pre.c (changed_blocks): Make global ...
+ (compute_antic): ... local here. Move and fix worklist
+ handling here. Do not clear EDGE_DFS_BACK or call mark_dfs_back_edges.
+ (compute_antic_aux): Add dumping for MAX assumed succs. Remove
+ worklist handling, dump when ANTIC_IN changed.
+ (compute_partial_antic_aux): Remove worklist handling.
+ (init_pre): Do not compute post dominators. Add a comment about
+ the CFG order chosen.
+ (fini_pre): Do not free post dominators.
+
2016-04-13 Martin Sebor <msebor@redhat.com>
PR c++/69517
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index b07b8b2..c2a4c93 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,9 @@
+2016-04-14 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/70623
+ * gcc.dg/torture/pr70623.c: New testcase.
+ * gcc.dg/torture/pr70623-2.c: Likewise.
+
2016-04-13 Martin Sebor <msebor@redhat.com>
PR c++/69517
diff --git a/gcc/testsuite/gcc.dg/torture/pr70623-2.c b/gcc/testsuite/gcc.dg/torture/pr70623-2.c
new file mode 100644
index 0000000..8e8dc96
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr70623-2.c
@@ -0,0 +1,41 @@
+/* { dg-do compile } */
+
+int b8, il, rc, nm;
+
+void
+h9(void)
+{
+ int *av = &b8;
+
+is:
+ for (;;) {
+ int vj, wk;
+ int *m9 = &b8;
+
+ if (*m9 == *av) {
+ if (il == 0)
+ goto is;
+
+di:
+ continue;
+ for (vj = 0; vj < 1; ++vj) {
+ goto di;
+kz:
+ ;
+ }
+ }
+
+ for (rc = 0; rc < 2; ++rc) {
+ int bc = rc ? rc : nm;
+ int ud = bc ? (*av ? 0 : rc) : 1;
+
+ if (ud != 0)
+ if (*av != 0)
+ goto kz;
+ }
+
+ for (wk = 0; wk < 3; ++wk)
+ ++(*av);
+ av = 0;
+ }
+}
diff --git a/gcc/testsuite/gcc.dg/torture/pr70623.c b/gcc/testsuite/gcc.dg/torture/pr70623.c
new file mode 100644
index 0000000..37f2712
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr70623.c
@@ -0,0 +1,32 @@
+/* { dg-do compile } */
+/* { dg-additional-options "-w" } */
+
+int nm;
+int *av;
+
+void
+h9(void)
+{
+ for (;;) {
+ int wk, rc;
+ int **ptr_10 = &av;
+ if (*av != 0) {
+ }
+u4:
+ wk = 0;
+ for (rc = 0; rc < 3; ++rc) {
+ int bc = (rc ? rc : nm);
+ int ud = bc ? (*av ? 0 : rc) : 1;
+ if (ud != 0) {
+ if (*av != 0)
+ goto u4;
+ for (;;) {
+ }
+ }
+ }
+ while (wk < 3) {
+ av = **ptr_10;
+ ++wk;
+ }
+ }
+}
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index b2d63acf..f1a3130 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -2062,11 +2062,6 @@ prune_clobbered_mems (bitmap_set_t set, basic_block block)
static sbitmap has_abnormal_preds;
-/* List of blocks that may have changed during ANTIC computation and
- thus need to be iterated over. */
-
-static sbitmap changed_blocks;
-
/* Compute the ANTIC set for BLOCK.
If succs(BLOCK) > 1 then
@@ -2125,6 +2120,16 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
first = e->dest;
else if (BB_VISITED (e->dest))
worklist.quick_push (e->dest);
+ else
+ {
+ /* Unvisited successors get their ANTIC_IN replaced by the
+ maximal set to arrive at a maximum ANTIC_IN solution.
+ We can ignore them in the intersection operation and thus
+ need not explicitely represent that maximum solution. */
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
+ e->src->index, e->dest->index);
+ }
}
/* Of multiple successors we have to have visited one already
@@ -2167,14 +2172,7 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
clean (ANTIC_IN (block));
if (!bitmap_set_equal (old, ANTIC_IN (block)))
- {
- changed = true;
- bitmap_set_bit (changed_blocks, block->index);
- FOR_EACH_EDGE (e, ei, block->preds)
- bitmap_set_bit (changed_blocks, e->src->index);
- }
- else
- bitmap_clear_bit (changed_blocks, block->index);
+ changed = true;
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -2182,6 +2180,8 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
if (ANTIC_OUT)
print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
+ if (changed)
+ fprintf (dump_file, "[changed] ");
print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
block->index);
@@ -2313,14 +2313,7 @@ compute_partial_antic_aux (basic_block block,
dependent_clean (PA_IN (block), ANTIC_IN (block));
if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
- {
- changed = true;
- bitmap_set_bit (changed_blocks, block->index);
- FOR_EACH_EDGE (e, ei, block->preds)
- bitmap_set_bit (changed_blocks, e->src->index);
- }
- else
- bitmap_clear_bit (changed_blocks, block->index);
+ changed = true;
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -2346,6 +2339,8 @@ compute_antic (void)
int num_iterations = 0;
basic_block block;
int i;
+ edge_iterator ei;
+ edge e;
/* If any predecessor edges are abnormal, we punt, so antic_in is empty.
We pre-build the map of blocks with incoming abnormal edges here. */
@@ -2354,18 +2349,12 @@ compute_antic (void)
FOR_ALL_BB_FN (block, cfun)
{
- edge_iterator ei;
- edge e;
-
FOR_EACH_EDGE (e, ei, block->preds)
- {
- e->flags &= ~EDGE_DFS_BACK;
- if (e->flags & EDGE_ABNORMAL)
- {
- bitmap_set_bit (has_abnormal_preds, block->index);
- break;
- }
- }
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ bitmap_set_bit (has_abnormal_preds, block->index);
+ break;
+ }
BB_VISITED (block) = 0;
@@ -2377,8 +2366,8 @@ compute_antic (void)
/* At the exit block we anticipate nothing. */
BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
- changed_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
- bitmap_ones (changed_blocks);
+ sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
+ bitmap_ones (worklist);
while (changed)
{
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -2391,12 +2380,18 @@ compute_antic (void)
changed = false;
for (i = postorder_num - 1; i >= 0; i--)
{
- if (bitmap_bit_p (changed_blocks, postorder[i]))
+ if (bitmap_bit_p (worklist, postorder[i]))
{
basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
- changed |= compute_antic_aux (block,
- bitmap_bit_p (has_abnormal_preds,
- block->index));
+ bitmap_clear_bit (worklist, block->index);
+ if (compute_antic_aux (block,
+ bitmap_bit_p (has_abnormal_preds,
+ block->index)))
+ {
+ FOR_EACH_EDGE (e, ei, block->preds)
+ bitmap_set_bit (worklist, e->src->index);
+ changed = true;
+ }
}
}
/* Theoretically possible, but *highly* unlikely. */
@@ -2408,8 +2403,7 @@ compute_antic (void)
if (do_partial_partial)
{
- bitmap_ones (changed_blocks);
- mark_dfs_back_edges ();
+ bitmap_ones (worklist);
num_iterations = 0;
changed = true;
while (changed)
@@ -2420,13 +2414,18 @@ compute_antic (void)
changed = false;
for (i = postorder_num - 1 ; i >= 0; i--)
{
- if (bitmap_bit_p (changed_blocks, postorder[i]))
+ if (bitmap_bit_p (worklist, postorder[i]))
{
basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
- changed
- |= compute_partial_antic_aux (block,
- bitmap_bit_p (has_abnormal_preds,
- block->index));
+ bitmap_clear_bit (worklist, block->index);
+ if (compute_partial_antic_aux (block,
+ bitmap_bit_p (has_abnormal_preds,
+ block->index)))
+ {
+ FOR_EACH_EDGE (e, ei, block->preds)
+ bitmap_set_bit (worklist, e->src->index);
+ changed = true;
+ }
}
}
/* Theoretically possible, but *highly* unlikely. */
@@ -2436,7 +2435,7 @@ compute_antic (void)
num_iterations);
}
sbitmap_free (has_abnormal_preds);
- sbitmap_free (changed_blocks);
+ sbitmap_free (worklist);
}
@@ -4695,12 +4694,14 @@ init_pre (void)
connect_infinite_loops_to_exit ();
memset (&pre_stats, 0, sizeof (pre_stats));
+ /* For ANTIC computation we need a postorder that also guarantees that
+ a block with a single successor is visited after its successor.
+ RPO on the inverted CFG has this property. */
postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
postorder_num = inverted_post_order_compute (postorder);
alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
- calculate_dominance_info (CDI_POST_DOMINATORS);
calculate_dominance_info (CDI_DOMINATORS);
bitmap_obstack_initialize (&grand_bitmap_obstack);
@@ -4734,8 +4735,6 @@ fini_pre ()
name_to_id.release ();
free_aux_for_blocks ();
-
- free_dominance_info (CDI_POST_DOMINATORS);
}
namespace {