aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2016-04-18 11:38:05 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2016-04-18 11:38:05 +0000
commit65833da69d1cb0b57572c2736781d335fced397c (patch)
tree9cfb693518ae09e34852ac3d54dcf698cfa38b24 /gcc
parentb269f47786ffff084e874cd09ac8d87f895a1db6 (diff)
downloadgcc-65833da69d1cb0b57572c2736781d335fced397c.zip
gcc-65833da69d1cb0b57572c2736781d335fced397c.tar.gz
gcc-65833da69d1cb0b57572c2736781d335fced397c.tar.bz2
tree-ssa-pre.c (postorder, [...]): Make locals ...
2016-04-18 Richard Biener <rguenther@suse.de> * tree-ssa-pre.c (postorder, postorder_num): Make locals ... (compute_antic): ... here. For partial antic use regular postorder and scrap iteration. (compute_partial_antic_aux): Remove unused return value. (init_pre): Do not allocate postorder. (fini_pre): Do not free postorder. From-SVN: r235130
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog9
-rw-r--r--gcc/tree-ssa-pre.c72
2 files changed, 33 insertions, 48 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index d6b0861..51bc8d7 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,14 @@
2016-04-18 Richard Biener <rguenther@suse.de>
+ * tree-ssa-pre.c (postorder, postorder_num): Make locals ...
+ (compute_antic): ... here. For partial antic use regular
+ postorder and scrap iteration.
+ (compute_partial_antic_aux): Remove unused return value.
+ (init_pre): Do not allocate postorder.
+ (fini_pre): Do not free postorder.
+
+2016-04-18 Richard Biener <rguenther@suse.de>
+
PR middle-end/37870
* expmed.c (extract_bit_field_1): Remove broken case
using a wider MODE_INT mode.
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index f1a3130..c0e3b80 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -432,10 +432,6 @@ typedef struct bb_bitmap_sets
#define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
-/* Basic block list in postorder. */
-static int *postorder;
-static int postorder_num;
-
/* This structure is used to keep track of statistics on what
optimization PRE was able to perform. */
static struct
@@ -2209,11 +2205,10 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
- ANTIC_IN[BLOCK])
*/
-static bool
+static void
compute_partial_antic_aux (basic_block block,
bool block_has_abnormal_pred_edge)
{
- bool changed = false;
bitmap_set_t old_PA_IN;
bitmap_set_t PA_OUT;
edge e;
@@ -2312,9 +2307,6 @@ compute_partial_antic_aux (basic_block block,
dependent_clean (PA_IN (block), ANTIC_IN (block));
- if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
- changed = true;
-
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -2327,7 +2319,6 @@ compute_partial_antic_aux (basic_block block,
bitmap_set_free (old_PA_IN);
if (PA_OUT)
bitmap_set_free (PA_OUT);
- return changed;
}
/* Compute ANTIC and partial ANTIC sets. */
@@ -2349,23 +2340,33 @@ compute_antic (void)
FOR_ALL_BB_FN (block, cfun)
{
+ BB_VISITED (block) = 0;
+
FOR_EACH_EDGE (e, ei, block->preds)
if (e->flags & EDGE_ABNORMAL)
{
bitmap_set_bit (has_abnormal_preds, block->index);
+
+ /* We also anticipate nothing. */
+ BB_VISITED (block) = 1;
break;
}
- BB_VISITED (block) = 0;
-
/* While we are here, give empty ANTIC_IN sets to each block. */
ANTIC_IN (block) = bitmap_set_new ();
- PA_IN (block) = bitmap_set_new ();
+ if (do_partial_partial)
+ PA_IN (block) = bitmap_set_new ();
}
/* At the exit block we anticipate nothing. */
BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
+ /* For ANTIC computation we need a postorder that also guarantees that
+ a block with a single successor is visited after its successor.
+ RPO on the inverted CFG has this property. */
+ int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
+ int postorder_num = inverted_post_order_compute (postorder);
+
sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
bitmap_ones (worklist);
while (changed)
@@ -2403,39 +2404,21 @@ compute_antic (void)
if (do_partial_partial)
{
- bitmap_ones (worklist);
- num_iterations = 0;
- changed = true;
- while (changed)
+ /* For partial antic we ignore backedges and thus we do not need
+ to perform any iteration when we process blocks in postorder. */
+ postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
+ for (i = postorder_num - 1 ; i >= 0; i--)
{
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "Starting iteration %d\n", num_iterations);
- num_iterations++;
- changed = false;
- for (i = postorder_num - 1 ; i >= 0; i--)
- {
- if (bitmap_bit_p (worklist, postorder[i]))
- {
- basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
- bitmap_clear_bit (worklist, block->index);
- if (compute_partial_antic_aux (block,
- bitmap_bit_p (has_abnormal_preds,
- block->index)))
- {
- FOR_EACH_EDGE (e, ei, block->preds)
- bitmap_set_bit (worklist, e->src->index);
- changed = true;
- }
- }
- }
- /* Theoretically possible, but *highly* unlikely. */
- gcc_checking_assert (num_iterations < 500);
+ basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
+ compute_partial_antic_aux (block,
+ bitmap_bit_p (has_abnormal_preds,
+ block->index));
}
- statistics_histogram_event (cfun, "compute_partial_antic iterations",
- num_iterations);
}
+
sbitmap_free (has_abnormal_preds);
sbitmap_free (worklist);
+ free (postorder);
}
@@ -4694,12 +4677,6 @@ init_pre (void)
connect_infinite_loops_to_exit ();
memset (&pre_stats, 0, sizeof (pre_stats));
- /* For ANTIC computation we need a postorder that also guarantees that
- a block with a single successor is visited after its successor.
- RPO on the inverted CFG has this property. */
- postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
- postorder_num = inverted_post_order_compute (postorder);
-
alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
calculate_dominance_info (CDI_DOMINATORS);
@@ -4722,7 +4699,6 @@ init_pre (void)
static void
fini_pre ()
{
- free (postorder);
value_expressions.release ();
BITMAP_FREE (inserted_exprs);
bitmap_obstack_release (&grand_bitmap_obstack);