aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2023-12-13 08:45:58 +0100
committerRichard Biener <rguenther@suse.de>2024-05-08 12:56:41 +0200
commit25add4b65a3bac262685d290a4dc93884a022576 (patch)
tree22aa53929c0fe96b692cf83eda1866121027f082 /gcc
parent7baefcb0a358a47a7e2340432d49f29db798a200 (diff)
downloadgcc-25add4b65a3bac262685d290a4dc93884a022576.zip
gcc-25add4b65a3bac262685d290a4dc93884a022576.tar.gz
gcc-25add4b65a3bac262685d290a4dc93884a022576.tar.bz2
tree-optimization/112991 - re-do PR112961 fix
The following does away with the fake edge adding as in the original PR112961 fix and instead exposes handling of entry PHIs as additional parameter of the region VN run. PR tree-optimization/112991 PR tree-optimization/112961 * tree-ssa-sccvn.h (do_rpo_vn): Add skip_entry_phis argument. * tree-ssa-sccvn.cc (do_rpo_vn): Likewise. (do_rpo_vn_1): Likewise, merge with auto-processing. (run_rpo_vn): Adjust. (pass_fre::execute): Likewise. * tree-if-conv.cc (tree_if_conversion): Revert last change. Value-number latch block but disable value-numbering of entry PHIs. * tree-ssa-uninit.cc (execute_early_warn_uninitialized): Adjust. * gcc.dg/torture/pr112991.c: New testcase. * g++.dg/vect/pr112961.cc: Likewise. (cherry picked from commit 93db32a4146afd2a6d90410691351a56768167c9)
Diffstat (limited to 'gcc')
-rw-r--r--gcc/testsuite/g++.dg/vect/pr112961.cc17
-rw-r--r--gcc/testsuite/gcc.dg/torture/pr112991.c21
-rw-r--r--gcc/tree-if-conv.cc18
-rw-r--r--gcc/tree-ssa-sccvn.cc24
-rw-r--r--gcc/tree-ssa-sccvn.h1
-rw-r--r--gcc/tree-ssa-uninit.cc2
6 files changed, 64 insertions, 19 deletions
diff --git a/gcc/testsuite/g++.dg/vect/pr112961.cc b/gcc/testsuite/g++.dg/vect/pr112961.cc
new file mode 100644
index 0000000..52759e1
--- /dev/null
+++ b/gcc/testsuite/g++.dg/vect/pr112961.cc
@@ -0,0 +1,17 @@
+// { dg-do compile }
+// { dg-require-effective-target vect_int }
+
+inline const int& maxx (const int& a, const int &b)
+{
+ return a > b ? a : b;
+}
+
+int foo(int *a)
+{
+ int max = 0;
+ for (int i = 0; i < 1024; ++i)
+ max = maxx(max, a[i]);
+ return max;
+}
+
+// { dg-final { scan-tree-dump "LOOP VECTORIZED" "vect" { xfail vect_no_int_min_max } } }
diff --git a/gcc/testsuite/gcc.dg/torture/pr112991.c b/gcc/testsuite/gcc.dg/torture/pr112991.c
new file mode 100644
index 0000000..aace985
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr112991.c
@@ -0,0 +1,21 @@
+/* { dg-do compile } */
+
+typedef struct {
+ unsigned links[2];
+} RMF_unit;
+long RMF_recurseListsBound_count;
+int RMF_recurseListsBound_tbl, RMF_recurseListsBound_list_head_1;
+unsigned RMF_recurseListsBound_list_head_0;
+void RMF_recurseListsBound() {
+ int list_count = RMF_recurseListsBound_list_head_1;
+ long link = RMF_recurseListsBound_list_head_0;
+ for (; RMF_recurseListsBound_count;) {
+ long next_link =
+ ((RMF_unit *)&RMF_recurseListsBound_tbl)[link >> 2].links[0];
+ if (link)
+ --RMF_recurseListsBound_count;
+ link = next_link;
+ }
+ while (list_count)
+ ;
+}
diff --git a/gcc/tree-if-conv.cc b/gcc/tree-if-conv.cc
index fddc4a8..b1dbb87 100644
--- a/gcc/tree-if-conv.cc
+++ b/gcc/tree-if-conv.cc
@@ -3697,21 +3697,21 @@ tree_if_conversion (class loop *loop, vec<gimple *> *preds)
combine_blocks (loop);
}
- /* Perform local CSE, this esp. helps the vectorizer analysis if loads
- and stores are involved. CSE only the loop body, not the entry
- PHIs, those are to be kept in sync with the non-if-converted copy.
- ??? We'll still keep dead stores though. */
- exit_bbs = BITMAP_ALLOC (NULL);
- bitmap_set_bit (exit_bbs, single_exit (loop)->dest->index);
- bitmap_set_bit (exit_bbs, loop->latch->index);
-
std::pair <tree, tree> *name_pair;
unsigned ssa_names_idx;
FOR_EACH_VEC_ELT (redundant_ssa_names, ssa_names_idx, name_pair)
replace_uses_by (name_pair->first, name_pair->second);
redundant_ssa_names.release ();
- todo |= do_rpo_vn (cfun, loop_preheader_edge (loop), exit_bbs);
+ /* Perform local CSE, this esp. helps the vectorizer analysis if loads
+ and stores are involved. CSE only the loop body, not the entry
+ PHIs, those are to be kept in sync with the non-if-converted copy.
+ ??? We'll still keep dead stores though. */
+ exit_bbs = BITMAP_ALLOC (NULL);
+ for (edge exit : get_loop_exit_edges (loop))
+ bitmap_set_bit (exit_bbs, exit->dest->index);
+ todo |= do_rpo_vn (cfun, loop_preheader_edge (loop), exit_bbs,
+ false, true, true);
/* Delete dead predicate computations. */
ifcvt_local_dce (loop);
diff --git a/gcc/tree-ssa-sccvn.cc b/gcc/tree-ssa-sccvn.cc
index fe4fc25..fa1d8d9 100644
--- a/gcc/tree-ssa-sccvn.cc
+++ b/gcc/tree-ssa-sccvn.cc
@@ -7535,12 +7535,13 @@ eliminate_with_rpo_vn (bitmap inserted_exprs)
static unsigned
do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
- bool iterate, bool eliminate, vn_lookup_kind kind);
+ bool iterate, bool eliminate, bool skip_entry_phis,
+ vn_lookup_kind kind);
void
run_rpo_vn (vn_lookup_kind kind)
{
- do_rpo_vn_1 (cfun, NULL, NULL, true, false, kind);
+ do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind);
/* ??? Prune requirement of these. */
constant_to_value_id = new hash_table<vn_constant_hasher> (23);
@@ -8237,11 +8238,13 @@ do_unwind (unwind_state *to, rpo_elim &avail)
/* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
If ITERATE is true then treat backedges optimistically as not
executed and iterate. If ELIMINATE is true then perform
- elimination, otherwise leave that to the caller. */
+ elimination, otherwise leave that to the caller. If SKIP_ENTRY_PHIS
+ is true then force PHI nodes in ENTRY->dest to VARYING. */
static unsigned
do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
- bool iterate, bool eliminate, vn_lookup_kind kind)
+ bool iterate, bool eliminate, bool skip_entry_phis,
+ vn_lookup_kind kind)
{
unsigned todo = 0;
default_vn_walk_kind = kind;
@@ -8282,10 +8285,10 @@ do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
if (e != entry
&& !(e->flags & EDGE_DFS_BACK))
break;
- bool skip_entry_phis = e != NULL;
- if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
+ if (e != NULL && dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Region does not contain all edges into "
"the entry block, skipping its PHIs.\n");
+ skip_entry_phis |= e != NULL;
int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
for (int i = 0; i < n; ++i)
@@ -8663,14 +8666,17 @@ do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
If ITERATE is true then treat backedges optimistically as not
executed and iterate. If ELIMINATE is true then perform
elimination, otherwise leave that to the caller.
+ If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
KIND specifies the amount of work done for handling memory operations. */
unsigned
do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
- bool iterate, bool eliminate, vn_lookup_kind kind)
+ bool iterate, bool eliminate, bool skip_entry_phis,
+ vn_lookup_kind kind)
{
auto_timevar tv (TV_TREE_RPO_VN);
- unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate, kind);
+ unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate,
+ skip_entry_phis, kind);
free_rpo_vn ();
return todo;
}
@@ -8726,7 +8732,7 @@ pass_fre::execute (function *fun)
if (iterate_p)
loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
- todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, VN_WALKREWRITE);
+ todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE);
free_rpo_vn ();
if (iterate_p)
diff --git a/gcc/tree-ssa-sccvn.h b/gcc/tree-ssa-sccvn.h
index 79e89cc..d0000f5 100644
--- a/gcc/tree-ssa-sccvn.h
+++ b/gcc/tree-ssa-sccvn.h
@@ -301,6 +301,7 @@ tree vn_nary_simplify (vn_nary_op_t);
unsigned do_rpo_vn (function *, edge, bitmap,
/* iterate */ bool = false,
/* eliminate */ bool = true,
+ /* skip_entry_phis */ bool = false,
vn_lookup_kind = VN_WALKREWRITE);
/* Private interface for PRE. */
diff --git a/gcc/tree-ssa-uninit.cc b/gcc/tree-ssa-uninit.cc
index 9f720ae..70c5843 100644
--- a/gcc/tree-ssa-uninit.cc
+++ b/gcc/tree-ssa-uninit.cc
@@ -1475,7 +1475,7 @@ execute_early_warn_uninitialized (struct function *fun)
elimination to compute edge reachability. Don't bother when
we only warn for unconditionally executed code though. */
if (!optimize)
- do_rpo_vn (fun, NULL, NULL, false, false, VN_NOWALK);
+ do_rpo_vn (fun, NULL, NULL, false, false, false, VN_NOWALK);
else
set_all_edges_as_executable (fun);