aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2021-09-27 12:01:38 +0200
committerRichard Biener <rguenther@suse.de>2021-09-28 12:31:46 +0200
commit5b8b1522e04adc20980f396571be1929a32d148a (patch)
tree0d646656faaf9ab4b3f1842631747a7275256915 /gcc
parent4f07769057c45ec9e751ab1c23e0fe4750102840 (diff)
downloadgcc-5b8b1522e04adc20980f396571be1929a32d148a.zip
gcc-5b8b1522e04adc20980f396571be1929a32d148a.tar.gz
gcc-5b8b1522e04adc20980f396571be1929a32d148a.tar.bz2
tree-optimization/100112 - VN last_vuse and redundant store elimination
This avoids the last_vuse optimization hindering redundant store elimination by always also recording the original VUSE that was in effect on the load. In stage3 gcc/*.o we have 3182752 times recorded a single entry and 903409 times two entries (that's ~20% overhead). With just recording a single entry the number of hashtable lookups done when walking the vuse->vdef links to find an earlier access is 28961618. When recording the second entry this makes us find that earlier for donwnstream redundant accesses, reducing the number of hashtable lookups to 25401052 (that's a ~10% reduction). 2021-09-27 Richard Biener <rguenther@suse.de> PR tree-optimization/100112 * tree-ssa-sccvn.c (visit_reference_op_load): Record the referece into the hashtable twice in case last_vuse is different from the original vuse on the stmt. * gcc.dg/tree-ssa/ssa-fre-95.c: New testcase.
Diffstat (limited to 'gcc')
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c25
-rw-r--r--gcc/tree-ssa-sccvn.c17
2 files changed, 38 insertions, 4 deletions
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c
new file mode 100644
index 0000000..b0936be
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-95.c
@@ -0,0 +1,25 @@
+/* PR100112 and dups. */
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1-details -fdump-tree-optimized" } */
+
+int *c, *b;
+void foo()
+{
+ int *tem = b;
+ *tem = 0;
+ int *footem = c;
+ c = footem;
+}
+
+void bar()
+{
+ int *tem = b;
+ int *bartem = c;
+ *tem = 0;
+ c = bartem;
+}
+
+/* We should elide the redundant store in foo, in bar it is not redundant since
+ the *tem = 0 store might alias. */
+/* { dg-final { scan-tree-dump "Deleted redundant store c = footem" "fre1" } } */
+/* { dg-final { scan-tree-dump "c = bartem" "optimized" } } */
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index e8b1c39..416a525 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -5125,13 +5125,12 @@ static bool
visit_reference_op_load (tree lhs, tree op, gimple *stmt)
{
bool changed = false;
- tree last_vuse;
tree result;
vn_reference_t res;
- last_vuse = gimple_vuse (stmt);
- result = vn_reference_lookup (op, gimple_vuse (stmt),
- default_vn_walk_kind, &res, true, &last_vuse);
+ tree vuse = gimple_vuse (stmt);
+ tree last_vuse = vuse;
+ result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
/* We handle type-punning through unions by value-numbering based
on offset and size of the access. Be prepared to handle a
@@ -5174,6 +5173,16 @@ visit_reference_op_load (tree lhs, tree op, gimple *stmt)
{
changed = set_ssa_val_to (lhs, lhs);
vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
+ if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Using extra use virtual operand ");
+ print_generic_expr (dump_file, last_vuse);
+ fprintf (dump_file, "\n");
+ }
+ vn_reference_insert (op, lhs, vuse, NULL_TREE);
+ }
}
return changed;