aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-sccvn.c
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2020-05-08 10:24:37 +0200
committerRichard Biener <rguenther@suse.de>2020-05-08 13:34:03 +0200
commit2b42509f8b7bdf0a27a6687a941663380b485416 (patch)
treea4fe62b429dae15a1ff1c0725b1af8a28d48ac40 /gcc/tree-ssa-sccvn.c
parent1595a1cb7bfac8d5a6026d5d6f3a495be0391506 (diff)
downloadgcc-2b42509f8b7bdf0a27a6687a941663380b485416.zip
gcc-2b42509f8b7bdf0a27a6687a941663380b485416.tar.gz
gcc-2b42509f8b7bdf0a27a6687a941663380b485416.tar.bz2
Fix availability compute during VN DOM elimination
This fixes an issue with redundant store elimination in FRE/PRE which, when invoked by the DOM elimination walk, ends up using possibly stale availability data from the RPO walk. It also fixes a missed optimization during valueization of addresses by making sure to use get_addr_base_and_unit_offset_1 which can valueize and adjusting that to also valueize ARRAY_REFs low-bound. 2020-05-08 Richard Biener <rguenther@suse.de> * tree-ssa-sccvn.c (rpo_avail): Change type to eliminate_dom_walker *. (eliminate_with_rpo_vn): Adjust rpo_avail to make vn_valueize use the DOM walker availability. (vn_reference_fold_indirect): Use get_addr_base_and_unit_offset_1 with vn_valueize as valueization callback. (vn_reference_maybe_forwprop_address): Likewise. * tree-dfa.c (get_addr_base_and_unit_offset_1): Also valueize array_ref_low_bound. * gnat.dg/opt83.adb: New testcase.
Diffstat (limited to 'gcc/tree-ssa-sccvn.c')
-rw-r--r--gcc/tree-ssa-sccvn.c15
1 files changed, 10 insertions, 5 deletions
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 8a4af91..39e9900 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -1224,8 +1224,8 @@ vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
address with &OBJ. */
- addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
- &addr_offset);
+ addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
+ &addr_offset, vn_valueize);
gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
if (addr_base != TREE_OPERAND (op->op0, 0))
{
@@ -1282,8 +1282,9 @@ vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
poly_int64 addr_offset;
addr = gimple_assign_rhs1 (def_stmt);
- addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
- &addr_offset);
+ addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
+ &addr_offset,
+ vn_valueize);
/* If that didn't work because the address isn't invariant propagate
the reference tree from the address operation in case the current
dereference isn't offsetted. */
@@ -2419,7 +2420,7 @@ public:
};
/* Global RPO state for access from hooks. */
-static rpo_elim *rpo_avail;
+static eliminate_dom_walker *rpo_avail;
basic_block vn_context_bb;
/* Return true if BASE1 and BASE2 can be adjusted so they have the
@@ -6559,7 +6560,11 @@ eliminate_with_rpo_vn (bitmap inserted_exprs)
{
eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
+ eliminate_dom_walker *saved_rpo_avail = rpo_avail;
+ rpo_avail = &walker;
walker.walk (cfun->cfg->x_entry_block_ptr);
+ rpo_avail = saved_rpo_avail;
+
return walker.eliminate_cleanup ();
}