diff options
author | Michael Matz <matz@suse.de> | 2009-10-26 13:00:36 +0000 |
---|---|---|
committer | Michael Matz <matz@gcc.gnu.org> | 2009-10-26 13:00:36 +0000 |
commit | 842809179e1d2fbf0a2af44e6e3f445770613bc6 (patch) | |
tree | fbb6277956a1693b32bea61a71c9fbf0f2321aee /gcc/tree-ssa-alias.c | |
parent | bd02b3a0c9223cecb7ec0367ecae6c76bbcd9b50 (diff) | |
download | gcc-842809179e1d2fbf0a2af44e6e3f445770613bc6.zip gcc-842809179e1d2fbf0a2af44e6e3f445770613bc6.tar.gz gcc-842809179e1d2fbf0a2af44e6e3f445770613bc6.tar.bz2 |
re PR tree-optimization/41783 (r151561 (PRE fix) regresses zeusmp)
PR tree-optimization/41783
* tree-ssa-alias.c (get_continuation_for_phi): Export, add a special
case for simple diamonds
* tree-ssa-alias.h (get_continuation_for_phi): Declare.
* tree-ssa-pre.c (translate_vuse_through_block): Add same_valid
argument, use alias oracle to skip some vdefs.
(phi_translate_1): Change call to above, don't allocate new
value ids if they can stay the same.
(compute_avail): Allow vuse walking when looking up references.
testsuite/
* gcc.dg/pr41783.c: New test.
* gcc.dg/tree-ssa/ssa-pre-23.c: Adjust.
* gcc.dg/tree-ssa/ssa-pre-24.c: Don't xfail anymore.
* gcc.dg/tree-ssa/ssa-pre-27.c: New test.
From-SVN: r153551
Diffstat (limited to 'gcc/tree-ssa-alias.c')
-rw-r--r-- | gcc/tree-ssa-alias.c | 25 |
1 files changed, 22 insertions, 3 deletions
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c index ba5cbbc..fbd0470 100644 --- a/gcc/tree-ssa-alias.c +++ b/gcc/tree-ssa-alias.c @@ -1313,8 +1313,6 @@ stmt_may_clobber_ref_p (gimple stmt, tree ref) } -static tree get_continuation_for_phi (gimple, ao_ref *, bitmap *); - /* Walk the virtual use-def chain of VUSE until hitting the virtual operand TARGET or a statement clobbering the memory reference REF in which case false is returned. The walk starts with VUSE, one argument of PHI. */ @@ -1358,7 +1356,7 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref, clobber REF. Returns NULL_TREE if no suitable virtual operand can be found. */ -static tree +tree get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited) { unsigned nargs = gimple_phi_num_args (phi); @@ -1375,6 +1373,7 @@ get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited) tree arg1 = PHI_ARG_DEF (phi, 1); gimple def0 = SSA_NAME_DEF_STMT (arg0); gimple def1 = SSA_NAME_DEF_STMT (arg1); + tree common_vuse; if (arg0 == arg1) return arg0; @@ -1393,6 +1392,26 @@ get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited) if (maybe_skip_until (phi, arg1, ref, arg0, visited)) return arg1; } + /* Special case of a diamond: + MEM_1 = ... + goto (cond) ? L1 : L2 + L1: store1 = ... #MEM_2 = vuse(MEM_1) + goto L3 + L2: store2 = ... #MEM_3 = vuse(MEM_1) + L3: MEM_4 = PHI<MEM_2, MEM_3> + We were called with the PHI at L3, MEM_2 and MEM_3 don't + dominate each other, but still we can easily skip this PHI node + if we recognize that the vuse MEM operand is the same for both, + and that we can skip both statements (they don't clobber us). + This is still linear. Don't use maybe_skip_until, that might + potentially be slow. */ + else if ((common_vuse = gimple_vuse (def0)) + && common_vuse == gimple_vuse (def1)) + { + if (!stmt_may_clobber_ref_p_1 (def0, ref) + && !stmt_may_clobber_ref_p_1 (def1, ref)) + return common_vuse; + } } return NULL_TREE; |