aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-loop-unswitch.cc
diff options
context:
space:
mode:
authorAndrew Pinski <quic_apinski@quicinc.com>2024-09-26 05:55:58 +0000
committerAndrew Pinski <quic_apinski@quicinc.com>2024-09-27 07:14:00 +0000
commit9c04112fdc221b0a337f88572dfef4caaca78349 (patch)
tree4919caadc52d066420ab9909d8467b06aa9c2b4e /gcc/tree-ssa-loop-unswitch.cc
parent1a0b33ebc57ebcc9595b19050f5c36c1f9d39e3e (diff)
downloadgcc-9c04112fdc221b0a337f88572dfef4caaca78349.zip
gcc-9c04112fdc221b0a337f88572dfef4caaca78349.tar.gz
gcc-9c04112fdc221b0a337f88572dfef4caaca78349.tar.bz2
unswitch: Replace manual ondemand maybe_undef with ssa_name_maybe_undef_p/mark_ssa_maybe_undefs [PR116848]
The ondemand maybe_undef that follows phis was added in r7-6427-g8b670f93ab1136 but then later ssa_name_maybe_undef_p/mark_ssa_maybe_undefs was added in r13-972-gbe2861fe8c527a. This moves the ondemand one to use mark_ssa_maybe_undefs/ssa_name_maybe_undef_p instead. Which itself will be faster since the mark_ssa_maybe_undefs is a walk based on the uses of undefined names (and only once) rather than a walk based on the def of ones which are more likely defined (and on demand). Even though the ondemand maybe_undef had some extra special cases, those won't make a big difference in most code. Bootstrapped and tested on x86_64-linux-gnu. PR tree-optimization/116848 gcc/ChangeLog: * tree-ssa-loop-unswitch.cc (tree_ssa_unswitch_loops): Call mark_ssa_maybe_undefs. (is_maybe_undefined): Call ssa_name_maybe_undef_p instead of ondemand undef. Signed-off-by: Andrew Pinski <quic_apinski@quicinc.com>
Diffstat (limited to 'gcc/tree-ssa-loop-unswitch.cc')
-rw-r--r--gcc/tree-ssa-loop-unswitch.cc61
1 files changed, 2 insertions, 59 deletions
diff --git a/gcc/tree-ssa-loop-unswitch.cc b/gcc/tree-ssa-loop-unswitch.cc
index 7601d91..847f7ac 100644
--- a/gcc/tree-ssa-loop-unswitch.cc
+++ b/gcc/tree-ssa-loop-unswitch.cc
@@ -329,6 +329,7 @@ tree_ssa_unswitch_loops (function *fun)
bool changed_unswitch = false;
bool changed_hoist = false;
auto_edge_flag ignored_edge_flag (fun);
+ mark_ssa_maybe_undefs ();
ranger = enable_ranger (fun);
@@ -427,65 +428,7 @@ is_maybe_undefined (const tree name, gimple *stmt, class loop *loop)
if (gimple_bb (stmt) == loop->header)
return false;
- auto_bitmap visited_ssa;
- auto_vec<tree> worklist;
- worklist.safe_push (name);
- bitmap_set_bit (visited_ssa, SSA_NAME_VERSION (name));
- while (!worklist.is_empty ())
- {
- tree t = worklist.pop ();
-
- /* If it's obviously undefined, avoid further computations. */
- if (ssa_undefined_value_p (t, true))
- return true;
-
- if (ssa_defined_default_def_p (t))
- continue;
-
- gimple *def = SSA_NAME_DEF_STMT (t);
-
- /* Check that all the PHI args are fully defined. */
- if (gphi *phi = dyn_cast <gphi *> (def))
- {
- for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
- {
- tree t = gimple_phi_arg_def (phi, i);
- /* If an SSA has already been seen, it may be a loop,
- but we can continue and ignore this use. Otherwise,
- add the SSA_NAME to the queue and visit it later. */
- if (TREE_CODE (t) == SSA_NAME
- && bitmap_set_bit (visited_ssa, SSA_NAME_VERSION (t)))
- worklist.safe_push (t);
- }
- continue;
- }
-
- /* Uses in stmts always executed when the region header executes
- are fine. */
- if (dominated_by_p (CDI_DOMINATORS, loop->header, gimple_bb (def)))
- continue;
-
- /* Handle calls and memory loads conservatively. */
- if (!is_gimple_assign (def)
- || (gimple_assign_single_p (def)
- && gimple_vuse (def)))
- return true;
-
- /* Check that any SSA names used to define NAME are also fully
- defined. */
- use_operand_p use_p;
- ssa_op_iter iter;
- FOR_EACH_SSA_USE_OPERAND (use_p, def, iter, SSA_OP_USE)
- {
- tree t = USE_FROM_PTR (use_p);
- /* If an SSA has already been seen, it may be a loop,
- but we can continue and ignore this use. Otherwise,
- add the SSA_NAME to the queue and visit it later. */
- if (bitmap_set_bit (visited_ssa, SSA_NAME_VERSION (t)))
- worklist.safe_push (t);
- }
- }
- return false;
+ return ssa_name_maybe_undef_p (name);
}
/* Checks whether we can unswitch LOOP on condition at end of BB -- one of its