aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-tailcall.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-tailcall.cc')
-rw-r--r--gcc/tree-tailcall.cc88
1 files changed, 67 insertions, 21 deletions
diff --git a/gcc/tree-tailcall.cc b/gcc/tree-tailcall.cc
index 477729c..f51bb97 100644
--- a/gcc/tree-tailcall.cc
+++ b/gcc/tree-tailcall.cc
@@ -165,8 +165,6 @@ suitable_for_tail_opt_p (gcall *call, bool diag_musttail)
static bool
suitable_for_tail_call_opt_p (gcall *call, bool diag_musttail)
{
- tree param;
-
/* alloca (until we have stack slot life analysis) inhibits
sibling call optimizations, but not tail recursion. */
if (cfun->calls_alloca)
@@ -204,18 +202,6 @@ suitable_for_tail_call_opt_p (gcall *call, bool diag_musttail)
return false;
}
- /* ??? It is OK if the argument of a function is taken in some cases,
- but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
- if (!diag_musttail || !gimple_call_must_tail_p (call))
- for (param = DECL_ARGUMENTS (current_function_decl);
- param; param = DECL_CHAIN (param))
- if (TREE_ADDRESSABLE (param))
- {
- maybe_error_musttail (call, _("address of caller arguments taken"),
- diag_musttail);
- return false;
- }
-
if (diag_musttail
&& gimple_call_must_tail_p (call)
&& warn_musttail_local_addr)
@@ -565,6 +551,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
basic_block abb;
size_t idx;
tree var;
+ bool only_tailr = false;
if (!single_succ_p (bb)
&& (EDGE_COUNT (bb->succs) || !cfun->has_musttail || !diag_musttail))
@@ -660,6 +647,25 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
if (!suitable_for_tail_call_opt_p (call, diag_musttail))
opt_tailcalls = false;
+ /* ??? It is OK if the argument of a function is taken in some cases,
+ but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
+ if (!diag_musttail || !gimple_call_must_tail_p (call))
+ for (param = DECL_ARGUMENTS (current_function_decl);
+ param; param = DECL_CHAIN (param))
+ if (TREE_ADDRESSABLE (param))
+ {
+ maybe_error_musttail (call, _("address of caller arguments taken"),
+ diag_musttail);
+ /* If current function has musttail calls, we can't disable tail
+ calls altogether for the whole caller, because those might be
+ actually fine. So just punt if this exact call is not
+ a tail recursion. */
+ if (cfun->has_musttail)
+ only_tailr = true;
+ else
+ opt_tailcalls = false;
+ }
+
/* If the LHS of our call is not just a simple register or local
variable, we can't transform this into a tail or sibling call.
This situation happens, in (e.g.) "*p = foo()" where foo returns a
@@ -794,6 +800,9 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
tail_recursion = true;
}
+ if (only_tailr && !tail_recursion)
+ return;
+
/* Compute live vars if not computed yet. */
if (live_vars == NULL)
{
@@ -911,6 +920,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
auto_bitmap to_move_defs;
auto_vec<gimple *> to_move_stmts;
bool is_noreturn = gimple_call_noreturn_p (call);
+ auto_vec<edge> edges;
abb = bb;
agsi = gsi;
@@ -924,6 +934,8 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
{
edge e = single_non_eh_succ_edge (abb);
ass_var = propagate_through_phis (ass_var, e);
+ if (!ass_var)
+ edges.safe_push (e);
abb = e->dest;
agsi = gsi_start_bb (abb);
}
@@ -1031,19 +1043,53 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
/* If IPA-VRP proves called function always returns a singleton range,
the return value is replaced by the only value in that range.
For tail call purposes, pretend such replacement didn't happen. */
- if (ass_var == NULL_TREE
- && !tail_recursion
- && TREE_CONSTANT (ret_var))
+ if (ass_var == NULL_TREE && !tail_recursion)
if (tree type = gimple_range_type (call))
if (tree callee = gimple_call_fndecl (call))
- if ((INTEGRAL_TYPE_P (type) || SCALAR_FLOAT_TYPE_P (type))
+ if ((INTEGRAL_TYPE_P (type)
+ || SCALAR_FLOAT_TYPE_P (type)
+ || POINTER_TYPE_P (type))
&& useless_type_conversion_p (TREE_TYPE (TREE_TYPE (callee)),
type)
&& useless_type_conversion_p (TREE_TYPE (ret_var), type)
&& ipa_return_value_range (val, callee)
- && val.singleton_p (&valr)
- && operand_equal_p (ret_var, valr, 0))
- ok = true;
+ && val.singleton_p (&valr))
+ {
+ tree rv = ret_var;
+ unsigned int i = edges.length ();
+ /* If ret_var is equal to valr, we can tail optimize. */
+ if (operand_equal_p (ret_var, valr, 0))
+ ok = true;
+ else
+ /* Otherwise, if ret_var is a PHI result, try to find out
+ if valr isn't propagated through PHIs on the path from
+ call's bb to SSA_NAME_DEF_STMT (ret_var)'s bb. */
+ while (TREE_CODE (rv) == SSA_NAME
+ && gimple_code (SSA_NAME_DEF_STMT (rv)) == GIMPLE_PHI)
+ {
+ tree nrv = NULL_TREE;
+ gimple *g = SSA_NAME_DEF_STMT (rv);
+ for (; i; --i)
+ {
+ if (edges[i - 1]->dest == gimple_bb (g))
+ {
+ nrv
+ = gimple_phi_arg_def_from_edge (g,
+ edges[i - 1]);
+ --i;
+ break;
+ }
+ }
+ if (nrv == NULL_TREE)
+ break;
+ if (operand_equal_p (nrv, valr, 0))
+ {
+ ok = true;
+ break;
+ }
+ rv = nrv;
+ }
+ }
if (!ok)
{
maybe_error_musttail (call,