aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-tailcall.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-tailcall.cc')
-rw-r--r--gcc/tree-tailcall.cc780
1 files changed, 678 insertions, 102 deletions
diff --git a/gcc/tree-tailcall.cc b/gcc/tree-tailcall.cc
index 8ba6752..c80145d 100644
--- a/gcc/tree-tailcall.cc
+++ b/gcc/tree-tailcall.cc
@@ -51,6 +51,8 @@ along with GCC; see the file COPYING3. If not see
#include "symbol-summary.h"
#include "ipa-cp.h"
#include "ipa-prop.h"
+#include "attribs.h"
+#include "asan.h"
/* The file implements the tail recursion elimination. It is also used to
analyze the tail calls in general, passing the results to the rtl level
@@ -122,6 +124,9 @@ struct tailcall
/* True if it is a call to the current function. */
bool tail_recursion;
+ /* True if there is __tsan_func_exit call after the call. */
+ bool has_tsan_func_exit;
+
/* The return value of the caller is mult * f + add, where f is the return
value of the call. */
tree mult, add;
@@ -165,8 +170,6 @@ suitable_for_tail_opt_p (gcall *call, bool diag_musttail)
static bool
suitable_for_tail_call_opt_p (gcall *call, bool diag_musttail)
{
- tree param;
-
/* alloca (until we have stack slot life analysis) inhibits
sibling call optimizations, but not tail recursion. */
if (cfun->calls_alloca)
@@ -204,21 +207,60 @@ suitable_for_tail_call_opt_p (gcall *call, bool diag_musttail)
return false;
}
- /* ??? It is OK if the argument of a function is taken in some cases,
- but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
- for (param = DECL_ARGUMENTS (current_function_decl);
- param;
- param = DECL_CHAIN (param))
- if (TREE_ADDRESSABLE (param))
+ if (diag_musttail
+ && gimple_call_must_tail_p (call)
+ && warn_musttail_local_addr)
+ for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
{
- maybe_error_musttail (call, _("address of caller arguments taken"),
- diag_musttail);
- return false;
+ tree arg = gimple_call_arg (call, i);
+ if (!POINTER_TYPE_P (TREE_TYPE (arg)))
+ continue;
+ if (TREE_CODE (arg) == ADDR_EXPR)
+ {
+ arg = get_base_address (TREE_OPERAND (arg, 0));
+ if (auto_var_in_fn_p (arg, current_function_decl))
+ {
+ if (TREE_CODE (arg) == LABEL_DECL)
+ warning_at (gimple_location (call), OPT_Wmusttail_local_addr,
+ "address of label passed to %<musttail%> "
+ "call argument");
+ else if (TREE_CODE (arg) == PARM_DECL)
+ warning_at (gimple_location (call), OPT_Wmusttail_local_addr,
+ "address of parameter %qD passed to "
+ "%<musttail%> call argument", arg);
+ else if (!DECL_ARTIFICIAL (arg) && DECL_NAME (arg))
+ warning_at (gimple_location (call), OPT_Wmusttail_local_addr,
+ "address of automatic variable %qD passed to "
+ "%<musttail%> call argument", arg);
+ else
+ warning_at (gimple_location (call), OPT_Wmusttail_local_addr,
+ "address of local variable passed to "
+ "%<musttail%> call argument");
+ suppress_warning (call, OPT_Wmaybe_musttail_local_addr);
+ }
+ }
}
return true;
}
+/* Return single successor edge ignoring EDGE_EH edges. */
+
+static edge
+single_non_eh_succ_edge (basic_block bb)
+{
+ edge e, ret = NULL;
+ edge_iterator ei;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if ((e->flags & EDGE_EH) == 0)
+ {
+ gcc_assert (ret == NULL);
+ ret = e;
+ }
+ gcc_assert (ret);
+ return ret;
+}
+
/* Checks whether the expression EXPR in stmt AT is independent of the
statement pointed to by GSI (in a sense that we already know EXPR's value
at GSI). We use the fact that we are only called from the chain of
@@ -245,7 +287,7 @@ independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi,
/* Mark the blocks in the chain leading to the end. */
at_bb = gimple_bb (at);
call_bb = gimple_bb (gsi_stmt (gsi));
- for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
+ for (bb = call_bb; bb != at_bb; bb = single_non_eh_succ_edge (bb)->dest)
bb->aux = &bb->aux;
bb->aux = &bb->aux;
@@ -289,7 +331,7 @@ independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi,
}
/* Unmark the blocks. */
- for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
+ for (bb = call_bb; bb != at_bb; bb = single_non_eh_succ_edge (bb)->dest)
bb->aux = NULL;
bb->aux = NULL;
@@ -361,6 +403,10 @@ process_assignment (gassign *stmt,
if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
return FAIL;
+ /* We at least cannot build -1 for all fixed point types. */
+ if (FIXED_POINT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
+ return FAIL;
+
if (rhs_class == GIMPLE_UNARY_RHS
&& op0 == *ass_var)
;
@@ -443,7 +489,7 @@ maybe_error_musttail (gcall *call, const char *err, bool diag_musttail)
{
if (gimple_call_must_tail_p (call) && diag_musttail)
{
- error_at (call->location, "cannot tail-call: %s", err);
+ error_at (gimple_location (call), "cannot tail-call: %s", err);
/* Avoid another error. ??? If there are multiple reasons why tail
calls fail it might be useful to report them all to avoid
whack-a-mole for the user. But currently there is too much
@@ -451,26 +497,85 @@ maybe_error_musttail (gcall *call, const char *err, bool diag_musttail)
gimple_call_set_must_tail (call, false); /* Avoid another error. */
gimple_call_set_tail (call, false);
}
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
+ fprintf (dump_file, "Cannot tail-call: %s: ", err);
print_gimple_stmt (dump_file, call, 0, TDF_SLIM);
- fprintf (dump_file, "Cannot convert: %s\n", err);
}
}
+/* Return true if there is no real work performed in the exception
+ path starting at BB and it will in the end result in external exception.
+ Search at most CNT basic blocks (so that we don't need to do trivial
+ loop discovery). */
+static bool
+empty_eh_cleanup (basic_block bb, int *eh_has_tsan_func_exit, int cnt)
+{
+ if (EDGE_COUNT (bb->succs) > 1)
+ return false;
+
+ for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
+ {
+ gimple *g = gsi_stmt (gsi);
+ if (is_gimple_debug (g) || gimple_clobber_p (g))
+ continue;
+ if (eh_has_tsan_func_exit
+ && !*eh_has_tsan_func_exit
+ && sanitize_flags_p (SANITIZE_THREAD)
+ && gimple_call_builtin_p (g, BUILT_IN_TSAN_FUNC_EXIT))
+ {
+ *eh_has_tsan_func_exit = 1;
+ continue;
+ }
+ if (eh_has_tsan_func_exit
+ && sanitize_flags_p (SANITIZE_ADDRESS)
+ && asan_mark_p (g, ASAN_MARK_POISON))
+ continue;
+ if (is_gimple_resx (g))
+ {
+ if (stmt_can_throw_external (cfun, g))
+ return true;
+ if (single_succ_p (bb)
+ && (single_succ_edge (bb)->flags & EDGE_EH)
+ && cnt > 1)
+ return empty_eh_cleanup (single_succ (bb), eh_has_tsan_func_exit,
+ cnt - 1);
+ }
+ return false;
+ }
+ if (!single_succ_p (bb))
+ return false;
+ if (cnt == 1)
+ return false;
+ return empty_eh_cleanup (single_succ (bb), eh_has_tsan_func_exit, cnt - 1);
+}
+
/* Argument for compute_live_vars/live_vars_at_stmt and what compute_live_vars
returns. Computed lazily, but just once for the function. */
static live_vars_map *live_vars;
static vec<bitmap_head> live_vars_vec;
-/* Finds tailcalls falling into basic block BB. The list of found tailcalls is
- added to the start of RET. When ONLY_MUSTTAIL is set only handle musttail.
+/* Finds tailcalls falling into basic block BB when coming from edge ESUCC (or
+ NULL). The list of found tailcalls is added to the start of RET.
+ When ONLY_MUSTTAIL is set only handle musttail.
Update OPT_TAILCALLS as output parameter. If DIAG_MUSTTAIL, diagnose
- failures for musttail calls. */
+ failures for musttail calls. RETRY_TSAN_FUNC_EXIT is initially 0 and
+ in that case the last call is attempted to be tail called, including
+ __tsan_func_exit with -fsanitize=thread. It is set to -1 if we
+ detect __tsan_func_exit call and in that case tree_optimize_tail_calls_1
+ will retry with it set to 1 (regardless of whether turning the
+ __tsan_func_exit was successfully detected as tail call or not) and that
+ will allow turning musttail calls before that call into tail calls as well
+ by adding __tsan_func_exit call before the call.
+ IGNORED_EDGES and MUST_SEE_BBS are used in recursive calls when handling
+ GIMPLE_CONDs for cleanups. */
static void
-find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
- bool &opt_tailcalls, bool diag_musttail)
+find_tail_calls (basic_block bb, edge esucc, struct tailcall **ret,
+ bool only_musttail, bool &opt_tailcalls, bool diag_musttail,
+ int &retry_tsan_func_exit, hash_set<edge> *ignored_edges,
+ hash_set<basic_block> *must_see_bbs)
{
tree ass_var = NULL_TREE, ret_var, func, param;
gimple *stmt;
@@ -483,14 +588,27 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
basic_block abb;
size_t idx;
tree var;
+ bool only_tailr = false;
+ bool has_tsan_func_exit = false;
+ int eh_has_tsan_func_exit = -1;
+ bool delete_ignored_edges = false;
if (!single_succ_p (bb)
&& (EDGE_COUNT (bb->succs) || !cfun->has_musttail || !diag_musttail))
{
+ if (EDGE_COUNT (bb->succs) == 2
+ && esucc
+ && cfun->has_musttail
+ && diag_musttail
+ && (EDGE_SUCC (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
+ && (EDGE_SUCC (bb, 1)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
+ && (stmt = last_nondebug_stmt (bb))
+ && gimple_code (stmt) == GIMPLE_COND)
+ ;
/* If there is an abnormal edge assume it's the only extra one.
Tolerate that case so that we can give better error messages
for musttail later. */
- if (!has_abnormal_or_eh_outgoing_edge_p (bb))
+ else if (!has_abnormal_or_eh_outgoing_edge_p (bb))
{
if (dump_file)
fprintf (dump_file, "Basic block %d has extra exit edges\n",
@@ -516,6 +634,96 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
|| is_gimple_debug (stmt))
continue;
+ if (cfun->has_musttail
+ && sanitize_flags_p (SANITIZE_THREAD)
+ && gimple_call_builtin_p (stmt, BUILT_IN_TSAN_FUNC_EXIT)
+ && diag_musttail)
+ {
+ if (retry_tsan_func_exit == 0)
+ retry_tsan_func_exit = -1;
+ else if (retry_tsan_func_exit == 1)
+ continue;
+ }
+
+ if (cfun->has_musttail
+ && sanitize_flags_p (SANITIZE_ADDRESS)
+ && asan_mark_p (stmt, ASAN_MARK_POISON)
+ && diag_musttail)
+ continue;
+
+ /* Especially at -O0 with -fsanitize=address we can end up with
+ code like
+ _26 = foo (x_24(D)); [must tail call]
+ finally_tmp.3_27 = 0;
+ goto <bb 5>; [INV]
+
+ ...
+
+ <bb 5> :
+ # _6 = PHI <_26(3), _23(D)(4)>
+ # finally_tmp.3_8 = PHI <finally_tmp.3_27(3), finally_tmp.3_22(4)>
+ .ASAN_MARK (POISON, &c, 4);
+ if (finally_tmp.3_8 == 1)
+ goto <bb 7>; [INV]
+ else
+ goto <bb 6>; [INV]
+ When walking backwards, ESUCC is the edge we are coming from,
+ depending on its EDGE_TRUE_FLAG, == vs. != for the comparison
+ and value compared against try to find out through which edge
+ we need to go and which edge should be ignored. The code handles
+ both INTEGER_CST PHI arguments and SSA_NAMEs set to constants
+ (for -O0 where those aren't propagated). */
+ if (cfun->has_musttail
+ && diag_musttail
+ && esucc
+ && gimple_code (stmt) == GIMPLE_COND
+ && (gimple_cond_code (stmt) == EQ_EXPR
+ || gimple_cond_code (stmt) == NE_EXPR)
+ && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
+ && TREE_CODE (gimple_cond_rhs (stmt)) == INTEGER_CST
+ && INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))
+ && (integer_zerop (gimple_cond_rhs (stmt))
+ || integer_onep (gimple_cond_rhs (stmt))))
+ {
+ tree lhs = gimple_cond_lhs (stmt);
+ bool rhsv = integer_onep (gimple_cond_rhs (stmt));
+ if (((esucc->flags & EDGE_TRUE_VALUE) != 0)
+ ^ (gimple_cond_code (stmt) == EQ_EXPR))
+ rhsv = !rhsv;
+ if (!ignored_edges)
+ {
+ ignored_edges = new hash_set<edge>;
+ must_see_bbs = new hash_set<basic_block>;
+ delete_ignored_edges = true;
+ }
+ if (is_gimple_assign (SSA_NAME_DEF_STMT (lhs))
+ && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (lhs))
+ == INTEGER_CST))
+ {
+ tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (lhs));
+ if (rhsv ? integer_onep (rhs) : integer_zerop (rhs))
+ continue;
+ }
+ else if (gimple_code (SSA_NAME_DEF_STMT (lhs)) == GIMPLE_PHI)
+ {
+ gimple *phi = SSA_NAME_DEF_STMT (lhs);
+ basic_block pbb = gimple_bb (phi);
+ must_see_bbs->add (pbb);
+ edge_iterator ei;
+ FOR_EACH_EDGE (e, ei, pbb->preds)
+ {
+ tree rhs = gimple_phi_arg_def_from_edge (phi, e);
+ if (TREE_CODE (rhs) == SSA_NAME
+ && is_gimple_assign (SSA_NAME_DEF_STMT (rhs))
+ && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (rhs))
+ == INTEGER_CST))
+ rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs));
+ if (!(rhsv ? integer_onep (rhs) : integer_zerop (rhs)))
+ ignored_edges->add (e);
+ }
+ }
+ }
+
if (!last_stmt)
last_stmt = stmt;
/* Check for a call. */
@@ -524,14 +732,21 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
call = as_a <gcall *> (stmt);
/* Handle only musttail calls when not optimizing. */
if (only_musttail && !gimple_call_must_tail_p (call))
- return;
- if (bad_stmt)
{
- maybe_error_musttail (call,
- _("memory reference or volatile after "
- "call"), diag_musttail);
+ maybe_delete_ignored_edges:
+ if (delete_ignored_edges)
+ {
+ delete ignored_edges;
+ delete must_see_bbs;
+ }
return;
}
+ if (bad_stmt)
+ {
+ maybe_error_musttail (call, _("memory reference or volatile "
+ "after call"), diag_musttail);
+ goto maybe_delete_ignored_edges;
+ }
ass_var = gimple_call_lhs (call);
break;
}
@@ -559,17 +774,34 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
}
if (bad_stmt)
- return;
+ goto maybe_delete_ignored_edges;
if (gsi_end_p (gsi))
{
+ if (must_see_bbs)
+ must_see_bbs->remove (bb);
+
edge_iterator ei;
/* Recurse to the predecessors. */
FOR_EACH_EDGE (e, ei, bb->preds)
- find_tail_calls (e->src, ret, only_musttail, opt_tailcalls,
- diag_musttail);
+ {
+ if (ignored_edges && ignored_edges->contains (e))
+ continue;
+ find_tail_calls (e->src, e, ret, only_musttail, opt_tailcalls,
+ diag_musttail, retry_tsan_func_exit, ignored_edges,
+ must_see_bbs);
+ }
- return;
+ goto maybe_delete_ignored_edges;
+ }
+
+ if (must_see_bbs && !must_see_bbs->is_empty ())
+ goto maybe_delete_ignored_edges;
+
+ if (delete_ignored_edges)
+ {
+ delete ignored_edges;
+ delete must_see_bbs;
}
if (!suitable_for_tail_opt_p (call, diag_musttail))
@@ -578,6 +810,25 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
if (!suitable_for_tail_call_opt_p (call, diag_musttail))
opt_tailcalls = false;
+ /* ??? It is OK if the argument of a function is taken in some cases,
+ but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
+ if (!diag_musttail || !gimple_call_must_tail_p (call))
+ for (param = DECL_ARGUMENTS (current_function_decl);
+ param; param = DECL_CHAIN (param))
+ if (TREE_ADDRESSABLE (param))
+ {
+ maybe_error_musttail (call, _("address of caller arguments taken"),
+ diag_musttail);
+ /* If current function has musttail calls, we can't disable tail
+ calls altogether for the whole caller, because those might be
+ actually fine. So just punt if this exact call is not
+ a tail recursion. */
+ if (cfun->has_musttail)
+ only_tailr = true;
+ else
+ opt_tailcalls = false;
+ }
+
/* If the LHS of our call is not just a simple register or local
variable, we can't transform this into a tail or sibling call.
This situation happens, in (e.g.) "*p = foo()" where foo returns a
@@ -608,14 +859,39 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
if ((stmt_could_throw_p (cfun, stmt)
&& !stmt_can_throw_external (cfun, stmt)) || EDGE_COUNT (bb->succs) > 1)
{
- if (stmt == last_stmt)
- maybe_error_musttail (call,
- _("call may throw exception that does not "
- "propagate"), diag_musttail);
- else
- maybe_error_musttail (call, _("code between call and return"),
- diag_musttail);
- return;
+ if (stmt != last_stmt)
+ {
+ maybe_error_musttail (call, _("code between call and return"),
+ diag_musttail);
+ return;
+ }
+
+ edge e;
+ edge_iterator ei;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->flags & EDGE_EH)
+ break;
+
+ if (!e)
+ {
+ maybe_error_musttail (call, _("call may throw exception that does not "
+ "propagate"), diag_musttail);
+ return;
+ }
+
+ if (diag_musttail && gimple_call_must_tail_p (call))
+ eh_has_tsan_func_exit = 0;
+ if (!gimple_call_must_tail_p (call)
+ || !empty_eh_cleanup (e->dest,
+ eh_has_tsan_func_exit
+ ? NULL : &eh_has_tsan_func_exit, 20)
+ || EDGE_COUNT (bb->succs) > 2)
+ {
+ maybe_error_musttail (call, _("call may throw exception caught "
+ "locally or perform cleanups"),
+ diag_musttail);
+ return;
+ }
}
/* If the function returns a value, then at present, the tail call
@@ -672,19 +948,16 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
have a copyable type and the two arguments must have reasonably
equivalent types. The latter requirement could be relaxed if
we emitted a suitable type conversion statement. */
- if (!is_gimple_reg_type (TREE_TYPE (param))
+ if (TREE_ADDRESSABLE (TREE_TYPE (param))
|| !useless_type_conversion_p (TREE_TYPE (param),
TREE_TYPE (arg)))
break;
- /* The parameter should be a real operand, so that phi node
- created for it at the start of the function has the meaning
- of copying the value. This test implies is_gimple_reg_type
- from the previous condition, however this one could be
- relaxed by being more careful with copying the new value
- of the parameter (emitting appropriate GIMPLE_ASSIGN and
- updating the virtual operands). */
- if (!is_gimple_reg (param))
+ if (is_gimple_reg_type (TREE_TYPE (param))
+ ? !is_gimple_reg (param)
+ : (!is_gimple_variable (param)
+ || TREE_THIS_VOLATILE (param)
+ || may_be_aliased (param)))
break;
}
}
@@ -692,6 +965,9 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
tail_recursion = true;
}
+ if (only_tailr && !tail_recursion)
+ return;
+
/* Compute live vars if not computed yet. */
if (live_vars == NULL)
{
@@ -728,11 +1004,23 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
{
if (!VAR_P (var))
{
+ if (diag_musttail && gimple_call_must_tail_p (call))
+ {
+ auto opt = OPT_Wmaybe_musttail_local_addr;
+ if (!warning_suppressed_p (call,
+ opt))
+ {
+ warning_at (gimple_location (call), opt,
+ "address of local variable can escape to "
+ "%<musttail%> call");
+ suppress_warning (call, opt);
+ }
+ continue;
+ }
if (local_live_vars)
BITMAP_FREE (local_live_vars);
- maybe_error_musttail (call,
- _("call invocation refers to locals"),
- diag_musttail);
+ maybe_error_musttail (call, _("call invocation refers to "
+ "locals"), diag_musttail);
return;
}
else
@@ -740,15 +1028,48 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
unsigned int *v = live_vars->get (DECL_UID (var));
if (bitmap_bit_p (local_live_vars, *v))
{
+ if (diag_musttail && gimple_call_must_tail_p (call))
+ {
+ auto opt = OPT_Wmaybe_musttail_local_addr;
+ if (!warning_suppressed_p (call, opt))
+ {
+ if (!DECL_ARTIFICIAL (var) && DECL_NAME (var))
+ warning_at (gimple_location (call), opt,
+ "address of automatic variable %qD "
+ "can escape to %<musttail%> call",
+ var);
+ else
+ warning_at (gimple_location (call), opt,
+ "address of local variable can escape "
+ "to %<musttail%> call");
+ suppress_warning (call, opt);
+ }
+ continue;
+ }
BITMAP_FREE (local_live_vars);
- maybe_error_musttail (call,
- _("call invocation refers to locals"),
- diag_musttail);
+ maybe_error_musttail (call, _("call invocation refers to "
+ "locals"), diag_musttail);
return;
}
}
}
}
+ if (diag_musttail
+ && gimple_call_must_tail_p (call)
+ && !warning_suppressed_p (call, OPT_Wmaybe_musttail_local_addr))
+ for (tree param = DECL_ARGUMENTS (current_function_decl);
+ param; param = DECL_CHAIN (param))
+ if (may_be_aliased (param)
+ && (ref_maybe_used_by_stmt_p (call, param, false)
+ || call_may_clobber_ref_p (call, param, false)))
+ {
+ auto opt = OPT_Wmaybe_musttail_local_addr;
+ warning_at (gimple_location (call), opt,
+ "address of parameter %qD can escape to "
+ "%<musttail%> call", param);
+ suppress_warning (call, opt);
+ break;
+ }
if (local_live_vars)
BITMAP_FREE (local_live_vars);
@@ -761,8 +1082,8 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
a = NULL_TREE;
auto_bitmap to_move_defs;
auto_vec<gimple *> to_move_stmts;
- bool is_noreturn
- = EDGE_COUNT (bb->succs) == 0 && gimple_call_noreturn_p (call);
+ bool is_noreturn = gimple_call_noreturn_p (call);
+ auto_vec<edge> edges;
abb = bb;
agsi = gsi;
@@ -772,10 +1093,14 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
tree tmp_m = NULL_TREE;
gsi_next (&agsi);
+ new_bb:
while (gsi_end_p (agsi))
{
- ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
- abb = single_succ (abb);
+ edge e = single_non_eh_succ_edge (abb);
+ ass_var = propagate_through_phis (ass_var, e);
+ if (!ass_var || ignored_edges)
+ edges.safe_push (e);
+ abb = e->dest;
agsi = gsi_start_bb (abb);
}
@@ -790,6 +1115,105 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
|| is_gimple_debug (stmt))
continue;
+ if (cfun->has_musttail
+ && sanitize_flags_p (SANITIZE_THREAD)
+ && retry_tsan_func_exit == 1
+ && gimple_call_builtin_p (stmt, BUILT_IN_TSAN_FUNC_EXIT)
+ && !has_tsan_func_exit
+ && gimple_call_must_tail_p (call))
+ {
+ has_tsan_func_exit = true;
+ continue;
+ }
+
+ if (cfun->has_musttail
+ && sanitize_flags_p (SANITIZE_ADDRESS)
+ && asan_mark_p (stmt, ASAN_MARK_POISON)
+ && diag_musttail)
+ continue;
+
+ /* See earlier comment on GIMPLE_CONDs. Here we need to propagate
+ through on the forward walk, so based on the edges vector we've
+ walked through determine which edge to follow. */
+ if (ignored_edges)
+ {
+ if (is_gimple_assign (stmt)
+ && gimple_assign_rhs_code (stmt) == INTEGER_CST)
+ {
+ use_operand_p use_p;
+ gimple *use_stmt;
+ if ((integer_zerop (gimple_assign_rhs1 (stmt))
+ || integer_onep (gimple_assign_rhs1 (stmt)))
+ && single_imm_use (gimple_assign_lhs (stmt), &use_p,
+ &use_stmt))
+ {
+ if (gimple_code (use_stmt) == GIMPLE_COND)
+ continue;
+ if (gimple_code (use_stmt) == GIMPLE_PHI
+ && single_imm_use (gimple_phi_result (use_stmt),
+ &use_p, &use_stmt)
+ && gimple_code (use_stmt) == GIMPLE_COND)
+ continue;
+ }
+ }
+ if (gimple_code (stmt) == GIMPLE_COND
+ && (gimple_cond_code (stmt) == EQ_EXPR
+ || gimple_cond_code (stmt) == NE_EXPR)
+ && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
+ && TREE_CODE (gimple_cond_rhs (stmt)) == INTEGER_CST
+ && INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))
+ && (integer_zerop (gimple_cond_rhs (stmt))
+ || integer_onep (gimple_cond_rhs (stmt))))
+ {
+ edge e = NULL, et, ef;
+ tree lhs = gimple_cond_lhs (stmt);
+ bool rhsv = integer_onep (gimple_cond_rhs (stmt));
+ if (gimple_cond_code (stmt) == NE_EXPR)
+ rhsv = !rhsv;
+ extract_true_false_edges_from_block (gimple_bb (stmt), &et, &ef);
+ if (is_gimple_assign (SSA_NAME_DEF_STMT (lhs))
+ && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (lhs))
+ == INTEGER_CST))
+ {
+ tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (lhs));
+ if (rhsv ? integer_onep (rhs) : integer_zerop (rhs))
+ e = et;
+ else if (rhsv ? integer_zerop (rhs) : integer_onep (rhs))
+ e = ef;
+ }
+ else if (gimple_code (SSA_NAME_DEF_STMT (lhs)) == GIMPLE_PHI)
+ {
+ gimple *phi = SSA_NAME_DEF_STMT (lhs);
+ basic_block pbb = gimple_bb (phi);
+ for (edge e2 : edges)
+ if (e2->dest == pbb)
+ {
+ tree rhs = gimple_phi_arg_def_from_edge (phi, e2);
+ if (TREE_CODE (rhs) == SSA_NAME)
+ if (gimple *g = SSA_NAME_DEF_STMT (rhs))
+ if (is_gimple_assign (g)
+ && gimple_assign_rhs_code (g) == INTEGER_CST)
+ rhs = gimple_assign_rhs1 (g);
+ if (rhsv ? integer_onep (rhs) : integer_zerop (rhs))
+ e = et;
+ else if (rhsv ? integer_zerop (rhs)
+ : integer_onep (rhs))
+ e = ef;
+ break;
+ }
+ }
+ if (e)
+ {
+ ass_var = propagate_through_phis (ass_var, e);
+ if (!ass_var || ignored_edges)
+ edges.safe_push (e);
+ abb = e->dest;
+ agsi = gsi_start_bb (abb);
+ goto new_bb;
+ }
+ }
+ }
+
if (gimple_code (stmt) != GIMPLE_ASSIGN)
{
maybe_error_musttail (call, _("unhandled code after call"),
@@ -849,13 +1273,17 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
/* See if this is a tail call we can handle. */
if (is_noreturn)
{
+ if (gimple_call_internal_p (call))
+ {
+ maybe_error_musttail (call, _("internal call"), diag_musttail);
+ return;
+ }
tree rettype = TREE_TYPE (TREE_TYPE (current_function_decl));
tree calltype = TREE_TYPE (gimple_call_fntype (call));
if (!VOID_TYPE_P (rettype)
&& !useless_type_conversion_p (rettype, calltype))
{
- maybe_error_musttail (call,
- _("call and return value are different"),
+ maybe_error_musttail (call, _("call and return value are different"),
diag_musttail);
return;
}
@@ -873,27 +1301,77 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
{
bool ok = false;
value_range val;
- tree valr;
- /* If IPA-VRP proves called function always returns a singleton range,
- the return value is replaced by the only value in that range.
- For tail call purposes, pretend such replacement didn't happen. */
- if (ass_var == NULL_TREE
- && !tail_recursion
- && TREE_CONSTANT (ret_var))
- if (tree type = gimple_range_type (call))
- if (tree callee = gimple_call_fndecl (call))
- if ((INTEGRAL_TYPE_P (type) || SCALAR_FLOAT_TYPE_P (type))
- && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (callee)),
+ if (ass_var == NULL_TREE && !tail_recursion)
+ {
+ tree other_value = NULL_TREE;
+ /* If we have a function call that we know the return value is the same
+ as the argument, try the argument too. */
+ int flags = gimple_call_return_flags (call);
+ if ((flags & ERF_RETURNS_ARG) != 0
+ && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
+ {
+ tree arg = gimple_call_arg (call, flags & ERF_RETURN_ARG_MASK);
+ if (useless_type_conversion_p (TREE_TYPE (ret_var), TREE_TYPE (arg) ))
+ other_value = arg;
+ }
+ /* If IPA-VRP proves called function always returns a singleton range,
+ the return value is replaced by the only value in that range.
+ For tail call purposes, pretend such replacement didn't happen. */
+ else if (tree type = gimple_range_type (call))
+ if (tree callee = gimple_call_fndecl (call))
+ {
+ tree valr;
+ if ((INTEGRAL_TYPE_P (type)
+ || SCALAR_FLOAT_TYPE_P (type)
+ || POINTER_TYPE_P (type))
+ && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (callee)),
type)
- && useless_type_conversion_p (TREE_TYPE (ret_var), type)
- && ipa_return_value_range (val, callee)
- && val.singleton_p (&valr)
- && operand_equal_p (ret_var, valr, 0))
- ok = true;
+ && useless_type_conversion_p (TREE_TYPE (ret_var), type)
+ && ipa_return_value_range (val, callee)
+ && val.singleton_p (&valr))
+ other_value = valr;
+ }
+
+ if (other_value)
+ {
+ tree rv = ret_var;
+ unsigned int i = edges.length ();
+ /* If ret_var is equal to other_value, we can tail optimize. */
+ if (operand_equal_p (ret_var, other_value, 0))
+ ok = true;
+ else
+ /* Otherwise, if ret_var is a PHI result, try to find out
+ if other_value isn't propagated through PHIs on the path from
+ call's bb to SSA_NAME_DEF_STMT (ret_var)'s bb. */
+ while (TREE_CODE (rv) == SSA_NAME
+ && gimple_code (SSA_NAME_DEF_STMT (rv)) == GIMPLE_PHI)
+ {
+ tree nrv = NULL_TREE;
+ gimple *g = SSA_NAME_DEF_STMT (rv);
+ for (; i; --i)
+ {
+ if (edges[i - 1]->dest == gimple_bb (g))
+ {
+ nrv = gimple_phi_arg_def_from_edge (g,
+ edges[i - 1]);
+ --i;
+ break;
+ }
+ }
+ if (nrv == NULL_TREE)
+ break;
+ if (operand_equal_p (nrv, other_value, 0))
+ {
+ ok = true;
+ break;
+ }
+ rv = nrv;
+ }
+ }
+ }
if (!ok)
{
- maybe_error_musttail (call,
- _("call and return value are different"),
+ maybe_error_musttail (call, _("call and return value are different"),
diag_musttail);
return;
}
@@ -903,18 +1381,29 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
multiplicands. */
if (!tail_recursion && (m || a))
{
- maybe_error_musttail (call,
- _("operations after non tail recursive call"),
- diag_musttail);
+ maybe_error_musttail (call, _("operations after non tail recursive "
+ "call"), diag_musttail);
return;
}
/* For pointers only allow additions. */
if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
{
- maybe_error_musttail (call,
- _("tail recursion with pointers can only use "
- "additions"), diag_musttail);
+ maybe_error_musttail (call, _("tail recursion with pointers can only "
+ "use additions"), diag_musttail);
+ return;
+ }
+
+ if (eh_has_tsan_func_exit != -1
+ && eh_has_tsan_func_exit != has_tsan_func_exit)
+ {
+ if (eh_has_tsan_func_exit)
+ maybe_error_musttail (call, _("call may throw exception caught "
+ "locally or perform cleanups"),
+ diag_musttail);
+ else
+ maybe_error_musttail (call, _("exception cleanups omit "
+ "__tsan_func_exit call"), diag_musttail);
return;
}
@@ -934,9 +1423,9 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
param = DECL_CHAIN (param), idx++)
{
tree ddef, arg = gimple_call_arg (call, idx);
- if (is_gimple_reg (param)
- && (ddef = ssa_default_def (cfun, param))
- && (arg != ddef))
+ if (!is_gimple_reg (param)
+ || ((ddef = ssa_default_def (cfun, param))
+ && arg != ddef))
bitmap_set_bit (tailr_arg_needs_copy, idx);
}
}
@@ -946,6 +1435,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
nw->call_gsi = gsi;
nw->tail_recursion = tail_recursion;
+ nw->has_tsan_func_exit = has_tsan_func_exit;
nw->mult = m;
nw->add = a;
@@ -1110,11 +1600,6 @@ static void
decrease_profile (basic_block bb, profile_count count)
{
bb->count = bb->count - count;
- if (!single_succ_p (bb))
- {
- gcc_assert (!EDGE_COUNT (bb->succs));
- return;
- }
}
/* Eliminates tail call described by T. TMP_VARS is a list of
@@ -1179,7 +1664,7 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop)
else
{
/* Number of executions of function has reduced by the tailcall. */
- e = single_succ_edge (gsi_bb (t->call_gsi));
+ e = single_non_eh_succ_edge (gsi_bb (t->call_gsi));
profile_count count = e->count ();
@@ -1194,8 +1679,7 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop)
decrease_profile (e->dest, count);
/* Replace the call by a jump to the start of function. */
- e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
- first);
+ e = redirect_edge_and_branch (e, first);
}
gcc_assert (e);
PENDING_STMT (e) = NULL;
@@ -1212,6 +1696,7 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop)
/* Add phi node entries for arguments. The ordering of the phi nodes should
be the same as the ordering of the arguments. */
+ auto_vec<tree> copies;
for (param = DECL_ARGUMENTS (current_function_decl),
idx = 0, gpi = gsi_start_phis (first);
param;
@@ -1220,6 +1705,35 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop)
if (!bitmap_bit_p (tailr_arg_needs_copy, idx))
continue;
+ if (!is_gimple_reg_type (TREE_TYPE (param)))
+ {
+ if (param == gimple_call_arg (stmt, idx))
+ continue;
+ /* First check if param isn't used by any of the following
+ call arguments. If it is, we need to copy first to
+ a temporary and only after doing all the assignments copy it
+ to param. */
+ size_t idx2 = idx + 1;
+ tree param2 = DECL_CHAIN (param);
+ for (; param2; param2 = DECL_CHAIN (param2), idx2++)
+ if (!is_gimple_reg_type (TREE_TYPE (param)))
+ {
+ tree base = get_base_address (gimple_call_arg (stmt, idx2));
+ if (base == param)
+ break;
+ }
+ tree tmp = param;
+ if (param2)
+ {
+ tmp = create_tmp_var (TREE_TYPE (param));
+ copies.safe_push (param);
+ copies.safe_push (tmp);
+ }
+ gimple *g = gimple_build_assign (tmp, gimple_call_arg (stmt, idx));
+ gsi_insert_before (&t->call_gsi, g, GSI_SAME_STMT);
+ continue;
+ }
+
arg = gimple_call_arg (stmt, idx);
phi = gpi.phi ();
gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
@@ -1227,6 +1741,11 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop)
add_phi_arg (phi, arg, e, gimple_location (stmt));
gsi_next (&gpi);
}
+ for (unsigned i = 0; i < copies.length (); i += 2)
+ {
+ gimple *g = gimple_build_assign (copies[i], copies[i + 1]);
+ gsi_insert_before (&t->call_gsi, g, GSI_SAME_STMT);
+ }
/* Update the values of accumulators. */
adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
@@ -1251,6 +1770,14 @@ static bool
optimize_tail_call (struct tailcall *t, bool opt_tailcalls,
class loop *&new_loop)
{
+ if (t->has_tsan_func_exit && (t->tail_recursion || opt_tailcalls))
+ {
+ tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
+ gimple *g = gimple_build_call (builtin_decl, 0);
+ gimple_set_location (g, cfun->function_end_locus);
+ gsi_insert_before (&t->call_gsi, g, GSI_SAME_STMT);
+ }
+
if (t->tail_recursion)
{
eliminate_tail_call (t, new_loop);
@@ -1269,6 +1796,7 @@ optimize_tail_call (struct tailcall *t, bool opt_tailcalls,
print_gimple_stmt (dump_file, stmt, 0, dump_flags);
fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
}
+ return t->has_tsan_func_exit;
}
return false;
@@ -1318,20 +1846,34 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail,
/* Only traverse the normal exits, i.e. those that end with return
statement. */
if (safe_is_a <greturn *> (*gsi_last_bb (e->src)))
- find_tail_calls (e->src, &tailcalls, only_musttail, opt_tailcalls,
- diag_musttail);
+ {
+ int retry_tsan_func_exit = 0;
+ find_tail_calls (e->src, e, &tailcalls, only_musttail, opt_tailcalls,
+ diag_musttail, retry_tsan_func_exit, NULL, NULL);
+ if (retry_tsan_func_exit == -1)
+ {
+ retry_tsan_func_exit = 1;
+ find_tail_calls (e->src, e, &tailcalls, only_musttail,
+ opt_tailcalls, diag_musttail,
+ retry_tsan_func_exit, NULL, NULL);
+ }
+ }
}
if (cfun->has_musttail && diag_musttail)
{
basic_block bb;
+ int retry_tsan_func_exit = 0;
FOR_EACH_BB_FN (bb, cfun)
- if (EDGE_COUNT (bb->succs) == 0)
+ if (EDGE_COUNT (bb->succs) == 0
+ || (single_succ_p (bb)
+ && (single_succ_edge (bb)->flags & EDGE_EH)))
if (gimple *c = last_nondebug_stmt (bb))
if (is_gimple_call (c)
&& gimple_call_must_tail_p (as_a <gcall *> (c))
&& gimple_call_noreturn_p (as_a <gcall *> (c)))
- find_tail_calls (bb, &tailcalls, only_musttail, opt_tailcalls,
- diag_musttail);
+ find_tail_calls (bb, NULL, &tailcalls, only_musttail,
+ opt_tailcalls, diag_musttail,
+ retry_tsan_func_exit, NULL, NULL);
}
if (live_vars)
@@ -1341,6 +1883,38 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail,
live_vars = NULL;
}
+ if (cfun->has_musttail)
+ {
+ /* We can't mix non-recursive must tail calls with tail recursive
+ calls which require accumulators, because in that case we have to
+ emit code in between the musttail calls and return, which prevent
+ calling them as tail calls. So, in that case give up on the
+ tail recursion. */
+ for (act = tailcalls; act; act = act->next)
+ if (!act->tail_recursion)
+ {
+ gcall *call = as_a <gcall *> (gsi_stmt (act->call_gsi));
+ if (gimple_call_must_tail_p (call))
+ break;
+ }
+ if (act)
+ for (struct tailcall **p = &tailcalls; *p; )
+ {
+ if ((*p)->tail_recursion && ((*p)->add || (*p)->mult))
+ {
+ struct tailcall *a = *p;
+ *p = (*p)->next;
+ gcall *call = as_a <gcall *> (gsi_stmt (a->call_gsi));
+ maybe_error_musttail (call, _("tail recursion with "
+ "accumulation mixed with "
+ "musttail non-recursive call"),
+ diag_musttail);
+ free (a);
+ }
+ else
+ p = &(*p)->next;
+ }
+ }
/* Construct the phi nodes and accumulators if necessary. */
a_acc = m_acc = NULL_TREE;
for (act = tailcalls; act; act = act->next)
@@ -1354,8 +1928,8 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail,
or if there are existing degenerate PHI nodes. */
if (!single_pred_p (first)
|| !gimple_seq_empty_p (phi_nodes (first)))
- first =
- split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
+ first
+ = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
/* Copy the args if needed. */
unsigned idx;
@@ -1364,6 +1938,8 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail,
param = DECL_CHAIN (param), idx++)
if (bitmap_bit_p (tailr_arg_needs_copy, idx))
{
+ if (!is_gimple_reg_type (TREE_TYPE (param)))
+ continue;
tree name = ssa_default_def (cfun, param);
tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
gphi *phi;