aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-tailcall.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-tailcall.cc')
-rw-r--r--gcc/tree-tailcall.cc264
1 files changed, 242 insertions, 22 deletions
diff --git a/gcc/tree-tailcall.cc b/gcc/tree-tailcall.cc
index 10e88d9..c80145d 100644
--- a/gcc/tree-tailcall.cc
+++ b/gcc/tree-tailcall.cc
@@ -528,8 +528,20 @@ empty_eh_cleanup (basic_block bb, int *eh_has_tsan_func_exit, int cnt)
*eh_has_tsan_func_exit = 1;
continue;
}
- if (is_gimple_resx (g) && stmt_can_throw_external (cfun, g))
- return true;
+ if (eh_has_tsan_func_exit
+ && sanitize_flags_p (SANITIZE_ADDRESS)
+ && asan_mark_p (g, ASAN_MARK_POISON))
+ continue;
+ if (is_gimple_resx (g))
+ {
+ if (stmt_can_throw_external (cfun, g))
+ return true;
+ if (single_succ_p (bb)
+ && (single_succ_edge (bb)->flags & EDGE_EH)
+ && cnt > 1)
+ return empty_eh_cleanup (single_succ (bb), eh_has_tsan_func_exit,
+ cnt - 1);
+ }
return false;
}
if (!single_succ_p (bb))
@@ -544,8 +556,9 @@ empty_eh_cleanup (basic_block bb, int *eh_has_tsan_func_exit, int cnt)
static live_vars_map *live_vars;
static vec<bitmap_head> live_vars_vec;
-/* Finds tailcalls falling into basic block BB. The list of found tailcalls is
- added to the start of RET. When ONLY_MUSTTAIL is set only handle musttail.
+/* Finds tailcalls falling into basic block BB when coming from edge ESUCC (or
+ NULL). The list of found tailcalls is added to the start of RET.
+ When ONLY_MUSTTAIL is set only handle musttail.
Update OPT_TAILCALLS as output parameter. If DIAG_MUSTTAIL, diagnose
failures for musttail calls. RETRY_TSAN_FUNC_EXIT is initially 0 and
in that case the last call is attempted to be tail called, including
@@ -554,12 +567,15 @@ static vec<bitmap_head> live_vars_vec;
will retry with it set to 1 (regardless of whether turning the
__tsan_func_exit was successfully detected as tail call or not) and that
will allow turning musttail calls before that call into tail calls as well
- by adding __tsan_func_exit call before the call. */
+ by adding __tsan_func_exit call before the call.
+ IGNORED_EDGES and MUST_SEE_BBS are used in recursive calls when handling
+ GIMPLE_CONDs for cleanups. */
static void
-find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
- bool &opt_tailcalls, bool diag_musttail,
- int &retry_tsan_func_exit)
+find_tail_calls (basic_block bb, edge esucc, struct tailcall **ret,
+ bool only_musttail, bool &opt_tailcalls, bool diag_musttail,
+ int &retry_tsan_func_exit, hash_set<edge> *ignored_edges,
+ hash_set<basic_block> *must_see_bbs)
{
tree ass_var = NULL_TREE, ret_var, func, param;
gimple *stmt;
@@ -575,14 +591,24 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
bool only_tailr = false;
bool has_tsan_func_exit = false;
int eh_has_tsan_func_exit = -1;
+ bool delete_ignored_edges = false;
if (!single_succ_p (bb)
&& (EDGE_COUNT (bb->succs) || !cfun->has_musttail || !diag_musttail))
{
+ if (EDGE_COUNT (bb->succs) == 2
+ && esucc
+ && cfun->has_musttail
+ && diag_musttail
+ && (EDGE_SUCC (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
+ && (EDGE_SUCC (bb, 1)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
+ && (stmt = last_nondebug_stmt (bb))
+ && gimple_code (stmt) == GIMPLE_COND)
+ ;
/* If there is an abnormal edge assume it's the only extra one.
Tolerate that case so that we can give better error messages
for musttail later. */
- if (!has_abnormal_or_eh_outgoing_edge_p (bb))
+ else if (!has_abnormal_or_eh_outgoing_edge_p (bb))
{
if (dump_file)
fprintf (dump_file, "Basic block %d has extra exit edges\n",
@@ -619,6 +645,85 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
continue;
}
+ if (cfun->has_musttail
+ && sanitize_flags_p (SANITIZE_ADDRESS)
+ && asan_mark_p (stmt, ASAN_MARK_POISON)
+ && diag_musttail)
+ continue;
+
+ /* Especially at -O0 with -fsanitize=address we can end up with
+ code like
+ _26 = foo (x_24(D)); [must tail call]
+ finally_tmp.3_27 = 0;
+ goto <bb 5>; [INV]
+
+ ...
+
+ <bb 5> :
+ # _6 = PHI <_26(3), _23(D)(4)>
+ # finally_tmp.3_8 = PHI <finally_tmp.3_27(3), finally_tmp.3_22(4)>
+ .ASAN_MARK (POISON, &c, 4);
+ if (finally_tmp.3_8 == 1)
+ goto <bb 7>; [INV]
+ else
+ goto <bb 6>; [INV]
+ When walking backwards, ESUCC is the edge we are coming from,
+ depending on its EDGE_TRUE_FLAG, == vs. != for the comparison
+ and value compared against try to find out through which edge
+ we need to go and which edge should be ignored. The code handles
+ both INTEGER_CST PHI arguments and SSA_NAMEs set to constants
+ (for -O0 where those aren't propagated). */
+ if (cfun->has_musttail
+ && diag_musttail
+ && esucc
+ && gimple_code (stmt) == GIMPLE_COND
+ && (gimple_cond_code (stmt) == EQ_EXPR
+ || gimple_cond_code (stmt) == NE_EXPR)
+ && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
+ && TREE_CODE (gimple_cond_rhs (stmt)) == INTEGER_CST
+ && INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))
+ && (integer_zerop (gimple_cond_rhs (stmt))
+ || integer_onep (gimple_cond_rhs (stmt))))
+ {
+ tree lhs = gimple_cond_lhs (stmt);
+ bool rhsv = integer_onep (gimple_cond_rhs (stmt));
+ if (((esucc->flags & EDGE_TRUE_VALUE) != 0)
+ ^ (gimple_cond_code (stmt) == EQ_EXPR))
+ rhsv = !rhsv;
+ if (!ignored_edges)
+ {
+ ignored_edges = new hash_set<edge>;
+ must_see_bbs = new hash_set<basic_block>;
+ delete_ignored_edges = true;
+ }
+ if (is_gimple_assign (SSA_NAME_DEF_STMT (lhs))
+ && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (lhs))
+ == INTEGER_CST))
+ {
+ tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (lhs));
+ if (rhsv ? integer_onep (rhs) : integer_zerop (rhs))
+ continue;
+ }
+ else if (gimple_code (SSA_NAME_DEF_STMT (lhs)) == GIMPLE_PHI)
+ {
+ gimple *phi = SSA_NAME_DEF_STMT (lhs);
+ basic_block pbb = gimple_bb (phi);
+ must_see_bbs->add (pbb);
+ edge_iterator ei;
+ FOR_EACH_EDGE (e, ei, pbb->preds)
+ {
+ tree rhs = gimple_phi_arg_def_from_edge (phi, e);
+ if (TREE_CODE (rhs) == SSA_NAME
+ && is_gimple_assign (SSA_NAME_DEF_STMT (rhs))
+ && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (rhs))
+ == INTEGER_CST))
+ rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs));
+ if (!(rhsv ? integer_onep (rhs) : integer_zerop (rhs)))
+ ignored_edges->add (e);
+ }
+ }
+ }
+
if (!last_stmt)
last_stmt = stmt;
/* Check for a call. */
@@ -627,12 +732,20 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
call = as_a <gcall *> (stmt);
/* Handle only musttail calls when not optimizing. */
if (only_musttail && !gimple_call_must_tail_p (call))
- return;
+ {
+ maybe_delete_ignored_edges:
+ if (delete_ignored_edges)
+ {
+ delete ignored_edges;
+ delete must_see_bbs;
+ }
+ return;
+ }
if (bad_stmt)
{
maybe_error_musttail (call, _("memory reference or volatile "
"after call"), diag_musttail);
- return;
+ goto maybe_delete_ignored_edges;
}
ass_var = gimple_call_lhs (call);
break;
@@ -661,17 +774,34 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
}
if (bad_stmt)
- return;
+ goto maybe_delete_ignored_edges;
if (gsi_end_p (gsi))
{
+ if (must_see_bbs)
+ must_see_bbs->remove (bb);
+
edge_iterator ei;
/* Recurse to the predecessors. */
FOR_EACH_EDGE (e, ei, bb->preds)
- find_tail_calls (e->src, ret, only_musttail, opt_tailcalls,
- diag_musttail, retry_tsan_func_exit);
+ {
+ if (ignored_edges && ignored_edges->contains (e))
+ continue;
+ find_tail_calls (e->src, e, ret, only_musttail, opt_tailcalls,
+ diag_musttail, retry_tsan_func_exit, ignored_edges,
+ must_see_bbs);
+ }
- return;
+ goto maybe_delete_ignored_edges;
+ }
+
+ if (must_see_bbs && !must_see_bbs->is_empty ())
+ goto maybe_delete_ignored_edges;
+
+ if (delete_ignored_edges)
+ {
+ delete ignored_edges;
+ delete must_see_bbs;
}
if (!suitable_for_tail_opt_p (call, diag_musttail))
@@ -963,11 +1093,12 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
tree tmp_m = NULL_TREE;
gsi_next (&agsi);
+ new_bb:
while (gsi_end_p (agsi))
{
edge e = single_non_eh_succ_edge (abb);
ass_var = propagate_through_phis (ass_var, e);
- if (!ass_var)
+ if (!ass_var || ignored_edges)
edges.safe_push (e);
abb = e->dest;
agsi = gsi_start_bb (abb);
@@ -995,6 +1126,94 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail,
continue;
}
+ if (cfun->has_musttail
+ && sanitize_flags_p (SANITIZE_ADDRESS)
+ && asan_mark_p (stmt, ASAN_MARK_POISON)
+ && diag_musttail)
+ continue;
+
+ /* See earlier comment on GIMPLE_CONDs. Here we need to propagate
+ through on the forward walk, so based on the edges vector we've
+ walked through determine which edge to follow. */
+ if (ignored_edges)
+ {
+ if (is_gimple_assign (stmt)
+ && gimple_assign_rhs_code (stmt) == INTEGER_CST)
+ {
+ use_operand_p use_p;
+ gimple *use_stmt;
+ if ((integer_zerop (gimple_assign_rhs1 (stmt))
+ || integer_onep (gimple_assign_rhs1 (stmt)))
+ && single_imm_use (gimple_assign_lhs (stmt), &use_p,
+ &use_stmt))
+ {
+ if (gimple_code (use_stmt) == GIMPLE_COND)
+ continue;
+ if (gimple_code (use_stmt) == GIMPLE_PHI
+ && single_imm_use (gimple_phi_result (use_stmt),
+ &use_p, &use_stmt)
+ && gimple_code (use_stmt) == GIMPLE_COND)
+ continue;
+ }
+ }
+ if (gimple_code (stmt) == GIMPLE_COND
+ && (gimple_cond_code (stmt) == EQ_EXPR
+ || gimple_cond_code (stmt) == NE_EXPR)
+ && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
+ && TREE_CODE (gimple_cond_rhs (stmt)) == INTEGER_CST
+ && INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))
+ && (integer_zerop (gimple_cond_rhs (stmt))
+ || integer_onep (gimple_cond_rhs (stmt))))
+ {
+ edge e = NULL, et, ef;
+ tree lhs = gimple_cond_lhs (stmt);
+ bool rhsv = integer_onep (gimple_cond_rhs (stmt));
+ if (gimple_cond_code (stmt) == NE_EXPR)
+ rhsv = !rhsv;
+ extract_true_false_edges_from_block (gimple_bb (stmt), &et, &ef);
+ if (is_gimple_assign (SSA_NAME_DEF_STMT (lhs))
+ && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (lhs))
+ == INTEGER_CST))
+ {
+ tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (lhs));
+ if (rhsv ? integer_onep (rhs) : integer_zerop (rhs))
+ e = et;
+ else if (rhsv ? integer_zerop (rhs) : integer_onep (rhs))
+ e = ef;
+ }
+ else if (gimple_code (SSA_NAME_DEF_STMT (lhs)) == GIMPLE_PHI)
+ {
+ gimple *phi = SSA_NAME_DEF_STMT (lhs);
+ basic_block pbb = gimple_bb (phi);
+ for (edge e2 : edges)
+ if (e2->dest == pbb)
+ {
+ tree rhs = gimple_phi_arg_def_from_edge (phi, e2);
+ if (TREE_CODE (rhs) == SSA_NAME)
+ if (gimple *g = SSA_NAME_DEF_STMT (rhs))
+ if (is_gimple_assign (g)
+ && gimple_assign_rhs_code (g) == INTEGER_CST)
+ rhs = gimple_assign_rhs1 (g);
+ if (rhsv ? integer_onep (rhs) : integer_zerop (rhs))
+ e = et;
+ else if (rhsv ? integer_zerop (rhs)
+ : integer_onep (rhs))
+ e = ef;
+ break;
+ }
+ }
+ if (e)
+ {
+ ass_var = propagate_through_phis (ass_var, e);
+ if (!ass_var || ignored_edges)
+ edges.safe_push (e);
+ abb = e->dest;
+ agsi = gsi_start_bb (abb);
+ goto new_bb;
+ }
+ }
+ }
+
if (gimple_code (stmt) != GIMPLE_ASSIGN)
{
maybe_error_musttail (call, _("unhandled code after call"),
@@ -1629,14 +1848,14 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail,
if (safe_is_a <greturn *> (*gsi_last_bb (e->src)))
{
int retry_tsan_func_exit = 0;
- find_tail_calls (e->src, &tailcalls, only_musttail, opt_tailcalls,
- diag_musttail, retry_tsan_func_exit);
+ find_tail_calls (e->src, e, &tailcalls, only_musttail, opt_tailcalls,
+ diag_musttail, retry_tsan_func_exit, NULL, NULL);
if (retry_tsan_func_exit == -1)
{
retry_tsan_func_exit = 1;
- find_tail_calls (e->src, &tailcalls, only_musttail,
+ find_tail_calls (e->src, e, &tailcalls, only_musttail,
opt_tailcalls, diag_musttail,
- retry_tsan_func_exit);
+ retry_tsan_func_exit, NULL, NULL);
}
}
}
@@ -1652,8 +1871,9 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail,
if (is_gimple_call (c)
&& gimple_call_must_tail_p (as_a <gcall *> (c))
&& gimple_call_noreturn_p (as_a <gcall *> (c)))
- find_tail_calls (bb, &tailcalls, only_musttail, opt_tailcalls,
- diag_musttail, retry_tsan_func_exit);
+ find_tail_calls (bb, NULL, &tailcalls, only_musttail,
+ opt_tailcalls, diag_musttail,
+ retry_tsan_func_exit, NULL, NULL);
}
if (live_vars)