diff options
Diffstat (limited to 'gcc/tree-tailcall.cc')
-rw-r--r-- | gcc/tree-tailcall.cc | 417 |
1 files changed, 343 insertions, 74 deletions
diff --git a/gcc/tree-tailcall.cc b/gcc/tree-tailcall.cc index e71341b..f593363 100644 --- a/gcc/tree-tailcall.cc +++ b/gcc/tree-tailcall.cc @@ -51,6 +51,8 @@ along with GCC; see the file COPYING3. If not see #include "symbol-summary.h" #include "ipa-cp.h" #include "ipa-prop.h" +#include "attribs.h" +#include "asan.h" /* The file implements the tail recursion elimination. It is also used to analyze the tail calls in general, passing the results to the rtl level @@ -122,6 +124,9 @@ struct tailcall /* True if it is a call to the current function. */ bool tail_recursion; + /* True if there is __tsan_func_exit call after the call. */ + bool has_tsan_func_exit; + /* The return value of the caller is mult * f + add, where f is the return value of the call. */ tree mult, add; @@ -165,8 +170,6 @@ suitable_for_tail_opt_p (gcall *call, bool diag_musttail) static bool suitable_for_tail_call_opt_p (gcall *call, bool diag_musttail) { - tree param; - /* alloca (until we have stack slot life analysis) inhibits sibling call optimizations, but not tail recursion. */ if (cfun->calls_alloca) @@ -204,21 +207,60 @@ suitable_for_tail_call_opt_p (gcall *call, bool diag_musttail) return false; } - /* ??? It is OK if the argument of a function is taken in some cases, - but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */ - for (param = DECL_ARGUMENTS (current_function_decl); - param; - param = DECL_CHAIN (param)) - if (TREE_ADDRESSABLE (param)) + if (diag_musttail + && gimple_call_must_tail_p (call) + && warn_musttail_local_addr) + for (unsigned int i = 0; i < gimple_call_num_args (call); i++) { - maybe_error_musttail (call, _("address of caller arguments taken"), - diag_musttail); - return false; + tree arg = gimple_call_arg (call, i); + if (!POINTER_TYPE_P (TREE_TYPE (arg))) + continue; + if (TREE_CODE (arg) == ADDR_EXPR) + { + arg = get_base_address (TREE_OPERAND (arg, 0)); + if (auto_var_in_fn_p (arg, current_function_decl)) + { + if (TREE_CODE (arg) == LABEL_DECL) + warning_at (gimple_location (call), OPT_Wmusttail_local_addr, + "address of label passed to %<musttail%> " + "call argument"); + else if (TREE_CODE (arg) == PARM_DECL) + warning_at (gimple_location (call), OPT_Wmusttail_local_addr, + "address of parameter %qD passed to " + "%<musttail%> call argument", arg); + else if (!DECL_ARTIFICIAL (arg) && DECL_NAME (arg)) + warning_at (gimple_location (call), OPT_Wmusttail_local_addr, + "address of automatic variable %qD passed to " + "%<musttail%> call argument", arg); + else + warning_at (gimple_location (call), OPT_Wmusttail_local_addr, + "address of local variable passed to " + "%<musttail%> call argument"); + suppress_warning (call, OPT_Wmaybe_musttail_local_addr); + } + } } return true; } +/* Return single successor edge ignoring EDGE_EH edges. */ + +static edge +single_non_eh_succ_edge (basic_block bb) +{ + edge e, ret = NULL; + edge_iterator ei; + FOR_EACH_EDGE (e, ei, bb->succs) + if ((e->flags & EDGE_EH) == 0) + { + gcc_assert (ret == NULL); + ret = e; + } + gcc_assert (ret); + return ret; +} + /* Checks whether the expression EXPR in stmt AT is independent of the statement pointed to by GSI (in a sense that we already know EXPR's value at GSI). We use the fact that we are only called from the chain of @@ -245,7 +287,7 @@ independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi, /* Mark the blocks in the chain leading to the end. */ at_bb = gimple_bb (at); call_bb = gimple_bb (gsi_stmt (gsi)); - for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) + for (bb = call_bb; bb != at_bb; bb = single_non_eh_succ_edge (bb)->dest) bb->aux = &bb->aux; bb->aux = &bb->aux; @@ -289,7 +331,7 @@ independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi, } /* Unmark the blocks. */ - for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) + for (bb = call_bb; bb != at_bb; bb = single_non_eh_succ_edge (bb)->dest) bb->aux = NULL; bb->aux = NULL; @@ -447,7 +489,7 @@ maybe_error_musttail (gcall *call, const char *err, bool diag_musttail) { if (gimple_call_must_tail_p (call) && diag_musttail) { - error_at (call->location, "cannot tail-call: %s", err); + error_at (gimple_location (call), "cannot tail-call: %s", err); /* Avoid another error. ??? If there are multiple reasons why tail calls fail it might be useful to report them all to avoid whack-a-mole for the user. But currently there is too much @@ -455,26 +497,69 @@ maybe_error_musttail (gcall *call, const char *err, bool diag_musttail) gimple_call_set_must_tail (call, false); /* Avoid another error. */ gimple_call_set_tail (call, false); } - if (dump_file) + if (dump_file && (dump_flags & TDF_DETAILS)) { + fprintf (dump_file, "Cannot tail-call: %s: ", err); print_gimple_stmt (dump_file, call, 0, TDF_SLIM); - fprintf (dump_file, "Cannot convert: %s\n", err); } } +/* Return true if there is no real work performed in the exception + path starting at BB and it will in the end result in external exception. + Search at most CNT basic blocks (so that we don't need to do trivial + loop discovery). */ +static bool +empty_eh_cleanup (basic_block bb, int *eh_has_tsan_func_exit, int cnt) +{ + if (EDGE_COUNT (bb->succs) > 1) + return false; + + for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi); + gsi_next (&gsi)) + { + gimple *g = gsi_stmt (gsi); + if (is_gimple_debug (g) || gimple_clobber_p (g)) + continue; + if (eh_has_tsan_func_exit + && !*eh_has_tsan_func_exit + && sanitize_flags_p (SANITIZE_THREAD) + && gimple_call_builtin_p (g, BUILT_IN_TSAN_FUNC_EXIT)) + { + *eh_has_tsan_func_exit = 1; + continue; + } + if (is_gimple_resx (g) && stmt_can_throw_external (cfun, g)) + return true; + return false; + } + if (!single_succ_p (bb)) + return false; + if (cnt == 1) + return false; + return empty_eh_cleanup (single_succ (bb), eh_has_tsan_func_exit, cnt - 1); +} + /* Argument for compute_live_vars/live_vars_at_stmt and what compute_live_vars returns. Computed lazily, but just once for the function. */ static live_vars_map *live_vars; static vec<bitmap_head> live_vars_vec; -/* Finds tailcalls falling into basic block BB. The list of found tailcalls is +/* Finds tailcalls falling into basic block BB. The list of found tailcalls is added to the start of RET. When ONLY_MUSTTAIL is set only handle musttail. Update OPT_TAILCALLS as output parameter. If DIAG_MUSTTAIL, diagnose - failures for musttail calls. */ + failures for musttail calls. RETRY_TSAN_FUNC_EXIT is initially 0 and + in that case the last call is attempted to be tail called, including + __tsan_func_exit with -fsanitize=thread. It is set to -1 if we + detect __tsan_func_exit call and in that case tree_optimize_tail_calls_1 + will retry with it set to 1 (regardless of whether turning the + __tsan_func_exit was successfully detected as tail call or not) and that + will allow turning musttail calls before that call into tail calls as well + by adding __tsan_func_exit call before the call. */ static void find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, - bool &opt_tailcalls, bool diag_musttail) + bool &opt_tailcalls, bool diag_musttail, + int &retry_tsan_func_exit) { tree ass_var = NULL_TREE, ret_var, func, param; gimple *stmt; @@ -487,6 +572,9 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, basic_block abb; size_t idx; tree var; + bool only_tailr = false; + bool has_tsan_func_exit = false; + int eh_has_tsan_func_exit = -1; if (!single_succ_p (bb) && (EDGE_COUNT (bb->succs) || !cfun->has_musttail || !diag_musttail)) @@ -520,6 +608,17 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, || is_gimple_debug (stmt)) continue; + if (cfun->has_musttail + && sanitize_flags_p (SANITIZE_THREAD) + && gimple_call_builtin_p (stmt, BUILT_IN_TSAN_FUNC_EXIT) + && diag_musttail) + { + if (retry_tsan_func_exit == 0) + retry_tsan_func_exit = -1; + else if (retry_tsan_func_exit == 1) + continue; + } + if (!last_stmt) last_stmt = stmt; /* Check for a call. */ @@ -531,9 +630,8 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, return; if (bad_stmt) { - maybe_error_musttail (call, - _("memory reference or volatile after " - "call"), diag_musttail); + maybe_error_musttail (call, _("memory reference or volatile " + "after call"), diag_musttail); return; } ass_var = gimple_call_lhs (call); @@ -571,7 +669,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, /* Recurse to the predecessors. */ FOR_EACH_EDGE (e, ei, bb->preds) find_tail_calls (e->src, ret, only_musttail, opt_tailcalls, - diag_musttail); + diag_musttail, retry_tsan_func_exit); return; } @@ -582,6 +680,25 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, if (!suitable_for_tail_call_opt_p (call, diag_musttail)) opt_tailcalls = false; + /* ??? It is OK if the argument of a function is taken in some cases, + but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */ + if (!diag_musttail || !gimple_call_must_tail_p (call)) + for (param = DECL_ARGUMENTS (current_function_decl); + param; param = DECL_CHAIN (param)) + if (TREE_ADDRESSABLE (param)) + { + maybe_error_musttail (call, _("address of caller arguments taken"), + diag_musttail); + /* If current function has musttail calls, we can't disable tail + calls altogether for the whole caller, because those might be + actually fine. So just punt if this exact call is not + a tail recursion. */ + if (cfun->has_musttail) + only_tailr = true; + else + opt_tailcalls = false; + } + /* If the LHS of our call is not just a simple register or local variable, we can't transform this into a tail or sibling call. This situation happens, in (e.g.) "*p = foo()" where foo returns a @@ -612,14 +729,39 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, if ((stmt_could_throw_p (cfun, stmt) && !stmt_can_throw_external (cfun, stmt)) || EDGE_COUNT (bb->succs) > 1) { - if (stmt == last_stmt) - maybe_error_musttail (call, - _("call may throw exception that does not " - "propagate"), diag_musttail); - else - maybe_error_musttail (call, _("code between call and return"), - diag_musttail); - return; + if (stmt != last_stmt) + { + maybe_error_musttail (call, _("code between call and return"), + diag_musttail); + return; + } + + edge e; + edge_iterator ei; + FOR_EACH_EDGE (e, ei, bb->succs) + if (e->flags & EDGE_EH) + break; + + if (!e) + { + maybe_error_musttail (call, _("call may throw exception that does not " + "propagate"), diag_musttail); + return; + } + + if (diag_musttail && gimple_call_must_tail_p (call)) + eh_has_tsan_func_exit = 0; + if (!gimple_call_must_tail_p (call) + || !empty_eh_cleanup (e->dest, + eh_has_tsan_func_exit + ? NULL : &eh_has_tsan_func_exit, 20) + || EDGE_COUNT (bb->succs) > 2) + { + maybe_error_musttail (call, _("call may throw exception caught " + "locally or perform cleanups"), + diag_musttail); + return; + } } /* If the function returns a value, then at present, the tail call @@ -694,6 +836,9 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, tail_recursion = true; } + if (only_tailr && !tail_recursion) + return; + /* Compute live vars if not computed yet. */ if (live_vars == NULL) { @@ -730,11 +875,23 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, { if (!VAR_P (var)) { + if (diag_musttail && gimple_call_must_tail_p (call)) + { + auto opt = OPT_Wmaybe_musttail_local_addr; + if (!warning_suppressed_p (call, + opt)) + { + warning_at (gimple_location (call), opt, + "address of local variable can escape to " + "%<musttail%> call"); + suppress_warning (call, opt); + } + continue; + } if (local_live_vars) BITMAP_FREE (local_live_vars); - maybe_error_musttail (call, - _("call invocation refers to locals"), - diag_musttail); + maybe_error_musttail (call, _("call invocation refers to " + "locals"), diag_musttail); return; } else @@ -742,15 +899,48 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, unsigned int *v = live_vars->get (DECL_UID (var)); if (bitmap_bit_p (local_live_vars, *v)) { + if (diag_musttail && gimple_call_must_tail_p (call)) + { + auto opt = OPT_Wmaybe_musttail_local_addr; + if (!warning_suppressed_p (call, opt)) + { + if (!DECL_ARTIFICIAL (var) && DECL_NAME (var)) + warning_at (gimple_location (call), opt, + "address of automatic variable %qD " + "can escape to %<musttail%> call", + var); + else + warning_at (gimple_location (call), opt, + "address of local variable can escape " + "to %<musttail%> call"); + suppress_warning (call, opt); + } + continue; + } BITMAP_FREE (local_live_vars); - maybe_error_musttail (call, - _("call invocation refers to locals"), - diag_musttail); + maybe_error_musttail (call, _("call invocation refers to " + "locals"), diag_musttail); return; } } } } + if (diag_musttail + && gimple_call_must_tail_p (call) + && !warning_suppressed_p (call, OPT_Wmaybe_musttail_local_addr)) + for (tree param = DECL_ARGUMENTS (current_function_decl); + param; param = DECL_CHAIN (param)) + if (may_be_aliased (param) + && (ref_maybe_used_by_stmt_p (call, param, false) + || call_may_clobber_ref_p (call, param, false))) + { + auto opt = OPT_Wmaybe_musttail_local_addr; + warning_at (gimple_location (call), opt, + "address of parameter %qD can escape to " + "%<musttail%> call", param); + suppress_warning (call, opt); + break; + } if (local_live_vars) BITMAP_FREE (local_live_vars); @@ -763,8 +953,8 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, a = NULL_TREE; auto_bitmap to_move_defs; auto_vec<gimple *> to_move_stmts; - bool is_noreturn - = EDGE_COUNT (bb->succs) == 0 && gimple_call_noreturn_p (call); + bool is_noreturn = gimple_call_noreturn_p (call); + auto_vec<edge> edges; abb = bb; agsi = gsi; @@ -776,8 +966,11 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, while (gsi_end_p (agsi)) { - ass_var = propagate_through_phis (ass_var, single_succ_edge (abb)); - abb = single_succ (abb); + edge e = single_non_eh_succ_edge (abb); + ass_var = propagate_through_phis (ass_var, e); + if (!ass_var) + edges.safe_push (e); + abb = e->dest; agsi = gsi_start_bb (abb); } @@ -792,6 +985,17 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, || is_gimple_debug (stmt)) continue; + if (cfun->has_musttail + && sanitize_flags_p (SANITIZE_THREAD) + && retry_tsan_func_exit == 1 + && gimple_call_builtin_p (stmt, BUILT_IN_TSAN_FUNC_EXIT) + && !has_tsan_func_exit + && gimple_call_must_tail_p (call)) + { + has_tsan_func_exit = true; + continue; + } + if (gimple_code (stmt) != GIMPLE_ASSIGN) { maybe_error_musttail (call, _("unhandled code after call"), @@ -851,13 +1055,17 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, /* See if this is a tail call we can handle. */ if (is_noreturn) { + if (gimple_call_internal_p (call)) + { + maybe_error_musttail (call, _("internal call"), diag_musttail); + return; + } tree rettype = TREE_TYPE (TREE_TYPE (current_function_decl)); tree calltype = TREE_TYPE (gimple_call_fntype (call)); if (!VOID_TYPE_P (rettype) && !useless_type_conversion_p (rettype, calltype)) { - maybe_error_musttail (call, - _("call and return value are different"), + maybe_error_musttail (call, _("call and return value are different"), diag_musttail); return; } @@ -879,23 +1087,56 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, /* If IPA-VRP proves called function always returns a singleton range, the return value is replaced by the only value in that range. For tail call purposes, pretend such replacement didn't happen. */ - if (ass_var == NULL_TREE - && !tail_recursion - && TREE_CONSTANT (ret_var)) + if (ass_var == NULL_TREE && !tail_recursion) if (tree type = gimple_range_type (call)) if (tree callee = gimple_call_fndecl (call)) - if ((INTEGRAL_TYPE_P (type) || SCALAR_FLOAT_TYPE_P (type)) + if ((INTEGRAL_TYPE_P (type) + || SCALAR_FLOAT_TYPE_P (type) + || POINTER_TYPE_P (type)) && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (callee)), type) && useless_type_conversion_p (TREE_TYPE (ret_var), type) && ipa_return_value_range (val, callee) - && val.singleton_p (&valr) - && operand_equal_p (ret_var, valr, 0)) - ok = true; + && val.singleton_p (&valr)) + { + tree rv = ret_var; + unsigned int i = edges.length (); + /* If ret_var is equal to valr, we can tail optimize. */ + if (operand_equal_p (ret_var, valr, 0)) + ok = true; + else + /* Otherwise, if ret_var is a PHI result, try to find out + if valr isn't propagated through PHIs on the path from + call's bb to SSA_NAME_DEF_STMT (ret_var)'s bb. */ + while (TREE_CODE (rv) == SSA_NAME + && gimple_code (SSA_NAME_DEF_STMT (rv)) == GIMPLE_PHI) + { + tree nrv = NULL_TREE; + gimple *g = SSA_NAME_DEF_STMT (rv); + for (; i; --i) + { + if (edges[i - 1]->dest == gimple_bb (g)) + { + nrv + = gimple_phi_arg_def_from_edge (g, + edges[i - 1]); + --i; + break; + } + } + if (nrv == NULL_TREE) + break; + if (operand_equal_p (nrv, valr, 0)) + { + ok = true; + break; + } + rv = nrv; + } + } if (!ok) { - maybe_error_musttail (call, - _("call and return value are different"), + maybe_error_musttail (call, _("call and return value are different"), diag_musttail); return; } @@ -905,18 +1146,29 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, multiplicands. */ if (!tail_recursion && (m || a)) { - maybe_error_musttail (call, - _("operations after non tail recursive call"), - diag_musttail); + maybe_error_musttail (call, _("operations after non tail recursive " + "call"), diag_musttail); return; } /* For pointers only allow additions. */ if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))) { - maybe_error_musttail (call, - _("tail recursion with pointers can only use " - "additions"), diag_musttail); + maybe_error_musttail (call, _("tail recursion with pointers can only " + "use additions"), diag_musttail); + return; + } + + if (eh_has_tsan_func_exit != -1 + && eh_has_tsan_func_exit != has_tsan_func_exit) + { + if (eh_has_tsan_func_exit) + maybe_error_musttail (call, _("call may throw exception caught " + "locally or perform cleanups"), + diag_musttail); + else + maybe_error_musttail (call, _("exception cleanups omit " + "__tsan_func_exit call"), diag_musttail); return; } @@ -948,6 +1200,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret, bool only_musttail, nw->call_gsi = gsi; nw->tail_recursion = tail_recursion; + nw->has_tsan_func_exit = has_tsan_func_exit; nw->mult = m; nw->add = a; @@ -1112,11 +1365,6 @@ static void decrease_profile (basic_block bb, profile_count count) { bb->count = bb->count - count; - if (!single_succ_p (bb)) - { - gcc_assert (!EDGE_COUNT (bb->succs)); - return; - } } /* Eliminates tail call described by T. TMP_VARS is a list of @@ -1181,7 +1429,7 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop) else { /* Number of executions of function has reduced by the tailcall. */ - e = single_succ_edge (gsi_bb (t->call_gsi)); + e = single_non_eh_succ_edge (gsi_bb (t->call_gsi)); profile_count count = e->count (); @@ -1196,8 +1444,7 @@ eliminate_tail_call (struct tailcall *t, class loop *&new_loop) decrease_profile (e->dest, count); /* Replace the call by a jump to the start of function. */ - e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)), - first); + e = redirect_edge_and_branch (e, first); } gcc_assert (e); PENDING_STMT (e) = NULL; @@ -1288,6 +1535,14 @@ static bool optimize_tail_call (struct tailcall *t, bool opt_tailcalls, class loop *&new_loop) { + if (t->has_tsan_func_exit && (t->tail_recursion || opt_tailcalls)) + { + tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); + gimple *g = gimple_build_call (builtin_decl, 0); + gimple_set_location (g, cfun->function_end_locus); + gsi_insert_before (&t->call_gsi, g, GSI_SAME_STMT); + } + if (t->tail_recursion) { eliminate_tail_call (t, new_loop); @@ -1306,6 +1561,7 @@ optimize_tail_call (struct tailcall *t, bool opt_tailcalls, print_gimple_stmt (dump_file, stmt, 0, dump_flags); fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index); } + return t->has_tsan_func_exit; } return false; @@ -1355,20 +1611,33 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail, /* Only traverse the normal exits, i.e. those that end with return statement. */ if (safe_is_a <greturn *> (*gsi_last_bb (e->src))) - find_tail_calls (e->src, &tailcalls, only_musttail, opt_tailcalls, - diag_musttail); + { + int retry_tsan_func_exit = 0; + find_tail_calls (e->src, &tailcalls, only_musttail, opt_tailcalls, + diag_musttail, retry_tsan_func_exit); + if (retry_tsan_func_exit == -1) + { + retry_tsan_func_exit = 1; + find_tail_calls (e->src, &tailcalls, only_musttail, + opt_tailcalls, diag_musttail, + retry_tsan_func_exit); + } + } } if (cfun->has_musttail && diag_musttail) { basic_block bb; + int retry_tsan_func_exit = 0; FOR_EACH_BB_FN (bb, cfun) - if (EDGE_COUNT (bb->succs) == 0) + if (EDGE_COUNT (bb->succs) == 0 + || (single_succ_p (bb) + && (single_succ_edge (bb)->flags & EDGE_EH))) if (gimple *c = last_nondebug_stmt (bb)) if (is_gimple_call (c) && gimple_call_must_tail_p (as_a <gcall *> (c)) && gimple_call_noreturn_p (as_a <gcall *> (c))) find_tail_calls (bb, &tailcalls, only_musttail, opt_tailcalls, - diag_musttail); + diag_musttail, retry_tsan_func_exit); } if (live_vars) @@ -1400,10 +1669,10 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls, bool only_musttail, struct tailcall *a = *p; *p = (*p)->next; gcall *call = as_a <gcall *> (gsi_stmt (a->call_gsi)); - maybe_error_musttail (call, - _("tail recursion with accumulation " - "mixed with musttail " - "non-recursive call"), diag_musttail); + maybe_error_musttail (call, _("tail recursion with " + "accumulation mixed with " + "musttail non-recursive call"), + diag_musttail); free (a); } else |