aboutsummaryrefslogtreecommitdiff
path: root/gcc/ipa-inline.c
diff options
context:
space:
mode:
authorJan Hubicka <hubicka@ucw.cz>2014-11-18 21:44:16 +0100
committerJan Hubicka <hubicka@gcc.gnu.org>2014-11-18 20:44:16 +0000
commit2bf86c845a89fce00ccb219adbf6002443b5b1cb (patch)
tree78111324e160451c1d54ce78b9af90a1e98ffa12 /gcc/ipa-inline.c
parentbb59f396f8ca74c7d663c197e99d15bbe9f6e5b6 (diff)
downloadgcc-2bf86c845a89fce00ccb219adbf6002443b5b1cb.zip
gcc-2bf86c845a89fce00ccb219adbf6002443b5b1cb.tar.gz
gcc-2bf86c845a89fce00ccb219adbf6002443b5b1cb.tar.bz2
ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
* ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn. (ipa_value_from_jfunc, ipa_context_from_jfunc): Skip sanity check. (ipa_get_indirect_edge_target_1): Use opt_for_fn. (good_cloning_opportunity_p): Likewise. (ipa-cp gate): Enable ipa-cp with LTO. * ipa-profile.c (ipa_propagate_frequency): Use opt_for_fn. * ipa.c (symbol_table::remove_unreachable_nodes): Always build type inheritance. * ipa-inline-transform.c (inline_transform): Check if there are inlines to apply even at -O0. * cgraphunit.c (cgraph_node::finalize_function): Use opt_for_fn. (analyze_functions): Build type inheritance graph. * ipa-inline.c (can_inline_edge_p): Use opt_for_fn. (want_early_inline_function_p, want_inline_small_function_p): Likewise. (check_callers): Likewise. (edge_badness): Likewise. (inline_small_functions): Always be ready for indirect inlining to happend. (ipa_inline): Always use want_inline_function_to_all_callers_p. (early_inline_small_functions): Use opt_for_fn. * ipa-inline-analysis.c (estimate_function_body_sizes): use opt_for_fn. (estimate_function_body_sizes): Likewise. (compute_inline_parameters): Likewise. (estimate_edge_devirt_benefit): Likewise. (inline_analyze_function): Likewise. * ipa-devirt.c (ipa_devirt): Likewise. (gate): Use in_lto_p. * ipa-prop.c (ipa_func_spec_opts_forbid_analysis_p): Use opt_for_fn. (try_make_edge_direct_virtual_call): Likewise. (update_indirect_edges_after_inlining): Likewise. (ipa_free_all_structures_after_ipa_cp): Add in_lto_p check. * common.opt (findirect-inlining): Turn into optimization. * ipa-pure-const.c (add_new_function): Use opt_for_fn. (pure_const_generate_summary): Likewise. (gate_pure_const): Always enable with in_lto_p. From-SVN: r217737
Diffstat (limited to 'gcc/ipa-inline.c')
-rw-r--r--gcc/ipa-inline.c44
1 files changed, 19 insertions, 25 deletions
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index ca50ad5..72c0715 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -378,18 +378,10 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
optimization attribute. */
else if (caller_tree != callee_tree)
{
- struct cl_optimization *caller_opt
- = TREE_OPTIMIZATION ((caller_tree)
- ? caller_tree
- : optimization_default_node);
-
- struct cl_optimization *callee_opt
- = TREE_OPTIMIZATION ((callee_tree)
- ? callee_tree
- : optimization_default_node);
-
- if (((caller_opt->x_optimize > callee_opt->x_optimize)
- || (caller_opt->x_optimize_size != callee_opt->x_optimize_size))
+ if (((opt_for_fn (e->caller->decl, optimize)
+ > opt_for_fn (e->callee->decl, optimize))
+ || (opt_for_fn (e->caller->decl, optimize_size)
+ != opt_for_fn (e->callee->decl, optimize_size)))
/* gcc.dg/pr43564.c. Look at forced inline even in -O0. */
&& !DECL_DISREGARD_INLINE_LIMITS (e->callee->decl))
{
@@ -469,7 +461,7 @@ want_early_inline_function_p (struct cgraph_edge *e)
else if (flag_auto_profile && afdo_callsite_hot_enough_for_early_inline (e))
;
else if (!DECL_DECLARED_INLINE_P (callee->decl)
- && !flag_inline_small_functions)
+ && !opt_for_fn (e->caller->decl, flag_inline_small_functions))
{
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
report_inline_failed_reason (e);
@@ -587,7 +579,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
;
else if (!DECL_DECLARED_INLINE_P (callee->decl)
- && !flag_inline_small_functions)
+ && !opt_for_fn (e->caller->decl, flag_inline_small_functions))
{
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
want_inline = false;
@@ -639,7 +631,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
want_inline = false;
}
else if (!DECL_DECLARED_INLINE_P (callee->decl)
- && !flag_inline_functions)
+ && !opt_for_fn (e->caller->decl, flag_inline_functions))
{
/* growth_likely_positive is expensive, always test it last. */
if (growth >= MAX_INLINE_INSNS_SINGLE
@@ -816,6 +808,8 @@ check_callers (struct cgraph_node *node, void *has_hot_call)
struct cgraph_edge *e;
for (e = node->callers; e; e = e->next_caller)
{
+ if (!opt_for_fn (e->caller->decl, flag_inline_functions_called_once))
+ return true;
if (!can_inline_edge_p (e, true))
return true;
if (!(*(bool *)has_hot_call) && e->maybe_hot_p ())
@@ -1010,6 +1004,8 @@ edge_badness (struct cgraph_edge *edge, bool dump)
compensated by the inline hints.
*/
+ /* TODO: We ought suport mixing units where some functions are profiled
+ and some not. */
else if (flag_guess_branch_prob)
{
badness = (relative_time_benefit (callee_info, edge, edge_time)
@@ -1575,8 +1571,7 @@ inline_small_functions (void)
int initial_size = 0;
struct cgraph_node **order = XCNEWVEC (cgraph_node *, symtab->cgraph_count);
struct cgraph_edge_hook_list *edge_removal_hook_holder;
- if (flag_indirect_inlining)
- new_indirect_edges.create (8);
+ new_indirect_edges.create (8);
edge_removal_hook_holder
= symtab->add_edge_removal_hook (&heap_edge_removal_hook, &edge_heap);
@@ -1773,7 +1768,8 @@ inline_small_functions (void)
if (where->global.inlined_to)
where = where->global.inlined_to;
if (!recursive_inlining (edge,
- flag_indirect_inlining
+ opt_for_fn (edge->caller->decl,
+ flag_indirect_inlining)
? &new_indirect_edges : NULL))
{
edge->inline_failed = CIF_RECURSIVE_INLINING;
@@ -1783,7 +1779,7 @@ inline_small_functions (void)
reset_edge_caches (where);
/* Recursive inliner inlines all recursive calls of the function
at once. Consequently we need to update all callee keys. */
- if (flag_indirect_inlining)
+ if (opt_for_fn (edge->caller->decl, flag_indirect_inlining))
add_new_edges_to_heap (&edge_heap, new_indirect_edges);
update_callee_keys (&edge_heap, where, updated_nodes);
bitmap_clear (updated_nodes);
@@ -1821,8 +1817,7 @@ inline_small_functions (void)
gcc_checking_assert (!callee->global.inlined_to);
inline_call (edge, true, &new_indirect_edges, &overall_size, true);
- if (flag_indirect_inlining)
- add_new_edges_to_heap (&edge_heap, new_indirect_edges);
+ add_new_edges_to_heap (&edge_heap, new_indirect_edges);
reset_edge_caches (edge->callee);
reset_node_growth_cache (callee);
@@ -2246,8 +2241,7 @@ ipa_inline (void)
reset_edge_caches (where);
inline_update_overall_summary (where);
}
- if (flag_inline_functions_called_once
- && want_inline_function_to_all_callers_p (node, cold))
+ if (want_inline_function_to_all_callers_p (node, cold))
{
int num_calls = 0;
node->call_for_symbol_thunks_and_aliases (sum_callers, &num_calls,
@@ -2345,8 +2339,8 @@ early_inline_small_functions (struct cgraph_node *node)
/* Do not consider functions not declared inline. */
if (!DECL_DECLARED_INLINE_P (callee->decl)
- && !flag_inline_small_functions
- && !flag_inline_functions)
+ && !opt_for_fn (node->decl, flag_inline_small_functions)
+ && !opt_for_fn (node->decl, flag_inline_functions))
continue;
if (dump_file)