diff options
author | Martin Liska <marxin@gcc.gnu.org> | 2019-10-30 07:55:39 +0000 |
---|---|---|
committer | Martin Liska <marxin@gcc.gnu.org> | 2019-10-30 07:55:39 +0000 |
commit | a62bfab5d2a332925fcf10c45b4c5d8ca499439d (patch) | |
tree | d0586bccd3e715429ba1c4df5ba680575950b1e7 /gcc | |
parent | 53289de583e4dfab17ace7b39e102b04eba749e5 (diff) | |
download | gcc-a62bfab5d2a332925fcf10c45b4c5d8ca499439d.zip gcc-a62bfab5d2a332925fcf10c45b4c5d8ca499439d.tar.gz gcc-a62bfab5d2a332925fcf10c45b4c5d8ca499439d.tar.bz2 |
Remove cgraph_global_info.
From-SVN: r277600
Diffstat (limited to 'gcc')
-rw-r--r-- | gcc/cgraph.c | 56 | ||||
-rw-r--r-- | gcc/cgraph.h | 41 | ||||
-rw-r--r-- | gcc/cgraphbuild.c | 2 | ||||
-rw-r--r-- | gcc/cgraphclones.c | 7 | ||||
-rw-r--r-- | gcc/cgraphunit.c | 16 | ||||
-rw-r--r-- | gcc/gimple-fold.c | 4 | ||||
-rw-r--r-- | gcc/ipa-comdats.c | 8 | ||||
-rw-r--r-- | gcc/ipa-devirt.c | 2 | ||||
-rw-r--r-- | gcc/ipa-fnsummary.c | 18 | ||||
-rw-r--r-- | gcc/ipa-inline-analysis.c | 8 | ||||
-rw-r--r-- | gcc/ipa-inline-transform.c | 16 | ||||
-rw-r--r-- | gcc/ipa-inline.c | 84 | ||||
-rw-r--r-- | gcc/ipa-profile.c | 4 | ||||
-rw-r--r-- | gcc/ipa-prop.c | 26 | ||||
-rw-r--r-- | gcc/ipa-pure-const.c | 6 | ||||
-rw-r--r-- | gcc/ipa-reference.c | 4 | ||||
-rw-r--r-- | gcc/ipa-utils.c | 2 | ||||
-rw-r--r-- | gcc/ipa-visibility.c | 2 | ||||
-rw-r--r-- | gcc/ipa.c | 18 | ||||
-rw-r--r-- | gcc/lto-cgraph.c | 26 | ||||
-rw-r--r-- | gcc/lto/lto-partition.c | 6 | ||||
-rw-r--r-- | gcc/lto/lto-symtab.c | 4 | ||||
-rw-r--r-- | gcc/omp-simd-clone.c | 2 | ||||
-rw-r--r-- | gcc/passes.c | 2 | ||||
-rw-r--r-- | gcc/symtab.c | 6 | ||||
-rw-r--r-- | gcc/tree-ssa-structalias.c | 4 |
26 files changed, 184 insertions, 190 deletions
diff --git a/gcc/cgraph.c b/gcc/cgraph.c index d47d412..2dc91a3 100644 --- a/gcc/cgraph.c +++ b/gcc/cgraph.c @@ -539,7 +539,7 @@ cgraph_node::get_create (tree decl) { cgraph_node *first_clone = cgraph_node::get (decl); - if (first_clone && !first_clone->global.inlined_to) + if (first_clone && !first_clone->inlined_to) return first_clone; cgraph_node *node = cgraph_node::create (decl); @@ -659,7 +659,7 @@ cgraph_node::get_for_asmname (tree asmname) node = node->next_sharing_asm_name) { cgraph_node *cn = dyn_cast <cgraph_node *> (node); - if (cn && !cn->global.inlined_to) + if (cn && !cn->inlined_to) return cn; } return NULL; @@ -1786,7 +1786,7 @@ cgraph_node::remove (void) { cgraph_node *n = cgraph_node::get (decl); if (!n - || (!n->clones && !n->clone_of && !n->global.inlined_to + || (!n->clones && !n->clone_of && !n->inlined_to && ((symtab->global_info_ready || in_lto_p) && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl) @@ -1817,7 +1817,7 @@ cgraph_node::mark_address_taken (void) { /* Indirect inlining can figure out that all uses of the address are inlined. */ - if (global.inlined_to) + if (inlined_to) { gcc_assert (cfun->after_inlining); gcc_assert (callers->indirect_inlining_edge); @@ -1944,10 +1944,10 @@ cgraph_node::dump (FILE *f) dump_base (f); - if (global.inlined_to) + if (inlined_to) fprintf (f, " Function %s is inline copy in %s\n", dump_name (), - global.inlined_to->dump_name ()); + inlined_to->dump_name ()); if (clone_of) fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ()); if (symtab->function_flags_ready) @@ -2096,7 +2096,7 @@ cgraph_node::dump (FILE *f) if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ()) sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa (); - if (global.inlined_to + if (inlined_to || (symtab->state < EXPANSION && ultimate_alias_target () == this && only_called_directly_p ())) ok = !count.ipa ().differs_from_p (sum); @@ -2212,14 +2212,14 @@ cgraph_node::get_availability (symtab_node *ref) { cgraph_node *cref = dyn_cast <cgraph_node *> (ref); if (cref) - ref = cref->global.inlined_to; + ref = cref->inlined_to; } enum availability avail; if (!analyzed) avail = AVAIL_NOT_AVAILABLE; else if (local.local) avail = AVAIL_LOCAL; - else if (global.inlined_to) + else if (inlined_to) avail = AVAIL_AVAILABLE; else if (transparent_alias) ultimate_alias_target (&avail, ref); @@ -2828,7 +2828,7 @@ bool cgraph_node::will_be_removed_from_program_if_no_direct_calls_p (bool will_inline) { - gcc_assert (!global.inlined_to); + gcc_assert (!inlined_to); if (DECL_EXTERNAL (decl)) return true; @@ -3015,7 +3015,7 @@ cgraph_edge::verify_corresponds_to_fndecl (tree decl) { cgraph_node *node; - if (!decl || callee->global.inlined_to) + if (!decl || callee->inlined_to) return false; if (symtab->state == LTO_STREAMING) return false; @@ -3085,7 +3085,7 @@ cgraph_node::verify_node (void) error ("cgraph count invalid"); error_found = true; } - if (global.inlined_to && same_comdat_group) + if (inlined_to && same_comdat_group) { error ("inline clone in same comdat group list"); error_found = true; @@ -3095,17 +3095,17 @@ cgraph_node::verify_node (void) error ("local symbols must be defined"); error_found = true; } - if (global.inlined_to && externally_visible) + if (inlined_to && externally_visible) { error ("externally visible inline clone"); error_found = true; } - if (global.inlined_to && address_taken) + if (inlined_to && address_taken) { error ("inline clone with address taken"); error_found = true; } - if (global.inlined_to && force_output) + if (inlined_to && force_output) { error ("inline clone is forced to output"); error_found = true; @@ -3142,9 +3142,9 @@ cgraph_node::verify_node (void) } if (!e->inline_failed) { - if (global.inlined_to - != (e->caller->global.inlined_to - ? e->caller->global.inlined_to : e->caller)) + if (inlined_to + != (e->caller->inlined_to + ? e->caller->inlined_to : e->caller)) { error ("inlined_to pointer is wrong"); error_found = true; @@ -3156,7 +3156,7 @@ cgraph_node::verify_node (void) } } else - if (global.inlined_to) + if (inlined_to) { error ("inlined_to pointer set for noninline callers"); error_found = true; @@ -3167,7 +3167,7 @@ cgraph_node::verify_node (void) if (e->verify_count ()) error_found = true; if (gimple_has_body_p (e->caller->decl) - && !e->caller->global.inlined_to + && !e->caller->inlined_to && !e->speculative /* Optimized out calls are redirected to __builtin_unreachable. */ && (e->count.nonzero_p () @@ -3192,7 +3192,7 @@ cgraph_node::verify_node (void) if (e->verify_count ()) error_found = true; if (gimple_has_body_p (e->caller->decl) - && !e->caller->global.inlined_to + && !e->caller->inlined_to && !e->speculative && e->count.ipa_p () && count @@ -3209,12 +3209,12 @@ cgraph_node::verify_node (void) error_found = true; } } - if (!callers && global.inlined_to) + if (!callers && inlined_to) { error ("inlined_to pointer is set but no predecessors found"); error_found = true; } - if (global.inlined_to == this) + if (inlined_to == this) { error ("inlined_to pointer refers to itself"); error_found = true; @@ -3303,7 +3303,7 @@ cgraph_node::verify_node (void) error ("More than one edge out of thunk node"); error_found = true; } - if (gimple_has_body_p (decl) && !global.inlined_to) + if (gimple_has_body_p (decl) && !inlined_to) { error ("Thunk is not supposed to have body"); error_found = true; @@ -3311,7 +3311,7 @@ cgraph_node::verify_node (void) } else if (analyzed && gimple_has_body_p (decl) && !TREE_ASM_WRITTEN (decl) - && (!DECL_EXTERNAL (decl) || global.inlined_to) + && (!DECL_EXTERNAL (decl) || inlined_to) && !flag_wpa) { if (this_cfun->cfg) @@ -3586,7 +3586,7 @@ cgraph_node::get_body (void) early. TODO: Materializing clones here will likely lead to smaller LTRANS footprint. */ - gcc_assert (!global.inlined_to && !clone_of); + gcc_assert (!inlined_to && !clone_of); if (ipa_transforms_to_apply.exists ()) { opt_pass *saved_current_pass = current_pass; @@ -3776,8 +3776,8 @@ cgraph_node::has_thunk_p (cgraph_node *node, void *) sreal cgraph_edge::sreal_frequency () { - return count.to_sreal_scale (caller->global.inlined_to - ? caller->global.inlined_to->count + return count.to_sreal_scale (caller->inlined_to + ? caller->inlined_to->count : caller->count); } diff --git a/gcc/cgraph.h b/gcc/cgraph.h index a7f357f..826d391 100644 --- a/gcc/cgraph.h +++ b/gcc/cgraph.h @@ -730,15 +730,6 @@ struct GTY(()) cgraph_local_info { unsigned tm_may_enter_irr : 1; }; -/* Information about the function that needs to be computed globally - once compilation is finished. Available only with -funit-at-a-time. */ - -struct GTY(()) cgraph_global_info { - /* For inline clones this points to the function they will be - inlined into. */ - cgraph_node *inlined_to; -}; - /* Represent which DECL tree (or reference to such tree) will be replaced by another tree while versioning. */ struct GTY(()) ipa_replace_map @@ -979,7 +970,7 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node If the new node is being inlined into another one, NEW_INLINED_TO should be the outline function the new one is (even indirectly) inlined to. - All hooks will see this in node's global.inlined_to, when invoked. + All hooks will see this in node's inlined_to, when invoked. Can be NULL if the node is not inlined. SUFFIX is string that is appended to the original name. */ cgraph_node *create_clone (tree decl, profile_count count, @@ -1446,7 +1437,11 @@ struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply; cgraph_local_info local; - cgraph_global_info global; + + /* For inline clones this points to the function they will be + inlined into. */ + cgraph_node *inlined_to; + struct cgraph_rtl_info *rtl; cgraph_clone_info clone; cgraph_thunk_info thunk; @@ -2519,7 +2514,7 @@ symtab_node::real_symbol_p (void) if (!is_a <cgraph_node *> (this)) return true; cnode = dyn_cast <cgraph_node *> (this); - if (cnode->global.inlined_to) + if (cnode->inlined_to) return false; return true; } @@ -2542,13 +2537,13 @@ symtab_node::in_same_comdat_group_p (symtab_node *target) if (cgraph_node *cn = dyn_cast <cgraph_node *> (target)) { - if (cn->global.inlined_to) - source = cn->global.inlined_to; + if (cn->inlined_to) + source = cn->inlined_to; } if (cgraph_node *cn = dyn_cast <cgraph_node *> (target)) { - if (cn->global.inlined_to) - target = cn->global.inlined_to; + if (cn->inlined_to) + target = cn->inlined_to; } return source->get_comdat_group () == target->get_comdat_group (); @@ -2995,7 +2990,7 @@ struct GTY((for_user)) constant_descriptor_tree { inline bool cgraph_node::only_called_directly_or_aliased_p (void) { - gcc_assert (!global.inlined_to); + gcc_assert (!inlined_to); return (!force_output && !address_taken && !ifunc_resolver && !used_from_other_partition @@ -3012,7 +3007,7 @@ cgraph_node::only_called_directly_or_aliased_p (void) inline bool cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void) { - gcc_checking_assert (!global.inlined_to); + gcc_checking_assert (!inlined_to); /* Extern inlines can always go, we will use the external definition. */ if (DECL_EXTERNAL (decl)) return true; @@ -3183,8 +3178,8 @@ inline bool cgraph_edge::recursive_p (void) { cgraph_node *c = callee->ultimate_alias_target (); - if (caller->global.inlined_to) - return caller->global.inlined_to->decl == c->decl; + if (caller->inlined_to) + return caller->inlined_to->decl == c->decl; else return caller->decl == c->decl; } @@ -3221,8 +3216,8 @@ cgraph_edge::binds_to_current_def_p () inline int cgraph_edge::frequency () { - return count.to_cgraph_frequency (caller->global.inlined_to - ? caller->global.inlined_to->count + return count.to_cgraph_frequency (caller->inlined_to + ? caller->inlined_to->count : caller->count); } @@ -3244,7 +3239,7 @@ inline void cgraph_node::mark_force_output (void) { force_output = 1; - gcc_checking_assert (!global.inlined_to); + gcc_checking_assert (!inlined_to); } /* Return true if function should be optimized for size. */ diff --git a/gcc/cgraphbuild.c b/gcc/cgraphbuild.c index 2e7d0b5..3baf67b 100644 --- a/gcc/cgraphbuild.c +++ b/gcc/cgraphbuild.c @@ -428,7 +428,7 @@ cgraph_edge::rebuild_edges (void) node->record_stmt_references (gsi_stmt (gsi)); } record_eh_tables (node, cfun); - gcc_assert (!node->global.inlined_to); + gcc_assert (!node->inlined_to); return 0; } diff --git a/gcc/cgraphclones.c b/gcc/cgraphclones.c index 087b5a2..fcf9cd5 100644 --- a/gcc/cgraphclones.c +++ b/gcc/cgraphclones.c @@ -309,7 +309,7 @@ dump_callgraph_transformation (const cgraph_node *original, If the new node is being inlined into another one, NEW_INLINED_TO should be the outline function the new one is (even indirectly) inlined to. All hooks - will see this in node's global.inlined_to, when invoked. Can be NULL if the + will see this in node's inlined_to, when invoked. Can be NULL if the node is not inlined. If PARAM_ADJUSTMENTS is non-NULL, the parameter manipulation information @@ -357,8 +357,7 @@ cgraph_node::create_clone (tree new_decl, profile_count prof_count, new_node->externally_visible = false; new_node->no_reorder = no_reorder; new_node->local.local = true; - new_node->global = global; - new_node->global.inlined_to = new_inlined_to; + new_node->inlined_to = new_inlined_to; new_node->rtl = rtl; new_node->frequency = frequency; new_node->tp_first_run = tp_first_run; @@ -862,7 +861,7 @@ cgraph_node::create_version_clone (tree new_decl, new_version->externally_visible = false; new_version->no_reorder = no_reorder; new_version->local.local = new_version->definition; - new_version->global = global; + new_version->inlined_to = inlined_to; new_version->rtl = rtl; new_version->count = count; diff --git a/gcc/cgraphunit.c b/gcc/cgraphunit.c index 6ec2443..ef96393 100644 --- a/gcc/cgraphunit.c +++ b/gcc/cgraphunit.c @@ -392,7 +392,7 @@ cgraph_node::reset (void) /* Reset our data structures so we can analyze the function again. */ memset (&local, 0, sizeof (local)); - memset (&global, 0, sizeof (global)); + inlined_to = NULL; memset (&rtl, 0, sizeof (rtl)); analyzed = false; definition = false; @@ -1507,7 +1507,7 @@ mark_functions_to_output (void) if (node->analyzed && !node->thunk.thunk_p && !node->alias - && !node->global.inlined_to + && !node->inlined_to && !TREE_ASM_WRITTEN (decl) && !DECL_EXTERNAL (decl)) { @@ -1532,7 +1532,7 @@ mark_functions_to_output (void) { /* We should've reclaimed all functions that are not needed. */ if (flag_checking - && !node->global.inlined_to + && !node->inlined_to && gimple_has_body_p (decl) /* FIXME: in ltrans unit when offline copy is outside partition but inline copies are inside partition, we can end up not removing the body since we no longer @@ -1545,7 +1545,7 @@ mark_functions_to_output (void) node->debug (); internal_error ("failed to reclaim unneeded function"); } - gcc_assert (node->global.inlined_to + gcc_assert (node->inlined_to || !gimple_has_body_p (decl) || node->in_other_partition || node->clones @@ -1560,7 +1560,7 @@ mark_functions_to_output (void) if (node->same_comdat_group && !node->process) { tree decl = node->decl; - if (!node->global.inlined_to + if (!node->inlined_to && gimple_has_body_p (decl) /* FIXME: in an ltrans unit when the offline copy is outside a partition but inline copies are inside a partition, we can @@ -2117,7 +2117,7 @@ cgraph_node::assemble_thunks_and_aliases (void) for (e = callers; e;) if (e->caller->thunk.thunk_p - && !e->caller->global.inlined_to) + && !e->caller->inlined_to) { cgraph_node *thunk = e->caller; @@ -2154,7 +2154,7 @@ cgraph_node::expand (void) location_t saved_loc; /* We ought to not compile any inline clones. */ - gcc_assert (!global.inlined_to); + gcc_assert (!inlined_to); /* __RTL functions are compiled as soon as they are parsed, so don't do it again. */ @@ -2707,7 +2707,7 @@ symbol_table::compile (void) bool error_found = false; FOR_EACH_DEFINED_FUNCTION (node) - if (node->global.inlined_to + if (node->inlined_to || gimple_has_body_p (node->decl)) { error_found = true; diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c index a085ab2..1b7e7ed 100644 --- a/gcc/gimple-fold.c +++ b/gcc/gimple-fold.c @@ -135,7 +135,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl) if (!snode || !snode->definition) return false; node = dyn_cast <cgraph_node *> (snode); - return !node || !node->global.inlined_to; + return !node || !node->inlined_to; } /* We will later output the initializer, so we can refer to it. @@ -184,7 +184,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl) || (!snode->forced_by_abi && !snode->force_output)))) return false; node = dyn_cast <cgraph_node *> (snode); - return !node || !node->global.inlined_to; + return !node || !node->inlined_to; } /* Create a temporary for TYPE for a statement STMT. If the current function diff --git a/gcc/ipa-comdats.c b/gcc/ipa-comdats.c index 69cc3f3..b496497 100644 --- a/gcc/ipa-comdats.c +++ b/gcc/ipa-comdats.c @@ -98,8 +98,8 @@ propagate_comdat_group (struct symtab_node *symbol, if (cgraph_node * cn = dyn_cast <cgraph_node *> (symbol2)) { - if (cn->global.inlined_to) - symbol2 = cn->global.inlined_to; + if (cn->inlined_to) + symbol2 = cn->inlined_to; } /* The actual merge operation. */ @@ -133,8 +133,8 @@ propagate_comdat_group (struct symtab_node *symbol, /* If we see inline clone, its comdat group actually corresponds to the comdat group of the function it is inlined to. */ - if (cn->global.inlined_to) - symbol2 = cn->global.inlined_to; + if (cn->inlined_to) + symbol2 = cn->inlined_to; } /* The actual merge operation. */ diff --git a/gcc/ipa-devirt.c b/gcc/ipa-devirt.c index 6c651a3..a14e795 100644 --- a/gcc/ipa-devirt.c +++ b/gcc/ipa-devirt.c @@ -2414,7 +2414,7 @@ maybe_record_node (vec <cgraph_node *> &nodes, || target_node->definition) && target_node->real_symbol_p ()) { - gcc_assert (!target_node->global.inlined_to); + gcc_assert (!target_node->inlined_to); gcc_assert (target_node->real_symbol_p ()); /* When sanitizing, do not assume that __cxa_pure_virtual is not called by valid program. */ diff --git a/gcc/ipa-fnsummary.c b/gcc/ipa-fnsummary.c index f01709c..798fdbe 100644 --- a/gcc/ipa-fnsummary.c +++ b/gcc/ipa-fnsummary.c @@ -470,8 +470,8 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p, class ipa_call_summary *es = ipa_call_summaries->get (e); int i, count = ipa_get_cs_argument_count (args); - if (e->caller->global.inlined_to) - caller_parms_info = IPA_NODE_REF (e->caller->global.inlined_to); + if (e->caller->inlined_to) + caller_parms_info = IPA_NODE_REF (e->caller->inlined_to); else caller_parms_info = IPA_NODE_REF (e->caller); callee_pi = IPA_NODE_REF (e->callee); @@ -746,7 +746,7 @@ ipa_fn_summary_t::duplicate (cgraph_node *src, set_hint_predicate (&info->loop_stride, p); } } - if (!dst->global.inlined_to) + if (!dst->inlined_to) ipa_update_overall_fn_summary (dst); } @@ -927,7 +927,7 @@ ipa_dump_fn_summaries (FILE *f) struct cgraph_node *node; FOR_EACH_DEFINED_FUNCTION (node) - if (!node->global.inlined_to) + if (!node->inlined_to) ipa_dump_fn_summary (f, node); } @@ -2690,7 +2690,7 @@ compute_fn_summary (struct cgraph_node *node, bool early) HOST_WIDE_INT self_stack_size; struct cgraph_edge *e; - gcc_assert (!node->global.inlined_to); + gcc_assert (!node->inlined_to); if (!ipa_fn_summaries) ipa_fn_summary_alloc (); @@ -3115,13 +3115,13 @@ HOST_WIDE_INT ipa_get_stack_frame_offset (struct cgraph_node *node) { HOST_WIDE_INT offset = 0; - if (!node->global.inlined_to) + if (!node->inlined_to) return 0; node = node->callers->caller; while (true) { offset += ipa_size_summaries->get (node)->estimated_self_stack_size; - if (!node->global.inlined_to) + if (!node->inlined_to) return offset; node = node->callers->caller; } @@ -3292,8 +3292,8 @@ void ipa_merge_fn_summary_after_inlining (struct cgraph_edge *edge) { ipa_fn_summary *callee_info = ipa_fn_summaries->get (edge->callee); - struct cgraph_node *to = (edge->caller->global.inlined_to - ? edge->caller->global.inlined_to : edge->caller); + struct cgraph_node *to = (edge->caller->inlined_to + ? edge->caller->inlined_to : edge->caller); class ipa_fn_summary *info = ipa_fn_summaries->get (to); clause_t clause = 0; /* not_inline is known to be false. */ size_time_entry *e; diff --git a/gcc/ipa-inline-analysis.c b/gcc/ipa-inline-analysis.c index 8b572a4..8dee132 100644 --- a/gcc/ipa-inline-analysis.c +++ b/gcc/ipa-inline-analysis.c @@ -93,8 +93,8 @@ int simple_edge_hints (struct cgraph_edge *edge) { int hints = 0; - struct cgraph_node *to = (edge->caller->global.inlined_to - ? edge->caller->global.inlined_to : edge->caller); + struct cgraph_node *to = (edge->caller->inlined_to + ? edge->caller->inlined_to : edge->caller); struct cgraph_node *callee = edge->callee->ultimate_alias_target (); int to_scc_no = ipa_fn_summaries->get (to)->scc_no; int callee_scc_no = ipa_fn_summaries->get (callee)->scc_no; @@ -147,8 +147,8 @@ do_estimate_edge_time (struct cgraph_edge *edge) may hurt optimization of the caller's hot path. */ if (edge->count.ipa ().initialized_p () && edge->maybe_hot_p () && (edge->count.ipa ().apply_scale (2, 1) - > (edge->caller->global.inlined_to - ? edge->caller->global.inlined_to->count.ipa () + > (edge->caller->inlined_to + ? edge->caller->inlined_to->count.ipa () : edge->caller->count.ipa ()))) hints |= INLINE_HINT_known_hot; diff --git a/gcc/ipa-inline-transform.c b/gcc/ipa-inline-transform.c index ccaa0eb..1cc7803 100644 --- a/gcc/ipa-inline-transform.c +++ b/gcc/ipa-inline-transform.c @@ -166,8 +166,8 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate, struct cgraph_node *inlining_into; struct cgraph_edge *next; - if (e->caller->global.inlined_to) - inlining_into = e->caller->global.inlined_to; + if (e->caller->inlined_to) + inlining_into = e->caller->inlined_to; else inlining_into = e->caller; @@ -193,7 +193,7 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate, For now we keep the ohter functions in the group in program until cgraph_remove_unreachable_functions gets rid of them. */ - gcc_assert (!e->callee->global.inlined_to); + gcc_assert (!e->callee->inlined_to); e->callee->remove_from_same_comdat_group (); if (e->callee->definition && inline_account_function_p (e->callee)) @@ -226,7 +226,7 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate, else e->callee->remove_from_same_comdat_group (); - e->callee->global.inlined_to = inlining_into; + e->callee->inlined_to = inlining_into; /* Recursively clone all bodies. */ for (e = e->callee->callees; e; e = next) @@ -344,11 +344,11 @@ inline_call (struct cgraph_edge *e, bool update_original, /* Don't inline inlined edges. */ gcc_assert (e->inline_failed); /* Don't even think of inlining inline clone. */ - gcc_assert (!callee->global.inlined_to); + gcc_assert (!callee->inlined_to); to = e->caller; - if (to->global.inlined_to) - to = to->global.inlined_to; + if (to->inlined_to) + to = to->inlined_to; if (to->thunk.thunk_p) { struct cgraph_node *target = to->callees->callee; @@ -478,7 +478,7 @@ inline_call (struct cgraph_edge *e, bool update_original, clone_inlined_nodes (e, true, update_original, overall_size); - gcc_assert (curr->callee->global.inlined_to == to); + gcc_assert (curr->callee->inlined_to == to); old_size = ipa_size_summaries->get (to)->size; ipa_merge_fn_summary_after_inlining (e); diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c index a7ef7fa..2103870 100644 --- a/gcc/ipa-inline.c +++ b/gcc/ipa-inline.c @@ -167,7 +167,7 @@ caller_growth_limits (struct cgraph_edge *e) limit = size_info->self_size; if (stack_size_limit < size_info->estimated_self_stack_size) stack_size_limit = size_info->estimated_self_stack_size; - if (to->global.inlined_to) + if (to->inlined_to) to = to->callers->caller; else break; @@ -321,8 +321,8 @@ can_inline_edge_p (struct cgraph_edge *e, bool report, bool inlinable = true; enum availability avail; - cgraph_node *caller = e->caller->global.inlined_to - ? e->caller->global.inlined_to : e->caller; + cgraph_node *caller = (e->caller->inlined_to + ? e->caller->inlined_to : e->caller); cgraph_node *callee = e->callee->ultimate_alias_target (&avail, caller); if (!callee->definition) @@ -458,8 +458,8 @@ can_inline_edge_by_limits_p (struct cgraph_edge *e, bool report, bool inlinable = true; enum availability avail; - cgraph_node *caller = e->caller->global.inlined_to - ? e->caller->global.inlined_to : e->caller; + cgraph_node *caller = (e->caller->inlined_to + ? e->caller->inlined_to : e->caller); cgraph_node *callee = e->callee->ultimate_alias_target (&avail, caller); tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller->decl); tree callee_tree @@ -737,8 +737,8 @@ inline sreal compute_uninlined_call_time (struct cgraph_edge *edge, sreal uninlined_call_time) { - cgraph_node *caller = (edge->caller->global.inlined_to - ? edge->caller->global.inlined_to + cgraph_node *caller = (edge->caller->inlined_to + ? edge->caller->inlined_to : edge->caller); sreal freq = edge->sreal_frequency (); @@ -758,8 +758,8 @@ inline sreal compute_inlined_call_time (struct cgraph_edge *edge, sreal time) { - cgraph_node *caller = (edge->caller->global.inlined_to - ? edge->caller->global.inlined_to + cgraph_node *caller = (edge->caller->inlined_to + ? edge->caller->inlined_to : edge->caller); sreal caller_time = ipa_fn_summaries->get (caller)->time; @@ -789,9 +789,9 @@ big_speedup_p (struct cgraph_edge *e) sreal spec_time = estimate_edge_time (e, &unspec_time); sreal time = compute_uninlined_call_time (e, unspec_time); sreal inlined_time = compute_inlined_call_time (e, spec_time); - cgraph_node *caller = e->caller->global.inlined_to - ? e->caller->global.inlined_to - : e->caller; + cgraph_node *caller = (e->caller->inlined_to + ? e->caller->inlined_to + : e->caller); int limit = opt_for_fn (caller->decl, optimize) >= 3 ? PARAM_VALUE (PARAM_INLINE_MIN_SPEEDUP) : PARAM_VALUE (PARAM_INLINE_MIN_SPEEDUP_O2); @@ -959,7 +959,7 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge, reason = "--param max-inline-recursive-depth exceeded."; want_inline = false; } - else if (outer_node->global.inlined_to + else if (outer_node->inlined_to && (caller_freq = outer_node->callers->sreal_frequency ()) == 0) { reason = "caller frequency is 0"; @@ -1069,7 +1069,7 @@ want_inline_function_to_all_callers_p (struct cgraph_node *node, bool cold) if (node->alias) return false; /* Already inlined? */ - if (node->global.inlined_to) + if (node->inlined_to) return false; /* Does it have callers? */ if (!node->call_for_symbol_and_aliases (has_caller_p, NULL, true)) @@ -1101,8 +1101,8 @@ edge_badness (struct cgraph_edge *edge, bool dump) struct cgraph_node *callee = edge->callee->ultimate_alias_target (); class ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee); ipa_hints hints; - cgraph_node *caller = (edge->caller->global.inlined_to - ? edge->caller->global.inlined_to + cgraph_node *caller = (edge->caller->inlined_to + ? edge->caller->inlined_to : edge->caller); growth = estimate_edge_growth (edge); @@ -1196,7 +1196,7 @@ edge_badness (struct cgraph_edge *edge, bool dump) if (growth > overall_growth /* ... and having only one caller which is not inlined ... */ && callee_info->single_caller - && !edge->caller->global.inlined_to + && !edge->caller->inlined_to /* ... and edges executed only conditionally ... */ && edge->sreal_frequency () < 1 /* ... consider case where callee is not inline but caller is ... */ @@ -1365,8 +1365,8 @@ reset_edge_caches (struct cgraph_node *node) struct cgraph_node *where = node; struct ipa_ref *ref; - if (where->global.inlined_to) - where = where->global.inlined_to; + if (where->inlined_to) + where = where->inlined_to; if (edge_growth_cache != NULL) for (edge = where->callers; edge; edge = edge->next_caller) @@ -1416,7 +1416,7 @@ update_caller_keys (edge_heap_t *heap, struct cgraph_node *node, struct ipa_ref *ref; if ((!node->alias && !ipa_fn_summaries->get (node)->inlinable) - || node->global.inlined_to) + || node->inlined_to) return; if (!bitmap_set_bit (updated_nodes, node->get_uid ())) return; @@ -1544,8 +1544,8 @@ recursive_inlining (struct cgraph_edge *edge, int n = 0; node = edge->caller; - if (node->global.inlined_to) - node = node->global.inlined_to; + if (node->inlined_to) + node = node->inlined_to; if (DECL_DECLARED_INLINE_P (node->decl)) limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE); @@ -1593,7 +1593,7 @@ recursive_inlining (struct cgraph_edge *edge, depth = 1; for (cnode = curr->caller; - cnode->global.inlined_to; cnode = cnode->callers->caller) + cnode->inlined_to; cnode = cnode->callers->caller) if (node->decl == curr->callee->ultimate_alias_target ()->decl) depth++; @@ -1658,7 +1658,7 @@ recursive_inlining (struct cgraph_edge *edge, node = next) { next = symtab->next_function (node); - if (node->global.inlined_to == master_clone) + if (node->inlined_to == master_clone) node->remove (); } master_clone->remove (); @@ -1776,8 +1776,8 @@ resolve_noninline_speculation (edge_heap_t *edge_heap, struct cgraph_edge *edge) if (edge->speculative && !speculation_useful_p (edge, false)) { struct cgraph_node *node = edge->caller; - struct cgraph_node *where = node->global.inlined_to - ? node->global.inlined_to : node; + struct cgraph_node *where = node->inlined_to + ? node->inlined_to : node; auto_bitmap updated_nodes; if (edge->count.ipa ().initialized_p ()) @@ -1859,7 +1859,7 @@ inline_small_functions (void) free (order); FOR_EACH_DEFINED_FUNCTION (node) - if (!node->global.inlined_to) + if (!node->inlined_to) { if (!node->alias && node->analyzed && (node->has_gimple_body_p () || node->thunk.thunk_p) @@ -1888,7 +1888,7 @@ inline_small_functions (void) if (opt_for_fn (n2->decl, optimize)) { ipa_fn_summary *info2 = ipa_fn_summaries->get - (n2->global.inlined_to ? n2->global.inlined_to : n2); + (n2->inlined_to ? n2->inlined_to : n2); if (info2->scc_no) break; info2->scc_no = id; @@ -1952,8 +1952,8 @@ inline_small_functions (void) } if (update) { - struct cgraph_node *where = node->global.inlined_to - ? node->global.inlined_to : node; + struct cgraph_node *where = node->inlined_to + ? node->inlined_to : node; ipa_update_overall_fn_summary (where); reset_edge_caches (where); update_caller_keys (&edge_heap, where, @@ -2097,8 +2097,8 @@ inline_small_functions (void) if (edge->recursive_p ()) { where = edge->caller; - if (where->global.inlined_to) - where = where->global.inlined_to; + if (where->inlined_to) + where = where->inlined_to; if (!recursive_inlining (edge, opt_for_fn (edge->caller->decl, flag_indirect_inlining) @@ -2128,7 +2128,7 @@ inline_small_functions (void) selective. */ where = edge->caller; - while (where->global.inlined_to) + while (where->inlined_to) { if (where->decl == callee->decl) outer_node = where, depth++; @@ -2147,7 +2147,7 @@ inline_small_functions (void) else if (depth && dump_file) fprintf (dump_file, " Peeling recursion with depth %i\n", depth); - gcc_checking_assert (!callee->global.inlined_to); + gcc_checking_assert (!callee->inlined_to); inline_call (edge, true, &new_indirect_edges, &overall_size, true); add_new_edges_to_heap (&edge_heap, new_indirect_edges); @@ -2156,8 +2156,8 @@ inline_small_functions (void) update_callee_keys (&edge_heap, where, updated_nodes); } where = edge->caller; - if (where->global.inlined_to) - where = where->global.inlined_to; + if (where->inlined_to) + where = where->inlined_to; /* Our profitability metric can depend on local properties such as number of inlinable calls and size of the function body. @@ -2290,8 +2290,8 @@ flatten_function (struct cgraph_node *node, bool early, bool update) node->aux = NULL; if (update) - ipa_update_overall_fn_summary (node->global.inlined_to - ? node->global.inlined_to : node); + ipa_update_overall_fn_summary (node->inlined_to + ? node->inlined_to : node); } /* Inline NODE to all callers. Worker for cgraph_for_node_and_aliases. @@ -2305,7 +2305,7 @@ inline_to_all_callers_1 (struct cgraph_node *node, void *data, int *num_calls = (int *)data; bool callee_removed = false; - while (node->callers && !node->global.inlined_to) + while (node->callers && !node->inlined_to) { struct cgraph_node *caller = node->callers->caller; @@ -2378,7 +2378,7 @@ dump_overall_stats (void) struct cgraph_node *node; FOR_EACH_DEFINED_FUNCTION (node) - if (!node->global.inlined_to + if (!node->inlined_to && !node->alias) { ipa_fn_summary *s = ipa_fn_summaries->get (node); @@ -2670,8 +2670,8 @@ ipa_inline (void) } if (update) { - struct cgraph_node *where = node->global.inlined_to - ? node->global.inlined_to : node; + struct cgraph_node *where = node->inlined_to + ? node->inlined_to : node; reset_edge_caches (where); ipa_update_overall_fn_summary (where); } diff --git a/gcc/ipa-profile.c b/gcc/ipa-profile.c index 970dba3..50a54eb 100644 --- a/gcc/ipa-profile.c +++ b/gcc/ipa-profile.c @@ -326,8 +326,8 @@ ipa_propagate_frequency_1 (struct cgraph_node *node, void *data) if (profile_info && !(edge->callee->count.ipa () == profile_count::zero ()) && (edge->caller->frequency != NODE_FREQUENCY_UNLIKELY_EXECUTED - || (edge->caller->global.inlined_to - && edge->caller->global.inlined_to->frequency + || (edge->caller->inlined_to + && edge->caller->inlined_to->frequency != NODE_FREQUENCY_UNLIKELY_EXECUTED))) d->maybe_unlikely_executed = false; if (edge->count.ipa ().initialized_p () diff --git a/gcc/ipa-prop.c b/gcc/ipa-prop.c index 9dfbe1a..5e70395 100644 --- a/gcc/ipa-prop.c +++ b/gcc/ipa-prop.c @@ -2892,7 +2892,7 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, /* Because may-edges are not explicitely represented and vtable may be external, we may create the first reference to the object in the unit. */ - if (!callee || callee->global.inlined_to) + if (!callee || callee->inlined_to) { /* We are better to ensure we can refer to it. @@ -2945,7 +2945,7 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, /* We cannot make edges to inline clones. It is bug that someone removed the cgraph node too early. */ - gcc_assert (!callee->global.inlined_to); + gcc_assert (!callee->inlined_to); if (dump_file && !unreachable) { @@ -3412,8 +3412,8 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs, ipa_check_create_edge_args (); top = IPA_EDGE_REF (cs); - new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to - ? cs->caller->global.inlined_to + new_root_info = IPA_NODE_REF (cs->caller->inlined_to + ? cs->caller->inlined_to : cs->caller); inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ()); @@ -3579,8 +3579,8 @@ propagate_controlled_uses (struct cgraph_edge *cs) class ipa_edge_args *args = IPA_EDGE_REF (cs); if (!args) return; - struct cgraph_node *new_root = cs->caller->global.inlined_to - ? cs->caller->global.inlined_to : cs->caller; + struct cgraph_node *new_root = cs->caller->inlined_to + ? cs->caller->inlined_to : cs->caller; class ipa_node_params *new_root_info = IPA_NODE_REF (new_root); class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee); int count, i; @@ -3645,7 +3645,7 @@ propagate_controlled_uses (struct cgraph_edge *cs) gcc_checking_assert (ok); clone = cs->caller; - while (clone->global.inlined_to + while (clone->inlined_to && clone != rdesc->cs->caller && IPA_NODE_REF (clone)->ipcp_orig_node) { @@ -3870,16 +3870,16 @@ ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst, We need to find the duplicate that refers to our tree of inline clones. */ - gcc_assert (dst->caller->global.inlined_to); + gcc_assert (dst->caller->inlined_to); for (dst_rdesc = src_rdesc->next_duplicate; dst_rdesc; dst_rdesc = dst_rdesc->next_duplicate) { struct cgraph_node *top; - top = dst_rdesc->cs->caller->global.inlined_to - ? dst_rdesc->cs->caller->global.inlined_to + top = dst_rdesc->cs->caller->inlined_to + ? dst_rdesc->cs->caller->inlined_to : dst_rdesc->cs->caller; - if (dst->caller->global.inlined_to == top) + if (dst->caller->inlined_to == top) break; } gcc_assert (dst_rdesc); @@ -3889,8 +3889,8 @@ ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst, else if (dst_jf->type == IPA_JF_PASS_THROUGH && src->caller == dst->caller) { - struct cgraph_node *inline_root = dst->caller->global.inlined_to - ? dst->caller->global.inlined_to : dst->caller; + struct cgraph_node *inline_root = dst->caller->inlined_to + ? dst->caller->inlined_to : dst->caller; class ipa_node_params *root_info = IPA_NODE_REF (inline_root); int idx = ipa_get_jf_pass_through_formal_id (dst_jf); diff --git a/gcc/ipa-pure-const.c b/gcc/ipa-pure-const.c index 4b2a79f..a142e0c 100644 --- a/gcc/ipa-pure-const.c +++ b/gcc/ipa-pure-const.c @@ -1678,7 +1678,7 @@ propagate_pure_const (void) /* Inline clones share declaration with their offline copies; do not modify their declarations since the offline copy may be different. */ - if (!w->global.inlined_to) + if (!w->inlined_to) switch (this_state) { case IPA_CONST: @@ -1839,7 +1839,7 @@ propagate_nothrow (void) /* Inline clones share declaration with their offline copies; do not modify their declarations since the offline copy may be different. */ - if (!w->global.inlined_to) + if (!w->inlined_to) { w->set_nothrow_flag (true); if (dump_file) @@ -1966,7 +1966,7 @@ propagate_malloc (void) funct_state l = funct_state_summaries->get (node); if (!node->alias && l->malloc_state == STATE_MALLOC - && !node->global.inlined_to) + && !node->inlined_to) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Function %s found to be malloc\n", diff --git a/gcc/ipa-reference.c b/gcc/ipa-reference.c index 06c1225..feee92d 100644 --- a/gcc/ipa-reference.c +++ b/gcc/ipa-reference.c @@ -901,7 +901,7 @@ propagate (void) ipa_reference_global_vars_info_t node_g; /* No need to produce summaries for inline clones. */ - if (node->global.inlined_to) + if (node->inlined_to) continue; node_info = get_reference_vars_info (node); @@ -977,7 +977,7 @@ write_node_summary_p (struct cgraph_node *node, ipa_reference_optimization_summary_t info; /* See if we have (non-empty) info. */ - if (!node->definition || node->global.inlined_to) + if (!node->definition || node->inlined_to) return false; info = get_reference_optimization_summary (node); if (!info) diff --git a/gcc/ipa-utils.c b/gcc/ipa-utils.c index 25c2e2c..8849429 100644 --- a/gcc/ipa-utils.c +++ b/gcc/ipa-utils.c @@ -296,7 +296,7 @@ ipa_reverse_postorder (struct cgraph_node **order) if (!node->aux && (pass || (!node->address_taken - && !node->global.inlined_to + && !node->inlined_to && !node->alias && !node->thunk.thunk_p && !node->only_called_directly_p ()))) { diff --git a/gcc/ipa-visibility.c b/gcc/ipa-visibility.c index 84585b5..274d308 100644 --- a/gcc/ipa-visibility.c +++ b/gcc/ipa-visibility.c @@ -707,7 +707,7 @@ function_and_variable_visibility (bool whole_program) || DECL_EXTERNAL (node->decl)); if (cgraph_externally_visible_p (node, whole_program)) { - gcc_assert (!node->global.inlined_to); + gcc_assert (!node->inlined_to); node->externally_visible = true; } else @@ -71,9 +71,9 @@ update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined { struct cgraph_edge *e; for (e = node->callees; e; e = e->next_callee) - if (e->callee->global.inlined_to) + if (e->callee->inlined_to) { - e->callee->global.inlined_to = inlined_to; + e->callee->inlined_to = inlined_to; update_inlined_to_pointer (e->callee, inlined_to); } } @@ -335,11 +335,11 @@ symbol_table::remove_unreachable_nodes (FILE *file) node->used_as_abstract_origin = false; node->indirect_call_target = false; if (node->definition - && !node->global.inlined_to + && !node->inlined_to && !node->in_other_partition && !node->can_remove_if_no_direct_calls_and_refs_p ()) { - gcc_assert (!node->global.inlined_to); + gcc_assert (!node->inlined_to); reachable.add (node); enqueue_node (node, &first, &reachable); } @@ -451,7 +451,7 @@ symbol_table::remove_unreachable_nodes (FILE *file) /* When inline clone exists, mark body to be preserved so when removing offline copy of the function we don't kill it. */ - if (cnode->global.inlined_to) + if (cnode->inlined_to) body_needed_for_clonning.add (cnode->decl); /* For non-inline clones, force their origins to the boundary and ensure @@ -560,11 +560,11 @@ symbol_table::remove_unreachable_nodes (FILE *file) to turn it into normal cone. */ FOR_EACH_FUNCTION (node) { - if (node->global.inlined_to + if (node->inlined_to && !node->callers) { gcc_assert (node->clones); - node->global.inlined_to = NULL; + node->inlined_to = NULL; update_inlined_to_pointer (node, node); } node->aux = NULL; @@ -1212,8 +1212,8 @@ propagate_single_user (varpool_node *vnode, cgraph_node *function, struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring); if (cnode) { - if (cnode->global.inlined_to) - cnode = cnode->global.inlined_to; + if (cnode->inlined_to) + cnode = cnode->inlined_to; if (!function) function = cnode; else if (function != cnode) diff --git a/gcc/lto-cgraph.c b/gcc/lto-cgraph.c index 147975b..b03835a 100644 --- a/gcc/lto-cgraph.c +++ b/gcc/lto-cgraph.c @@ -329,7 +329,7 @@ reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t struct cgraph_edge *e; if (!node->definition) return false; - if (node->global.inlined_to) + if (node->inlined_to) return false; for (e = node->callers; e; e = e->next_caller) { @@ -399,7 +399,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node, boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node); if (node->analyzed && (!boundary_p || node->alias - || (node->thunk.thunk_p && !node->global.inlined_to))) + || (node->thunk.thunk_p && !node->inlined_to))) tag = LTO_symtab_analyzed_node; else tag = LTO_symtab_unavail_node; @@ -422,7 +422,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node, && node->get_partitioning_class () == SYMBOL_PARTITION) { /* Inline clones cannot be part of boundary. - gcc_assert (!node->global.inlined_to); + gcc_assert (!node->inlined_to); FIXME: At the moment they can be, when partition contains an inline clone that is clone of inline clone from outside partition. We can @@ -468,9 +468,9 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node, if (tag == LTO_symtab_analyzed_node) { - if (node->global.inlined_to) + if (node->inlined_to) { - ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to); + ref = lto_symtab_encoder_lookup (encoder, node->inlined_to); gcc_assert (ref != LCC_NOT_FOUND); } else @@ -884,7 +884,7 @@ compute_ltrans_boundary (lto_symtab_encoder_t in_encoder) if (!lto_symtab_encoder_in_partition_p (encoder, callee)) { /* We should have moved all the inlines. */ - gcc_assert (!callee->global.inlined_to); + gcc_assert (!callee->inlined_to); add_node_to (encoder, callee, false); } } @@ -911,7 +911,7 @@ compute_ltrans_boundary (lto_symtab_encoder_t in_encoder) && !lto_symtab_encoder_in_partition_p (encoder, callee)) { - gcc_assert (!callee->global.inlined_to); + gcc_assert (!callee->inlined_to); add_node_to (encoder, callee, false); } } @@ -928,7 +928,7 @@ compute_ltrans_boundary (lto_symtab_encoder_t in_encoder) if (node->alias && node->analyzed) create_references (encoder, node); if (cnode - && cnode->thunk.thunk_p && !cnode->global.inlined_to) + && cnode->thunk.thunk_p && !cnode->inlined_to) add_node_to (encoder, cnode->callees->callee, false); while (node->transparent_alias && node->analyzed) { @@ -984,7 +984,7 @@ output_symtab (void) { node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i)); if (node - && ((node->thunk.thunk_p && !node->global.inlined_to) + && ((node->thunk.thunk_p && !node->inlined_to) || lto_symtab_encoder_in_partition_p (encoder, node))) { output_outgoing_cgraph_edges (node->callees, ob, encoder); @@ -1283,7 +1283,7 @@ input_node (struct lto_file_decl_data *file_data, input_overwrite_node (file_data, node, tag, &bp); /* Store a reference for now, and fix up later to be a pointer. */ - node->global.inlined_to = (cgraph_node *) (intptr_t) ref; + node->inlined_to = (cgraph_node *) (intptr_t) ref; if (group) { @@ -1542,7 +1542,7 @@ input_cgraph_1 (struct lto_file_decl_data *file_data, int ref; if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node)) { - ref = (int) (intptr_t) cnode->global.inlined_to; + ref = (int) (intptr_t) cnode->inlined_to; /* We share declaration of builtins, so we may read same node twice. */ if (!node->aux) @@ -1551,10 +1551,10 @@ input_cgraph_1 (struct lto_file_decl_data *file_data, /* Fixup inlined_to from reference to pointer. */ if (ref != LCC_NOT_FOUND) - dyn_cast<cgraph_node *> (node)->global.inlined_to + dyn_cast<cgraph_node *> (node)->inlined_to = dyn_cast<cgraph_node *> (nodes[ref]); else - cnode->global.inlined_to = NULL; + cnode->inlined_to = NULL; } ref = (int) (intptr_t) node->same_comdat_group; diff --git a/gcc/lto/lto-partition.c b/gcc/lto/lto-partition.c index 1492833..3209035 100644 --- a/gcc/lto/lto-partition.c +++ b/gcc/lto/lto-partition.c @@ -182,7 +182,7 @@ add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node) /* Add all thunks associated with the function. */ for (e = cnode->callers; e; e = e->next_caller) - if (e->caller->thunk.thunk_p && !e->caller->global.inlined_to) + if (e->caller->thunk.thunk_p && !e->caller->inlined_to) add_symbol_to_partition_1 (part, e->caller); } @@ -233,8 +233,8 @@ contained_in_symbol (symtab_node *node) if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node)) { cnode = cnode->function_symbol (); - if (cnode->global.inlined_to) - cnode = cnode->global.inlined_to; + if (cnode->inlined_to) + cnode = cnode->inlined_to; return cnode; } else if (varpool_node *vnode = dyn_cast <varpool_node *> (node)) diff --git a/gcc/lto/lto-symtab.c b/gcc/lto/lto-symtab.c index 37fa572..2fd5b1e 100644 --- a/gcc/lto/lto-symtab.c +++ b/gcc/lto/lto-symtab.c @@ -63,7 +63,7 @@ lto_cgraph_replace_node (struct cgraph_node *node, prevailing_node->forced_by_abi = true; if (node->address_taken) { - gcc_assert (!prevailing_node->global.inlined_to); + gcc_assert (!prevailing_node->inlined_to); prevailing_node->mark_address_taken (); } if (node->definition && prevailing_node->definition @@ -910,7 +910,7 @@ lto_symtab_merge_symbols_1 (symtab_node *prevailing) cgraph_node *ce = dyn_cast <cgraph_node *> (e); if ((!TREE_PUBLIC (e->decl) && !DECL_EXTERNAL (e->decl)) - || (ce != NULL && ce->global.inlined_to)) + || (ce != NULL && ce->inlined_to)) continue; symtab_node *to = symtab_node::get (lto_symtab_prevailing_decl (e->decl)); diff --git a/gcc/omp-simd-clone.c b/gcc/omp-simd-clone.c index d71a963..f4bfcc8 100644 --- a/gcc/omp-simd-clone.c +++ b/gcc/omp-simd-clone.c @@ -1645,7 +1645,7 @@ expand_simd_clones (struct cgraph_node *node) tree attr = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (node->decl)); if (attr == NULL_TREE - || node->global.inlined_to + || node->inlined_to || lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl))) return; diff --git a/gcc/passes.c b/gcc/passes.c index 863605d..d86af11 100644 --- a/gcc/passes.c +++ b/gcc/passes.c @@ -3033,7 +3033,7 @@ function_called_by_processed_nodes_p (void) continue; if (TREE_ASM_WRITTEN (e->caller->decl)) continue; - if (!e->caller->process && !e->caller->global.inlined_to) + if (!e->caller->process && !e->caller->inlined_to) break; } if (dump_file && e) diff --git a/gcc/symtab.c b/gcc/symtab.c index ee9723c..a75f516 100644 --- a/gcc/symtab.c +++ b/gcc/symtab.c @@ -1921,7 +1921,7 @@ symtab_node::get_partitioning_class (void) if (DECL_ABSTRACT_P (decl)) return SYMBOL_EXTERNAL; - if (cnode && cnode->global.inlined_to) + if (cnode && cnode->inlined_to) return SYMBOL_DUPLICATE; /* Transparent aliases are always duplicated. */ @@ -2321,7 +2321,7 @@ symtab_node::binds_to_current_def_p (symtab_node *ref) return true; /* Inline clones always binds locally. */ - if (cnode && cnode->global.inlined_to) + if (cnode && cnode->inlined_to) return true; if (DECL_EXTERNAL (decl)) @@ -2333,7 +2333,7 @@ symtab_node::binds_to_current_def_p (symtab_node *ref) { cgraph_node *cref = dyn_cast <cgraph_node *> (ref); if (cref) - ref = cref->global.inlined_to; + ref = cref->inlined_to; } /* If this is a reference from symbol itself and there are no aliases, we diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c index 75c6fae..6e7d4db 100644 --- a/gcc/tree-ssa-structalias.c +++ b/gcc/tree-ssa-structalias.c @@ -7959,7 +7959,7 @@ associate_varinfo_to_alias (struct cgraph_node *node, void *data) { if ((node->alias || (node->thunk.thunk_p - && ! node->global.inlined_to)) + && ! node->inlined_to)) && node->analyzed && !node->ifunc_resolver) insert_vi_for_tree (node->decl, (varinfo_t)data); @@ -8129,7 +8129,7 @@ ipa_pta_execute (void) /* Nodes without a body are not interesting. Especially do not visit clones at this point for now - we get duplicate decls there for inline clones at least. */ - if (!node->has_gimple_body_p () || node->global.inlined_to) + if (!node->has_gimple_body_p () || node->inlined_to) continue; node->get_body (); |