diff options
author | Jan Hubicka <hubicka@ucw.cz> | 2014-08-07 22:58:17 +0200 |
---|---|---|
committer | Jan Hubicka <hubicka@gcc.gnu.org> | 2014-08-07 20:58:17 +0000 |
commit | 7d0aa05b9ead1b114958d8230377c2c25ef54876 (patch) | |
tree | 3358c76f8c1af0e9fe0b8b19fef9b3fe6f0935ff /gcc/ipa-devirt.c | |
parent | 9f25a338f91f9c448d57681b6d17a91233222d43 (diff) | |
download | gcc-7d0aa05b9ead1b114958d8230377c2c25ef54876.zip gcc-7d0aa05b9ead1b114958d8230377c2c25ef54876.tar.gz gcc-7d0aa05b9ead1b114958d8230377c2c25ef54876.tar.bz2 |
ipa-devirt.c: Include gimple-pretty-print.h
* ipa-devirt.c: Include gimple-pretty-print.h
(referenced_from_vtable_p): Exclude DECL_EXTERNAL from
further tests.
(decl_maybe_in_construction_p): Fix conditional on cdtor check
(get_polymorphic_call_info): Fix return value
(type_change_info): New sturcture based on ipa-prop
variant.
(noncall_stmt_may_be_vtbl_ptr_store): New predicate
based on ipa-prop variant.
(extr_type_from_vtbl_ptr_store): New function
based on ipa-prop variant.
(record_known_type): New function.
(check_stmt_for_type_change): New function.
(get_dynamic_type): New function.
* ipa-prop.c (ipa_analyze_call_uses): Use get_dynamic_type.
* tree-ssa-pre.c: ipa-utils.h
(eliminate_dom_walker::before_dom_children): Use ipa-devirt
machinery; sanity check with ipa-prop devirtualization.
* trans-mem.c (ipa_tm_insert_gettmclone_call): Clear
polymorphic flag.
* g++.dg/ipa/devirt-35.C: New testcase.
* g++.dg/ipa/devirt-36.C: New testcase.
From-SVN: r213739
Diffstat (limited to 'gcc/ipa-devirt.c')
-rw-r--r-- | gcc/ipa-devirt.c | 527 |
1 files changed, 521 insertions, 6 deletions
diff --git a/gcc/ipa-devirt.c b/gcc/ipa-devirt.c index 56eeaf5..8827d0e 100644 --- a/gcc/ipa-devirt.c +++ b/gcc/ipa-devirt.c @@ -131,6 +131,7 @@ along with GCC; see the file COPYING3. If not see #include "tree-dfa.h" #include "demangle.h" #include "dbgcnt.h" +#include "gimple-pretty-print.h" #include "stor-layout.h" #include "intl.h" #include "hash-map.h" @@ -1323,6 +1324,7 @@ referenced_from_vtable_p (struct cgraph_node *node) bool found = false; if (node->externally_visible + || DECL_EXTERNAL (node->decl) || node->used_from_other_partition) return true; @@ -2113,7 +2115,7 @@ decl_maybe_in_construction_p (tree base, tree outer_type, if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE || (!DECL_CXX_CONSTRUCTOR_P (fn) - || !DECL_CXX_DESTRUCTOR_P (fn))) + && !DECL_CXX_DESTRUCTOR_P (fn))) { /* Watch for clones where we constant propagated the first argument (pointer to the instance). */ @@ -2122,7 +2124,7 @@ decl_maybe_in_construction_p (tree base, tree outer_type, || !is_global_var (base) || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE || (!DECL_CXX_CONSTRUCTOR_P (fn) - || !DECL_CXX_DESTRUCTOR_P (fn))) + && !DECL_CXX_DESTRUCTOR_P (fn))) continue; } if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST)) @@ -2142,7 +2144,7 @@ decl_maybe_in_construction_p (tree base, tree outer_type, { if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE || (!DECL_CXX_CONSTRUCTOR_P (function) - || !DECL_CXX_DESTRUCTOR_P (function))) + && !DECL_CXX_DESTRUCTOR_P (function))) { if (!DECL_ABSTRACT_ORIGIN (function)) return false; @@ -2152,7 +2154,7 @@ decl_maybe_in_construction_p (tree base, tree outer_type, if (!function || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE || (!DECL_CXX_CONSTRUCTOR_P (function) - || !DECL_CXX_DESTRUCTOR_P (function))) + && !DECL_CXX_DESTRUCTOR_P (function))) return false; } /* FIXME: this can go away once we have ODR types equivalency on @@ -2243,7 +2245,8 @@ walk_ssa_copies (tree op) call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT. CALL is optional argument giving the actual statement (usually call) where the context is used. - Return pointer to object described by the context */ + Return pointer to object described by the context or an declaration if + we found the instance to be stored in the static storage. */ tree get_polymorphic_call_info (tree fndecl, @@ -2317,7 +2320,7 @@ get_polymorphic_call_info (tree fndecl, context->outer_type, call, current_function_decl); - return base_pointer; + return base; } else break; @@ -2436,6 +2439,515 @@ get_polymorphic_call_info (tree fndecl, return base_pointer; } +/* Structure to be passed in between detect_type_change and + check_stmt_for_type_change. */ + +struct type_change_info +{ + /* Offset into the object where there is the virtual method pointer we are + looking for. */ + HOST_WIDE_INT offset; + /* The declaration or SSA_NAME pointer of the base that we are checking for + type change. */ + tree instance; + /* The reference to virtual table pointer used. */ + tree vtbl_ptr_ref; + tree otr_type; + /* If we actually can tell the type that the object has changed to, it is + stored in this field. Otherwise it remains NULL_TREE. */ + tree known_current_type; + HOST_WIDE_INT known_current_offset; + + /* Set to true if dynamic type change has been detected. */ + bool type_maybe_changed; + /* Set to true if multiple types have been encountered. known_current_type + must be disregarded in that case. */ + bool multiple_types_encountered; + /* Set to true if we possibly missed some dynamic type changes and we should + consider the set to be speculative. */ + bool speculative; + bool seen_unanalyzed_store; +}; + +/* Return true if STMT is not call and can modify a virtual method table pointer. + We take advantage of fact that vtable stores must appear within constructor + and destructor functions. */ + +bool +noncall_stmt_may_be_vtbl_ptr_store (gimple stmt) +{ + if (is_gimple_assign (stmt)) + { + tree lhs = gimple_assign_lhs (stmt); + + if (gimple_clobber_p (stmt)) + return false; + if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))) + { + if (flag_strict_aliasing + && !POINTER_TYPE_P (TREE_TYPE (lhs))) + return false; + + if (TREE_CODE (lhs) == COMPONENT_REF + && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) + return false; + /* In the future we might want to use get_base_ref_and_offset to find + if there is a field corresponding to the offset and if so, proceed + almost like if it was a component ref. */ + } + } + + /* Code unification may mess with inline stacks. */ + if (cfun->after_inlining) + return true; + + /* Walk the inline stack and watch out for ctors/dtors. + TODO: Maybe we can require the store to appear in toplevel + block of CTOR/DTOR. */ + for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK; + block = BLOCK_SUPERCONTEXT (block)) + if (BLOCK_ABSTRACT_ORIGIN (block) + && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL) + { + tree fn = BLOCK_ABSTRACT_ORIGIN (block); + + if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST)) + return false; + return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE + && (DECL_CXX_CONSTRUCTOR_P (fn) + || DECL_CXX_DESTRUCTOR_P (fn))); + } + return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE + && (DECL_CXX_CONSTRUCTOR_P (current_function_decl) + || DECL_CXX_DESTRUCTOR_P (current_function_decl))); +} + +/* If STMT can be proved to be an assignment to the virtual method table + pointer of ANALYZED_OBJ and the type associated with the new table + identified, return the type. Otherwise return NULL_TREE. */ + +static tree +extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci, + HOST_WIDE_INT *type_offset) +{ + HOST_WIDE_INT offset, size, max_size; + tree lhs, rhs, base, binfo; + + if (!gimple_assign_single_p (stmt)) + return NULL_TREE; + + lhs = gimple_assign_lhs (stmt); + rhs = gimple_assign_rhs1 (stmt); + if (TREE_CODE (lhs) != COMPONENT_REF + || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) + return NULL_TREE; + + if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0)) + ; + else + { + base = get_ref_base_and_extent (lhs, &offset, &size, &max_size); + if (offset != tci->offset + || size != POINTER_SIZE + || max_size != POINTER_SIZE) + return NULL_TREE; + if (DECL_P (tci->instance)) + { + if (base != tci->instance) + return NULL_TREE; + } + else if (TREE_CODE (base) == MEM_REF) + { + if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0) + || !integer_zerop (TREE_OPERAND (base, 1))) + return NULL_TREE; + } + else if (!operand_equal_p (tci->instance, base, 0) + || tci->offset) + return NULL_TREE; + } + + binfo = vtable_pointer_value_to_binfo (rhs); + + if (!binfo) + return NULL; + *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT; + if (TYPE_BINFO (BINFO_TYPE (binfo)) == binfo) + return BINFO_TYPE (binfo); + + /* TODO: Figure out the type containing BINFO. */ + return NULL; +} + +/* Record dynamic type change of TCI to TYPE. */ + +void +record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset) +{ + if (dump_file) + { + if (type) + { + fprintf (dump_file, " Recording type: "); + print_generic_expr (dump_file, type, TDF_SLIM); + fprintf (dump_file, " at offset %i\n", (int)offset); + } + else + fprintf (dump_file, " Recording unknown type\n"); + } + if (tci->type_maybe_changed + && (type != tci->known_current_type + || offset != tci->known_current_offset)) + tci->multiple_types_encountered = true; + tci->known_current_type = type; + tci->known_current_offset = offset; + tci->type_maybe_changed = true; +} + +/* Callback of walk_aliased_vdefs and a helper function for + detect_type_change to check whether a particular statement may modify + the virtual table pointer, and if possible also determine the new type of + the (sub-)object. It stores its result into DATA, which points to a + type_change_info structure. */ + +static bool +check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data) +{ + gimple stmt = SSA_NAME_DEF_STMT (vdef); + struct type_change_info *tci = (struct type_change_info *) data; + tree fn; + + /* If we already gave up, just terminate the rest of walk. */ + if (tci->multiple_types_encountered) + return true; + + if (is_gimple_call (stmt)) + { + if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)) + return false; + + /* Check for a constructor call. */ + if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE + && DECL_CXX_CONSTRUCTOR_P (fn) + && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE + && gimple_call_num_args (stmt)) + { + tree op = walk_ssa_copies (gimple_call_arg (stmt, 0)); + tree type = method_class_type (TREE_TYPE (fn)); + HOST_WIDE_INT offset = 0, size, max_size; + + if (dump_file) + { + fprintf (dump_file, " Checking constructor call: "); + print_gimple_stmt (dump_file, stmt, 0, 0); + } + + /* See if THIS parameter seems like instance pointer. */ + if (TREE_CODE (op) == ADDR_EXPR) + { + op = get_ref_base_and_extent (TREE_OPERAND (op, 0), + &offset, &size, &max_size); + if (size != max_size || max_size == -1) + { + tci->speculative = true; + return false; + } + if (op && TREE_CODE (op) == MEM_REF) + { + if (!tree_fits_shwi_p (TREE_OPERAND (op, 1))) + { + tci->speculative = true; + return false; + } + offset += tree_to_shwi (TREE_OPERAND (op, 1)) + * BITS_PER_UNIT; + op = TREE_OPERAND (op, 0); + } + else + { + tci->speculative = true; + return false; + } + op = walk_ssa_copies (op); + } + if (operand_equal_p (op, tci->instance, 0) + && TYPE_SIZE (type) + && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST + && tree_fits_shwi_p (TYPE_SIZE (type)) + && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset) + { + record_known_type (tci, type, tci->offset - offset); + return true; + } + } + /* Calls may possibly change dynamic type by placement new. Assume + it will not happen, but make result speculative only. */ + if (dump_file) + { + fprintf (dump_file, " Function call may change dynamic type:"); + print_gimple_stmt (dump_file, stmt, 0, 0); + } + tci->speculative = true; + return false; + } + /* Check for inlined virtual table store. */ + else if (noncall_stmt_may_be_vtbl_ptr_store (stmt)) + { + tree type; + HOST_WIDE_INT offset = 0; + if (dump_file) + { + fprintf (dump_file, " Checking vtbl store: "); + print_gimple_stmt (dump_file, stmt, 0, 0); + } + + type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset); + gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type); + if (!type) + { + if (dump_file) + fprintf (dump_file, " Unanalyzed store may change type.\n"); + tci->seen_unanalyzed_store = true; + tci->speculative = true; + } + else + record_known_type (tci, type, offset); + return true; + } + else + return false; +} + +/* CONTEXT is polymorphic call context obtained from get_polymorphic_context. + OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT. + INSTANCE is pointer to the outer instance as returned by + get_polymorphic_context. To avoid creation of temporary expressions, + INSTANCE may also be an declaration of get_polymorphic_context found the + value to be in static storage. + + If the type of instance is not fully determined + (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES + is set), try to walk memory writes and find the actual construction of the + instance. + + We do not include this analysis in the context analysis itself, because + it needs memory SSA to be fully built and the walk may be expensive. + So it is not suitable for use withing fold_stmt and similar uses. */ + +bool +get_dynamic_type (tree instance, + ipa_polymorphic_call_context *context, + tree otr_object, + tree otr_type, + gimple call) +{ + struct type_change_info tci; + ao_ref ao; + bool function_entry_reached = false; + tree instance_ref = NULL; + gimple stmt = call; + + if (!context->maybe_in_construction && !context->maybe_derived_type) + return false; + + /* We need to obtain refernce to virtual table pointer. It is better + to look it up in the code rather than build our own. This require bit + of pattern matching, but we end up verifying that what we found is + correct. + + What we pattern match is: + + tmp = instance->_vptr.A; // vtbl ptr load + tmp2 = tmp[otr_token]; // vtable lookup + OBJ_TYPE_REF(tmp2;instance->0) (instance); + + We want to start alias oracle walk from vtbl pointer load, + but we may not be able to identify it, for example, when PRE moved the + load around. */ + + if (gimple_code (call) == GIMPLE_CALL) + { + tree ref = gimple_call_fn (call); + HOST_WIDE_INT offset2, size, max_size; + + if (TREE_CODE (ref) == OBJ_TYPE_REF) + { + ref = OBJ_TYPE_REF_EXPR (ref); + ref = walk_ssa_copies (ref); + + /* Check if definition looks like vtable lookup. */ + if (TREE_CODE (ref) == SSA_NAME + && !SSA_NAME_IS_DEFAULT_DEF (ref) + && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)) + && TREE_CODE (gimple_assign_rhs1 + (SSA_NAME_DEF_STMT (ref))) == MEM_REF) + { + ref = get_base_address + (TREE_OPERAND (gimple_assign_rhs1 + (SSA_NAME_DEF_STMT (ref)), 0)); + ref = walk_ssa_copies (ref); + /* Find base address of the lookup and see if it looks like + vptr load. */ + if (TREE_CODE (ref) == SSA_NAME + && !SSA_NAME_IS_DEFAULT_DEF (ref) + && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))) + { + tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref)); + tree base_ref = get_ref_base_and_extent + (ref_exp, &offset2, &size, &max_size); + + /* Finally verify that what we found looks like read from OTR_OBJECT + or from INSTANCE with offset OFFSET. */ + if (base_ref + && TREE_CODE (base_ref) == MEM_REF + && ((offset2 == context->offset + && TREE_OPERAND (base_ref, 0) == instance) + || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object))) + { + stmt = SSA_NAME_DEF_STMT (ref); + instance_ref = ref_exp; + } + } + } + } + } + + /* If we failed to look up the refernece in code, build our own. */ + if (!instance_ref) + { + /* If the statement in question does not use memory, we can't tell + anything. */ + if (!gimple_vuse (stmt)) + return false; + ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL); + } + else + /* Otherwise use the real reference. */ + ao_ref_init (&ao, instance_ref); + + /* We look for vtbl pointer read. */ + ao.size = POINTER_SIZE; + ao.max_size = ao.size; + ao.ref_alias_set + = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type)))); + + if (dump_file) + { + fprintf (dump_file, "Determining dynamic type for call: "); + print_gimple_stmt (dump_file, call, 0, 0); + fprintf (dump_file, " Starting walk at: "); + print_gimple_stmt (dump_file, stmt, 0, 0); + fprintf (dump_file, " instance pointer: "); + print_generic_expr (dump_file, otr_object, TDF_SLIM); + fprintf (dump_file, " Outer instance pointer: "); + print_generic_expr (dump_file, instance, TDF_SLIM); + fprintf (dump_file, " offset: %i (bits)", (int)context->offset); + fprintf (dump_file, " vtbl reference: "); + print_generic_expr (dump_file, instance_ref, TDF_SLIM); + fprintf (dump_file, "\n"); + } + + tci.offset = context->offset; + tci.instance = instance; + tci.vtbl_ptr_ref = instance_ref; + gcc_assert (TREE_CODE (instance) != MEM_REF); + tci.known_current_type = NULL_TREE; + tci.known_current_offset = 0; + tci.otr_type = otr_type; + tci.type_maybe_changed = false; + tci.multiple_types_encountered = false; + tci.speculative = false; + tci.seen_unanalyzed_store = false; + + walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change, + &tci, NULL, &function_entry_reached); + + /* If we did not find any type changing statements, we may still drop + maybe_in_construction flag if the context already have outer type. + + Here we make special assumptions about both constructors and + destructors which are all the functions that are allowed to alter the + VMT pointers. It assumes that destructors begin with assignment into + all VMT pointers and that constructors essentially look in the + following way: + + 1) The very first thing they do is that they call constructors of + ancestor sub-objects that have them. + + 2) Then VMT pointers of this and all its ancestors is set to new + values corresponding to the type corresponding to the constructor. + + 3) Only afterwards, other stuff such as constructor of member + sub-objects and the code written by the user is run. Only this may + include calling virtual functions, directly or indirectly. + + 4) placement new can not be used to change type of non-POD statically + allocated variables. + + There is no way to call a constructor of an ancestor sub-object in any + other way. + + This means that we do not have to care whether constructors get the + correct type information because they will always change it (in fact, + if we define the type to be given by the VMT pointer, it is undefined). + + The most important fact to derive from the above is that if, for some + statement in the section 3, we try to detect whether the dynamic type + has changed, we can safely ignore all calls as we examine the function + body backwards until we reach statements in section 2 because these + calls cannot be ancestor constructors or destructors (if the input is + not bogus) and so do not change the dynamic type (this holds true only + for automatically allocated objects but at the moment we devirtualize + only these). We then must detect that statements in section 2 change + the dynamic type and can try to derive the new type. That is enough + and we can stop, we will never see the calls into constructors of + sub-objects in this code. + + Therefore if the static outer type was found (context->outer_type) + we can safely ignore tci.speculative that is set on calls and give up + only if there was dyanmic type store that may affect given variable + (seen_unanalyzed_store) */ + + if (!tci.type_maybe_changed) + { + if (!context->outer_type || tci.seen_unanalyzed_store) + return false; + if (context->maybe_in_construction) + context->maybe_in_construction = false; + if (dump_file) + fprintf (dump_file, " No dynamic type change found.\n"); + return true; + } + + if (tci.known_current_type + && !function_entry_reached + && !tci.multiple_types_encountered) + { + if (!tci.speculative) + { + context->outer_type = tci.known_current_type; + context->offset = tci.known_current_offset; + context->maybe_in_construction = false; + context->maybe_derived_type = false; + if (dump_file) + fprintf (dump_file, " Determined dynamic type.\n"); + } + else if (!context->speculative_outer_type + || context->speculative_maybe_derived_type) + { + context->speculative_outer_type = tci.known_current_type; + context->speculative_offset = tci.known_current_offset; + context->speculative_maybe_derived_type = false; + if (dump_file) + fprintf (dump_file, " Determined speculative dynamic type.\n"); + } + } + else if (dump_file) + fprintf (dump_file, " Found multiple types.\n"); + + return true; +} + /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET. Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE and insert them to NODES. @@ -2516,6 +3028,7 @@ devirt_variable_node_removal_hook (varpool_node *n, } /* Record about how many calls would benefit from given type to be final. */ + struct odr_type_warn_count { tree type; @@ -2524,6 +3037,7 @@ struct odr_type_warn_count }; /* Record about how many calls would benefit from given method to be final. */ + struct decl_warn_count { tree decl; @@ -2532,6 +3046,7 @@ struct decl_warn_count }; /* Information about type and decl warnings. */ + struct final_warning_record { gcov_type dyn_count; |