aboutsummaryrefslogtreecommitdiff
path: root/gcc/ipa-prop.c
diff options
context:
space:
mode:
authorMartin Jambor <mjambor@suse.cz>2013-05-13 12:16:09 +0200
committerMartin Jambor <jamborm@gcc.gnu.org>2013-05-13 12:16:09 +0200
commit4502fe8dfcb1cc2c59b50b868ac75fb5cdd742fc (patch)
treeeb36587215072023d7faafc93b4cb03eb6f629f3 /gcc/ipa-prop.c
parent0864bfc257a3d3198ba182c562b729ba1e96466d (diff)
downloadgcc-4502fe8dfcb1cc2c59b50b868ac75fb5cdd742fc.zip
gcc-4502fe8dfcb1cc2c59b50b868ac75fb5cdd742fc.tar.gz
gcc-4502fe8dfcb1cc2c59b50b868ac75fb5cdd742fc.tar.bz2
re PR middle-end/42371 (dead code not eliminated during folding with whole-program)
2013-05-13 Martin Jambor <mjambor@suse.cz> PR middle-end/42371 * ipa-prop.h (IPA_UNDESCRIBED_USE): New macro. (ipa_constant_data): New type. (ipa_jump_func): Use ipa_constant_data to hold information about constant jump functions. (ipa_get_jf_constant): Adjust to jump function type changes. (ipa_get_jf_constant_rdesc): New function. (ipa_param_descriptor): New field controlled_uses. (ipa_get_controlled_uses): New function. (ipa_set_controlled_uses): Likewise. * ipa-ref.h (ipa_find_reference): Declare. * ipa-prop.c (ipa_cst_ref_desc): New type. (ipa_print_node_jump_functions_for_edge): Adjust for jump function type changes. (ipa_set_jf_constant): Likewise. Also create reference descriptions. New parameter cs. Adjust all callers. (ipa_analyze_params_uses): Detect uncontrolled and controlled uses. (remove_described_reference): New function. (jfunc_rdesc_usable): Likewise. (try_make_edge_direct_simple_call): Decrement controlled use count, attempt to remove reference if it hits zero. (combine_controlled_uses_counters): New function. (propagate_controlled_uses): Likewise. (ipa_propagate_indirect_call_infos): Call propagate_controlled_uses. (ipa_edge_duplication_hook): Duplicate reference descriptions. (ipa_print_node_params): Print described use counter. (ipa_write_jump_function): Adjust to jump function type changes. (ipa_read_jump_function): New parameter CS, pass it to ipa_set_jf_constant. Adjust caller. (ipa_write_node_info): Stream controlled use count (ipa_read_node_info): Likewise. * cgraph.c (cgraph_mark_address_taken_node): Bail out instead of asserting. * ipa-cp.c (ipcp_discover_new_direct_edges): Decrement controlled use count. Remove cloning-added reference if it reaches zero. * ipa-ref.c (ipa_find_reference): New function. testsuite/ * gcc.dg/ipa/remref-0.c: New test. * gcc.dg/ipa/remref-1a.c: Likewise. * gcc.dg/ipa/remref-1b.c: Likewise. * gcc.dg/ipa/remref-2a.c: Likewise. * gcc.dg/ipa/remref-2b.c: Likewise. From-SVN: r198821
Diffstat (limited to 'gcc/ipa-prop.c')
-rw-r--r--gcc/ipa-prop.c327
1 files changed, 309 insertions, 18 deletions
diff --git a/gcc/ipa-prop.c b/gcc/ipa-prop.c
index b98f9ad..76b3136 100644
--- a/gcc/ipa-prop.c
+++ b/gcc/ipa-prop.c
@@ -62,6 +62,22 @@ static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
static struct cgraph_2node_hook_list *node_duplication_hook_holder;
static struct cgraph_node_hook_list *function_insertion_hook_holder;
+/* Description of a reference to an IPA constant. */
+struct ipa_cst_ref_desc
+{
+ /* Edge that corresponds to the statement which took the reference. */
+ struct cgraph_edge *cs;
+ /* Linked list of duplicates created when call graph edges are cloned. */
+ struct ipa_cst_ref_desc *next_duplicate;
+ /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
+ if out of control. */
+ int refcount;
+};
+
+/* Allocation pool for reference descriptions. */
+
+static alloc_pool ipa_refdesc_pool;
+
/* Return index of the formal whose tree is PTREE in function which corresponds
to INFO. */
@@ -175,7 +191,7 @@ ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
}
else if (type == IPA_JF_CONST)
{
- tree val = jump_func->value.constant;
+ tree val = jump_func->value.constant.value;
fprintf (f, "CONST: ");
print_generic_expr (f, val, 0);
if (TREE_CODE (val) == ADDR_EXPR
@@ -309,13 +325,31 @@ ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
/* Set JFUNC to be a constant jmp function. */
static void
-ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
+ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
+ struct cgraph_edge *cs)
{
constant = unshare_expr (constant);
if (constant && EXPR_P (constant))
SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
jfunc->type = IPA_JF_CONST;
- jfunc->value.constant = unshare_expr_without_location (constant);
+ jfunc->value.constant.value = unshare_expr_without_location (constant);
+
+ if (TREE_CODE (constant) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
+ {
+ struct ipa_cst_ref_desc *rdesc;
+ if (!ipa_refdesc_pool)
+ ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
+ sizeof (struct ipa_cst_ref_desc), 32);
+
+ rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
+ rdesc->cs = cs;
+ rdesc->next_duplicate = NULL;
+ rdesc->refcount = 1;
+ jfunc->value.constant.rdesc = rdesc;
+ }
+ else
+ jfunc->value.constant.rdesc = NULL;
}
/* Set JFUNC to be a simple pass-through jump function. */
@@ -1404,7 +1438,7 @@ ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
tree arg = gimple_call_arg (call, n);
if (is_gimple_ip_invariant (arg))
- ipa_set_jf_constant (jfunc, arg);
+ ipa_set_jf_constant (jfunc, arg, cs);
else if (!is_gimple_reg_type (TREE_TYPE (arg))
&& TREE_CODE (arg) == PARM_DECL)
{
@@ -1891,14 +1925,35 @@ ipa_analyze_params_uses (struct cgraph_node *node,
for (i = 0; i < ipa_get_param_count (info); i++)
{
tree parm = ipa_get_param (info, i);
- tree ddef;
+ int controlled_uses = 0;
+
/* For SSA regs see if parameter is used. For non-SSA we compute
the flag during modification analysis. */
- if (is_gimple_reg (parm)
- && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
- parm)) != NULL_TREE
- && !has_zero_uses (ddef))
- ipa_set_param_used (info, i, true);
+ if (is_gimple_reg (parm))
+ {
+ tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
+ parm);
+ if (ddef && !has_zero_uses (ddef))
+ {
+ imm_use_iterator imm_iter;
+ use_operand_p use_p;
+
+ ipa_set_param_used (info, i, true);
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
+ if (!is_gimple_call (USE_STMT (use_p)))
+ {
+ controlled_uses = IPA_UNDESCRIBED_USE;
+ break;
+ }
+ else
+ controlled_uses++;
+ }
+ else
+ controlled_uses = 0;
+ }
+ else
+ controlled_uses = IPA_UNDESCRIBED_USE;
+ ipa_set_controlled_uses (info, i, controlled_uses);
}
func = DECL_STRUCT_FUNCTION (decl);
@@ -2226,6 +2281,40 @@ ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
return NULL;
}
+/* Remove a reference to SYMBOL from the list of references of a node given by
+ reference description RDESC. */
+
+static void
+remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
+{
+ struct ipa_ref *to_del;
+ struct cgraph_edge *origin;
+
+ origin = rdesc->cs;
+ to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
+ origin->call_stmt);
+ gcc_assert (to_del);
+ ipa_remove_reference (to_del);
+ if (dump_file)
+ fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
+ xstrdup (cgraph_node_name (origin->caller)),
+ origin->caller->uid, xstrdup (symtab_node_name (symbol)));
+}
+
+/* If JFUNC has a reference description with refcount different from
+ IPA_UNDESCRIBED_USE, return the reference description, otherwise return
+ NULL. JFUNC must be a constant jump function. */
+
+static struct ipa_cst_ref_desc *
+jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
+{
+ struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
+ if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
+ return rdesc;
+ else
+ return NULL;
+}
+
/* Try to find a destination for indirect edge IE that corresponds to a simple
call or a call of a member function pointer and where the destination is a
pointer formal parameter described by jump function JFUNC. If it can be
@@ -2237,6 +2326,8 @@ try_make_edge_direct_simple_call (struct cgraph_edge *ie,
struct ipa_jump_func *jfunc,
struct ipa_node_params *new_root_info)
{
+ struct ipa_cst_ref_desc *rdesc;
+ struct cgraph_edge *cs;
tree target;
if (ie->indirect_info->agg_contents)
@@ -2247,7 +2338,15 @@ try_make_edge_direct_simple_call (struct cgraph_edge *ie,
target = ipa_value_from_jfunc (new_root_info, jfunc);
if (!target)
return NULL;
- return ipa_make_edge_direct_to_target (ie, target);
+ cs = ipa_make_edge_direct_to_target (ie, target);
+
+ if (cs && !ie->indirect_info->agg_contents
+ && jfunc->type == IPA_JF_CONST
+ && (rdesc = jfunc_rdesc_usable (jfunc))
+ && --rdesc->refcount == 0)
+ remove_described_reference ((symtab_node) cs->callee, rdesc);
+
+ return cs;
}
/* Try to find a destination for indirect edge IE that corresponds to a virtual
@@ -2411,6 +2510,135 @@ propagate_info_to_inlined_callees (struct cgraph_edge *cs,
return res;
}
+/* Combine two controlled uses counts as done during inlining. */
+
+static int
+combine_controlled_uses_counters (int c, int d)
+{
+ if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
+ return IPA_UNDESCRIBED_USE;
+ else
+ return c + d - 1;
+}
+
+/* Propagate number of controlled users from CS->caleee to the new root of the
+ tree of inlined nodes. */
+
+static void
+propagate_controlled_uses (struct cgraph_edge *cs)
+{
+ struct ipa_edge_args *args = IPA_EDGE_REF (cs);
+ struct cgraph_node *new_root = cs->caller->global.inlined_to
+ ? cs->caller->global.inlined_to : cs->caller;
+ struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
+ struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
+ int count, i;
+
+ count = MIN (ipa_get_cs_argument_count (args),
+ ipa_get_param_count (old_root_info));
+ for (i = 0; i < count; i++)
+ {
+ struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
+ struct ipa_cst_ref_desc *rdesc;
+
+ if (jf->type == IPA_JF_PASS_THROUGH)
+ {
+ int src_idx, c, d;
+ src_idx = ipa_get_jf_pass_through_formal_id (jf);
+ c = ipa_get_controlled_uses (new_root_info, src_idx);
+ d = ipa_get_controlled_uses (old_root_info, i);
+
+ gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
+ == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
+ c = combine_controlled_uses_counters (c, d);
+ ipa_set_controlled_uses (new_root_info, src_idx, c);
+ if (c == 0 && new_root_info->ipcp_orig_node)
+ {
+ struct cgraph_node *n;
+ struct ipa_ref *ref;
+ tree t = new_root_info->known_vals[src_idx];
+
+ if (t && TREE_CODE (t) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
+ && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
+ && (ref = ipa_find_reference ((symtab_node) new_root,
+ (symtab_node) n, NULL)))
+ {
+ if (dump_file)
+ fprintf (dump_file, "ipa-prop: Removing cloning-created "
+ "reference from %s/%i to %s/%i.\n",
+ xstrdup (cgraph_node_name (new_root)),
+ new_root->uid,
+ xstrdup (cgraph_node_name (n)), n->uid);
+ ipa_remove_reference (ref);
+ }
+ }
+ }
+ else if (jf->type == IPA_JF_CONST
+ && (rdesc = jfunc_rdesc_usable (jf)))
+ {
+ int d = ipa_get_controlled_uses (old_root_info, i);
+ int c = rdesc->refcount;
+ rdesc->refcount = combine_controlled_uses_counters (c, d);
+ if (rdesc->refcount == 0)
+ {
+ tree cst = ipa_get_jf_constant (jf);
+ struct cgraph_node *n;
+ gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (cst, 0))
+ == FUNCTION_DECL);
+ n = cgraph_get_node (TREE_OPERAND (cst, 0));
+ if (n)
+ {
+ struct cgraph_node *clone;
+ remove_described_reference ((symtab_node) n, rdesc);
+
+ clone = cs->caller;
+ while (clone->global.inlined_to
+ && clone != rdesc->cs->caller
+ && IPA_NODE_REF (clone)->ipcp_orig_node)
+ {
+ struct ipa_ref *ref;
+ ref = ipa_find_reference ((symtab_node) clone,
+ (symtab_node) n, NULL);
+ if (ref)
+ {
+ if (dump_file)
+ fprintf (dump_file, "ipa-prop: Removing "
+ "cloning-created reference "
+ "from %s/%i to %s/%i.\n",
+ xstrdup (cgraph_node_name (clone)),
+ clone->uid,
+ xstrdup (cgraph_node_name (n)),
+ n->uid);
+ ipa_remove_reference (ref);
+ }
+ clone = clone->callers->caller;
+ }
+ }
+ }
+ }
+ }
+
+ for (i = ipa_get_param_count (old_root_info);
+ i < ipa_get_cs_argument_count (args);
+ i++)
+ {
+ struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
+
+ if (jf->type == IPA_JF_CONST)
+ {
+ struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
+ if (rdesc)
+ rdesc->refcount = IPA_UNDESCRIBED_USE;
+ }
+ else if (jf->type == IPA_JF_PASS_THROUGH)
+ ipa_set_controlled_uses (new_root_info,
+ jf->value.pass_through.formal_id,
+ IPA_UNDESCRIBED_USE);
+ }
+}
+
/* Update jump functions and call note functions on inlining the call site CS.
CS is expected to lead to a node already cloned by
cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
@@ -2428,6 +2656,7 @@ ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
return false;
gcc_assert (ipa_edge_args_vector);
+ propagate_controlled_uses (cs);
changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
/* We do not keep jump functions of inlined edges up to date. Better to free
@@ -2543,8 +2772,53 @@ ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
- (*new_args->jump_functions)[i].agg.items
- = vec_safe_copy ((*old_args->jump_functions)[i].agg.items);
+ {
+ struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
+ struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
+
+ dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
+
+ if (src_jf->type == IPA_JF_CONST)
+ {
+ struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
+
+ if (!src_rdesc)
+ dst_jf->value.constant.rdesc = NULL;
+ else if (src_rdesc->cs == src)
+ {
+ struct ipa_cst_ref_desc *dst_rdesc;
+ gcc_checking_assert (ipa_refdesc_pool);
+ dst_rdesc
+ = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
+ dst_rdesc->cs = dst;
+ dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
+ src_rdesc->next_duplicate = dst_rdesc;
+ dst_rdesc->refcount = src_rdesc->refcount;
+ dst_jf->value.constant.rdesc = dst_rdesc;
+ }
+ else
+ {
+ struct ipa_cst_ref_desc *dst_rdesc;
+ /* This can happen during inlining, when a JFUNC can refer to a
+ reference taken in a function up in the tree of inline clones.
+ We need to find the duplicate that refers to our tree of
+ inline clones. */
+
+ gcc_assert (dst->caller->global.inlined_to);
+ for (dst_rdesc = src_rdesc->next_duplicate;
+ dst_rdesc;
+ dst_rdesc = dst_rdesc->next_duplicate)
+ {
+ gcc_assert (dst_rdesc->cs->caller->global.inlined_to);
+ if (dst_rdesc->cs->caller->global.inlined_to
+ == dst->caller->global.inlined_to)
+ break;
+ }
+
+ dst_jf->value.constant.rdesc = dst_rdesc;
+ }
+ }
+ }
}
/* Hook that is called by cgraph.c when a node is duplicated. */
@@ -2646,6 +2920,8 @@ ipa_free_all_structures_after_ipa_cp (void)
free_alloc_pool (ipcp_values_pool);
free_alloc_pool (ipcp_agg_lattice_pool);
ipa_unregister_cgraph_hooks ();
+ if (ipa_refdesc_pool)
+ free_alloc_pool (ipa_refdesc_pool);
}
}
@@ -2664,6 +2940,8 @@ ipa_free_all_structures_after_iinln (void)
free_alloc_pool (ipcp_values_pool);
if (ipcp_agg_lattice_pool)
free_alloc_pool (ipcp_agg_lattice_pool);
+ if (ipa_refdesc_pool)
+ free_alloc_pool (ipa_refdesc_pool);
}
/* Print ipa_tree_map data structures of all functions in the
@@ -2684,6 +2962,8 @@ ipa_print_node_params (FILE *f, struct cgraph_node *node)
count = ipa_get_param_count (info);
for (i = 0; i < count; i++)
{
+ int c;
+
temp = ipa_get_param (info, i);
if (TREE_CODE (temp) == PARM_DECL)
fprintf (f, " param %d : %s", i,
@@ -2692,6 +2972,11 @@ ipa_print_node_params (FILE *f, struct cgraph_node *node)
: "(unnamed)"));
if (ipa_is_param_used (info, i))
fprintf (f, " used");
+ c = ipa_get_controlled_uses (info, i);
+ if (c == IPA_UNDESCRIBED_USE)
+ fprintf (f, " undescribed_use");
+ else
+ fprintf (f, " controlled_uses=%i", c);
fprintf (f, "\n");
}
}
@@ -3311,8 +3596,8 @@ ipa_write_jump_function (struct output_block *ob,
break;
case IPA_JF_CONST:
gcc_assert (
- EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
- stream_write_tree (ob, jump_func->value.constant, true);
+ EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
+ stream_write_tree (ob, jump_func->value.constant.value, true);
break;
case IPA_JF_PASS_THROUGH:
streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
@@ -3360,6 +3645,7 @@ ipa_write_jump_function (struct output_block *ob,
static void
ipa_read_jump_function (struct lto_input_block *ib,
struct ipa_jump_func *jump_func,
+ struct cgraph_edge *cs,
struct data_in *data_in)
{
enum jump_func_type jftype;
@@ -3382,7 +3668,7 @@ ipa_read_jump_function (struct lto_input_block *ib,
break;
}
case IPA_JF_CONST:
- ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in));
+ ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
break;
case IPA_JF_PASS_THROUGH:
operation = (enum tree_code) streamer_read_uhwi (ib);
@@ -3503,6 +3789,8 @@ ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
for (j = 0; j < ipa_get_param_count (info); j++)
bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
streamer_write_bitpack (&bp);
+ for (j = 0; j < ipa_get_param_count (info); j++)
+ streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
for (e = node->callees; e; e = e->next_callee)
{
struct ipa_edge_args *args = IPA_EDGE_REF (e);
@@ -3540,6 +3828,8 @@ ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
info->uses_analysis_done = true;
info->node_enqueued = false;
for (k = 0; k < ipa_get_param_count (info); k++)
+ ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
+ for (k = 0; k < ipa_get_param_count (info); k++)
ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
for (e = node->callees; e; e = e->next_callee)
{
@@ -3551,7 +3841,8 @@ ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
vec_safe_grow_cleared (args->jump_functions, count);
for (k = 0; k < ipa_get_cs_argument_count (args); k++)
- ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
+ ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
+ data_in);
}
for (e = node->indirect_calls; e; e = e->next_callee)
{
@@ -3562,7 +3853,7 @@ ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
{
vec_safe_grow_cleared (args->jump_functions, count);
for (k = 0; k < ipa_get_cs_argument_count (args); k++)
- ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
+ ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
data_in);
}
ipa_read_indirect_edge_info (ib, data_in, e);