aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-sccvn.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-ssa-sccvn.c')
-rw-r--r--gcc/tree-ssa-sccvn.c305
1 files changed, 228 insertions, 77 deletions
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 99759a8..bfa516b 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -249,6 +249,55 @@ vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
typedef hash_table<vn_reference_hasher> vn_reference_table_type;
typedef vn_reference_table_type::iterator vn_reference_iterator_type;
+/* Pretty-print OPS to OUTFILE. */
+
+void
+print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
+{
+ vn_reference_op_t vro;
+ unsigned int i;
+ fprintf (outfile, "{");
+ for (i = 0; ops.iterate (i, &vro); i++)
+ {
+ bool closebrace = false;
+ if (vro->opcode != SSA_NAME
+ && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
+ {
+ fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
+ if (vro->op0)
+ {
+ fprintf (outfile, "<");
+ closebrace = true;
+ }
+ }
+ if (vro->op0)
+ {
+ print_generic_expr (outfile, vro->op0);
+ if (vro->op1)
+ {
+ fprintf (outfile, ",");
+ print_generic_expr (outfile, vro->op1);
+ }
+ if (vro->op2)
+ {
+ fprintf (outfile, ",");
+ print_generic_expr (outfile, vro->op2);
+ }
+ }
+ if (closebrace)
+ fprintf (outfile, ">");
+ if (i != ops.length () - 1)
+ fprintf (outfile, ",");
+ }
+ fprintf (outfile, "}");
+}
+
+DEBUG_FUNCTION void
+debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
+{
+ print_vn_reference_ops (stderr, ops);
+ fputc ('\n', stderr);
+}
/* The set of VN hashtables. */
@@ -374,7 +423,7 @@ static unsigned int vn_nary_length_from_stmt (gimple *);
static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
vn_nary_op_table_type *, bool);
-static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
+static void init_vn_nary_op_from_stmt (vn_nary_op_t, gassign *);
static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
enum tree_code, tree, tree *);
static tree vn_lookup_simplify_result (gimple_match_op *);
@@ -504,7 +553,7 @@ vuse_ssa_val (tree x)
return x;
}
-/* Similar to the above but used as callback for walk_non_aliases_vuses
+/* Similar to the above but used as callback for walk_non_aliased_vuses
and thus should stop at unvisited VUSE to not walk across region
boundaries. */
@@ -715,14 +764,18 @@ vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
if (vr1->operands == vr2->operands)
return true;
- if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
- || (COMPLETE_TYPE_P (vr1->type)
- && !expressions_equal_p (TYPE_SIZE (vr1->type),
- TYPE_SIZE (vr2->type))))
+ if (!vr1->type || !vr2->type)
+ {
+ if (vr1->type != vr2->type)
+ return false;
+ }
+ else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
+ || (COMPLETE_TYPE_P (vr1->type)
+ && !expressions_equal_p (TYPE_SIZE (vr1->type),
+ TYPE_SIZE (vr2->type))))
return false;
-
- if (INTEGRAL_TYPE_P (vr1->type)
- && INTEGRAL_TYPE_P (vr2->type))
+ else if (INTEGRAL_TYPE_P (vr1->type)
+ && INTEGRAL_TYPE_P (vr2->type))
{
if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
return false;
@@ -744,6 +797,7 @@ vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
vn_reference_op_t vro1, vro2;
vn_reference_op_s tem1, tem2;
bool deref1 = false, deref2 = false;
+ bool reverse1 = false, reverse2 = false;
for (; vr1->operands.iterate (i, &vro1); i++)
{
if (vro1->opcode == MEM_REF)
@@ -751,6 +805,7 @@ vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
/* Do not look through a storage order barrier. */
else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
return false;
+ reverse1 |= vro1->reverse;
if (known_eq (vro1->off, -1))
break;
off1 += vro1->off;
@@ -762,11 +817,12 @@ vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
/* Do not look through a storage order barrier. */
else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
return false;
+ reverse2 |= vro2->reverse;
if (known_eq (vro2->off, -1))
break;
off2 += vro2->off;
}
- if (maybe_ne (off1, off2))
+ if (maybe_ne (off1, off2) || reverse1 != reverse2)
return false;
if (deref1 && vro1->opcode == ADDR_EXPR)
{
@@ -858,11 +914,13 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
break;
case COMPONENT_REF:
/* The field decl is enough to unambiguously specify the field,
- a matching type is not necessary and a mismatching type
- is always a spurious difference. */
- temp.type = NULL_TREE;
+ so use its type here. */
+ temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
temp.op0 = TREE_OPERAND (ref, 1);
temp.op1 = TREE_OPERAND (ref, 2);
+ temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
+ && TYPE_REVERSE_STORAGE_ORDER
+ (TREE_TYPE (TREE_OPERAND (ref, 0))));
{
tree this_offset = component_ref_field_offset (ref);
if (this_offset
@@ -876,12 +934,10 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
+ (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
/* Probibit value-numbering zero offset components
of addresses the same before the pass folding
- __builtin_object_size had a chance to run
- (checking cfun->after_inlining does the
- trick here). */
+ __builtin_object_size had a chance to run. */
if (TREE_CODE (orig) != ADDR_EXPR
|| maybe_ne (off, 0)
- || cfun->after_inlining)
+ || (cfun->curr_properties & PROP_objsz))
off.to_shwi (&temp.off);
}
}
@@ -911,6 +967,9 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
* vn_ref_op_align_unit (&temp));
off.to_shwi (&temp.off);
}
+ temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
+ && TYPE_REVERSE_STORAGE_ORDER
+ (TREE_TYPE (TREE_OPERAND (ref, 0))));
}
break;
case VAR_DECL:
@@ -991,9 +1050,8 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, alias_set_type base_set,
- tree type, vec<vn_reference_op_s> ops)
+ tree type, const vec<vn_reference_op_s> &ops)
{
- vn_reference_op_t op;
unsigned i;
tree base = NULL_TREE;
tree *op0_p = &base;
@@ -1002,22 +1060,33 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
poly_offset_int size = -1;
tree size_tree = NULL_TREE;
- /* First get the final access size from just the outermost expression. */
- op = &ops[0];
+ /* We don't handle calls. */
+ if (!type)
+ return false;
+
+ machine_mode mode = TYPE_MODE (type);
+ if (mode == BLKmode)
+ size_tree = TYPE_SIZE (type);
+ else
+ size = GET_MODE_BITSIZE (mode);
+ if (size_tree != NULL_TREE
+ && poly_int_tree_p (size_tree))
+ size = wi::to_poly_offset (size_tree);
+
+ /* Lower the final access size from the outermost expression. */
+ const_vn_reference_op_t cst_op = &ops[0];
+ /* Cast away constness for the sake of the const-unsafe
+ FOR_EACH_VEC_ELT(). */
+ vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
+ size_tree = NULL_TREE;
if (op->opcode == COMPONENT_REF)
size_tree = DECL_SIZE (op->op0);
else if (op->opcode == BIT_FIELD_REF)
size_tree = op->op0;
- else
- {
- machine_mode mode = TYPE_MODE (type);
- if (mode == BLKmode)
- size_tree = TYPE_SIZE (type);
- else
- size = GET_MODE_BITSIZE (mode);
- }
if (size_tree != NULL_TREE
- && poly_int_tree_p (size_tree))
+ && poly_int_tree_p (size_tree)
+ && (!known_size_p (size)
+ || known_lt (wi::to_poly_offset (size_tree), size)))
size = wi::to_poly_offset (size_tree);
/* Initially, maxsize is the same as the accessed element size.
@@ -1039,7 +1108,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
&& op->op0
&& DECL_P (TREE_OPERAND (op->op0, 0)))
{
- vn_reference_op_t pop = &ops[i-1];
+ const_vn_reference_op_t pop = &ops[i-1];
base = TREE_OPERAND (op->op0, 0);
if (known_eq (pop->off, -1))
{
@@ -1427,6 +1496,7 @@ fully_constant_vn_reference_p (vn_reference_t ref)
/* Simplify reads from constants or constant initializers. */
else if (BITS_PER_UNIT == 8
+ && ref->type
&& COMPLETE_TYPE_P (ref->type)
&& is_gimple_reg_type (ref->type))
{
@@ -1521,13 +1591,33 @@ contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
return false;
}
+/* Return true if OPS represent an access with reverse storage order. */
+
+static bool
+reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
+{
+ unsigned i = 0;
+ if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
+ ++i;
+ switch (ops[i].opcode)
+ {
+ case ARRAY_REF:
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case MEM_REF:
+ return ops[i].reverse;
+ default:
+ return false;
+ }
+}
+
/* Transform any SSA_NAME's in a vector of vn_reference_op_s
structures into their value numbers. This is done in-place, and
the vector passed in is returned. *VALUEIZED_ANYTHING will specify
whether any operands were valueized. */
-static vec<vn_reference_op_s>
-valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
+static void
+valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
bool with_avail = false)
{
vn_reference_op_t vro;
@@ -1535,7 +1625,7 @@ valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
*valueized_anything = false;
- FOR_EACH_VEC_ELT (orig, i, vro)
+ FOR_EACH_VEC_ELT (*orig, i, vro)
{
if (vro->opcode == SSA_NAME
|| (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
@@ -1574,16 +1664,16 @@ valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
if (i > 0
&& vro->op0
&& TREE_CODE (vro->op0) == ADDR_EXPR
- && orig[i - 1].opcode == MEM_REF)
+ && (*orig)[i - 1].opcode == MEM_REF)
{
- if (vn_reference_fold_indirect (&orig, &i))
+ if (vn_reference_fold_indirect (orig, &i))
*valueized_anything = true;
}
else if (i > 0
&& vro->opcode == SSA_NAME
- && orig[i - 1].opcode == MEM_REF)
+ && (*orig)[i - 1].opcode == MEM_REF)
{
- if (vn_reference_maybe_forwprop_address (&orig, &i))
+ if (vn_reference_maybe_forwprop_address (orig, &i))
*valueized_anything = true;
}
/* If it transforms a non-constant ARRAY_REF into a constant
@@ -1601,15 +1691,13 @@ valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
off.to_shwi (&vro->off);
}
}
-
- return orig;
}
-static vec<vn_reference_op_s>
-valueize_refs (vec<vn_reference_op_s> orig)
+static void
+valueize_refs (vec<vn_reference_op_s> *orig)
{
bool tem;
- return valueize_refs_1 (orig, &tem);
+ valueize_refs_1 (orig, &tem);
}
static vec<vn_reference_op_s> shared_lookup_references;
@@ -1626,8 +1714,7 @@ valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
return vNULL;
shared_lookup_references.truncate (0);
copy_reference_ops_from_ref (ref, &shared_lookup_references);
- shared_lookup_references = valueize_refs_1 (shared_lookup_references,
- valueized_anything);
+ valueize_refs_1 (&shared_lookup_references, valueized_anything);
return shared_lookup_references;
}
@@ -1642,7 +1729,7 @@ valueize_shared_reference_ops_from_call (gcall *call)
return vNULL;
shared_lookup_references.truncate (0);
copy_reference_ops_from_call (call, &shared_lookup_references);
- shared_lookup_references = valueize_refs (shared_lookup_references);
+ valueize_refs (&shared_lookup_references);
return shared_lookup_references;
}
@@ -2329,7 +2416,7 @@ vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
vno1->length = length;
vno1->predicated_values = 0;
vno1->u.result = result;
- init_vn_nary_op_from_stmt (vno1, new_stmt);
+ init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
vn_nary_op_insert_into (vno1, valid_info->nary, true);
/* Also do not link it into the undo chain. */
last_inserted_nary = vno1->next;
@@ -2493,7 +2580,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
{
copy_reference_ops_from_ref (lhs, &lhs_ops);
- lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
+ valueize_refs_1 (&lhs_ops, &valueized_anything, true);
}
vn_context_bb = saved_rpo_bb;
ao_ref_init (&lhs_ref, lhs);
@@ -2840,6 +2927,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
routines to extract the assigned bits. */
else if (known_eq (ref->size, maxsize)
&& is_gimple_reg_type (vr->type)
+ && !reverse_storage_order_for_component_p (vr->operands)
&& !contains_storage_order_barrier_p (vr->operands)
&& gimple_assign_single_p (def_stmt)
&& CHAR_BIT == 8
@@ -2991,6 +3079,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
to access pieces from or we can combine to a larger entity. */
else if (known_eq (ref->size, maxsize)
&& is_gimple_reg_type (vr->type)
+ && !reverse_storage_order_for_component_p (vr->operands)
&& !contains_storage_order_barrier_p (vr->operands)
&& gimple_assign_single_p (def_stmt)
&& TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
@@ -3172,7 +3261,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
vr->operands.truncate (i + 1 + rhs.length ());
FOR_EACH_VEC_ELT (rhs, j, vro)
vr->operands[i + 1 + j] = *vro;
- vr->operands = valueize_refs (vr->operands);
+ valueize_refs (&vr->operands);
if (old == shared_lookup_references)
shared_lookup_references = vr->operands;
vr->hashcode = vn_reference_compute_hash (vr);
@@ -3473,8 +3562,9 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set,
operands.address (),
sizeof (vn_reference_op_s)
* operands.length ());
- vr1.operands = operands = shared_lookup_references
- = valueize_refs (shared_lookup_references);
+ bool valueized_p;
+ valueize_refs_1 (&shared_lookup_references, &valueized_p);
+ vr1.operands = shared_lookup_references;
vr1.type = type;
vr1.set = set;
vr1.base_set = base_set;
@@ -3490,13 +3580,31 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set,
ao_ref r;
unsigned limit = param_sccvn_max_alias_queries_per_access;
vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
+ vec<vn_reference_op_s> ops_for_ref;
+ if (!valueized_p)
+ ops_for_ref = vr1.operands;
+ else
+ {
+ /* For ao_ref_from_mem we have to ensure only available SSA names
+ end up in base and the only convenient way to make this work
+ for PRE is to re-valueize with that in mind. */
+ ops_for_ref.create (operands.length ());
+ ops_for_ref.quick_grow (operands.length ());
+ memcpy (ops_for_ref.address (),
+ operands.address (),
+ sizeof (vn_reference_op_s)
+ * operands.length ());
+ valueize_refs_1 (&ops_for_ref, &valueized_p, true);
+ }
if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
- vr1.operands))
+ ops_for_ref))
*vnresult
= ((vn_reference_t)
walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
vn_reference_lookup_3, vuse_valueize,
limit, &data));
+ if (ops_for_ref != shared_lookup_references)
+ ops_for_ref.release ();
gcc_checking_assert (vr1.operands == shared_lookup_references);
}
@@ -3525,14 +3633,14 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
{
vec<vn_reference_op_s> operands;
struct vn_reference_s vr1;
- bool valuezied_anything;
+ bool valueized_anything;
if (vnresult)
*vnresult = NULL;
vr1.vuse = vuse_ssa_val (vuse);
vr1.operands = operands
- = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
+ = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
vr1.type = TREE_TYPE (op);
ao_ref op_ref;
ao_ref_init (&op_ref, op);
@@ -3548,11 +3656,18 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_t wvnresult;
ao_ref r;
unsigned limit = param_sccvn_max_alias_queries_per_access;
+ auto_vec<vn_reference_op_s> ops_for_ref;
+ if (valueized_anything)
+ {
+ copy_reference_ops_from_ref (op, &ops_for_ref);
+ bool tem;
+ valueize_refs_1 (&ops_for_ref, &tem, true);
+ }
/* Make sure to use a valueized reference if we valueized anything.
Otherwise preserve the full reference for advanced TBAA. */
- if (!valuezied_anything
+ if (!valueized_anything
|| !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
- vr1.type, vr1.operands))
+ vr1.type, ops_for_ref))
ao_ref_init (&r, op);
vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
last_vuse_ptr, kind, tbaa_p, mask);
@@ -3597,7 +3712,10 @@ vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr->operands = valueize_shared_reference_ops_from_call (call);
- vr->type = gimple_expr_type (call);
+ tree lhs = gimple_call_lhs (call);
+ /* For non-SSA return values the referece ops contain the LHS. */
+ vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
+ ? TREE_TYPE (lhs) : NULL_TREE);
vr->punned = false;
vr->set = 0;
vr->base_set = 0;
@@ -3680,7 +3798,8 @@ vn_reference_insert_pieces (tree vuse, alias_set_type set,
vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
vr1->value_id = value_id;
vr1->vuse = vuse_ssa_val (vuse);
- vr1->operands = valueize_refs (operands);
+ vr1->operands = operands;
+ valueize_refs (&vr1->operands);
vr1->type = type;
vr1->punned = false;
vr1->set = set;
@@ -3807,12 +3926,12 @@ vn_nary_length_from_stmt (gimple *stmt)
/* Initialize VNO from STMT. */
static void
-init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
+init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
{
unsigned i;
vno->opcode = gimple_assign_rhs_code (stmt);
- vno->type = gimple_expr_type (stmt);
+ vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
switch (vno->opcode)
{
case REALPART_EXPR:
@@ -3893,7 +4012,7 @@ vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s,
sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
- init_vn_nary_op_from_stmt (vno1, stmt);
+ init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
return vn_nary_op_lookup_1 (vno1, vnresult);
}
@@ -3986,7 +4105,7 @@ vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
bool found = false;
for (vn_pval *val = (*slot)->u.values; val; val = val->next)
{
- if (expressions_equal_p (val->result, vno->u.values->result))
+ if (expressions_equal_p (val->result, nval->result))
{
found = true;
for (unsigned i = 0; i < val->n; ++i)
@@ -4118,7 +4237,7 @@ vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
}
static bool
-dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
+dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
static tree
vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
@@ -4127,9 +4246,12 @@ vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
return vno->u.result;
for (vn_pval *val = vno->u.values; val; val = val->next)
for (unsigned i = 0; i < val->n; ++i)
- if (dominated_by_p_w_unex (bb,
- BASIC_BLOCK_FOR_FN
- (cfun, val->valid_dominated_by_p[i])))
+ /* Do not handle backedge executability optimistically since
+ when figuring out whether to iterate we do not consider
+ changed predication. */
+ if (dominated_by_p_w_unex
+ (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
+ false))
return val->result;
return NULL_TREE;
}
@@ -4143,7 +4265,7 @@ vn_nary_op_insert_stmt (gimple *stmt, tree result)
vn_nary_op_t vno1
= alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
result, VN_INFO (result)->value_id);
- init_vn_nary_op_from_stmt (vno1, stmt);
+ init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
return vn_nary_op_insert_into (vno1, valid_info->nary, true);
}
@@ -4429,10 +4551,11 @@ vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
/* Return true if BB1 is dominated by BB2 taking into account edges
- that are not executable. */
+ that are not executable. When ALLOW_BACK is false consider not
+ executable backedges as executable. */
static bool
-dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
+dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
{
edge_iterator ei;
edge e;
@@ -4449,7 +4572,8 @@ dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
{
edge prede = NULL;
FOR_EACH_EDGE (e, ei, bb1->preds)
- if (e->flags & EDGE_EXECUTABLE)
+ if ((e->flags & EDGE_EXECUTABLE)
+ || (!allow_back && (e->flags & EDGE_DFS_BACK)))
{
if (prede)
{
@@ -4471,7 +4595,8 @@ dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
/* Iterate to the single executable bb2 successor. */
edge succe = NULL;
FOR_EACH_EDGE (e, ei, bb2->succs)
- if (e->flags & EDGE_EXECUTABLE)
+ if ((e->flags & EDGE_EXECUTABLE)
+ || (!allow_back && (e->flags & EDGE_DFS_BACK)))
{
if (succe)
{
@@ -4489,7 +4614,8 @@ dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
{
FOR_EACH_EDGE (e, ei, succe->dest->preds)
if (e != succe
- && (e->flags & EDGE_EXECUTABLE))
+ && ((e->flags & EDGE_EXECUTABLE)
+ || (!allow_back && (e->flags & EDGE_DFS_BACK))))
{
succe = NULL;
break;
@@ -5199,6 +5325,8 @@ visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
{
tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
+ if (def == PHI_RESULT (phi))
+ continue;
++n_executable;
if (TREE_CODE (def) == SSA_NAME)
{
@@ -5723,6 +5851,7 @@ vn_reference_may_trap (vn_reference_t ref)
case MODIFY_EXPR:
case CALL_EXPR:
/* We do not handle calls. */
+ return true;
case ADDR_EXPR:
/* And toplevel address computations never trap. */
return false;
@@ -5744,10 +5873,33 @@ vn_reference_may_trap (vn_reference_t ref)
return true;
break;
case ARRAY_RANGE_REF:
- case ARRAY_REF:
if (TREE_CODE (op->op0) == SSA_NAME)
return true;
break;
+ case ARRAY_REF:
+ {
+ if (TREE_CODE (op->op0) != INTEGER_CST)
+ return true;
+
+ /* !in_array_bounds */
+ tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
+ if (!domain_type)
+ return true;
+
+ tree min = op->op1;
+ tree max = TYPE_MAX_VALUE (domain_type);
+ if (!min
+ || !max
+ || TREE_CODE (min) != INTEGER_CST
+ || TREE_CODE (max) != INTEGER_CST)
+ return true;
+
+ if (tree_int_cst_lt (op->op0, min)
+ || tree_int_cst_lt (max, op->op0))
+ return true;
+
+ break;
+ }
case MEM_REF:
/* Nothing interesting in itself, the base is separate. */
break;
@@ -6302,7 +6454,7 @@ eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
gimple_stmt_iterator prev = *gsi;
gsi_prev (&prev);
- if (fold_stmt (gsi))
+ if (fold_stmt (gsi, follow_all_ssa_edges))
{
/* fold_stmt may have created new stmts inbetween
the previous stmt and the folded stmt. Mark
@@ -6846,7 +6998,7 @@ rpo_elim::eliminate_avail (basic_block bb, tree op)
may also be able to "pre-compute" (bits of) the next immediate
(non-)dominator during the RPO walk when marking edges as
executable. */
- if (dominated_by_p_w_unex (bb, abb))
+ if (dominated_by_p_w_unex (bb, abb, true))
{
tree leader = ssa_name (av->leader);
/* Prevent eliminations that break loop-closed SSA. */
@@ -7553,9 +7705,8 @@ do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
loops and the outermost one optimistically. */
if (iterate)
{
- loop_p loop;
unsigned max_depth = param_rpo_vn_max_loop_depth;
- FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
+ for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
if (loop_depth (loop) > max_depth)
for (unsigned i = 2;
i < loop_depth (loop) - max_depth; ++i)