aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-sccvn.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-ssa-sccvn.c')
-rw-r--r--gcc/tree-ssa-sccvn.c323
1 files changed, 153 insertions, 170 deletions
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 2391632..ed89a5a 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -149,17 +149,15 @@ static unsigned int next_value_id;
detection. */
static unsigned int next_dfs_num;
-static VEC (tree, heap) *sccstack;
+static vec<tree> sccstack;
-DEF_VEC_P(vn_ssa_aux_t);
-DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
are allocated on an obstack for locality reasons, and to free them
- without looping over the VEC. */
+ without looping over the vec. */
-static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
+static vec<vn_ssa_aux_t> vn_ssa_aux_table;
static struct obstack vn_ssa_aux_obstack;
/* Return the value numbering information for a given SSA name. */
@@ -167,8 +165,7 @@ static struct obstack vn_ssa_aux_obstack;
vn_ssa_aux_t
VN_INFO (tree name)
{
- vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name));
+ vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
gcc_checking_assert (res);
return res;
}
@@ -179,8 +176,7 @@ VN_INFO (tree name)
static inline void
VN_INFO_SET (tree name, vn_ssa_aux_t value)
{
- VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name), value);
+ vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
}
/* Initialize the value numbering info for a given SSA name.
@@ -193,11 +189,9 @@ VN_INFO_GET (tree name)
newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
memset (newinfo, 0, sizeof (struct vn_ssa_aux));
- if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
- VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
- SSA_NAME_VERSION (name) + 1);
- VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name), newinfo);
+ if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
+ vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
+ vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
return newinfo;
}
@@ -351,7 +345,7 @@ static void
free_phi (void *vp)
{
vn_phi_t phi = (vn_phi_t) vp;
- VEC_free (tree, heap, phi->phiargs);
+ phi->phiargs.release ();
}
/* Free a reference operation structure VP. */
@@ -360,7 +354,7 @@ static void
free_reference (void *vp)
{
vn_reference_t vr = (vn_reference_t) vp;
- VEC_free (vn_reference_op_s, heap, vr->operands);
+ vr->operands.release ();
}
/* Hash table equality function for vn_constant_t. */
@@ -493,7 +487,7 @@ vn_reference_compute_hash (const vn_reference_t vr1)
HOST_WIDE_INT off = -1;
bool deref = false;
- FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
+ FOR_EACH_VEC_ELT (vr1->operands, i, vro)
{
if (vro->opcode == MEM_REF)
deref = true;
@@ -582,7 +576,7 @@ vn_reference_eq (const void *p1, const void *p2)
vn_reference_op_t vro1, vro2;
vn_reference_op_s tem1, tem2;
bool deref1 = false, deref2 = false;
- for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
+ for (; vr1->operands.iterate (i, &vro1); i++)
{
if (vro1->opcode == MEM_REF)
deref1 = true;
@@ -590,7 +584,7 @@ vn_reference_eq (const void *p1, const void *p2)
break;
off1 += vro1->off;
}
- for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
+ for (; vr2->operands.iterate (j, &vro2); j++)
{
if (vro2->opcode == MEM_REF)
deref2 = true;
@@ -625,8 +619,8 @@ vn_reference_eq (const void *p1, const void *p2)
++j;
++i;
}
- while (VEC_length (vn_reference_op_s, vr1->operands) != i
- || VEC_length (vn_reference_op_s, vr2->operands) != j);
+ while (vr1->operands.length () != i
+ || vr2->operands.length () != j);
return true;
}
@@ -635,7 +629,7 @@ vn_reference_eq (const void *p1, const void *p2)
vn_reference_op_s's. */
void
-copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
+copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
{
if (TREE_CODE (ref) == TARGET_MEM_REF)
{
@@ -648,21 +642,21 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
temp.op1 = TMR_STEP (ref);
temp.op2 = TMR_OFFSET (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
temp.opcode = ERROR_MARK;
temp.op0 = TMR_INDEX2 (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
temp.opcode = TREE_CODE (TMR_BASE (ref));
temp.op0 = TMR_BASE (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
return;
}
@@ -757,7 +751,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
temp.opcode = MEM_REF;
temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
temp.off = 0;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
temp.opcode = ADDR_EXPR;
temp.op0 = build_fold_addr_expr (ref);
temp.type = TREE_TYPE (temp.op0);
@@ -796,7 +790,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
default:
gcc_unreachable ();
}
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
if (REFERENCE_CLASS_P (ref)
|| TREE_CODE (ref) == MODIFY_EXPR
@@ -816,7 +810,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *ops)
+ vec<vn_reference_op_s> ops)
{
vn_reference_op_t op;
unsigned i;
@@ -829,7 +823,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type base_alias_set = -1;
/* First get the final access size from just the outermost expression. */
- op = &VEC_index (vn_reference_op_s, ops, 0);
+ op = &ops[0];
if (op->opcode == COMPONENT_REF)
size_tree = DECL_SIZE (op->op0);
else if (op->opcode == BIT_FIELD_REF)
@@ -856,7 +850,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
/* Compute cumulative bit-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
- FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
+ FOR_EACH_VEC_ELT (ops, i, op)
{
switch (op->opcode)
{
@@ -869,7 +863,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
&& op->op0
&& DECL_P (TREE_OPERAND (op->op0, 0)))
{
- vn_reference_op_t pop = &VEC_index (vn_reference_op_s, ops, i-1);
+ vn_reference_op_t pop = &ops[i-1];
base = TREE_OPERAND (op->op0, 0);
if (pop->off == -1)
{
@@ -990,7 +984,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
void
copy_reference_ops_from_call (gimple call,
- VEC(vn_reference_op_s, heap) **result)
+ vec<vn_reference_op_s> *result)
{
vn_reference_op_s temp;
unsigned i;
@@ -1006,7 +1000,7 @@ copy_reference_ops_from_call (gimple call,
temp.type = TREE_TYPE (lhs);
temp.op0 = lhs;
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
}
/* Copy the type, opcode, function being called and static chain. */
@@ -1016,7 +1010,7 @@ copy_reference_ops_from_call (gimple call,
temp.op0 = gimple_call_fn (call);
temp.op1 = gimple_call_chain (call);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
/* Copy the call arguments. As they can be references as well,
just chain them together. */
@@ -1030,10 +1024,10 @@ copy_reference_ops_from_call (gimple call,
/* Create a vector of vn_reference_op_s structures from REF, a
REFERENCE_CLASS_P tree. The vector is not shared. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
create_reference_ops_from_ref (tree ref)
{
- VEC (vn_reference_op_s, heap) *result = NULL;
+ vec<vn_reference_op_s> result = vec<vn_reference_op_s>();
copy_reference_ops_from_ref (ref, &result);
return result;
@@ -1042,10 +1036,10 @@ create_reference_ops_from_ref (tree ref)
/* Create a vector of vn_reference_op_s structures from CALL, a
call statement. The vector is not shared. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
create_reference_ops_from_call (gimple call)
{
- VEC (vn_reference_op_s, heap) *result = NULL;
+ vec<vn_reference_op_s> result = vec<vn_reference_op_s>();
copy_reference_ops_from_call (call, &result);
return result;
@@ -1054,12 +1048,12 @@ create_reference_ops_from_call (gimple call)
/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
*I_P to point to the last element of the replacement. */
void
-vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
+vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &(*ops)[i];
+ vn_reference_op_t mem_op = &(*ops)[i - 1];
tree addr_base;
HOST_WIDE_INT addr_offset = 0;
@@ -1086,12 +1080,12 @@ vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
*I_P to point to the last element of the replacement. */
static void
-vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
+vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &(*ops)[i];
+ vn_reference_op_t mem_op = &(*ops)[i - 1];
gimple def_stmt;
enum tree_code code;
double_int off;
@@ -1163,24 +1157,24 @@ vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
tree
fully_constant_vn_reference_p (vn_reference_t ref)
{
- VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vec<vn_reference_op_s> operands = ref->operands;
vn_reference_op_t op;
/* Try to simplify the translated expression if it is
a call to a builtin function with at most two arguments. */
- op = &VEC_index (vn_reference_op_s, operands, 0);
+ op = &operands[0];
if (op->opcode == CALL_EXPR
&& TREE_CODE (op->op0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
&& DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
- && VEC_length (vn_reference_op_s, operands) >= 2
- && VEC_length (vn_reference_op_s, operands) <= 3)
+ && operands.length () >= 2
+ && operands.length () <= 3)
{
vn_reference_op_t arg0, arg1 = NULL;
bool anyconst = false;
- arg0 = &VEC_index (vn_reference_op_s, operands, 1);
- if (VEC_length (vn_reference_op_s, operands) > 2)
- arg1 = &VEC_index (vn_reference_op_s, operands, 2);
+ arg0 = &operands[1];
+ if (operands.length () > 2)
+ arg1 = &operands[2];
if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
|| (arg0->opcode == ADDR_EXPR
&& is_gimple_min_invariant (arg0->op0)))
@@ -1209,10 +1203,10 @@ fully_constant_vn_reference_p (vn_reference_t ref)
else if (op->opcode == ARRAY_REF
&& TREE_CODE (op->op0) == INTEGER_CST
&& integer_zerop (op->op1)
- && VEC_length (vn_reference_op_s, operands) == 2)
+ && operands.length () == 2)
{
vn_reference_op_t arg0;
- arg0 = &VEC_index (vn_reference_op_s, operands, 1);
+ arg0 = &operands[1];
if (arg0->opcode == STRING_CST
&& (TYPE_MODE (op->type)
== TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
@@ -1232,15 +1226,15 @@ fully_constant_vn_reference_p (vn_reference_t ref)
the vector passed in is returned. *VALUEIZED_ANYTHING will specify
whether any operands were valueized. */
-static VEC (vn_reference_op_s, heap) *
-valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
+static vec<vn_reference_op_s>
+valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
{
vn_reference_op_t vro;
unsigned int i;
*valueized_anything = false;
- FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
+ FOR_EACH_VEC_ELT (orig, i, vro)
{
if (vro->opcode == SSA_NAME
|| (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
@@ -1279,13 +1273,11 @@ valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
if (i > 0
&& vro->op0
&& TREE_CODE (vro->op0) == ADDR_EXPR
- && VEC_index (vn_reference_op_s,
- orig, i - 1).opcode == MEM_REF)
+ && orig[i - 1].opcode == MEM_REF)
vn_reference_fold_indirect (&orig, &i);
else if (i > 0
&& vro->opcode == SSA_NAME
- && VEC_index (vn_reference_op_s,
- orig, i - 1).opcode == MEM_REF)
+ && orig[i - 1].opcode == MEM_REF)
vn_reference_maybe_forwprop_address (&orig, &i);
/* If it transforms a non-constant ARRAY_REF into a constant
one, adjust the constant offset. */
@@ -1306,26 +1298,26 @@ valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
return orig;
}
-static VEC (vn_reference_op_s, heap) *
-valueize_refs (VEC (vn_reference_op_s, heap) *orig)
+static vec<vn_reference_op_s>
+valueize_refs (vec<vn_reference_op_s> orig)
{
bool tem;
return valueize_refs_1 (orig, &tem);
}
-static VEC(vn_reference_op_s, heap) *shared_lookup_references;
+static vec<vn_reference_op_s> shared_lookup_references;
/* Create a vector of vn_reference_op_s structures from REF, a
REFERENCE_CLASS_P tree. The vector is shared among all callers of
this function. *VALUEIZED_ANYTHING will specify whether any
operands were valueized. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
{
if (!ref)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ return vec<vn_reference_op_s>();
+ shared_lookup_references.truncate (0);
copy_reference_ops_from_ref (ref, &shared_lookup_references);
shared_lookup_references = valueize_refs_1 (shared_lookup_references,
valueized_anything);
@@ -1336,12 +1328,12 @@ valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
call statement. The vector is shared among all callers of
this function. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call (gimple call)
{
if (!call)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ return vec<vn_reference_op_s>();
+ shared_lookup_references.truncate (0);
copy_reference_ops_from_call (call, &shared_lookup_references);
shared_lookup_references = valueize_refs (shared_lookup_references);
return shared_lookup_references;
@@ -1425,8 +1417,8 @@ static vn_reference_t
vn_reference_lookup_or_insert_for_pieces (tree vuse,
alias_set_type set,
tree type,
- VEC (vn_reference_op_s,
- heap) *operands,
+ vec<vn_reference_op_s,
+ va_heap> operands,
tree value)
{
struct vn_reference_s vr1;
@@ -1444,8 +1436,7 @@ vn_reference_lookup_or_insert_for_pieces (tree vuse,
else
value_id = get_or_alloc_constant_value_id (value);
return vn_reference_insert_pieces (vuse, set, type,
- VEC_copy (vn_reference_op_s, heap,
- operands), value, value_id);
+ operands.copy (), value, value_id);
}
/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
@@ -1460,18 +1451,19 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
tree base;
HOST_WIDE_INT offset, maxsize;
- static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
+ static vec<vn_reference_op_s>
+ lhs_ops = vec<vn_reference_op_s>();
ao_ref lhs_ref;
bool lhs_ref_ok = false;
/* First try to disambiguate after value-replacing in the definitions LHS. */
if (is_gimple_assign (def_stmt))
{
- VEC (vn_reference_op_s, heap) *tem;
+ vec<vn_reference_op_s> tem;
tree lhs = gimple_assign_lhs (def_stmt);
bool valueized_anything = false;
/* Avoid re-allocation overhead. */
- VEC_truncate (vn_reference_op_s, lhs_ops, 0);
+ lhs_ops.truncate (0);
copy_reference_ops_from_ref (lhs, &lhs_ops);
tem = lhs_ops;
lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
@@ -1665,7 +1657,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
int i, j;
- VEC (vn_reference_op_s, heap) *rhs = NULL;
+ vec<vn_reference_op_s>
+ rhs = vec<vn_reference_op_s>();
vn_reference_op_t vro;
ao_ref r;
@@ -1685,12 +1678,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
/* Find the common base of ref and the lhs. lhs_ops already
contains valueized operands for the lhs. */
- i = VEC_length (vn_reference_op_s, vr->operands) - 1;
- j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
+ i = vr->operands.length () - 1;
+ j = lhs_ops.length () - 1;
while (j >= 0 && i >= 0
- && vn_reference_op_eq (&VEC_index (vn_reference_op_s,
- vr->operands, i),
- &VEC_index (vn_reference_op_s, lhs_ops, j)))
+ && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
{
i--;
j--;
@@ -1703,10 +1694,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
don't care here - further lookups with the rewritten operands
will simply fail if we messed up types too badly. */
if (j == 0 && i >= 0
- && VEC_index (vn_reference_op_s, lhs_ops, 0).opcode == MEM_REF
- && VEC_index (vn_reference_op_s, lhs_ops, 0).off != -1
- && (VEC_index (vn_reference_op_s, lhs_ops, 0).off
- == VEC_index (vn_reference_op_s, vr->operands, i).off))
+ && lhs_ops[0].opcode == MEM_REF
+ && lhs_ops[0].off != -1
+ && (lhs_ops[0].off == vr->operands[i].off))
i--, j--;
/* i now points to the first additional op.
@@ -1719,22 +1709,19 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
/* Now re-write REF to be based on the rhs of the assignment. */
copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
/* We need to pre-pend vr->operands[0..i] to rhs. */
- if (i + 1 + VEC_length (vn_reference_op_s, rhs)
- > VEC_length (vn_reference_op_s, vr->operands))
+ if (i + 1 + rhs.length () > vr->operands.length ())
{
- VEC (vn_reference_op_s, heap) *old = vr->operands;
- VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
- i + 1 + VEC_length (vn_reference_op_s, rhs));
+ vec<vn_reference_op_s> old = vr->operands;
+ vr->operands.safe_grow (i + 1 + rhs.length ());
if (old == shared_lookup_references
&& vr->operands != old)
- shared_lookup_references = NULL;
+ shared_lookup_references = vec<vn_reference_op_s>();
}
else
- VEC_truncate (vn_reference_op_s, vr->operands,
- i + 1 + VEC_length (vn_reference_op_s, rhs));
- FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
- VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, *vro);
- VEC_free (vn_reference_op_s, heap, rhs);
+ vr->operands.truncate (i + 1 + rhs.length ());
+ FOR_EACH_VEC_ELT (rhs, j, vro)
+ vr->operands[i + 1 + j] = *vro;
+ rhs.release ();
vr->operands = valueize_refs (vr->operands);
vr->hashcode = vn_reference_compute_hash (vr);
@@ -1854,16 +1841,16 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
return (void *)-1;
/* Make room for 2 operands in the new reference. */
- if (VEC_length (vn_reference_op_s, vr->operands) < 2)
+ if (vr->operands.length () < 2)
{
- VEC (vn_reference_op_s, heap) *old = vr->operands;
- VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
+ vec<vn_reference_op_s> old = vr->operands;
+ vr->operands.safe_grow_cleared (2);
if (old == shared_lookup_references
&& vr->operands != old)
- shared_lookup_references = NULL;
+ shared_lookup_references.create (0);
}
else
- VEC_truncate (vn_reference_op_s, vr->operands, 2);
+ vr->operands.truncate (2);
/* The looked-through reference is a simple MEM_REF. */
memset (&op, 0, sizeof (op));
@@ -1871,12 +1858,12 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
op.opcode = MEM_REF;
op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
op.off = at - lhs_offset + rhs_offset;
- VEC_replace (vn_reference_op_s, vr->operands, 0, op);
+ vr->operands[0] = op;
op.type = TREE_TYPE (rhs);
op.opcode = TREE_CODE (rhs);
op.op0 = rhs;
op.off = -1;
- VEC_replace (vn_reference_op_s, vr->operands, 1, op);
+ vr->operands[1] = op;
vr->hashcode = vn_reference_compute_hash (vr);
/* Adjust *ref from the new operands. */
@@ -1905,7 +1892,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
tree
vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *operands,
+ vec<vn_reference_op_s> operands,
vn_reference_t *vnresult, vn_lookup_kind kind)
{
struct vn_reference_s vr1;
@@ -1917,13 +1904,12 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
*vnresult = NULL;
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
- VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
- VEC_length (vn_reference_op_s, operands));
- memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
- VEC_address (vn_reference_op_s, operands),
+ shared_lookup_references.truncate (0);
+ shared_lookup_references.safe_grow (operands.length ());
+ memcpy (shared_lookup_references.address (),
+ operands.address (),
sizeof (vn_reference_op_s)
- * VEC_length (vn_reference_op_s, operands));
+ * operands.length ());
vr1.operands = operands = shared_lookup_references
= valueize_refs (shared_lookup_references);
vr1.type = type;
@@ -1945,7 +1931,7 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
- VEC_free (vn_reference_op_s, heap, vr1.operands);
+ vr1.operands.release ();
}
if (*vnresult)
@@ -1964,7 +1950,7 @@ tree
vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_t *vnresult)
{
- VEC (vn_reference_op_s, heap) *operands;
+ vec<vn_reference_op_s> operands;
struct vn_reference_s vr1;
tree cst;
bool valuezied_anything;
@@ -1998,7 +1984,7 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
- VEC_free (vn_reference_op_s, heap, vr1.operands);
+ vr1.operands.release ();
if (wvnresult)
{
if (vnresult)
@@ -2060,7 +2046,7 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
vn_reference_t
vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *operands,
+ vec<vn_reference_op_s> operands,
tree result, unsigned int value_id)
{
@@ -2416,12 +2402,12 @@ vn_phi_compute_hash (vn_phi_t vp1)
/* If all PHI arguments are constants we need to distinguish
the PHI node via its type. */
- type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
+ type = TREE_TYPE (vp1->phiargs[0]);
result += (INTEGRAL_TYPE_P (type)
+ (INTEGRAL_TYPE_P (type)
? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
- FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
+ FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
{
if (phi1op == VN_TOP)
continue;
@@ -2458,15 +2444,15 @@ vn_phi_eq (const void *p1, const void *p2)
/* If the PHI nodes do not have compatible types
they are not the same. */
- if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
- TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
+ if (!types_compatible_p (TREE_TYPE (vp1->phiargs[0]),
+ TREE_TYPE (vp2->phiargs[0])))
return false;
/* Any phi in the same block will have it's arguments in the
same edge order, because of how we store phi nodes. */
- FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
+ FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
{
- tree phi2op = VEC_index (tree, vp2->phiargs, i);
+ tree phi2op = vp2->phiargs[i];
if (phi1op == VN_TOP || phi2op == VN_TOP)
continue;
if (!expressions_equal_p (phi1op, phi2op))
@@ -2477,7 +2463,7 @@ vn_phi_eq (const void *p1, const void *p2)
return false;
}
-static VEC(tree, heap) *shared_lookup_phiargs;
+static vec<tree> shared_lookup_phiargs;
/* Lookup PHI in the current hash table, and return the resulting
value number if it exists in the hash table. Return NULL_TREE if
@@ -2490,14 +2476,14 @@ vn_phi_lookup (gimple phi)
struct vn_phi_s vp1;
unsigned i;
- VEC_truncate (tree, shared_lookup_phiargs, 0);
+ shared_lookup_phiargs.truncate (0);
/* Canonicalize the SSA_NAME's to their value number. */
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree def = PHI_ARG_DEF (phi, i);
def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
- VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
+ shared_lookup_phiargs.safe_push (def);
}
vp1.phiargs = shared_lookup_phiargs;
vp1.block = gimple_bb (phi);
@@ -2521,14 +2507,14 @@ vn_phi_insert (gimple phi, tree result)
void **slot;
vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
unsigned i;
- VEC (tree, heap) *args = NULL;
+ vec<tree> args = vec<tree>();
/* Canonicalize the SSA_NAME's to their value number. */
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree def = PHI_ARG_DEF (phi, i);
def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
- VEC_safe_push (tree, heap, args, def);
+ args.safe_push (def);
}
vp1->value_id = VN_INFO (result)->value_id;
vp1->phiargs = args;
@@ -2549,13 +2535,13 @@ vn_phi_insert (gimple phi, tree result)
/* Print set of components in strongly connected component SCC to OUT. */
static void
-print_scc (FILE *out, VEC (tree, heap) *scc)
+print_scc (FILE *out, vec<tree> scc)
{
tree var;
unsigned int i;
fprintf (out, "SCC consists of:");
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
{
fprintf (out, " ");
print_generic_expr (out, var, 0);
@@ -3557,9 +3543,9 @@ compare_ops (const void *pa, const void *pb)
array will give you the members in RPO order. */
static void
-sort_scc (VEC (tree, heap) *scc)
+sort_scc (vec<tree> scc)
{
- VEC_qsort (tree, scc, compare_ops);
+ scc.qsort (compare_ops);
}
/* Insert the no longer used nary ONARY to the hash INFO. */
@@ -3582,7 +3568,7 @@ copy_phi (vn_phi_t ophi, vn_tables_t info)
vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
void **slot;
memcpy (phi, ophi, sizeof (*phi));
- ophi->phiargs = NULL;
+ ophi->phiargs.create (0);
slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
gcc_assert (!*slot);
*slot = phi;
@@ -3597,7 +3583,7 @@ copy_reference (vn_reference_t oref, vn_tables_t info)
void **slot;
ref = (vn_reference_t) pool_alloc (info->references_pool);
memcpy (ref, oref, sizeof (*ref));
- oref->operands = NULL;
+ oref->operands.create (0);
slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
INSERT);
if (*slot)
@@ -3608,7 +3594,7 @@ copy_reference (vn_reference_t oref, vn_tables_t info)
/* Process a strongly connected component in the SSA graph. */
static void
-process_scc (VEC (tree, heap) *scc)
+process_scc (vec<tree> scc)
{
tree var;
unsigned int i;
@@ -3620,9 +3606,9 @@ process_scc (VEC (tree, heap) *scc)
vn_reference_t ref;
/* If the SCC has a single member, just visit it. */
- if (VEC_length (tree, scc) == 1)
+ if (scc.length () == 1)
{
- tree use = VEC_index (tree, scc, 0);
+ tree use = scc[0];
if (VN_INFO (use)->use_processed)
return;
/* We need to make sure it doesn't form a cycle itself, which can
@@ -3658,9 +3644,9 @@ process_scc (VEC (tree, heap) *scc)
gcc_obstack_init (&optimistic_info->nary_obstack);
empty_alloc_pool (optimistic_info->phis_pool);
empty_alloc_pool (optimistic_info->references_pool);
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
VN_INFO (var)->expr = NULL_TREE;
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
changed |= visit_use (var);
}
@@ -3678,8 +3664,6 @@ process_scc (VEC (tree, heap) *scc)
current_info = valid_info;
}
-DEF_VEC_O(ssa_op_iter);
-DEF_VEC_ALLOC_O(ssa_op_iter,heap);
/* Pop the components of the found SCC for NAME off the SCC stack
and process them. Returns true if all went well, false if
@@ -3688,33 +3672,33 @@ DEF_VEC_ALLOC_O(ssa_op_iter,heap);
static bool
extract_and_process_scc_for_name (tree name)
{
- VEC (tree, heap) *scc = NULL;
+ vec<tree> scc = vec<tree>();
tree x;
/* Found an SCC, pop the components off the SCC stack and
process them. */
do
{
- x = VEC_pop (tree, sccstack);
+ x = sccstack.pop ();
VN_INFO (x)->on_sccstack = false;
- VEC_safe_push (tree, heap, scc, x);
+ scc.safe_push (x);
} while (x != name);
/* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
- if (VEC_length (tree, scc)
+ if (scc.length ()
> (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
{
if (dump_file)
fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
- "SCC size %u exceeding %u\n", VEC_length (tree, scc),
+ "SCC size %u exceeding %u\n", scc.length (),
(unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
- VEC_free (tree, heap, scc);
+ scc.release ();
return false;
}
- if (VEC_length (tree, scc) > 1)
+ if (scc.length () > 1)
sort_scc (scc);
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -3722,7 +3706,7 @@ extract_and_process_scc_for_name (tree name)
process_scc (scc);
- VEC_free (tree, heap, scc);
+ scc.release ();
return true;
}
@@ -3737,8 +3721,8 @@ extract_and_process_scc_for_name (tree name)
static bool
DFS (tree name)
{
- VEC(ssa_op_iter, heap) *itervec = NULL;
- VEC(tree, heap) *namevec = NULL;
+ vec<ssa_op_iter> itervec = vec<ssa_op_iter>();
+ vec<tree> namevec = vec<tree>();
use_operand_p usep = NULL;
gimple defstmt;
tree use;
@@ -3750,7 +3734,7 @@ start_over:
VN_INFO (name)->visited = true;
VN_INFO (name)->low = VN_INFO (name)->dfsnum;
- VEC_safe_push (tree, heap, sccstack, name);
+ sccstack.safe_push (name);
VN_INFO (name)->on_sccstack = true;
defstmt = SSA_NAME_DEF_STMT (name);
@@ -3776,25 +3760,25 @@ start_over:
if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
if (!extract_and_process_scc_for_name (name))
{
- VEC_free (tree, heap, namevec);
- VEC_free (ssa_op_iter, heap, itervec);
+ namevec.release ();
+ itervec.release ();
return false;
}
/* Check if we are done. */
- if (VEC_empty (tree, namevec))
+ if (namevec.is_empty ())
{
- VEC_free (tree, heap, namevec);
- VEC_free (ssa_op_iter, heap, itervec);
+ namevec.release ();
+ itervec.release ();
return true;
}
/* Restore the last use walker and continue walking there. */
use = name;
- name = VEC_pop (tree, namevec);
- memcpy (&iter, &VEC_last (ssa_op_iter, itervec),
+ name = namevec.pop ();
+ memcpy (&iter, &itervec.last (),
sizeof (ssa_op_iter));
- VEC_pop (ssa_op_iter, itervec);
+ itervec.pop ();
goto continue_walking;
}
@@ -3808,8 +3792,8 @@ start_over:
{
/* Recurse by pushing the current use walking state on
the stack and starting over. */
- VEC_safe_push(ssa_op_iter, heap, itervec, iter);
- VEC_safe_push(tree, heap, namevec, name);
+ itervec.safe_push (iter);
+ namevec.safe_push (name);
name = use;
goto start_over;
@@ -3869,7 +3853,7 @@ init_scc_vn (void)
int *rpo_numbers_temp;
calculate_dominance_info (CDI_DOMINATORS);
- sccstack = NULL;
+ sccstack.create (0);
constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
free);
@@ -3878,15 +3862,14 @@ init_scc_vn (void)
next_dfs_num = 1;
next_value_id = 1;
- vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
+ vn_ssa_aux_table.create (num_ssa_names + 1);
/* VEC_alloc doesn't actually grow it to the right size, it just
preallocates the space to do so. */
- VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table,
- num_ssa_names + 1);
+ vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
gcc_obstack_init (&vn_ssa_aux_obstack);
- shared_lookup_phiargs = NULL;
- shared_lookup_references = NULL;
+ shared_lookup_phiargs.create (0);
+ shared_lookup_references.create (0);
rpo_numbers = XNEWVEC (int, last_basic_block);
rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
@@ -3930,8 +3913,8 @@ free_scc_vn (void)
htab_delete (constant_to_value_id);
BITMAP_FREE (constant_value_ids);
- VEC_free (tree, heap, shared_lookup_phiargs);
- VEC_free (vn_reference_op_s, heap, shared_lookup_references);
+ shared_lookup_phiargs.release ();
+ shared_lookup_references.release ();
XDELETEVEC (rpo_numbers);
for (i = 0; i < num_ssa_names; i++)
@@ -3942,9 +3925,9 @@ free_scc_vn (void)
release_ssa_name (name);
}
obstack_free (&vn_ssa_aux_obstack, NULL);
- VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
+ vn_ssa_aux_table.release ();
- VEC_free (tree, heap, sccstack);
+ sccstack.release ();
free_vn_table (valid_info);
XDELETE (valid_info);
free_vn_table (optimistic_info);