aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog29
-rw-r--r--gcc/tree-vect-data-refs.c5
-rw-r--r--gcc/tree-vect-loop.c8
-rw-r--r--gcc/tree-vect-patterns.c105
-rw-r--r--gcc/tree-vect-slp.c7
-rw-r--r--gcc/tree-vect-stmts.c34
-rw-r--r--gcc/tree-vectorizer.h2
7 files changed, 118 insertions, 72 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index bbb4710..42762a9 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,34 @@
2019-10-21 Richard Sandiford <richard.sandiford@arm.com>
+ * tree-vectorizer.h (get_vectype_for_scalar_type): Take a vec_info.
+ * tree-vect-stmts.c (get_vectype_for_scalar_type): Likewise.
+ (vect_prologue_cost_for_slp_op): Update call accordingly.
+ (vect_get_vec_def_for_operand, vect_get_gather_scatter_ops)
+ (vect_get_strided_load_store_ops, vectorizable_simd_clone_call)
+ (vect_supportable_shift, vect_is_simple_cond, vectorizable_comparison)
+ (get_mask_type_for_scalar_type): Likewise.
+ (vect_get_vector_types_for_stmt): Likewise.
+ * tree-vect-data-refs.c (vect_analyze_data_refs): Likewise.
+ * tree-vect-loop.c (vect_determine_vectorization_factor): Likewise.
+ (get_initial_def_for_reduction, build_vect_cond_expr): Likewise.
+ * tree-vect-patterns.c (vect_supportable_direct_optab_p): Likewise.
+ (vect_split_statement, vect_convert_input): Likewise.
+ (vect_recog_widen_op_pattern, vect_recog_pow_pattern): Likewise.
+ (vect_recog_over_widening_pattern, vect_recog_mulhs_pattern): Likewise.
+ (vect_recog_average_pattern, vect_recog_cast_forwprop_pattern)
+ (vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern)
+ (vect_synth_mult_by_constant, vect_recog_mult_pattern): Likewise.
+ (vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern)
+ (check_bool_pattern, adjust_bool_pattern_cast, adjust_bool_pattern)
+ (search_type_for_mask_1, vect_recog_bool_pattern): Likewise.
+ (vect_recog_mask_conversion_pattern): Likewise.
+ (vect_add_conversion_to_pattern): Likewise.
+ (vect_recog_gather_scatter_pattern): Likewise.
+ * tree-vect-slp.c (vect_build_slp_tree_2): Likewise.
+ (vect_analyze_slp_instance, vect_get_constant_vectors): Likewise.
+
+2019-10-21 Richard Sandiford <richard.sandiford@arm.com>
+
* tree-vectorizer.h (get_mask_type_for_scalar_type): Take a vec_info.
* tree-vect-stmts.c (get_mask_type_for_scalar_type): Likewise.
(vect_check_load_store_mask): Update call accordingly.
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 9a4c01d..987dc0e 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -4344,7 +4344,7 @@ vect_analyze_data_refs (vec_info *vinfo, poly_uint64 *min_vf, bool *fatal)
/* Set vectype for STMT. */
scalar_type = TREE_TYPE (DR_REF (dr));
STMT_VINFO_VECTYPE (stmt_info)
- = get_vectype_for_scalar_type (scalar_type);
+ = get_vectype_for_scalar_type (vinfo, scalar_type);
if (!STMT_VINFO_VECTYPE (stmt_info))
{
if (dump_enabled_p ())
@@ -4392,7 +4392,8 @@ vect_analyze_data_refs (vec_info *vinfo, poly_uint64 *min_vf, bool *fatal)
if (!vect_check_gather_scatter (stmt_info,
as_a <loop_vec_info> (vinfo),
&gs_info)
- || !get_vectype_for_scalar_type (TREE_TYPE (gs_info.offset)))
+ || !get_vectype_for_scalar_type (vinfo,
+ TREE_TYPE (gs_info.offset)))
{
if (fatal)
*fatal = false;
diff --git a/gcc/tree-vect-loop.c b/gcc/tree-vect-loop.c
index 10920ac..304bb5e 100644
--- a/gcc/tree-vect-loop.c
+++ b/gcc/tree-vect-loop.c
@@ -327,7 +327,7 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
"get vectype for scalar type: %T\n",
scalar_type);
- vectype = get_vectype_for_scalar_type (scalar_type);
+ vectype = get_vectype_for_scalar_type (loop_vinfo, scalar_type);
if (!vectype)
return opt_result::failure_at (phi,
"not vectorized: unsupported "
@@ -3774,7 +3774,7 @@ get_initial_def_for_reduction (stmt_vec_info stmt_vinfo,
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
class loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
tree scalar_type = TREE_TYPE (init_val);
- tree vectype = get_vectype_for_scalar_type (scalar_type);
+ tree vectype = get_vectype_for_scalar_type (loop_vinfo, scalar_type);
tree def_for_init;
tree init_def;
REAL_VALUE_TYPE real_init_val = dconst0;
@@ -5555,11 +5555,11 @@ build_vect_cond_expr (enum tree_code code, tree vop[3], tree mask,
corresponds to the type of arguments to the reduction stmt, and should *NOT*
be used to create the vectorized stmt. The right vectype for the vectorized
stmt is obtained from the type of the result X:
- get_vectype_for_scalar_type (TREE_TYPE (X))
+ get_vectype_for_scalar_type (vinfo, TREE_TYPE (X))
This means that, contrary to "regular" reductions (or "regular" stmts in
general), the following equation:
- STMT_VINFO_VECTYPE == get_vectype_for_scalar_type (TREE_TYPE (X))
+ STMT_VINFO_VECTYPE == get_vectype_for_scalar_type (vinfo, TREE_TYPE (X))
does *NOT* necessarily hold for reduction patterns. */
bool
diff --git a/gcc/tree-vect-patterns.c b/gcc/tree-vect-patterns.c
index 1be4cc0..b497ec9 100644
--- a/gcc/tree-vect-patterns.c
+++ b/gcc/tree-vect-patterns.c
@@ -187,15 +187,15 @@ vect_get_external_def_edge (vec_info *vinfo, tree var)
is nonnull. */
static bool
-vect_supportable_direct_optab_p (vec_info *, tree otype, tree_code code,
+vect_supportable_direct_optab_p (vec_info *vinfo, tree otype, tree_code code,
tree itype, tree *vecotype_out,
tree *vecitype_out = NULL)
{
- tree vecitype = get_vectype_for_scalar_type (itype);
+ tree vecitype = get_vectype_for_scalar_type (vinfo, itype);
if (!vecitype)
return false;
- tree vecotype = get_vectype_for_scalar_type (otype);
+ tree vecotype = get_vectype_for_scalar_type (vinfo, otype);
if (!vecotype)
return false;
@@ -635,6 +635,7 @@ static bool
vect_split_statement (stmt_vec_info stmt2_info, tree new_rhs,
gimple *stmt1, tree vectype)
{
+ vec_info *vinfo = stmt2_info->vinfo;
if (is_pattern_stmt_p (stmt2_info))
{
/* STMT2_INFO is part of a pattern. Get the statement to which
@@ -678,7 +679,7 @@ vect_split_statement (stmt_vec_info stmt2_info, tree new_rhs,
two-statement pattern now. */
gcc_assert (!STMT_VINFO_RELATED_STMT (stmt2_info));
tree lhs_type = TREE_TYPE (gimple_get_lhs (stmt2_info->stmt));
- tree lhs_vectype = get_vectype_for_scalar_type (lhs_type);
+ tree lhs_vectype = get_vectype_for_scalar_type (vinfo, lhs_type);
if (!lhs_vectype)
return false;
@@ -715,6 +716,8 @@ static tree
vect_convert_input (stmt_vec_info stmt_info, tree type,
vect_unpromoted_value *unprom, tree vectype)
{
+ vec_info *vinfo = stmt_info->vinfo;
+
/* Check for a no-op conversion. */
if (types_compatible_p (type, TREE_TYPE (unprom->op)))
return unprom->op;
@@ -752,7 +755,7 @@ vect_convert_input (stmt_vec_info stmt_info, tree type,
unsigned promotion. */
tree midtype = build_nonstandard_integer_type
(TYPE_PRECISION (type), TYPE_UNSIGNED (unprom->type));
- tree vec_midtype = get_vectype_for_scalar_type (midtype);
+ tree vec_midtype = get_vectype_for_scalar_type (vinfo, midtype);
if (vec_midtype)
{
input = vect_recog_temp_ssa_var (midtype, NULL);
@@ -1189,6 +1192,7 @@ vect_recog_widen_op_pattern (stmt_vec_info last_stmt_info, tree *type_out,
tree_code orig_code, tree_code wide_code,
bool shift_p, const char *name)
{
+ vec_info *vinfo = last_stmt_info->vinfo;
gimple *last_stmt = last_stmt_info->stmt;
vect_unpromoted_value unprom[2];
@@ -1208,8 +1212,8 @@ vect_recog_widen_op_pattern (stmt_vec_info last_stmt_info, tree *type_out,
TYPE_UNSIGNED (half_type));
/* Check target support */
- tree vectype = get_vectype_for_scalar_type (half_type);
- tree vecitype = get_vectype_for_scalar_type (itype);
+ tree vectype = get_vectype_for_scalar_type (vinfo, half_type);
+ tree vecitype = get_vectype_for_scalar_type (vinfo, itype);
enum tree_code dummy_code;
int dummy_int;
auto_vec<tree> dummy_vec;
@@ -1221,7 +1225,7 @@ vect_recog_widen_op_pattern (stmt_vec_info last_stmt_info, tree *type_out,
&dummy_int, &dummy_vec))
return NULL;
- *type_out = get_vectype_for_scalar_type (type);
+ *type_out = get_vectype_for_scalar_type (vinfo, type);
if (!*type_out)
return NULL;
@@ -1342,7 +1346,7 @@ vect_recog_pow_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
if (node->simd_clones == NULL)
return NULL;
}
- *type_out = get_vectype_for_scalar_type (TREE_TYPE (base));
+ *type_out = get_vectype_for_scalar_type (vinfo, TREE_TYPE (base));
if (!*type_out)
return NULL;
tree def = vect_recog_temp_ssa_var (TREE_TYPE (base), NULL);
@@ -1380,7 +1384,7 @@ vect_recog_pow_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
if (TREE_CODE (exp) == REAL_CST
&& real_equal (&TREE_REAL_CST (exp), &dconsthalf))
{
- *type_out = get_vectype_for_scalar_type (TREE_TYPE (base));
+ *type_out = get_vectype_for_scalar_type (vinfo, TREE_TYPE (base));
if (*type_out
&& direct_internal_fn_supported_p (IFN_SQRT, *type_out,
OPTIMIZE_FOR_SPEED))
@@ -1665,7 +1669,7 @@ vect_recog_over_widening_pattern (stmt_vec_info last_stmt_info, tree *type_out)
vect_pattern_detected ("vect_recog_over_widening_pattern", last_stmt);
- *type_out = get_vectype_for_scalar_type (type);
+ *type_out = get_vectype_for_scalar_type (vinfo, type);
if (!*type_out)
return NULL;
@@ -1686,8 +1690,8 @@ vect_recog_over_widening_pattern (stmt_vec_info last_stmt_info, tree *type_out)
wants to rewrite anyway. If targets have a minimum element size
for some optabs, we should pattern-match smaller ops to larger ops
where beneficial. */
- tree new_vectype = get_vectype_for_scalar_type (new_type);
- tree op_vectype = get_vectype_for_scalar_type (op_type);
+ tree new_vectype = get_vectype_for_scalar_type (vinfo, new_type);
+ tree op_vectype = get_vectype_for_scalar_type (vinfo, op_type);
if (!new_vectype || !op_vectype)
return NULL;
@@ -1864,7 +1868,7 @@ vect_recog_mulhs_pattern (stmt_vec_info last_stmt_info, tree *type_out)
(target_precision, TYPE_UNSIGNED (new_type));
/* Check for target support. */
- tree new_vectype = get_vectype_for_scalar_type (new_type);
+ tree new_vectype = get_vectype_for_scalar_type (vinfo, new_type);
if (!new_vectype
|| !direct_internal_fn_supported_p
(ifn, new_vectype, OPTIMIZE_FOR_SPEED))
@@ -1872,7 +1876,7 @@ vect_recog_mulhs_pattern (stmt_vec_info last_stmt_info, tree *type_out)
/* The IR requires a valid vector type for the cast result, even though
it's likely to be discarded. */
- *type_out = get_vectype_for_scalar_type (lhs_type);
+ *type_out = get_vectype_for_scalar_type (vinfo, lhs_type);
if (!*type_out)
return NULL;
@@ -2014,7 +2018,7 @@ vect_recog_average_pattern (stmt_vec_info last_stmt_info, tree *type_out)
TYPE_UNSIGNED (new_type));
/* Check for target support. */
- tree new_vectype = get_vectype_for_scalar_type (new_type);
+ tree new_vectype = get_vectype_for_scalar_type (vinfo, new_type);
if (!new_vectype
|| !direct_internal_fn_supported_p (ifn, new_vectype,
OPTIMIZE_FOR_SPEED))
@@ -2022,7 +2026,7 @@ vect_recog_average_pattern (stmt_vec_info last_stmt_info, tree *type_out)
/* The IR requires a valid vector type for the cast result, even though
it's likely to be discarded. */
- *type_out = get_vectype_for_scalar_type (type);
+ *type_out = get_vectype_for_scalar_type (vinfo, type);
if (!*type_out)
return NULL;
@@ -2108,7 +2112,7 @@ vect_recog_cast_forwprop_pattern (stmt_vec_info last_stmt_info, tree *type_out)
the unnecessary widening and narrowing. */
vect_pattern_detected ("vect_recog_cast_forwprop_pattern", last_stmt);
- *type_out = get_vectype_for_scalar_type (lhs_type);
+ *type_out = get_vectype_for_scalar_type (vinfo, lhs_type);
if (!*type_out)
return NULL;
@@ -2219,7 +2223,7 @@ vect_recog_rotate_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
}
type = TREE_TYPE (lhs);
- vectype = get_vectype_for_scalar_type (type);
+ vectype = get_vectype_for_scalar_type (vinfo, type);
if (vectype == NULL_TREE)
return NULL;
@@ -2285,7 +2289,7 @@ vect_recog_rotate_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
&& dt != vect_external_def)
return NULL;
- vectype = get_vectype_for_scalar_type (type);
+ vectype = get_vectype_for_scalar_type (vinfo, type);
if (vectype == NULL_TREE)
return NULL;
@@ -2404,7 +2408,7 @@ vect_recog_rotate_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
}
else
{
- tree vecstype = get_vectype_for_scalar_type (stype);
+ tree vecstype = get_vectype_for_scalar_type (vinfo, stype);
if (vecstype == NULL_TREE)
return NULL;
@@ -2533,7 +2537,7 @@ vect_recog_vector_vector_shift_pattern (stmt_vec_info stmt_vinfo,
if (!def_vinfo)
return NULL;
- *type_out = get_vectype_for_scalar_type (TREE_TYPE (oprnd0));
+ *type_out = get_vectype_for_scalar_type (vinfo, TREE_TYPE (oprnd0));
if (*type_out == NULL_TREE)
return NULL;
@@ -2556,7 +2560,8 @@ vect_recog_vector_vector_shift_pattern (stmt_vec_info stmt_vinfo,
TYPE_PRECISION (TREE_TYPE (oprnd1)));
def = vect_recog_temp_ssa_var (TREE_TYPE (rhs1), NULL);
def_stmt = gimple_build_assign (def, BIT_AND_EXPR, rhs1, mask);
- tree vecstype = get_vectype_for_scalar_type (TREE_TYPE (rhs1));
+ tree vecstype = get_vectype_for_scalar_type (vinfo,
+ TREE_TYPE (rhs1));
append_pattern_def_seq (stmt_vinfo, def_stmt, vecstype);
}
}
@@ -2751,7 +2756,7 @@ vect_synth_mult_by_constant (tree op, tree val,
if (!possible)
return NULL;
- tree vectype = get_vectype_for_scalar_type (multtype);
+ tree vectype = get_vectype_for_scalar_type (vinfo, multtype);
if (!vectype
|| !target_supports_mult_synth_alg (&alg, variant,
@@ -2897,6 +2902,7 @@ vect_synth_mult_by_constant (tree op, tree val,
static gimple *
vect_recog_mult_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
{
+ vec_info *vinfo = stmt_vinfo->vinfo;
gimple *last_stmt = stmt_vinfo->stmt;
tree oprnd0, oprnd1, vectype, itype;
gimple *pattern_stmt;
@@ -2917,7 +2923,7 @@ vect_recog_mult_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
|| !type_has_mode_precision_p (itype))
return NULL;
- vectype = get_vectype_for_scalar_type (itype);
+ vectype = get_vectype_for_scalar_type (vinfo, itype);
if (vectype == NULL_TREE)
return NULL;
@@ -2985,6 +2991,7 @@ vect_recog_mult_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
static gimple *
vect_recog_divmod_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
{
+ vec_info *vinfo = stmt_vinfo->vinfo;
gimple *last_stmt = stmt_vinfo->stmt;
tree oprnd0, oprnd1, vectype, itype, cond;
gimple *pattern_stmt, *def_stmt;
@@ -3017,7 +3024,7 @@ vect_recog_divmod_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
return NULL;
scalar_int_mode itype_mode = SCALAR_INT_TYPE_MODE (itype);
- vectype = get_vectype_for_scalar_type (itype);
+ vectype = get_vectype_for_scalar_type (vinfo, itype);
if (vectype == NULL_TREE)
return NULL;
@@ -3115,7 +3122,7 @@ vect_recog_divmod_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
{
tree utype
= build_nonstandard_integer_type (prec, 1);
- tree vecutype = get_vectype_for_scalar_type (utype);
+ tree vecutype = get_vectype_for_scalar_type (vinfo, utype);
tree shift
= build_int_cst (utype, GET_MODE_BITSIZE (itype_mode)
- tree_log2 (oprnd1));
@@ -3433,6 +3440,7 @@ vect_recog_divmod_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
static gimple *
vect_recog_mixed_size_cond_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
{
+ vec_info *vinfo = stmt_vinfo->vinfo;
gimple *last_stmt = stmt_vinfo->stmt;
tree cond_expr, then_clause, else_clause;
tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
@@ -3455,7 +3463,7 @@ vect_recog_mixed_size_cond_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
return NULL;
comp_scalar_type = TREE_TYPE (TREE_OPERAND (cond_expr, 0));
- comp_vectype = get_vectype_for_scalar_type (comp_scalar_type);
+ comp_vectype = get_vectype_for_scalar_type (vinfo, comp_scalar_type);
if (comp_vectype == NULL_TREE)
return NULL;
@@ -3503,7 +3511,7 @@ vect_recog_mixed_size_cond_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
if (GET_MODE_BITSIZE (type_mode) == cmp_mode_size)
return NULL;
- vectype = get_vectype_for_scalar_type (type);
+ vectype = get_vectype_for_scalar_type (vinfo, type);
if (vectype == NULL_TREE)
return NULL;
@@ -3518,7 +3526,7 @@ vect_recog_mixed_size_cond_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
|| GET_MODE_BITSIZE (SCALAR_TYPE_MODE (itype)) != cmp_mode_size)
return NULL;
- vecitype = get_vectype_for_scalar_type (itype);
+ vecitype = get_vectype_for_scalar_type (vinfo, itype);
if (vecitype == NULL_TREE)
return NULL;
@@ -3612,7 +3620,7 @@ check_bool_pattern (tree var, vec_info *vinfo, hash_set<gimple *> &stmts)
if (stmt_could_throw_p (cfun, def_stmt))
return false;
- comp_vectype = get_vectype_for_scalar_type (TREE_TYPE (rhs1));
+ comp_vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (rhs1));
if (comp_vectype == NULL_TREE)
return false;
@@ -3627,7 +3635,7 @@ check_bool_pattern (tree var, vec_info *vinfo, hash_set<gimple *> &stmts)
scalar_mode mode = SCALAR_TYPE_MODE (TREE_TYPE (rhs1));
tree itype
= build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
- vecitype = get_vectype_for_scalar_type (itype);
+ vecitype = get_vectype_for_scalar_type (vinfo, itype);
if (vecitype == NULL_TREE)
return false;
}
@@ -3656,10 +3664,11 @@ check_bool_pattern (tree var, vec_info *vinfo, hash_set<gimple *> &stmts)
static tree
adjust_bool_pattern_cast (tree type, tree var, stmt_vec_info stmt_info)
{
+ vec_info *vinfo = stmt_info->vinfo;
gimple *cast_stmt = gimple_build_assign (vect_recog_temp_ssa_var (type, NULL),
NOP_EXPR, var);
append_pattern_def_seq (stmt_info, cast_stmt,
- get_vectype_for_scalar_type (type));
+ get_vectype_for_scalar_type (vinfo, type));
return gimple_assign_lhs (cast_stmt);
}
@@ -3673,6 +3682,7 @@ static void
adjust_bool_pattern (tree var, tree out_type,
stmt_vec_info stmt_info, hash_map <tree, tree> &defs)
{
+ vec_info *vinfo = stmt_info->vinfo;
gimple *stmt = SSA_NAME_DEF_STMT (var);
enum tree_code rhs_code, def_rhs_code;
tree itype, cond_expr, rhs1, rhs2, irhs1, irhs2;
@@ -3834,7 +3844,7 @@ adjust_bool_pattern (tree var, tree out_type,
gimple_set_location (pattern_stmt, loc);
append_pattern_def_seq (stmt_info, pattern_stmt,
- get_vectype_for_scalar_type (itype));
+ get_vectype_for_scalar_type (vinfo, itype));
defs.put (var, gimple_assign_lhs (pattern_stmt));
}
@@ -3937,7 +3947,7 @@ search_type_for_mask_1 (tree var, vec_info *vinfo,
break;
}
- comp_vectype = get_vectype_for_scalar_type (TREE_TYPE (rhs1));
+ comp_vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (rhs1));
if (comp_vectype == NULL_TREE)
{
res = NULL_TREE;
@@ -4052,7 +4062,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
if (! INTEGRAL_TYPE_P (TREE_TYPE (lhs))
|| TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
return NULL;
- vectype = get_vectype_for_scalar_type (TREE_TYPE (lhs));
+ vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (lhs));
if (vectype == NULL_TREE)
return NULL;
@@ -4089,7 +4099,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
if (!useless_type_conversion_p (type, TREE_TYPE (lhs)))
{
- tree new_vectype = get_vectype_for_scalar_type (type);
+ tree new_vectype = get_vectype_for_scalar_type (vinfo, type);
append_pattern_def_seq (stmt_vinfo, pattern_stmt, new_vectype);
lhs = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
@@ -4105,7 +4115,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
else if (rhs_code == COND_EXPR
&& TREE_CODE (var) == SSA_NAME)
{
- vectype = get_vectype_for_scalar_type (TREE_TYPE (lhs));
+ vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (lhs));
if (vectype == NULL_TREE)
return NULL;
@@ -4119,7 +4129,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
tree type
= build_nonstandard_integer_type (prec,
TYPE_UNSIGNED (TREE_TYPE (var)));
- if (get_vectype_for_scalar_type (type) == NULL_TREE)
+ if (get_vectype_for_scalar_type (vinfo, type) == NULL_TREE)
return NULL;
if (!check_bool_pattern (var, vinfo, bool_stmts))
@@ -4163,7 +4173,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
cst0 = build_int_cst (type, 0);
cst1 = build_int_cst (type, 1);
- new_vectype = get_vectype_for_scalar_type (type);
+ new_vectype = get_vectype_for_scalar_type (vinfo, type);
rhs = vect_recog_temp_ssa_var (type, NULL);
pattern_stmt = gimple_build_assign (rhs, COND_EXPR, var, cst1, cst0);
@@ -4264,12 +4274,12 @@ vect_recog_mask_conversion_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
{
int rhs_index = internal_fn_stored_value_index (ifn);
tree rhs = gimple_call_arg (last_stmt, rhs_index);
- vectype1 = get_vectype_for_scalar_type (TREE_TYPE (rhs));
+ vectype1 = get_vectype_for_scalar_type (vinfo, TREE_TYPE (rhs));
}
else
{
lhs = gimple_call_lhs (last_stmt);
- vectype1 = get_vectype_for_scalar_type (TREE_TYPE (lhs));
+ vectype1 = get_vectype_for_scalar_type (vinfo, TREE_TYPE (lhs));
}
tree mask_arg = gimple_call_arg (last_stmt, mask_argno);
@@ -4322,7 +4332,7 @@ vect_recog_mask_conversion_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
/* Check for cond expression requiring mask conversion. */
if (rhs_code == COND_EXPR)
{
- vectype1 = get_vectype_for_scalar_type (TREE_TYPE (lhs));
+ vectype1 = get_vectype_for_scalar_type (vinfo, TREE_TYPE (lhs));
if (TREE_CODE (rhs1) == SSA_NAME)
{
@@ -4388,7 +4398,8 @@ vect_recog_mask_conversion_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
tree wide_scalar_type = build_nonstandard_integer_type
(tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype1))),
TYPE_UNSIGNED (rhs1_type));
- tree vectype3 = get_vectype_for_scalar_type (wide_scalar_type);
+ tree vectype3 = get_vectype_for_scalar_type (vinfo,
+ wide_scalar_type);
if (expand_vec_cond_expr_p (vectype1, vectype3, TREE_CODE (rhs1)))
return NULL;
}
@@ -4544,10 +4555,11 @@ vect_add_conversion_to_pattern (tree type, tree value, stmt_vec_info stmt_info)
if (useless_type_conversion_p (type, TREE_TYPE (value)))
return value;
+ vec_info *vinfo = stmt_info->vinfo;
tree new_value = vect_recog_temp_ssa_var (type, NULL);
gassign *conversion = gimple_build_assign (new_value, CONVERT_EXPR, value);
append_pattern_def_seq (stmt_info, conversion,
- get_vectype_for_scalar_type (type));
+ get_vectype_for_scalar_type (vinfo, type));
return new_value;
}
@@ -4583,7 +4595,8 @@ vect_recog_gather_scatter_pattern (stmt_vec_info stmt_info, tree *type_out)
return NULL;
/* Convert the mask to the right form. */
- tree gs_vectype = get_vectype_for_scalar_type (gs_info.element_type);
+ tree gs_vectype = get_vectype_for_scalar_type (loop_vinfo,
+ gs_info.element_type);
if (mask)
mask = vect_convert_mask_for_vectype (mask, gs_vectype, stmt_info,
loop_vinfo);
diff --git a/gcc/tree-vect-slp.c b/gcc/tree-vect-slp.c
index 5b4b37f..886bbce 100644
--- a/gcc/tree-vect-slp.c
+++ b/gcc/tree-vect-slp.c
@@ -1127,7 +1127,7 @@ vect_build_slp_tree_2 (vec_info *vinfo,
if (gphi *stmt = dyn_cast <gphi *> (stmt_info->stmt))
{
tree scalar_type = TREE_TYPE (PHI_RESULT (stmt));
- tree vectype = get_vectype_for_scalar_type (scalar_type);
+ tree vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
if (!vect_record_max_nunits (stmt_info, group_size, vectype, max_nunits))
return NULL;
@@ -1926,7 +1926,7 @@ vect_analyze_slp_instance (vec_info *vinfo,
if (STMT_VINFO_GROUPED_ACCESS (stmt_info))
{
scalar_type = TREE_TYPE (DR_REF (dr));
- vectype = get_vectype_for_scalar_type (scalar_type);
+ vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
group_size = DR_GROUP_SIZE (stmt_info);
}
else if (!dr && REDUC_GROUP_FIRST_ELEMENT (stmt_info))
@@ -3287,6 +3287,7 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
{
vec<stmt_vec_info> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
stmt_vec_info stmt_vinfo = stmts[0];
+ vec_info *vinfo = stmt_vinfo->vinfo;
gimple *stmt = stmt_vinfo->stmt;
unsigned HOST_WIDE_INT nunits;
tree vec_cst;
@@ -3310,7 +3311,7 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
vector_type
= build_same_sized_truth_vector_type (STMT_VINFO_VECTYPE (stmt_vinfo));
else
- vector_type = get_vectype_for_scalar_type (TREE_TYPE (op));
+ vector_type = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op));
if (STMT_VINFO_DATA_REF (stmt_vinfo))
{
diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c
index d371820..35fe141 100644
--- a/gcc/tree-vect-stmts.c
+++ b/gcc/tree-vect-stmts.c
@@ -796,6 +796,7 @@ vect_prologue_cost_for_slp_op (slp_tree node, stmt_vec_info stmt_info,
unsigned opno, enum vect_def_type dt,
stmt_vector_for_cost *cost_vec)
{
+ vec_info *vinfo = stmt_info->vinfo;
gimple *stmt = SLP_TREE_SCALAR_STMTS (node)[0]->stmt;
tree op = gimple_op (stmt, opno);
unsigned prologue_cost = 0;
@@ -803,7 +804,7 @@ vect_prologue_cost_for_slp_op (slp_tree node, stmt_vec_info stmt_info,
/* Without looking at the actual initializer a vector of
constants can be implemented as load from the constant pool.
When all elements are the same we can use a splat. */
- tree vectype = get_vectype_for_scalar_type (TREE_TYPE (op));
+ tree vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op));
unsigned group_size = SLP_TREE_SCALAR_STMTS (node).length ();
unsigned num_vects_to_check;
unsigned HOST_WIDE_INT const_nunits;
@@ -1610,7 +1611,7 @@ vect_get_vec_def_for_operand (tree op, stmt_vec_info stmt_vinfo, tree vectype)
&& VECTOR_BOOLEAN_TYPE_P (stmt_vectype))
vector_type = build_same_sized_truth_vector_type (stmt_vectype);
else
- vector_type = get_vectype_for_scalar_type (TREE_TYPE (op));
+ vector_type = get_vectype_for_scalar_type (loop_vinfo, TREE_TYPE (op));
gcc_assert (vector_type);
return vect_init_vector (stmt_vinfo, op, vector_type, NULL);
@@ -2975,6 +2976,7 @@ vect_get_gather_scatter_ops (class loop *loop, stmt_vec_info stmt_info,
gather_scatter_info *gs_info,
tree *dataref_ptr, tree *vec_offset)
{
+ vec_info *vinfo = stmt_info->vinfo;
gimple_seq stmts = NULL;
*dataref_ptr = force_gimple_operand (gs_info->base, &stmts, true, NULL_TREE);
if (stmts != NULL)
@@ -2985,7 +2987,7 @@ vect_get_gather_scatter_ops (class loop *loop, stmt_vec_info stmt_info,
gcc_assert (!new_bb);
}
tree offset_type = TREE_TYPE (gs_info->offset);
- tree offset_vectype = get_vectype_for_scalar_type (offset_type);
+ tree offset_vectype = get_vectype_for_scalar_type (vinfo, offset_type);
*vec_offset = vect_get_vec_def_for_operand (gs_info->offset, stmt_info,
offset_vectype);
}
@@ -3020,7 +3022,7 @@ vect_get_strided_load_store_ops (stmt_vec_info stmt_info,
/* The offset given in GS_INFO can have pointer type, so use the element
type of the vector instead. */
tree offset_type = TREE_TYPE (gs_info->offset);
- tree offset_vectype = get_vectype_for_scalar_type (offset_type);
+ tree offset_vectype = get_vectype_for_scalar_type (loop_vinfo, offset_type);
offset_type = TREE_TYPE (offset_vectype);
/* Calculate X = DR_STEP / SCALE and convert it to the appropriate type. */
@@ -4101,9 +4103,8 @@ vectorizable_simd_clone_call (stmt_vec_info stmt_info,
|| arginfo[i].dt == vect_external_def)
&& bestn->simdclone->args[i].arg_type == SIMD_CLONE_ARG_TYPE_VECTOR)
{
- arginfo[i].vectype
- = get_vectype_for_scalar_type (TREE_TYPE (gimple_call_arg (stmt,
- i)));
+ tree arg_type = TREE_TYPE (gimple_call_arg (stmt, i));
+ arginfo[i].vectype = get_vectype_for_scalar_type (vinfo, arg_type);
if (arginfo[i].vectype == NULL
|| (simd_clone_subparts (arginfo[i].vectype)
> bestn->simdclone->simdlen))
@@ -5466,7 +5467,7 @@ vectorizable_assignment (stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
either as shift by a scalar or by a vector. */
bool
-vect_supportable_shift (vec_info *, enum tree_code code, tree scalar_type)
+vect_supportable_shift (vec_info *vinfo, enum tree_code code, tree scalar_type)
{
machine_mode vec_mode;
@@ -5474,7 +5475,7 @@ vect_supportable_shift (vec_info *, enum tree_code code, tree scalar_type)
int icode;
tree vectype;
- vectype = get_vectype_for_scalar_type (scalar_type);
+ vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
if (!vectype)
return false;
@@ -9763,7 +9764,7 @@ vect_is_simple_cond (tree cond, vec_info *vinfo,
scalar_type = build_nonstandard_integer_type
(tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype))),
TYPE_UNSIGNED (scalar_type));
- *comp_vectype = get_vectype_for_scalar_type (scalar_type);
+ *comp_vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
}
return true;
@@ -10359,7 +10360,7 @@ vectorizable_comparison (stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
/* Invariant comparison. */
if (!vectype)
{
- vectype = get_vectype_for_scalar_type (TREE_TYPE (rhs1));
+ vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (rhs1));
if (maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), nunits))
return false;
}
@@ -11140,7 +11141,7 @@ poly_uint64 current_vector_size;
by the target. */
tree
-get_vectype_for_scalar_type (tree scalar_type)
+get_vectype_for_scalar_type (vec_info *, tree scalar_type)
{
tree vectype;
vectype = get_vectype_for_scalar_type_and_size (scalar_type,
@@ -11157,9 +11158,9 @@ get_vectype_for_scalar_type (tree scalar_type)
of vectors of specified SCALAR_TYPE as supported by target. */
tree
-get_mask_type_for_scalar_type (vec_info *, tree scalar_type)
+get_mask_type_for_scalar_type (vec_info *vinfo, tree scalar_type)
{
- tree vectype = get_vectype_for_scalar_type (scalar_type);
+ tree vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
if (!vectype)
return NULL;
@@ -11853,6 +11854,7 @@ vect_get_vector_types_for_stmt (stmt_vec_info stmt_info,
tree *stmt_vectype_out,
tree *nunits_vectype_out)
{
+ vec_info *vinfo = stmt_info->vinfo;
gimple *stmt = stmt_info->stmt;
*stmt_vectype_out = NULL_TREE;
@@ -11919,7 +11921,7 @@ vect_get_vector_types_for_stmt (stmt_vec_info stmt_info,
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"get vectype for scalar type: %T\n", scalar_type);
- vectype = get_vectype_for_scalar_type (scalar_type);
+ vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
if (!vectype)
return opt_result::failure_at (stmt,
"not vectorized:"
@@ -11952,7 +11954,7 @@ vect_get_vector_types_for_stmt (stmt_vec_info stmt_info,
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"get vectype for scalar type: %T\n", scalar_type);
- nunits_vectype = get_vectype_for_scalar_type (scalar_type);
+ nunits_vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
}
if (!nunits_vectype)
return opt_result::failure_at (stmt,
diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h
index 2dd9ea3..108e704 100644
--- a/gcc/tree-vectorizer.h
+++ b/gcc/tree-vectorizer.h
@@ -1589,7 +1589,7 @@ extern bool vect_can_advance_ivs_p (loop_vec_info);
/* In tree-vect-stmts.c. */
extern poly_uint64 current_vector_size;
-extern tree get_vectype_for_scalar_type (tree);
+extern tree get_vectype_for_scalar_type (vec_info *, tree);
extern tree get_vectype_for_scalar_type_and_size (tree, poly_uint64);
extern tree get_mask_type_for_scalar_type (vec_info *, tree);
extern tree get_same_sized_vectype (tree, tree);