aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2013-04-18 12:57:17 +0000
committerRichard Biener <rguenth@gcc.gnu.org>2013-04-18 12:57:17 +0000
commit08940f33eb4bb87f8944e771189ba48d8c7ca176 (patch)
treed2f83ada3798b32cb210ac1543a9fa62b2494525 /gcc
parent0e0f87d44bfa4c5a2e6547f780e869a8756dca70 (diff)
downloadgcc-08940f33eb4bb87f8944e771189ba48d8c7ca176.zip
gcc-08940f33eb4bb87f8944e771189ba48d8c7ca176.tar.gz
gcc-08940f33eb4bb87f8944e771189ba48d8c7ca176.tar.bz2
tree-vect-data-refs.c (vect_analyze_group_access): Properly handle negative step.
2013-04-18 Richard Biener <rguenther@suse.de> * tree-vect-data-refs.c (vect_analyze_group_access): Properly handle negative step. Remove redundant checks. (vect_create_data_ref_ptr): Avoid ICEs with non-constant steps. * tree-vect-stmts.c (vectorizable_load): Instead of asserting for negative step and grouped loads fail to vectorize. From-SVN: r198054
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog8
-rw-r--r--gcc/tree-vect-data-refs.c42
-rw-r--r--gcc/tree-vect-stmts.c8
3 files changed, 33 insertions, 25 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 21e5dbc..77b6a4f 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,11 @@
+2013-04-18 Richard Biener <rguenther@suse.de>
+
+ * tree-vect-data-refs.c (vect_analyze_group_access): Properly
+ handle negative step. Remove redundant checks.
+ (vect_create_data_ref_ptr): Avoid ICEs with non-constant steps.
+ * tree-vect-stmts.c (vectorizable_load): Instead of asserting
+ for negative step and grouped loads fail to vectorize.
+
2013-04-18 Steven Bosscher <steven@gcc.gnu.org>
* emit-rtl.c (reset_insn_used_flags): New function.
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 1fe5047..9cbc5c7 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -2024,7 +2024,7 @@ vect_analyze_group_access (struct data_reference *dr)
/* For interleaving, GROUPSIZE is STEP counted in elements, i.e., the
size of the interleaving group (including gaps). */
- groupsize = dr_step / type_size;
+ groupsize = absu_hwi (dr_step) / type_size;
/* Not consecutive access is possible only if it is a part of interleaving. */
if (!GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
@@ -2094,10 +2094,10 @@ vect_analyze_group_access (struct data_reference *dr)
gimple next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt));
struct data_reference *data_ref = dr;
unsigned int count = 1;
- tree next_step;
tree prev_init = DR_INIT (data_ref);
gimple prev = stmt;
- HOST_WIDE_INT diff, count_in_bytes, gaps = 0;
+ HOST_WIDE_INT diff, gaps = 0;
+ unsigned HOST_WIDE_INT count_in_bytes;
while (next)
{
@@ -2126,18 +2126,11 @@ vect_analyze_group_access (struct data_reference *dr)
}
prev = next;
+ data_ref = STMT_VINFO_DATA_REF (vinfo_for_stmt (next));
- /* Check that all the accesses have the same STEP. */
- next_step = DR_STEP (STMT_VINFO_DATA_REF (vinfo_for_stmt (next)));
- if (tree_int_cst_compare (step, next_step))
- {
- if (dump_enabled_p ())
- dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
- "not consecutive access in interleaving");
- return false;
- }
+ /* All group members have the same STEP by construction. */
+ gcc_checking_assert (operand_equal_p (DR_STEP (data_ref), step, 0));
- data_ref = STMT_VINFO_DATA_REF (vinfo_for_stmt (next));
/* Check that the distance between two accesses is equal to the type
size. Otherwise, we have gaps. */
diff = (TREE_INT_CST_LOW (DR_INIT (data_ref))
@@ -2175,7 +2168,8 @@ vect_analyze_group_access (struct data_reference *dr)
/* Check that the size of the interleaving (including gaps) is not
greater than STEP. */
- if (dr_step && dr_step < count_in_bytes + gaps * type_size)
+ if (dr_step != 0
+ && absu_hwi (dr_step) < count_in_bytes + gaps * type_size)
{
if (dump_enabled_p ())
{
@@ -2188,7 +2182,8 @@ vect_analyze_group_access (struct data_reference *dr)
/* Check that the size of the interleaving is equal to STEP for stores,
i.e., that there are no gaps. */
- if (dr_step && dr_step != count_in_bytes)
+ if (dr_step != 0
+ && absu_hwi (dr_step) != count_in_bytes)
{
if (DR_IS_READ (dr))
{
@@ -2208,7 +2203,8 @@ vect_analyze_group_access (struct data_reference *dr)
}
/* Check that STEP is a multiple of type size. */
- if (dr_step && (dr_step % type_size) != 0)
+ if (dr_step != 0
+ && (dr_step % type_size) != 0)
{
if (dump_enabled_p ())
{
@@ -3520,7 +3516,6 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
tree aptr;
gimple_stmt_iterator incr_gsi;
bool insert_after;
- bool negative;
tree indx_before_incr, indx_after_incr;
gimple incr;
tree step;
@@ -3550,11 +3545,10 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
else
step = DR_STEP (STMT_VINFO_DATA_REF (stmt_info));
- if (tree_int_cst_compare (step, size_zero_node) == 0)
+ if (integer_zerop (step))
*inv_p = true;
else
*inv_p = false;
- negative = tree_int_cst_compare (step, size_zero_node) < 0;
/* Create an expression for the first address accessed by this load
in LOOP. */
@@ -3693,18 +3687,18 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
else
{
/* The step of the aggregate pointer is the type size. */
- tree step = TYPE_SIZE_UNIT (aggr_type);
+ tree iv_step = TYPE_SIZE_UNIT (aggr_type);
/* One exception to the above is when the scalar step of the load in
LOOP is zero. In this case the step here is also zero. */
if (*inv_p)
- step = size_zero_node;
- else if (negative)
- step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
+ iv_step = size_zero_node;
+ else if (tree_int_cst_sgn (step) == -1)
+ iv_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (iv_step), iv_step);
standard_iv_increment_position (loop, &incr_gsi, &insert_after);
create_iv (aggr_ptr_init,
- fold_convert (aggr_ptr_type, step),
+ fold_convert (aggr_ptr_type, iv_step),
aggr_ptr, loop, &incr_gsi, insert_after,
&indx_before_incr, &indx_after_incr);
incr = gsi_stmt (incr_gsi);
diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c
index bdd4d64..ca474c1 100644
--- a/gcc/tree-vect-stmts.c
+++ b/gcc/tree-vect-stmts.c
@@ -4465,7 +4465,13 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
if (negative)
{
- gcc_assert (!grouped_load);
+ if (grouped_load)
+ {
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
+ "negative step for group load not supported");
+ return false;
+ }
alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
if (alignment_support_scheme != dr_aligned
&& alignment_support_scheme != dr_unaligned_supported)