aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2025-09-12 14:15:59 +0200
committerRichard Biener <rguenther@suse.de>2025-09-14 11:05:42 +0200
commit57c97de978332fa5683393b38a2db77b78bf0e2c (patch)
tree3385622ea9b756307e31dc9e1bda5bc1bec66325
parent80f9440b5ecb1c3ab943b5c862f84e06fbeabd89 (diff)
downloadgcc-57c97de978332fa5683393b38a2db77b78bf0e2c.zip
gcc-57c97de978332fa5683393b38a2db77b78bf0e2c.tar.gz
gcc-57c97de978332fa5683393b38a2db77b78bf0e2c.tar.bz2
Avoid VMAT_ELEMENTWISE for negative stride SLP
The following makes us always use VMAT_STRIDED_SLP for negative stride multi-element accesses. That handles falling back to single element accesses transparently. * tree-vect-stmts.cc (get_load_store_type): Use VMAT_STRIDED_SLP for negative stride accesses when VMAT_CONTIGUOUS_REVERSE isn't applicable.
-rw-r--r--gcc/tree-vect-stmts.cc14
1 files changed, 5 insertions, 9 deletions
diff --git a/gcc/tree-vect-stmts.cc b/gcc/tree-vect-stmts.cc
index f78acaf..852bc40 100644
--- a/gcc/tree-vect-stmts.cc
+++ b/gcc/tree-vect-stmts.cc
@@ -2026,6 +2026,7 @@ get_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
without permutation. */
if (! SLP_TREE_LOAD_PERMUTATION (slp_node).exists ())
first_dr_info = STMT_VINFO_DR_INFO (SLP_TREE_SCALAR_STMTS (slp_node)[0]);
+
if (STMT_VINFO_STRIDED_P (first_stmt_info))
/* Try to use consecutive accesses of as many elements as possible,
separated by the stride, until we have a complete vector.
@@ -2089,15 +2090,10 @@ get_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
(vinfo, stmt_info, vectype, vls_type, 1,
&neg_ldst_offset);
else
- {
- /* Try to use consecutive accesses of DR_GROUP_SIZE elements,
- separated by the stride, until we have a complete vector.
- Fall back to scalar accesses if that isn't possible. */
- if (multiple_p (nunits, group_size))
- *memory_access_type = VMAT_STRIDED_SLP;
- else
- *memory_access_type = VMAT_ELEMENTWISE;
- }
+ /* We can fall back to VMAT_STRIDED_SLP since that does
+ not care whether the stride between the group instances
+ is positive or negative. */
+ *memory_access_type = VMAT_STRIDED_SLP;
}
else if (cmp == 0 && loop_vinfo)
{