aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-vect-loop.cc
diff options
context:
space:
mode:
authorTamar Christina <tamar.christina@arm.com>2024-01-09 13:35:49 +0000
committerTamar Christina <tamar.christina@arm.com>2024-01-09 13:38:04 +0000
commit80bb94e88faaadd4d6f008fececb46214dc89e9f (patch)
tree5f7cfae6fd97b13a3215cc9748b447d7047ee497 /gcc/tree-vect-loop.cc
parente3632a18d1e0b94b4c7b99a512b19c830ed3b228 (diff)
downloadgcc-80bb94e88faaadd4d6f008fececb46214dc89e9f.zip
gcc-80bb94e88faaadd4d6f008fececb46214dc89e9f.tar.gz
gcc-80bb94e88faaadd4d6f008fececb46214dc89e9f.tar.bz2
middle-end: check if target can do extract first for early breaks [PR113199]
I was generating the vector reverse mask without checking if the target actually supported such an operation. This patch changes it to if the bitstart is 0 then use BIT_FIELD_REF instead to extract the first element since this is supported by all targets. This is good for now since masks always come from whilelo. But in the future when masks can come from other sources we will need the old code back. gcc/ChangeLog: PR tree-optimization/113199 * tree-vect-loop.cc (vectorizable_live_operation_1): Use BIT_FIELD_REF. gcc/testsuite/ChangeLog: PR tree-optimization/113199 * gcc.target/gcn/pr113199.c: New test.
Diffstat (limited to 'gcc/tree-vect-loop.cc')
-rw-r--r--gcc/tree-vect-loop.cc44
1 files changed, 11 insertions, 33 deletions
diff --git a/gcc/tree-vect-loop.cc b/gcc/tree-vect-loop.cc
index cfea907..b50b5cf 100644
--- a/gcc/tree-vect-loop.cc
+++ b/gcc/tree-vect-loop.cc
@@ -10607,7 +10607,17 @@ vectorizable_live_operation_1 (loop_vec_info loop_vinfo,
gimple_seq stmts = NULL;
tree new_tree;
- if (LOOP_VINFO_FULLY_WITH_LENGTH_P (loop_vinfo))
+
+ /* If bitstart is 0 then we can use a BIT_FIELD_REF */
+ if (integer_zerop (bitstart))
+ {
+ tree scalar_res = gimple_build (&stmts, BIT_FIELD_REF, TREE_TYPE (vectype),
+ vec_lhs_phi, bitsize, bitstart);
+
+ /* Convert the extracted vector element to the scalar type. */
+ new_tree = gimple_convert (&stmts, lhs_type, scalar_res);
+ }
+ else if (LOOP_VINFO_FULLY_WITH_LENGTH_P (loop_vinfo))
{
/* Emit:
@@ -10633,12 +10643,6 @@ vectorizable_live_operation_1 (loop_vec_info loop_vinfo,
tree last_index = gimple_build (&stmts, PLUS_EXPR, TREE_TYPE (len),
len, bias_minus_one);
- /* This needs to implement extraction of the first index, but not sure
- how the LEN stuff works. At the moment we shouldn't get here since
- there's no LEN support for early breaks. But guard this so there's
- no incorrect codegen. */
- gcc_assert (!LOOP_VINFO_EARLY_BREAKS (loop_vinfo));
-
/* SCALAR_RES = VEC_EXTRACT <VEC_LHS, LEN + BIAS - 1>. */
tree scalar_res
= gimple_build (&stmts, CFN_VEC_EXTRACT, TREE_TYPE (vectype),
@@ -10663,32 +10667,6 @@ vectorizable_live_operation_1 (loop_vec_info loop_vinfo,
&LOOP_VINFO_MASKS (loop_vinfo),
1, vectype, 0);
tree scalar_res;
-
- /* For an inverted control flow with early breaks we want EXTRACT_FIRST
- instead of EXTRACT_LAST. Emulate by reversing the vector and mask. */
- if (restart_loop && LOOP_VINFO_EARLY_BREAKS (loop_vinfo))
- {
- /* First create the permuted mask. */
- tree perm_mask = perm_mask_for_reverse (TREE_TYPE (mask));
- tree perm_dest = copy_ssa_name (mask);
- gimple *perm_stmt
- = gimple_build_assign (perm_dest, VEC_PERM_EXPR, mask,
- mask, perm_mask);
- vect_finish_stmt_generation (loop_vinfo, stmt_info, perm_stmt,
- &gsi);
- mask = perm_dest;
-
- /* Then permute the vector contents. */
- tree perm_elem = perm_mask_for_reverse (vectype);
- perm_dest = copy_ssa_name (vec_lhs_phi);
- perm_stmt
- = gimple_build_assign (perm_dest, VEC_PERM_EXPR, vec_lhs_phi,
- vec_lhs_phi, perm_elem);
- vect_finish_stmt_generation (loop_vinfo, stmt_info, perm_stmt,
- &gsi);
- vec_lhs_phi = perm_dest;
- }
-
gimple_seq_add_seq (&stmts, tem);
scalar_res = gimple_build (&stmts, CFN_EXTRACT_LAST, scalar_type,