diff options
author | Prathamesh Kulkarni <prathamesh.kulkarni@linaro.org> | 2019-10-18 05:13:26 +0000 |
---|---|---|
committer | Prathamesh Kulkarni <prathamesh3492@gcc.gnu.org> | 2019-10-18 05:13:26 +0000 |
commit | cc1facefe3b4e3b067d95291a7dba834b830ff18 (patch) | |
tree | 656904edfbf10a3b87dfff157ab33cc839fa0460 /gcc/tree-vect-loop.c | |
parent | 4aa255f52588be89a56b2d30ebada235c7bb7c15 (diff) | |
download | gcc-cc1facefe3b4e3b067d95291a7dba834b830ff18.zip gcc-cc1facefe3b4e3b067d95291a7dba834b830ff18.tar.gz gcc-cc1facefe3b4e3b067d95291a7dba834b830ff18.tar.bz2 |
re PR target/86753 (gcc.target/aarch64/sve/vcond_[45].c fail after recent combine patch)
2019-10-18 Prathamesh Kulkarni <prathamesh.kulkarni@linaro.org>
Richard Sandiford <richard.sandiford@arm.com>
PR target/86753
* tree-vectorizer.h (scalar_cond_masked_key): New struct,
and define hashmap traits for it.
(loop_vec_info::scalar_cond_masked_set): New member.
(vect_record_loop_mask): Adjust prototype.
* tree-vectorizer.c (scalar_cond_masked_key::get_cond_ops_from_tree):
Implement method.
* tree-vect-loop.c (vectorizable_reduction): Pass NULL as last arg to
vect_record_loop_mask.
(vectorizable_live_operation): Likewise.
(vect_record_loop_mask): New param scalar_mask. Add entry
cond, loop_mask to scalar_cond_masked_set if scalar_mask is non NULL.
* tree-vect-stmts.c (check_load_store_masking): New param scalar_mask.
Pass it as last arg to vect_record_loop_mask.
(vectorizable_call): Pass scalar_mask as last arg to
vect_record_loop_mask.
(vectorizable_store): Likewise.
(vectorizable_load): Likewise.
(vectorizable_condition): Check if another part of vectorized code
applies loop_mask to condition or to it's inverse, and if yes,
apply loop_mask to result of vector comparison.
testsuite/
* gcc.target/aarch64/sve/cond_cnot_2.c: Remove XFAIL
from { scan-assembler-not {\tsel\t}.
* gcc.target/aarch64/sve/cond_convert_1.c: Adjust to make
only one load conditional.
* gcc.target/aarch64/sve/cond_convert_4.c: Likewise.
* gcc.target/aarch64/sve/cond_unary_2.c: Likewise.
* gcc.target/aarch64/sve/vcond_4.c: Remove XFAIL's.
* gcc.target/aarch64/sve/vcond_5.c: Likewise.
Co-Authored-By: Richard Sandiford <richard.sandiford@arm.com>
From-SVN: r277141
Diffstat (limited to 'gcc/tree-vect-loop.c')
-rw-r--r-- | gcc/tree-vect-loop.c | 16 |
1 files changed, 12 insertions, 4 deletions
diff --git a/gcc/tree-vect-loop.c b/gcc/tree-vect-loop.c index 80db6ab..10920ac 100644 --- a/gcc/tree-vect-loop.c +++ b/gcc/tree-vect-loop.c @@ -6330,7 +6330,7 @@ vectorizable_reduction (stmt_vec_info stmt_info, slp_tree slp_node, } else vect_record_loop_mask (loop_vinfo, masks, ncopies * vec_num, - vectype_in); + vectype_in, NULL); } if (dump_enabled_p () && reduction_type == FOLD_LEFT_REDUCTION) @@ -7561,7 +7561,7 @@ vectorizable_live_operation (stmt_vec_info stmt_info, gcc_assert (ncopies == 1 && !slp_node); vect_record_loop_mask (loop_vinfo, &LOOP_VINFO_MASKS (loop_vinfo), - 1, vectype); + 1, vectype, NULL); } } return true; @@ -7760,11 +7760,12 @@ vect_double_mask_nunits (tree type) /* Record that a fully-masked version of LOOP_VINFO would need MASKS to contain a sequence of NVECTORS masks that each control a vector of type - VECTYPE. */ + VECTYPE. If SCALAR_MASK is nonnull, the fully-masked loop would AND + these vector masks with the vector version of SCALAR_MASK. */ void vect_record_loop_mask (loop_vec_info loop_vinfo, vec_loop_masks *masks, - unsigned int nvectors, tree vectype) + unsigned int nvectors, tree vectype, tree scalar_mask) { gcc_assert (nvectors != 0); if (masks->length () < nvectors) @@ -7775,6 +7776,13 @@ vect_record_loop_mask (loop_vec_info loop_vinfo, vec_loop_masks *masks, unsigned int nscalars_per_iter = exact_div (nvectors * TYPE_VECTOR_SUBPARTS (vectype), LOOP_VINFO_VECT_FACTOR (loop_vinfo)).to_constant (); + + if (scalar_mask) + { + scalar_cond_masked_key cond (scalar_mask, nvectors); + loop_vinfo->scalar_cond_masked_set.add (cond); + } + if (rgm->max_nscalars_per_iter < nscalars_per_iter) { rgm->max_nscalars_per_iter = nscalars_per_iter; |