diff options
author | Richard Sandiford <richard.sandiford@linaro.org> | 2017-08-04 10:42:53 +0000 |
---|---|---|
committer | Richard Sandiford <rsandifo@gcc.gnu.org> | 2017-08-04 10:42:53 +0000 |
commit | 62c8a2cf17cd794241c8f978c8fcfc4682ca4315 (patch) | |
tree | 52a7de7b09fb0528bcb735d67d552670ecb519f3 /gcc/tree-vect-data-refs.c | |
parent | 2c515559f9dbe8bace5f68e2fec7600a9edc7c42 (diff) | |
download | gcc-62c8a2cf17cd794241c8f978c8fcfc4682ca4315.zip gcc-62c8a2cf17cd794241c8f978c8fcfc4682ca4315.tar.gz gcc-62c8a2cf17cd794241c8f978c8fcfc4682ca4315.tar.bz2 |
Pool alignment information for common bases
This patch is a follow-on to the fix for PR81136. The testcase for that
PR shows that we can (correctly) calculate different base alignments
for two data_references but still tell that their misalignments wrt the
vector size are equal. This is because we calculate the base alignments
for each dr individually, without looking at the other drs, and in
general the alignment we calculate is only guaranteed if the dr's DR_REF
actually occurs.
This is working as designed, but it does expose a missed opportunity.
We know that if a vectorised loop is reached, all statements in that
loop execute at least once, so it should be safe to pool the alignment
information for all the statements we're vectorising. The only catch is
that DR_REFs for masked loads and stores only occur if the mask value is
nonzero. For example, in:
struct s __attribute__((aligned(32))) {
int misaligner;
int array[N];
};
int *ptr;
for (int i = 0; i < n; ++i)
ptr[i] = c[i] ? ((struct s *) (ptr - 1))->array[i] : 0;
we can only guarantee that ptr points to a "struct s" if at least
one c[i] is true.
This patch adds a DR_IS_CONDITIONAL_IN_STMT flag to record whether
the DR_REF is guaranteed to occur every time that the statement
executes to completion. It then pools the alignment information
for references that aren't conditional in this sense.
2017-08-04 Richard Sandiford <richard.sandiford@linaro.org>
gcc/
PR tree-optimization/81136
* tree-vectorizer.h: Include tree-hash-traits.h.
(vec_base_alignments): New typedef.
(vec_info): Add a base_alignments field.
(vect_record_base_alignments): Declare.
* tree-data-ref.h (data_reference): Add an is_conditional_in_stmt
field.
(DR_IS_CONDITIONAL_IN_STMT): New macro.
(create_data_ref): Add an is_conditional_in_stmt argument.
* tree-data-ref.c (create_data_ref): Likewise. Use it to initialize
the is_conditional_in_stmt field.
(data_ref_loc): Add an is_conditional_in_stmt field.
(get_references_in_stmt): Set the is_conditional_in_stmt field.
(find_data_references_in_stmt): Update call to create_data_ref.
(graphite_find_data_references_in_stmt): Likewise.
* tree-ssa-loop-prefetch.c (determine_loop_nest_reuse): Likewise.
* tree-vect-data-refs.c (vect_analyze_data_refs): Likewise.
(vect_record_base_alignment): New function.
(vect_record_base_alignments): Likewise.
(vect_compute_data_ref_alignment): Adjust base_addr and aligned_to
for nested statements even if we fail to compute a misalignment.
Use pooled base alignments for unconditional references.
(vect_find_same_alignment_drs): Compare base addresses instead
of base objects.
(vect_analyze_data_refs_alignment): Call vect_record_base_alignments.
* tree-vect-slp.c (vect_slp_analyze_bb_1): Likewise.
gcc/testsuite/
PR tree-optimization/81136
* gcc.dg/vect/pr81136.c: Add scan test.
From-SVN: r250870
Diffstat (limited to 'gcc/tree-vect-data-refs.c')
-rw-r--r-- | gcc/tree-vect-data-refs.c | 80 |
1 files changed, 77 insertions, 3 deletions
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c index a91e304..60f2539 100644 --- a/gcc/tree-vect-data-refs.c +++ b/gcc/tree-vect-data-refs.c @@ -708,6 +708,69 @@ vect_slp_analyze_instance_dependence (slp_instance instance) return res; } +/* Record in VINFO the base alignment guarantee given by DRB. STMT is + the statement that contains DRB, which is useful for recording in the + dump file. */ + +static void +vect_record_base_alignment (vec_info *vinfo, gimple *stmt, + innermost_loop_behavior *drb) +{ + bool existed; + innermost_loop_behavior *&entry + = vinfo->base_alignments.get_or_insert (drb->base_address, &existed); + if (!existed || entry->base_alignment < drb->base_alignment) + { + entry = drb; + if (dump_enabled_p ()) + { + dump_printf_loc (MSG_NOTE, vect_location, + "recording new base alignment for "); + dump_generic_expr (MSG_NOTE, TDF_SLIM, drb->base_address); + dump_printf (MSG_NOTE, "\n"); + dump_printf_loc (MSG_NOTE, vect_location, + " alignment: %d\n", drb->base_alignment); + dump_printf_loc (MSG_NOTE, vect_location, + " misalignment: %d\n", drb->base_misalignment); + dump_printf_loc (MSG_NOTE, vect_location, + " based on: "); + dump_gimple_stmt (MSG_NOTE, TDF_SLIM, stmt, 0); + } + } +} + +/* If the region we're going to vectorize is reached, all unconditional + data references occur at least once. We can therefore pool the base + alignment guarantees from each unconditional reference. Do this by + going through all the data references in VINFO and checking whether + the containing statement makes the reference unconditionally. If so, + record the alignment of the base address in VINFO so that it can be + used for all other references with the same base. */ + +void +vect_record_base_alignments (vec_info *vinfo) +{ + loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo); + struct loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo) : NULL; + data_reference *dr; + unsigned int i; + FOR_EACH_VEC_ELT (vinfo->datarefs, i, dr) + if (!DR_IS_CONDITIONAL_IN_STMT (dr)) + { + gimple *stmt = DR_STMT (dr); + vect_record_base_alignment (vinfo, stmt, &DR_INNERMOST (dr)); + + /* If DR is nested in the loop that is being vectorized, we can also + record the alignment of the base wrt the outer loop. */ + if (loop && nested_in_vect_loop_p (loop, stmt)) + { + stmt_vec_info stmt_info = vinfo_for_stmt (stmt); + vect_record_base_alignment + (vinfo, stmt, &STMT_VINFO_DR_WRT_VEC_LOOP (stmt_info)); + } + } +} + /* Function vect_compute_data_ref_alignment Compute the misalignment of the data reference DR. @@ -725,6 +788,7 @@ vect_compute_data_ref_alignment (struct data_reference *dr) { gimple *stmt = DR_STMT (dr); stmt_vec_info stmt_info = vinfo_for_stmt (stmt); + vec_base_alignments *base_alignments = &stmt_info->vinfo->base_alignments; loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); struct loop *loop = NULL; tree ref = DR_REF (dr); @@ -793,6 +857,15 @@ vect_compute_data_ref_alignment (struct data_reference *dr) unsigned int base_misalignment = drb->base_misalignment; unsigned HOST_WIDE_INT vector_alignment = TYPE_ALIGN_UNIT (vectype); + /* Calculate the maximum of the pooled base address alignment and the + alignment that we can compute for DR itself. */ + innermost_loop_behavior **entry = base_alignments->get (drb->base_address); + if (entry && base_alignment < (*entry)->base_alignment) + { + base_alignment = (*entry)->base_alignment; + base_misalignment = (*entry)->base_misalignment; + } + if (drb->offset_alignment < vector_alignment || !step_preserves_misalignment_p /* We need to know whether the step wrt the vectorized loop is @@ -2113,8 +2186,7 @@ vect_find_same_alignment_drs (struct data_dependence_relation *ddr) if (dra == drb) return; - if (!operand_equal_p (DR_BASE_OBJECT (dra), DR_BASE_OBJECT (drb), - OEP_ADDRESS_OF) + if (!operand_equal_p (DR_BASE_ADDRESS (dra), DR_BASE_ADDRESS (drb), 0) || !operand_equal_p (DR_OFFSET (dra), DR_OFFSET (drb), 0) || !operand_equal_p (DR_STEP (dra), DR_STEP (drb), 0)) return; @@ -2172,6 +2244,7 @@ vect_analyze_data_refs_alignment (loop_vec_info vinfo) vec<data_reference_p> datarefs = vinfo->datarefs; struct data_reference *dr; + vect_record_base_alignments (vinfo); FOR_EACH_VEC_ELT (datarefs, i, dr) { stmt_vec_info stmt_info = vinfo_for_stmt (DR_STMT (dr)); @@ -3437,7 +3510,8 @@ again: { struct data_reference *newdr = create_data_ref (NULL, loop_containing_stmt (stmt), - DR_REF (dr), stmt, maybe_scatter ? false : true); + DR_REF (dr), stmt, !maybe_scatter, + DR_IS_CONDITIONAL_IN_STMT (dr)); gcc_assert (newdr != NULL && DR_REF (newdr)); if (DR_BASE_ADDRESS (newdr) && DR_OFFSET (newdr) |