diff options
author | Richard Biener <rguenther@suse.de> | 2017-03-27 08:07:49 +0000 |
---|---|---|
committer | Richard Biener <rguenth@gcc.gnu.org> | 2017-03-27 08:07:49 +0000 |
commit | 79f512ffebee22885684ff63bc3d66e7e6db9b4b (patch) | |
tree | e2509265db26833fc9b5658703ccb50c1e0e2dc3 | |
parent | 672d9f8eab3e81bd80098fc5b16eac116eec7658 (diff) | |
download | gcc-79f512ffebee22885684ff63bc3d66e7e6db9b4b.zip gcc-79f512ffebee22885684ff63bc3d66e7e6db9b4b.tar.gz gcc-79f512ffebee22885684ff63bc3d66e7e6db9b4b.tar.bz2 |
re PR tree-optimization/80170 (SLP vectorization creates aligned access)
2017-03-27 Richard Biener <rguenther@suse.de>
PR tree-optimization/80170
* tree-vect-data-refs.c (vect_compute_data_ref_alignment): Make
sure DR/SCEV didnt fold in constants we do not see when looking
at the reference base alignment.
* gcc.dg/pr80170.c: New testcase.
From-SVN: r246491
-rw-r--r-- | gcc/ChangeLog | 7 | ||||
-rw-r--r-- | gcc/testsuite/ChangeLog | 5 | ||||
-rw-r--r-- | gcc/testsuite/gcc.dg/pr80170.c | 42 | ||||
-rw-r--r-- | gcc/tree-vect-data-refs.c | 15 |
4 files changed, 66 insertions, 3 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 3ef3016..8dcd897 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,12 @@ 2017-03-27 Richard Biener <rguenther@suse.de> + PR tree-optimization/80170 + * tree-vect-data-refs.c (vect_compute_data_ref_alignment): Make + sure DR/SCEV didnt fold in constants we do not see when looking + at the reference base alignment. + +2017-03-27 Richard Biener <rguenther@suse.de> + PR middle-end/80171 * gimple-fold.c (fold_ctor_reference): Properly guard against NULL return value from canonicalize_constructor_val. diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index 65c8310..0b04afe 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,5 +1,10 @@ 2017-03-27 Richard Biener <rguenther@suse.de> + PR tree-optimization/80170 + * gcc.dg/pr80170.c: New testcase. + +2017-03-27 Richard Biener <rguenther@suse.de> + PR middle-end/80171 * g++.dg/torture/pr80171.C: New testcase. diff --git a/gcc/testsuite/gcc.dg/pr80170.c b/gcc/testsuite/gcc.dg/pr80170.c new file mode 100644 index 0000000..5ffd866 --- /dev/null +++ b/gcc/testsuite/gcc.dg/pr80170.c @@ -0,0 +1,42 @@ +/* { dg-do run } */ +/* { dg-options "-fgimple -O2 -ftree-slp-vectorize" } */ + +struct A +{ + void * a; + void * b; +}; + +struct __attribute__((aligned(16))) B +{ + void * pad; + void * misaligned; + void * pad2; + + struct A a; +}; + +__attribute__((noclone, noinline)) +void __GIMPLE (startwith("slp")) +NullB (void * misalignedPtr) +{ + struct B * b; + + bb_2: +#if __SIZEOF_LONG__ == 8 + b_2 = misalignedPtr_1(D) + 18446744073709551608ul; +#else + b_2 = misalignedPtr_1(D) + 4294967292ul; +#endif + __MEM <struct B> (b_2).a.a = _Literal (void *) 0; + __MEM <struct B> (b_2).a.b = _Literal (void *) 0; + return; + +} + +int main() +{ + struct B b; + NullB (&b.misaligned); + return 0; +} diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c index 29164d1..3d1d7e7 100644 --- a/gcc/tree-vect-data-refs.c +++ b/gcc/tree-vect-data-refs.c @@ -779,7 +779,7 @@ vect_compute_data_ref_alignment (struct data_reference *dr) base = ref; while (handled_component_p (base)) base = TREE_OPERAND (base, 0); - unsigned int base_alignment; + unsigned int base_alignment = 0; unsigned HOST_WIDE_INT base_bitpos; get_object_alignment_1 (base, &base_alignment, &base_bitpos); /* As data-ref analysis strips the MEM_REF down to its base operand @@ -788,8 +788,17 @@ vect_compute_data_ref_alignment (struct data_reference *dr) DR_BASE_ADDRESS. */ if (TREE_CODE (base) == MEM_REF) { - base_bitpos -= mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT; - base_bitpos &= (base_alignment - 1); + /* Note all this only works if DR_BASE_ADDRESS is the same as + MEM_REF operand zero, otherwise DR/SCEV analysis might have factored + in other offsets. We need to rework DR to compute the alingment + of DR_BASE_ADDRESS as long as all information is still available. */ + if (operand_equal_p (TREE_OPERAND (base, 0), base_addr, 0)) + { + base_bitpos -= mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT; + base_bitpos &= (base_alignment - 1); + } + else + base_bitpos = BITS_PER_UNIT; } if (base_bitpos != 0) base_alignment = base_bitpos & -base_bitpos; |