aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Pinski <apinski@marvell.com>2020-01-16 07:54:51 +0000
committerAndrew Pinski <apinski@marvell.com>2020-01-16 12:34:32 +0000
commit2db99ef7896914bfbca1adc40f6ac4ba8887f040 (patch)
tree80b5d90546ab599c47967b969c77574cba2ea581
parent7c6056d52ef7ea43b503b11a571b316d55f392c7 (diff)
downloadgcc-2db99ef7896914bfbca1adc40f6ac4ba8887f040.zip
gcc-2db99ef7896914bfbca1adc40f6ac4ba8887f040.tar.gz
gcc-2db99ef7896914bfbca1adc40f6ac4ba8887f040.tar.bz2
Fix value numbering dealing with reverse byte order
Hi, While working on bit-field lowering pass, I came across this bug. The IR looks like: VIEW_CONVERT_EXPR<unsigned long>(var1) = _12; _1 = BIT_FIELD_REF <var1, 64, 0>; Where the BIT_FIELD_REF has REF_REVERSE_STORAGE_ORDER set on it and var1's type has TYPE_REVERSE_STORAGE_ORDER set on it. PRE/FRE would decided to prop _12 into the BFR statement which would produce wrong code. And yes _12 has the correct byte order already; bit-field lowering removes the implicit byte swaps in the IR and adds the explicity to make it easier optimize later on. This patch adds a check for storage_order_barrier_p on the lhs tree which returns true in the case where we had a reverse order with a VCE. ChangeLog: * tree-ssa-sccvn.c(vn_reference_lookup_3): Check lhs for !storage_order_barrier_p.
-rw-r--r--gcc/ChangeLog5
-rw-r--r--gcc/tree-ssa-sccvn.c2
2 files changed, 7 insertions, 0 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index bbb7e06..7baf474 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,10 @@
2020-01-16 Andrew Pinski <apinski@marvell.com>
+ * tree-ssa-sccvn.c(vn_reference_lookup_3): Check lhs for
+ !storage_order_barrier_p.
+
+2020-01-16 Andrew Pinski <apinski@marvell.com>
+
* sched-int.h (_dep): Add unused bit-field field for the padding.
* sched-deps.c (init_dep_1): Init unused field.
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 3b27c50..4d13015 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -2593,6 +2593,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
&offset2, &size2, &maxsize2, &reverse);
if (base2
&& !reverse
+ && !storage_order_barrier_p (lhs)
&& known_eq (maxsize2, size2)
&& multiple_p (size2, BITS_PER_UNIT)
&& multiple_p (offset2, BITS_PER_UNIT)
@@ -2695,6 +2696,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
&offset2, &size2, &maxsize2, &reverse);
tree def_rhs = gimple_assign_rhs1 (def_stmt);
if (!reverse
+ && !storage_order_barrier_p (lhs)
&& known_size_p (maxsize2)
&& known_eq (maxsize2, size2)
&& adjust_offsets_for_equal_base_address (base, &offset,