aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-sccvn.c
diff options
context:
space:
mode:
authorEric Botcazou <ebotcazou@gcc.gnu.org>2020-07-09 00:13:50 +0200
committerEric Botcazou <ebotcazou@gcc.gnu.org>2020-07-09 00:59:59 +0200
commitb541b871135cb8f261d079006c79698a82e3594d (patch)
treeea67db21fcba13c246749cf5e8eed64e59f77442 /gcc/tree-ssa-sccvn.c
parenta8b522311beef5e02de15427e924752ea02def2a (diff)
downloadgcc-b541b871135cb8f261d079006c79698a82e3594d.zip
gcc-b541b871135cb8f261d079006c79698a82e3594d.tar.gz
gcc-b541b871135cb8f261d079006c79698a82e3594d.tar.bz2
Make memory copy functions scalar storage order barriers
This addresses the issue raised about the usage of memory copy functions to toggle the scalar storage order. Recall that you cannot (the compiler errors out) take the address of a scalar which is stored in reverse order, but you can do it for the enclosing aggregate type., which means that you can also pass it to the memory copy functions. In this case, the optimizer may rewrite the copy into a scalar copy, which is a no-no. gcc/c-family/ChangeLog: * c.opt (Wscalar-storage-order): Add explicit variable. gcc/c/ChangeLog: * c-typeck.c (convert_for_assignment): If -Wscalar-storage-order is set, warn for conversion between pointers that point to incompatible scalar storage orders. gcc/ChangeLog: * gimple-fold.c (gimple_fold_builtin_memory_op): Do not fold if either type has reverse scalar storage order. * tree-ssa-sccvn.c (vn_reference_lookup_3): Do not propagate through a memory copy if either type has reverse scalar storage order. gcc/testsuite/ChangeLog: * gcc.dg/sso-11.c: New test. * gcc.dg/sso/sso.exp: Pass -Wno-scalar-storage-order. * gcc.dg/sso/memcpy-1.c: New test.
Diffstat (limited to 'gcc/tree-ssa-sccvn.c')
-rw-r--r--gcc/tree-ssa-sccvn.c12
1 files changed, 10 insertions, 2 deletions
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 4b3f31c..e269f78 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -3224,8 +3224,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
return NULL;
}
- /* 6) For memcpy copies translate the reference through them if
- the copy kills ref. */
+ /* 6) For memcpy copies translate the reference through them if the copy
+ kills ref. But we cannot (easily) do this translation if the memcpy is
+ a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
+ can modify the storage order of objects (see storage_order_barrier_p). */
else if (data->vn_walk_kind == VN_WALKREWRITE
&& is_gimple_reg_type (vr->type)
/* ??? Handle BCOPY as well. */
@@ -3275,6 +3277,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
}
if (TREE_CODE (lhs) == ADDR_EXPR)
{
+ if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
+ && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
+ return (void *)-1;
tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
&lhs_offset);
if (!tem)
@@ -3303,6 +3308,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
rhs = vn_valueize (rhs);
if (TREE_CODE (rhs) == ADDR_EXPR)
{
+ if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
+ && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
+ return (void *)-1;
tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
&rhs_offset);
if (!tem)