aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/gimple-ssa-store-merging.c53
-rw-r--r--gcc/testsuite/ChangeLog5
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/pr89475.c104
-rw-r--r--gcc/tree-ssa-ccp.c29
5 files changed, 190 insertions, 7 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 2604ffa..ff57006 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,9 @@
+2019-04-30 Jakub Jelinek <jakub@redhat.com>
+
+ PR tree-optimization/89475
+ * tree-ssa-ccp.c (evaluate_stmt): Handle BUILT_IN_BSWAP{16,32,64}
+ calls.
+
2019-04-30 Martin Liska <mliska@suse.cz>
PR translation/90274
diff --git a/gcc/gimple-ssa-store-merging.c b/gcc/gimple-ssa-store-merging.c
index 4a8cf6f..5fdff6e 100644
--- a/gcc/gimple-ssa-store-merging.c
+++ b/gcc/gimple-ssa-store-merging.c
@@ -1615,13 +1615,31 @@ encode_tree_to_bitpos (tree expr, unsigned char *ptr, int bitlen, int bitpos,
unsigned int total_bytes)
{
unsigned int first_byte = bitpos / BITS_PER_UNIT;
- tree tmp_int = expr;
bool sub_byte_op_p = ((bitlen % BITS_PER_UNIT)
|| (bitpos % BITS_PER_UNIT)
|| !int_mode_for_size (bitlen, 0).exists ());
+ bool empty_ctor_p
+ = (TREE_CODE (expr) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (expr) == 0
+ && TYPE_SIZE_UNIT (TREE_TYPE (expr))
+ && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (expr))));
if (!sub_byte_op_p)
- return native_encode_expr (tmp_int, ptr + first_byte, total_bytes) != 0;
+ {
+ if (first_byte >= total_bytes)
+ return false;
+ total_bytes -= first_byte;
+ if (empty_ctor_p)
+ {
+ unsigned HOST_WIDE_INT rhs_bytes
+ = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
+ if (rhs_bytes > total_bytes)
+ return false;
+ memset (ptr + first_byte, '\0', rhs_bytes);
+ return true;
+ }
+ return native_encode_expr (expr, ptr + first_byte, total_bytes) != 0;
+ }
/* LITTLE-ENDIAN
We are writing a non byte-sized quantity or at a position that is not
@@ -1667,14 +1685,29 @@ encode_tree_to_bitpos (tree expr, unsigned char *ptr, int bitlen, int bitpos,
/* We must be dealing with fixed-size data at this point, since the
total size is also fixed. */
- fixed_size_mode mode = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (expr)));
+ unsigned int byte_size;
+ if (empty_ctor_p)
+ {
+ unsigned HOST_WIDE_INT rhs_bytes
+ = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
+ if (rhs_bytes > total_bytes)
+ return false;
+ byte_size = rhs_bytes;
+ }
+ else
+ {
+ fixed_size_mode mode
+ = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (expr)));
+ byte_size = GET_MODE_SIZE (mode);
+ }
/* Allocate an extra byte so that we have space to shift into. */
- unsigned int byte_size = GET_MODE_SIZE (mode) + 1;
+ byte_size++;
unsigned char *tmpbuf = XALLOCAVEC (unsigned char, byte_size);
memset (tmpbuf, '\0', byte_size);
/* The store detection code should only have allowed constants that are
- accepted by native_encode_expr. */
- if (native_encode_expr (expr, tmpbuf, byte_size - 1) == 0)
+ accepted by native_encode_expr or empty ctors. */
+ if (!empty_ctor_p
+ && native_encode_expr (expr, tmpbuf, byte_size - 1) == 0)
gcc_unreachable ();
/* The native_encode_expr machinery uses TYPE_MODE to determine how many
@@ -4164,7 +4197,8 @@ lhs_valid_for_store_merging_p (tree lhs)
tree_code code = TREE_CODE (lhs);
if (code == ARRAY_REF || code == ARRAY_RANGE_REF || code == MEM_REF
- || code == COMPONENT_REF || code == BIT_FIELD_REF)
+ || code == COMPONENT_REF || code == BIT_FIELD_REF
+ || DECL_P (lhs))
return true;
return false;
@@ -4178,6 +4212,11 @@ static bool
rhs_valid_for_store_merging_p (tree rhs)
{
unsigned HOST_WIDE_INT size;
+ if (TREE_CODE (rhs) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (rhs) == 0
+ && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
+ && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs))))
+ return true;
return (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (rhs))).is_constant (&size)
&& native_encode_expr (rhs, NULL, size) != 0);
}
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index eb2599a..b1f2585 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,8 @@
+2019-04-30 Jakub Jelinek <jakub@redhat.com>
+
+ PR tree-optimization/89475
+ * gcc.dg/tree-ssa/pr89475.c: New test.
+
2019-04-30 Bin Cheng <bin.cheng@linux.alibaba.com>
PR tree-optimization/90240
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/pr89475.c b/gcc/testsuite/gcc.dg/tree-ssa/pr89475.c
new file mode 100644
index 0000000..4a84bea
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/pr89475.c
@@ -0,0 +1,104 @@
+/* PR tree-optimization/89475 */
+/* { dg-do compile { target { ilp32 || lp64 } } } */
+/* { dg-options "-O2 -fdump-tree-optimized" } */
+/* { dg-final { scan-tree-dump-not "link_error " "optimized" } } */
+
+void link_error (void);
+
+unsigned short
+f0 (unsigned short x)
+{
+ x &= 0xaa55;
+ x = __builtin_bswap16 (x);
+ if (x & 0xaa55)
+ link_error ();
+ return x;
+}
+
+unsigned short
+f1 (unsigned short x)
+{
+ x &= 0x55aa;
+ x = __builtin_bswap16 (x);
+ if (x & 0x55aa)
+ link_error ();
+ return x;
+}
+
+unsigned int
+f2 (unsigned int x)
+{
+ x &= 0x55aa5aa5U;
+ x = __builtin_bswap32 (x);
+ if (x & 0x5aa555aaU)
+ link_error ();
+ return x;
+}
+
+unsigned long long int
+f3 (unsigned long long int x)
+{
+ x &= 0x55aa5aa544cc2211ULL;
+ x = __builtin_bswap64 (x);
+ if (x & 0xeedd33bb5aa555aaULL)
+ link_error ();
+ return x;
+}
+
+unsigned short
+f4 (unsigned short x)
+{
+ x = __builtin_bswap32 (x);
+ if (x != 0)
+ link_error ();
+ return x;
+}
+
+unsigned int
+f5 (unsigned int x)
+{
+ x = __builtin_bswap64 (x);
+ if (x != 0)
+ link_error ();
+ return x;
+}
+
+unsigned short
+f6 (unsigned short x)
+{
+ x |= 0xaa55;
+ x = __builtin_bswap16 (x);
+ if ((x | 0xaa55) != 0xffff)
+ link_error ();
+ return x;
+}
+
+unsigned short
+f7 (unsigned short x)
+{
+ x |= 0x55aa;
+ x = __builtin_bswap16 (x);
+ if ((x | 0x55aa) != 0xffff)
+ link_error ();
+ return x;
+}
+
+unsigned int
+f8 (unsigned int x)
+{
+ x |= 0x55aa5aa5U;
+ x = __builtin_bswap32 (x);
+ if ((x | 0x5aa555aaU) != 0xffffffffU)
+ link_error ();
+ return x;
+}
+
+unsigned long long int
+f9 (unsigned long long int x)
+{
+ x |= 0x55aa5aa544cc2211ULL;
+ x = __builtin_bswap64 (x);
+ if ((x | 0xeedd33bb5aa555aaULL) != 0xffffffffffffffffULL)
+ link_error ();
+ return x;
+}
diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c
index e6bcc21..51b9d9f 100644
--- a/gcc/tree-ssa-ccp.c
+++ b/gcc/tree-ssa-ccp.c
@@ -1960,6 +1960,35 @@ evaluate_stmt (gimple *stmt)
break;
}
+ case BUILT_IN_BSWAP16:
+ case BUILT_IN_BSWAP32:
+ case BUILT_IN_BSWAP64:
+ val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
+ if (val.lattice_val == UNDEFINED)
+ break;
+ else if (val.lattice_val == CONSTANT
+ && val.value
+ && TREE_CODE (val.value) == INTEGER_CST)
+ {
+ tree type = TREE_TYPE (gimple_call_lhs (stmt));
+ int prec = TYPE_PRECISION (type);
+ wide_int wval = wi::to_wide (val.value);
+ val.value
+ = wide_int_to_tree (type,
+ wide_int::from (wval, prec,
+ UNSIGNED).bswap ());
+ val.mask
+ = widest_int::from (wide_int::from (val.mask, prec,
+ UNSIGNED).bswap (),
+ UNSIGNED);
+ if (wi::sext (val.mask, prec) != -1)
+ break;
+ }
+ val.lattice_val = VARYING;
+ val.value = NULL_TREE;
+ val.mask = -1;
+ break;
+
default:;
}
}