aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Pinski <quic_apinski@quicinc.com>2024-05-26 17:59:21 -0700
committerAndrew Pinski <quic_apinski@quicinc.com>2024-05-27 12:09:58 -0700
commitc5a7628470a7fb801ebeea82e16a4549db43bfa5 (patch)
tree9de6c90a45db6760d08dd59d3cef09056ca6c98b
parenta209f219b862def8fed166b31984b8c6c3bb74a0 (diff)
downloadgcc-c5a7628470a7fb801ebeea82e16a4549db43bfa5.zip
gcc-c5a7628470a7fb801ebeea82e16a4549db43bfa5.tar.gz
gcc-c5a7628470a7fb801ebeea82e16a4549db43bfa5.tar.bz2
match: Use uniform_integer_cst_p in bitwise_inverted_equal_p [PR115238]
I noticed while working on the `a ^ CST` patch, that bitwise_inverted_equal_p would check INTEGER_CST directly and not handle vector csts that are uniform. This moves over to using uniform_integer_cst_p instead of checking INTEGER_CST directly. Bootstrapped and tested on x86_64-linux-gnu with no regressions. PR tree-optimization/115238 gcc/ChangeLog: * generic-match-head.cc (bitwise_inverted_equal_p): Use uniform_integer_cst_p instead of checking INTEGER_CST. * gimple-match-head.cc (gimple_bitwise_inverted_equal_p): Likewise. gcc/testsuite/ChangeLog: * gcc.dg/tree-ssa/bitops-9.c: New test. Signed-off-by: Andrew Pinski <quic_apinski@quicinc.com>
-rw-r--r--gcc/generic-match-head.cc6
-rw-r--r--gcc/gimple-match-head.cc6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c15
3 files changed, 23 insertions, 4 deletions
diff --git a/gcc/generic-match-head.cc b/gcc/generic-match-head.cc
index e2e1e4b..55ba369 100644
--- a/gcc/generic-match-head.cc
+++ b/gcc/generic-match-head.cc
@@ -146,8 +146,10 @@ bitwise_inverted_equal_p (tree expr1, tree expr2, bool &wascmp)
return false;
if (!tree_nop_conversion_p (TREE_TYPE (expr1), TREE_TYPE (expr2)))
return false;
- if (TREE_CODE (expr1) == INTEGER_CST && TREE_CODE (expr2) == INTEGER_CST)
- return wi::to_wide (expr1) == ~wi::to_wide (expr2);
+ tree cst1 = uniform_integer_cst_p (expr1);
+ tree cst2 = uniform_integer_cst_p (expr2);
+ if (cst1 && cst2)
+ return wi::to_wide (cst1) == ~wi::to_wide (cst2);
if (operand_equal_p (expr1, expr2, 0))
return false;
if (TREE_CODE (expr1) == BIT_NOT_EXPR
diff --git a/gcc/gimple-match-head.cc b/gcc/gimple-match-head.cc
index 49b1dde..6220725 100644
--- a/gcc/gimple-match-head.cc
+++ b/gcc/gimple-match-head.cc
@@ -294,8 +294,10 @@ gimple_bitwise_inverted_equal_p (tree expr1, tree expr2, bool &wascmp, tree (*va
return false;
if (!tree_nop_conversion_p (TREE_TYPE (expr1), TREE_TYPE (expr2)))
return false;
- if (TREE_CODE (expr1) == INTEGER_CST && TREE_CODE (expr2) == INTEGER_CST)
- return wi::to_wide (expr1) == ~wi::to_wide (expr2);
+ tree cst1 = uniform_integer_cst_p (expr1);
+ tree cst2 = uniform_integer_cst_p (expr2);
+ if (cst1 && cst2)
+ return wi::to_wide (cst1) == ~wi::to_wide (cst2);
if (operand_equal_p (expr1, expr2, 0))
return false;
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c b/gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c
new file mode 100644
index 0000000..a18b6bf
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c
@@ -0,0 +1,15 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized-raw" } */
+/* PR tree-optimization/115238 */
+
+
+#define vector8 __attribute__((vector_size(2*sizeof(int))))
+
+void f(int a, vector8 int *b)
+{
+ a = 1;
+ *b = a | ((~a) ^ *b);
+}
+/* { dg-final { scan-tree-dump-not "bit_xor_expr, " "optimized" } } */
+/* { dg-final { scan-tree-dump-times "bit_ior_expr, " 1 "optimized" } } */
+/* { dg-final { scan-tree-dump-times "bit_not_expr, " 1 "optimized" } } */