aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2018-06-07 09:41:18 +0200
committerJakub Jelinek <jakub@gcc.gnu.org>2018-06-07 09:41:18 +0200
commitca6b7410ddb84f2131a1754981a26238f3a6bf9f (patch)
treed71f8f027abe5bec0af61bcbb8445227c585517f /gcc
parentb2bf82589273884743923aa780c9479965609070 (diff)
downloadgcc-ca6b7410ddb84f2131a1754981a26238f3a6bf9f.zip
gcc-ca6b7410ddb84f2131a1754981a26238f3a6bf9f.tar.gz
gcc-ca6b7410ddb84f2131a1754981a26238f3a6bf9f.tar.bz2
re PR tree-optimization/69615 (0 to limit signed range checks don't always use unsigned compare)
PR tree-optimization/69615 * tree-ssa-reassoc.c (optimize_range_tests_var_bound): If rhs2 is lhs of a cast from a same precision integral SSA_NAME in a bb dominated by first_bb, retry with rhs2 set to the rhs1 of the cast. Don't emit cast to utype if rhs2 has already a compatible type. * gcc.dg/tree-ssa/pr69615.c: New test. From-SVN: r261264
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog8
-rw-r--r--gcc/testsuite/ChangeLog5
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/pr69615.c16
-rw-r--r--gcc/tree-ssa-reassoc.c45
4 files changed, 61 insertions, 13 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index f6f3fe78..ccc02ba 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,11 @@
+2018-06-07 Jakub Jelinek <jakub@redhat.com>
+
+ PR tree-optimization/69615
+ * tree-ssa-reassoc.c (optimize_range_tests_var_bound): If rhs2 is lhs
+ of a cast from a same precision integral SSA_NAME in a bb dominated
+ by first_bb, retry with rhs2 set to the rhs1 of the cast. Don't emit
+ cast to utype if rhs2 has already a compatible type.
+
2018-06-07 Richard Biener <rguenther@suse.de>
PR tree-optimization/85935
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index 989e497..5015f59 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,8 @@
+2018-06-07 Jakub Jelinek <jakub@redhat.com>
+
+ PR tree-optimization/69615
+ * gcc.dg/tree-ssa/pr69615.c: New test.
+
2018-06-07 Richard Biener <rguenther@suse.de>
PR tree-optimization/85935
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/pr69615.c b/gcc/testsuite/gcc.dg/tree-ssa/pr69615.c
new file mode 100644
index 0000000..f5c4d8a
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/pr69615.c
@@ -0,0 +1,16 @@
+/* PR tree-optimization/69615 */
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized" } */
+/* { dg-final { scan-tree-dump-not " >= 0" "optimized" } } */
+/* { dg-final { scan-tree-dump-not " < 0" "optimized" } } */
+
+extern void foo (void);
+
+void
+bar (int z, unsigned int y)
+{
+ long long x = z;
+ y &= 0xf;
+ if (x >= 0 && x < (int) y)
+ foo ();
+}
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 38bae77..48d402b 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -3172,7 +3172,7 @@ optimize_range_tests_var_bound (enum tree_code opcode, int first, int length,
to (unsigned) k_32 < (unsigned) iftmp.0_44, then we would execute
those stmts even for negative k_32 and the value ranges would be no
longer guaranteed and so the optimization would be invalid. */
- if (opcode == ERROR_MARK)
+ while (opcode == ERROR_MARK)
{
gimple *g = SSA_NAME_DEF_STMT (rhs2);
basic_block bb2 = gimple_bb (g);
@@ -3182,21 +3182,37 @@ optimize_range_tests_var_bound (enum tree_code opcode, int first, int length,
{
/* As an exception, handle a few common cases. */
if (gimple_assign_cast_p (g)
- && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (g)))
- && TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (g)))
- && (TYPE_PRECISION (TREE_TYPE (rhs2))
- > TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (g)))))
- /* Zero-extension is always ok. */ ;
+ && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (g))))
+ {
+ tree op0 = gimple_assign_rhs1 (g);
+ if (TYPE_UNSIGNED (TREE_TYPE (op0))
+ && (TYPE_PRECISION (TREE_TYPE (rhs2))
+ > TYPE_PRECISION (TREE_TYPE (op0))))
+ /* Zero-extension is always ok. */
+ break;
+ else if (TYPE_PRECISION (TREE_TYPE (rhs2))
+ == TYPE_PRECISION (TREE_TYPE (op0))
+ && TREE_CODE (op0) == SSA_NAME)
+ {
+ /* Cast from signed to unsigned or vice versa. Retry
+ with the op0 as new rhs2. */
+ rhs2 = op0;
+ continue;
+ }
+ }
else if (is_gimple_assign (g)
&& gimple_assign_rhs_code (g) == BIT_AND_EXPR
&& TREE_CODE (gimple_assign_rhs2 (g)) == INTEGER_CST
&& !wi::neg_p (wi::to_wide (gimple_assign_rhs2 (g))))
/* Masking with INTEGER_CST with MSB clear is always ok
- too. */ ;
- else
- continue;
+ too. */
+ break;
+ rhs2 = NULL_TREE;
}
+ break;
}
+ if (rhs2 == NULL_TREE)
+ continue;
wide_int nz = get_nonzero_bits (rhs2);
if (wi::neg_p (nz))
@@ -3253,10 +3269,13 @@ optimize_range_tests_var_bound (enum tree_code opcode, int first, int length,
gimple_set_uid (g, uid);
rhs1 = gimple_assign_lhs (g);
gsi_insert_before (&gsi, g, GSI_SAME_STMT);
- g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, rhs2);
- gimple_set_uid (g, uid);
- rhs2 = gimple_assign_lhs (g);
- gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+ if (!useless_type_conversion_p (utype, TREE_TYPE (rhs2)))
+ {
+ g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, rhs2);
+ gimple_set_uid (g, uid);
+ rhs2 = gimple_assign_lhs (g);
+ gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+ }
if (tree_swap_operands_p (rhs1, rhs2))
{
std::swap (rhs1, rhs2);