aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-ccp.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-ssa-ccp.cc')
-rw-r--r--gcc/tree-ssa-ccp.cc84
1 files changed, 69 insertions, 15 deletions
diff --git a/gcc/tree-ssa-ccp.cc b/gcc/tree-ssa-ccp.cc
index 9778e77..3a4b6bc 100644
--- a/gcc/tree-ssa-ccp.cc
+++ b/gcc/tree-ssa-ccp.cc
@@ -3471,17 +3471,35 @@ optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
{
gimple *use_nop_stmt;
if (!single_imm_use (use_lhs, &use_p, &use_nop_stmt)
- || !is_gimple_assign (use_nop_stmt))
+ || (!is_gimple_assign (use_nop_stmt)
+ && gimple_code (use_nop_stmt) != GIMPLE_COND))
return false;
- tree use_nop_lhs = gimple_assign_lhs (use_nop_stmt);
- rhs_code = gimple_assign_rhs_code (use_nop_stmt);
- if (rhs_code != BIT_AND_EXPR)
+ /* Handle both
+ _4 = _5 < 0;
+ and
+ if (_5 < 0)
+ */
+ tree use_nop_lhs = nullptr;
+ rhs_code = ERROR_MARK;
+ if (is_gimple_assign (use_nop_stmt))
{
- if (TREE_CODE (use_nop_lhs) == SSA_NAME
+ use_nop_lhs = gimple_assign_lhs (use_nop_stmt);
+ rhs_code = gimple_assign_rhs_code (use_nop_stmt);
+ }
+ if (!use_nop_lhs || rhs_code != BIT_AND_EXPR)
+ {
+ /* Also handle
+ if (_5 < 0)
+ */
+ if (use_nop_lhs
+ && TREE_CODE (use_nop_lhs) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_nop_lhs))
return false;
- if (rhs_code == BIT_NOT_EXPR)
+ if (use_nop_lhs && rhs_code == BIT_NOT_EXPR)
{
+ /* Handle
+ _7 = ~_2;
+ */
g = convert_atomic_bit_not (fn, use_nop_stmt, lhs,
mask);
if (!g)
@@ -3512,14 +3530,31 @@ optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
}
else
{
- if (TREE_CODE (TREE_TYPE (use_nop_lhs)) != BOOLEAN_TYPE)
- return false;
+ tree cmp_rhs1, cmp_rhs2;
+ if (use_nop_lhs)
+ {
+ /* Handle
+ _4 = _5 < 0;
+ */
+ if (TREE_CODE (TREE_TYPE (use_nop_lhs))
+ != BOOLEAN_TYPE)
+ return false;
+ cmp_rhs1 = gimple_assign_rhs1 (use_nop_stmt);
+ cmp_rhs2 = gimple_assign_rhs2 (use_nop_stmt);
+ }
+ else
+ {
+ /* Handle
+ if (_5 < 0)
+ */
+ rhs_code = gimple_cond_code (use_nop_stmt);
+ cmp_rhs1 = gimple_cond_lhs (use_nop_stmt);
+ cmp_rhs2 = gimple_cond_rhs (use_nop_stmt);
+ }
if (rhs_code != GE_EXPR && rhs_code != LT_EXPR)
return false;
- tree cmp_rhs1 = gimple_assign_rhs1 (use_nop_stmt);
if (use_lhs != cmp_rhs1)
return false;
- tree cmp_rhs2 = gimple_assign_rhs2 (use_nop_stmt);
if (!integer_zerop (cmp_rhs2))
return false;
@@ -3547,6 +3582,14 @@ optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
_1 = __atomic_fetch_and_4 (ptr_6, 0x7fffffff, _3);
_6 = _1 & 0x80000000;
_4 = _6 != 0 or _6 == 0;
+ and convert
+ _1 = __atomic_fetch_and_4 (ptr_6, 0x7fffffff, _3);
+ _5 = (signed int) _1;
+ if (_5 < 0 or _5 >= 0)
+ to
+ _1 = __atomic_fetch_and_4 (ptr_6, 0x7fffffff, _3);
+ _6 = _1 & 0x80000000;
+ if (_6 != 0 or _6 == 0)
*/
and_mask = build_int_cst (TREE_TYPE (use_rhs),
highest);
@@ -3567,6 +3610,14 @@ optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
_1 = __atomic_fetch_or_4 (ptr_6, 0x80000000, _3);
_6 = _1 & 0x80000000;
_4 = _6 != 0 or _6 == 0;
+ and convert
+ _1 = __atomic_fetch_or_4 (ptr_6, 0x80000000, _3);
+ _5 = (signed int) _1;
+ if (_5 < 0 or _5 >= 0)
+ to
+ _1 = __atomic_fetch_or_4 (ptr_6, 0x80000000, _3);
+ _6 = _1 & 0x80000000;
+ if (_6 != 0 or _6 == 0)
*/
}
var = make_ssa_name (TREE_TYPE (use_rhs));
@@ -3577,11 +3628,14 @@ optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
gsi = gsi_for_stmt (use_nop_stmt);
gsi_insert_before (&gsi, g, GSI_NEW_STMT);
use_stmt = g;
- g = gimple_build_assign (use_nop_lhs,
- (rhs_code == GE_EXPR
- ? EQ_EXPR : NE_EXPR),
- var,
- build_zero_cst (TREE_TYPE (use_rhs)));
+ rhs_code = rhs_code == GE_EXPR ? EQ_EXPR : NE_EXPR;
+ tree const_zero = build_zero_cst (TREE_TYPE (use_rhs));
+ if (use_nop_lhs)
+ g = gimple_build_assign (use_nop_lhs, rhs_code,
+ var, const_zero);
+ else
+ g = gimple_build_cond (rhs_code, var, const_zero,
+ nullptr, nullptr);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
gsi = gsi_for_stmt (use_nop_stmt);
gsi_remove (&gsi, true);