aboutsummaryrefslogtreecommitdiff
path: root/gcc/fold-const.cc
diff options
context:
space:
mode:
authorRoger Sayle <roger@nextmovesoftware.com>2022-08-09 18:54:43 +0100
committerRoger Sayle <roger@nextmovesoftware.com>2022-08-09 18:54:43 +0100
commit6fc14f1963dfefead588a4cd8902d641ed69255c (patch)
tree07530a7b52d77cb52b773483c081d7384a8e3ee0 /gcc/fold-const.cc
parent9385cd9c74cf6662f43038aafe4d2467899f322e (diff)
downloadgcc-6fc14f1963dfefead588a4cd8902d641ed69255c.zip
gcc-6fc14f1963dfefead588a4cd8902d641ed69255c.tar.gz
gcc-6fc14f1963dfefead588a4cd8902d641ed69255c.tar.bz2
middle-end: Optimize ((X >> C1) & C2) != C3 for more cases.
Following my middle-end patch for PR tree-optimization/94026, I'd promised Jeff Law that I'd clean up the dead-code in fold-const.cc now that these optimizations are handled in match.pd. Alas, I discovered things aren't quite that simple, as the transformations I'd added avoided cases where C2 overlapped with the new bits introduced by the shift, but the original code handled any value of C2 provided that it had a single-bit set (under the condition that C3 was always zero). This patch upgrades the transformations supported by match.pd to cover any values of C2 and C3, provided that C1 is a valid bit shift constant, for all three shift types (logical right, arithmetic right and left). This then makes the code in fold-const.cc fully redundant, and adds support for some new (corner) cases not previously handled. If the constant C1 is valid for the type's precision, the shift is now always eliminated (with C2 and C3 possibly updated to test the sign bit). Interestingly, the fold-const.cc code that I'm now deleting was originally added by me back in 2006 to resolve PR middle-end/21137. I've confirmed that those testcase(s) remain resolved with this patch (and I'll close 21137 in Bugzilla). This patch also implements most (but not all) of the examples mentioned in PR tree-optimization/98954, for which I have some follow-up patches. 2022-08-09 Roger Sayle <roger@nextmovesoftware.com> Richard Biener <rguenther@suse.de> gcc/ChangeLog PR middle-end/21137 PR tree-optimization/98954 * fold-const.cc (fold_binary_loc): Remove optimizations to optimize ((X >> C1) & C2) ==/!= 0. * match.pd (cmp (bit_and (lshift @0 @1) @2) @3): Remove wi::ctz check, and handle all values of INTEGER_CSTs @2 and @3. (cmp (bit_and (rshift @0 @1) @2) @3): Likewise, remove wi::clz checks, and handle all values of INTEGER_CSTs @2 and @3. gcc/testsuite/ChangeLog PR middle-end/21137 PR tree-optimization/98954 * gcc.dg/fold-eqandshift-4.c: New test case.
Diffstat (limited to 'gcc/fold-const.cc')
-rw-r--r--gcc/fold-const.cc54
1 files changed, 0 insertions, 54 deletions
diff --git a/gcc/fold-const.cc b/gcc/fold-const.cc
index 99021a8..4f4ec81 100644
--- a/gcc/fold-const.cc
+++ b/gcc/fold-const.cc
@@ -12204,60 +12204,6 @@ fold_binary_loc (location_t loc, enum tree_code code, tree type,
}
}
- /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
- C1 is a valid shift constant, and C2 is a power of two, i.e.
- a single bit. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && integer_pow2p (TREE_OPERAND (arg0, 1))
- && integer_zerop (arg1))
- {
- tree arg00 = TREE_OPERAND (arg0, 0);
- STRIP_NOPS (arg00);
- if (TREE_CODE (arg00) == RSHIFT_EXPR
- && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
- {
- tree itype = TREE_TYPE (arg00);
- tree arg001 = TREE_OPERAND (arg00, 1);
- prec = TYPE_PRECISION (itype);
-
- /* Check for a valid shift count. */
- if (wi::ltu_p (wi::to_wide (arg001), prec))
- {
- tree arg01 = TREE_OPERAND (arg0, 1);
- tree arg000 = TREE_OPERAND (arg00, 0);
- unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
- /* If (C2 << C1) doesn't overflow, then
- ((X >> C1) & C2) != 0 can be rewritten as
- (X & (C2 << C1)) != 0. */
- if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
- {
- tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
- arg01, arg001);
- tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
- arg000, tem);
- return fold_build2_loc (loc, code, type, tem,
- fold_convert_loc (loc, itype, arg1));
- }
- /* Otherwise, for signed (arithmetic) shifts,
- ((X >> C1) & C2) != 0 is rewritten as X < 0, and
- ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
- else if (!TYPE_UNSIGNED (itype))
- return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
- : LT_EXPR,
- type, arg000,
- build_int_cst (itype, 0));
- /* Otherwise, of unsigned (logical) shifts,
- ((X >> C1) & C2) != 0 is rewritten as (X,false), and
- ((X >> C1) & C2) == 0 is rewritten as (X,true). */
- else
- return omit_one_operand_loc (loc, type,
- code == EQ_EXPR ? integer_one_node
- : integer_zero_node,
- arg000);
- }
- }
- }
-
/* If this is a comparison of a field, we may be able to simplify it. */
if ((TREE_CODE (arg0) == COMPONENT_REF
|| TREE_CODE (arg0) == BIT_FIELD_REF)