diff options
author | Jakub Jelinek <jakub@redhat.com> | 2018-07-06 23:42:41 +0200 |
---|---|---|
committer | Jakub Jelinek <jakub@gcc.gnu.org> | 2018-07-06 23:42:41 +0200 |
commit | f290100275139ccb59832bb75ff7fb2606e110d7 (patch) | |
tree | 18cda432e83a3fcae530b889830983dd4a7f0efb /gcc/match.pd | |
parent | 8de583fc5301987f31e1897e07d545e218b943da (diff) | |
download | gcc-f290100275139ccb59832bb75ff7fb2606e110d7.zip gcc-f290100275139ccb59832bb75ff7fb2606e110d7.tar.gz gcc-f290100275139ccb59832bb75ff7fb2606e110d7.tar.bz2 |
re PR tree-optimization/86401 (The "For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,..." opts are only in fold-const.c and in RTL)
PR tree-optimization/86401
* fold-const.c (fold_binary_loc) <case BIT_AND_EXPR>: Move the
((A & N) + B) & M -> (A + B) & M etc. optimization into ...
(fold_bit_and_mask): ... here. New helper function for match.pd.
* fold-const.h (fold_bit_and_mask): Declare.
* match.pd (((A & N) + B) & M -> (A + B) & M): New optimization.
* gcc.dg/tree-ssa/pr86401-1.c: New test.
* gcc.dg/tree-ssa/pr86401-2.c: New test.
* c-c++-common/rotate-9.c: New test.
From-SVN: r262485
Diffstat (limited to 'gcc/match.pd')
-rw-r--r-- | gcc/match.pd | 54 |
1 files changed, 54 insertions, 0 deletions
diff --git a/gcc/match.pd b/gcc/match.pd index c1e0963..3b02db3 100644 --- a/gcc/match.pd +++ b/gcc/match.pd @@ -779,6 +779,60 @@ DEFINE_INT_AND_FLOAT_ROUND_FN (RINT) (bit_xor @0 @1))) #endif +/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M, + ((A & N) + B) & M -> (A + B) & M + Similarly if (N & M) == 0, + ((A | N) + B) & M -> (A + B) & M + and for - instead of + (or unary - instead of +) + and/or ^ instead of |. + If B is constant and (B & M) == 0, fold into A & M. */ +(for op (plus minus) + (for bitop (bit_and bit_ior bit_xor) + (simplify + (bit_and (op:s (bitop:s@0 @3 INTEGER_CST@4) @1) INTEGER_CST@2) + (with + { tree pmop[2]; + tree utype = fold_bit_and_mask (TREE_TYPE (@0), @2, op, @0, bitop, + @3, @4, @1, ERROR_MARK, NULL_TREE, + NULL_TREE, pmop); } + (if (utype) + (convert (bit_and (op (convert:utype { pmop[0]; }) + (convert:utype { pmop[1]; })) + (convert:utype @2)))))) + (simplify + (bit_and (op:s @0 (bitop:s@1 @3 INTEGER_CST@4)) INTEGER_CST@2) + (with + { tree pmop[2]; + tree utype = fold_bit_and_mask (TREE_TYPE (@0), @2, op, @0, ERROR_MARK, + NULL_TREE, NULL_TREE, @1, bitop, @3, + @4, pmop); } + (if (utype) + (convert (bit_and (op (convert:utype { pmop[0]; }) + (convert:utype { pmop[1]; })) + (convert:utype @2))))))) + (simplify + (bit_and (op:s @0 @1) INTEGER_CST@2) + (with + { tree pmop[2]; + tree utype = fold_bit_and_mask (TREE_TYPE (@0), @2, op, @0, ERROR_MARK, + NULL_TREE, NULL_TREE, @1, ERROR_MARK, + NULL_TREE, NULL_TREE, pmop); } + (if (utype) + (convert (bit_and (op (convert:utype { pmop[0]; }) + (convert:utype { pmop[1]; })) + (convert:utype @2))))))) +(for bitop (bit_and bit_ior bit_xor) + (simplify + (bit_and (negate:s (bitop:s@0 @2 INTEGER_CST@3)) INTEGER_CST@1) + (with + { tree pmop[2]; + tree utype = fold_bit_and_mask (TREE_TYPE (@0), @1, NEGATE_EXPR, @0, + bitop, @2, @3, NULL_TREE, ERROR_MARK, + NULL_TREE, NULL_TREE, pmop); } + (if (utype) + (convert (bit_and (negate (convert:utype { pmop[0]; })) + (convert:utype @1))))))) + /* X % Y is smaller than Y. */ (for cmp (lt ge) (simplify |