aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog8
-rw-r--r--gcc/fold-const.c106
-rw-r--r--gcc/match.pd51
3 files changed, 51 insertions, 114 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 643b4f5..5cfac4e 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,11 @@
+2015-07-10 Richard Biener <rguenther@suse.de>
+
+ * fold-const.c (distribute_bit_expr): Remove.
+ (fold_binary_loc): Move simplifying (A & C1) + (B & C2)
+ to (A & C1) | (B & C2), distributing (A & B) | (A & C)
+ to A & (B | C) and simplifying A << C1 << C2 to ...
+ * match.pd: ... patterns here.
+
2015-07-10 Jiong Wang <jiong.wang@arm.com>
* config/aarch64/aarch64.c (aarch64_load_symref_appropriately): Mark mem
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index f78ec4e..7078edb 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -118,7 +118,6 @@ static enum tree_code compcode_to_comparison (enum comparison_code);
static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
-static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
static tree make_bit_field_ref (location_t, tree, tree,
HOST_WIDE_INT, HOST_WIDE_INT, int);
static tree optimize_bit_field_compare (location_t, enum tree_code,
@@ -3550,62 +3549,6 @@ invert_truthvalue_loc (location_t loc, tree arg)
type, arg);
}
-/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
- operands are another bit-wise operation with a common input. If so,
- distribute the bit operations to save an operation and possibly two if
- constants are involved. For example, convert
- (A | B) & (A | C) into A | (B & C)
- Further simplification will occur if B and C are constants.
-
- If this optimization cannot be done, 0 will be returned. */
-
-static tree
-distribute_bit_expr (location_t loc, enum tree_code code, tree type,
- tree arg0, tree arg1)
-{
- tree common;
- tree left, right;
-
- if (TREE_CODE (arg0) != TREE_CODE (arg1)
- || TREE_CODE (arg0) == code
- || (TREE_CODE (arg0) != BIT_AND_EXPR
- && TREE_CODE (arg0) != BIT_IOR_EXPR))
- return 0;
-
- if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
- {
- common = TREE_OPERAND (arg0, 0);
- left = TREE_OPERAND (arg0, 1);
- right = TREE_OPERAND (arg1, 1);
- }
- else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
- {
- common = TREE_OPERAND (arg0, 0);
- left = TREE_OPERAND (arg0, 1);
- right = TREE_OPERAND (arg1, 0);
- }
- else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
- {
- common = TREE_OPERAND (arg0, 1);
- left = TREE_OPERAND (arg0, 0);
- right = TREE_OPERAND (arg1, 1);
- }
- else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
- {
- common = TREE_OPERAND (arg0, 1);
- left = TREE_OPERAND (arg0, 0);
- right = TREE_OPERAND (arg1, 0);
- }
- else
- return 0;
-
- common = fold_convert_loc (loc, type, common);
- left = fold_convert_loc (loc, type, left);
- right = fold_convert_loc (loc, type, right);
- return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
- fold_build2_loc (loc, code, type, left, right));
-}
-
/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
with code CODE. This optimization is unsafe. */
static tree
@@ -9575,21 +9518,6 @@ fold_binary_loc (location_t loc,
if (! FLOAT_TYPE_P (type))
{
- /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
- with a constant, and the two constants have no bits in common,
- we should treat this as a BIT_IOR_EXPR since this may produce more
- simplifications. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && TREE_CODE (arg1) == BIT_AND_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
- && wi::bit_and (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1)) == 0)
- {
- code = BIT_IOR_EXPR;
- goto bit_ior;
- }
-
/* Reassociate (plus (plus (mult) (foo)) (mult)) as
(plus (plus (mult) (mult)) (foo)) so that we can
take advantage of the factoring cases below. */
@@ -10423,7 +10351,6 @@ fold_binary_loc (location_t loc,
goto associate;
case BIT_IOR_EXPR:
- bit_ior:
/* Canonicalize (X & C1) | C2. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
@@ -10494,10 +10421,6 @@ fold_binary_loc (location_t loc,
return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
}
- t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
- if (t1 != NULL_TREE)
- return t1;
-
/* See if this can be simplified into a rotate first. If that
is unsuccessful continue in the association code. */
goto bit_rotate;
@@ -10760,9 +10683,6 @@ fold_binary_loc (location_t loc,
}
}
- t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
- if (t1 != NULL_TREE)
- return t1;
/* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
&& TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
@@ -11111,32 +11031,6 @@ fold_binary_loc (location_t loc,
prec = element_precision (type);
- /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
- if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
- && tree_to_uhwi (arg1) < prec
- && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
- && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
- {
- unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
- + tree_to_uhwi (arg1));
-
- /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
- being well defined. */
- if (low >= prec)
- {
- if (code == LROTATE_EXPR || code == RROTATE_EXPR)
- low = low % prec;
- else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
- return omit_one_operand_loc (loc, type, build_zero_cst (type),
- TREE_OPERAND (arg0, 0));
- else
- low = prec - 1;
- }
-
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- build_int_cst (TREE_TYPE (arg1), low));
- }
-
/* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
into x & ((unsigned)-1 >> c) for unsigned types. */
if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
diff --git a/gcc/match.pd b/gcc/match.pd
index b28f107..b991008b 100644
--- a/gcc/match.pd
+++ b/gcc/match.pd
@@ -419,17 +419,18 @@ along with GCC; see the file COPYING3. If not see
&& tree_nop_conversion_p (type, TREE_TYPE (@1)))
(bit_not (rop (convert @0) (convert @1))))))
-/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
+/* If we are XORing or adding two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
simplifications. */
-(simplify
- (bit_xor (convert1? (bit_and@4 @0 INTEGER_CST@1))
- (convert2? (bit_and@5 @2 INTEGER_CST@3)))
- (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
- && tree_nop_conversion_p (type, TREE_TYPE (@2))
- && wi::bit_and (@1, @3) == 0)
- (bit_ior (convert @4) (convert @5))))
+(for op (bit_xor plus)
+ (simplify
+ (op (convert1? (bit_and@4 @0 INTEGER_CST@1))
+ (convert2? (bit_and@5 @2 INTEGER_CST@3)))
+ (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
+ && tree_nop_conversion_p (type, TREE_TYPE (@2))
+ && wi::bit_and (@1, @3) == 0)
+ (bit_ior (convert @4) (convert @5)))))
/* (X | Y) ^ X -> Y & ~ X*/
(simplify
@@ -455,6 +456,19 @@ along with GCC; see the file COPYING3. If not see
(bit_xor:c (bit_and:c @0 @1) @1)
(bit_and (bit_not @0) @1))
+/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
+ operands are another bit-wise operation with a common input. If so,
+ distribute the bit operations to save an operation and possibly two if
+ constants are involved. For example, convert
+ (A | B) & (A | C) into A | (B & C)
+ Further simplification will occur if B and C are constants. */
+(for op (bit_and bit_ior)
+ rop (bit_ior bit_and)
+ (simplify
+ (op (convert? (rop:c @0 @1)) (convert? (rop @0 @2)))
+ (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+ (rop (convert @0) (op (convert @1) (convert @2))))))
+
(simplify
(abs (abs@1 @0))
@@ -880,6 +894,27 @@ along with GCC; see the file COPYING3. If not see
build_int_cst (TREE_TYPE (@1),
element_precision (type)), @1); }))
+/* Turn (a OP c1) OP c2 into a OP (c1+c2). */
+(for op (lrotate rrotate rshift lshift)
+ (simplify
+ (op (op @0 INTEGER_CST@1) INTEGER_CST@2)
+ (with { unsigned int prec = element_precision (type); }
+ (if (wi::ge_p (@1, 0, TYPE_SIGN (TREE_TYPE (@1)))
+ && wi::lt_p (@1, prec, TYPE_SIGN (TREE_TYPE (@1)))
+ && wi::ge_p (@2, 0, TYPE_SIGN (TREE_TYPE (@2)))
+ && wi::lt_p (@2, prec, TYPE_SIGN (TREE_TYPE (@2))))
+ (with { unsigned int low = wi::add (@1, @2).to_uhwi (); }
+ /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
+ being well defined. */
+ (if (low >= prec)
+ (if (op == LROTATE_EXPR || op == RROTATE_EXPR)
+ (op @0 { build_int_cst (TREE_TYPE (@1), low % prec); }))
+ (if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
+ { build_zero_cst (type); })
+ (op @0 { build_int_cst (TREE_TYPE (@1), prec - 1); }))
+ (op @0 { build_int_cst (TREE_TYPE (@1), low); }))))))
+
+
/* ((1 << A) & 1) != 0 -> A == 0
((1 << A) & 1) == 0 -> A != 0 */
(for cmp (ne eq)