aboutsummaryrefslogtreecommitdiff
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@gcc.gnu.org>2017-10-17 09:38:32 +0200
committerJakub Jelinek <jakub@gcc.gnu.org>2017-10-17 09:38:32 +0200
commit49dcd8a45afa7564e658b677735538a83ce56cc1 (patch)
treec17639a5c47727a55f5035bbe921afaab74f2999 /gcc/fold-const.c
parent7a76132c80a7a5b832c93bf1ff44e14720d54a33 (diff)
downloadgcc-49dcd8a45afa7564e658b677735538a83ce56cc1.zip
gcc-49dcd8a45afa7564e658b677735538a83ce56cc1.tar.gz
gcc-49dcd8a45afa7564e658b677735538a83ce56cc1.tar.bz2
re PR tree-optimization/82549 (ICE at -O1 and above: verify_gimple failed)
PR tree-optimization/82549 * fold-const.c (optimize_bit_field_compare, fold_truth_andor_1): Formatting fixes. Instead of calling make_bit_field_ref with negative bitpos return 0. * gcc.c-torture/compile/pr82549.c: New test. From-SVN: r253805
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c52
1 files changed, 32 insertions, 20 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index d1e6191..d876074 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -4013,21 +4013,20 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
size_int (nbitsize - lbitsize - lbitpos));
if (! const_p)
- /* If not comparing with constant, just rework the comparison
- and return. */
- return fold_build2_loc (loc, code, compare_type,
- fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
- make_bit_field_ref (loc, linner, lhs,
- unsigned_type,
- nbitsize, nbitpos,
- 1, lreversep),
- mask),
- fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
- make_bit_field_ref (loc, rinner, rhs,
- unsigned_type,
- nbitsize, nbitpos,
- 1, rreversep),
- mask));
+ {
+ if (nbitpos < 0)
+ return 0;
+
+ /* If not comparing with constant, just rework the comparison
+ and return. */
+ tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
+ nbitsize, nbitpos, 1, lreversep);
+ t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
+ tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
+ nbitsize, nbitpos, 1, rreversep);
+ t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
+ return fold_build2_loc (loc, code, compare_type, t1, t2);
+ }
/* Otherwise, we are handling the constant case. See if the constant is too
big for the field. Warn and return a tree for 0 (false) if so. We do
@@ -4058,6 +4057,9 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
}
}
+ if (nbitpos < 0)
+ return 0;
+
/* Single-bit compares should always be against zero. */
if (lbitsize == 1 && ! integer_zerop (rhs))
{
@@ -5874,7 +5876,10 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
results. */
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
- if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
+ if (lnbitsize == rnbitsize
+ && xll_bitpos == xlr_bitpos
+ && lnbitpos >= 0
+ && rnbitpos >= 0)
{
lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
lntype, lnbitsize, lnbitpos,
@@ -5898,10 +5903,14 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
Note that we still must mask the lhs/rhs expressions. Furthermore,
the mask must be shifted to account for the shift done by
make_bit_field_ref. */
- if ((ll_bitsize + ll_bitpos == rl_bitpos
- && lr_bitsize + lr_bitpos == rr_bitpos)
- || (ll_bitpos == rl_bitpos + rl_bitsize
- && lr_bitpos == rr_bitpos + rr_bitsize))
+ if (((ll_bitsize + ll_bitpos == rl_bitpos
+ && lr_bitsize + lr_bitpos == rr_bitpos)
+ || (ll_bitpos == rl_bitpos + rl_bitsize
+ && lr_bitpos == rr_bitpos + rr_bitsize))
+ && ll_bitpos >= 0
+ && rl_bitpos >= 0
+ && lr_bitpos >= 0
+ && rr_bitpos >= 0)
{
tree type;
@@ -5970,6 +5979,9 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
}
}
+ if (lnbitpos < 0)
+ return 0;
+
/* Construct the expression we will return. First get the component
reference we will make. Unless the mask is all ones the width of
that field, perform the mask operation. Then compare with the