aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorCharles M. Hannum <root@ihack.net>1999-03-26 14:52:14 -0700
committerJeff Law <law@gcc.gnu.org>1999-03-26 14:52:14 -0700
commit97ea717682ff2c82773a482c9dc9d467810c03ef (patch)
tree78a476e1fb06efb51cbc4e1ab6e07cdd1af05727 /gcc
parent2d490c9e5d6c47fffb395c10115aed5e4082ddbe (diff)
downloadgcc-97ea717682ff2c82773a482c9dc9d467810c03ef.zip
gcc-97ea717682ff2c82773a482c9dc9d467810c03ef.tar.gz
gcc-97ea717682ff2c82773a482c9dc9d467810c03ef.tar.bz2
fold-const.c (fold_truthop): Mask the lhs and rhs after merging adjacent bitfield references.
* fold-const.c (fold_truthop): Mask the lhs and rhs after merging adjacent bitfield references. From-SVN: r26004
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog3
-rw-r--r--gcc/fold-const.c32
2 files changed, 25 insertions, 10 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 1935236..0e9486c 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -65,6 +65,9 @@ Fri Mar 26 10:43:47 1999 Nick Clifton <nickc@cygnus.com>
Fri Mar 26 01:59:15 1999 "Charles M. Hannum" <root@ihack.net>
+ * fold-const.c (fold_truthop): Mask the lhs and rhs after merging
+ adjacent bitfield references.
+
* fold-const.c (fold_truthop): Verify that the lhs and rhs are
in the same bit position when optimizing bitfield references
which have the same mask.
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 78d7e41..93a0ae0 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -3956,20 +3956,32 @@ fold_truthop (code, truth_type, lhs, rhs)
/* There is still another way we can do something: If both pairs of
fields being compared are adjacent, we may be able to make a wider
- field containing them both. */
+ field containing them both.
+
+ Note that we still must mask the lhs/rhs expressions. Furthermore,
+ the mask must be shifted to account for the shift done by
+ make_bit_field_ref. */
if ((ll_bitsize + ll_bitpos == rl_bitpos
&& lr_bitsize + lr_bitpos == rr_bitpos)
|| (ll_bitpos == rl_bitpos + rl_bitsize
&& lr_bitpos == rr_bitpos + rr_bitsize))
- return build (wanted_code, truth_type,
- make_bit_field_ref (ll_inner, type,
- ll_bitsize + rl_bitsize,
- MIN (ll_bitpos, rl_bitpos),
- ll_unsignedp),
- make_bit_field_ref (lr_inner, type,
- lr_bitsize + rr_bitsize,
- MIN (lr_bitpos, rr_bitpos),
- lr_unsignedp));
+ {
+ lhs = make_bit_field_ref (ll_inner, type, ll_bitsize + rl_bitsize,
+ MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
+ ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
+ size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
+ if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
+ lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
+
+ rhs = make_bit_field_ref (lr_inner, type, lr_bitsize + rr_bitsize,
+ MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
+ lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
+ size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
+ if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
+ rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
+
+ return build (wanted_code, truth_type, lhs, rhs);
+ }
return 0;
}