aboutsummaryrefslogtreecommitdiff
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c132
1 files changed, 79 insertions, 53 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index ae28445..5e32901 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -114,12 +114,12 @@ static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
static tree make_bit_field_ref (location_t, tree, tree,
- HOST_WIDE_INT, HOST_WIDE_INT, int);
+ HOST_WIDE_INT, HOST_WIDE_INT, int, int);
static tree optimize_bit_field_compare (location_t, enum tree_code,
tree, tree, tree);
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
HOST_WIDE_INT *,
- machine_mode *, int *, int *,
+ machine_mode *, int *, int *, int *,
tree *, tree *);
static int simple_operand_p (const_tree);
static bool simple_operand_p_2 (tree);
@@ -3688,15 +3688,17 @@ distribute_real_division (location_t loc, enum tree_code code, tree type,
}
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
- starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
+ starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
+ and uses reverse storage order if REVERSEP is nonzero. */
static tree
make_bit_field_ref (location_t loc, tree inner, tree type,
- HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
+ HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
+ int unsignedp, int reversep)
{
tree result, bftype;
- if (bitpos == 0)
+ if (bitpos == 0 && !reversep)
{
tree size = TYPE_SIZE (TREE_TYPE (inner));
if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
@@ -3713,6 +3715,7 @@ make_bit_field_ref (location_t loc, tree inner, tree type,
result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
size_int (bitsize), bitsize_int (bitpos));
+ REF_REVERSE_STORAGE_ORDER (result) = reversep;
if (bftype != type)
result = fold_convert_loc (loc, type, result);
@@ -3750,6 +3753,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
int const_p = TREE_CODE (rhs) == INTEGER_CST;
machine_mode lmode, rmode, nmode;
int lunsignedp, runsignedp;
+ int lreversep, rreversep;
int lvolatilep = 0, rvolatilep = 0;
tree linner, rinner = NULL_TREE;
tree mask;
@@ -3761,20 +3765,23 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lvolatilep, false);
+ &lunsignedp, &lreversep, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
return 0;
- if (!const_p)
+ if (const_p)
+ rreversep = lreversep;
+ else
{
/* If this is not a constant, we can only do something if bit positions,
- sizes, and signedness are the same. */
- rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rvolatilep, false);
+ sizes, signedness and storage order are the same. */
+ rinner
+ = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
+ &runsignedp, &rreversep, &rvolatilep, false);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
- || lunsignedp != runsignedp || offset != 0
+ || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
|| TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
return 0;
}
@@ -3802,7 +3809,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
if (nbitsize == lbitsize)
return 0;
- if (BYTES_BIG_ENDIAN)
+ if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
lbitpos = nbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
@@ -3819,17 +3826,17 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
make_bit_field_ref (loc, linner,
unsigned_type,
nbitsize, nbitpos,
- 1),
+ 1, lreversep),
mask),
fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
make_bit_field_ref (loc, rinner,
unsigned_type,
nbitsize, nbitpos,
- 1),
+ 1, rreversep),
mask));
- /* Otherwise, we are handling the constant case. See if the constant is too
- big for the field. Warn and return a tree of for 0 (false) if so. We do
+ /* Otherwise, we are handling the constant case. See if the constant is too
+ big for the field. Warn and return a tree for 0 (false) if so. We do
this not only for its own sake, but to avoid having to test for this
error case below. If we didn't, we might generate wrong code.
@@ -3867,7 +3874,8 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
/* Make a new bitfield reference, shift the constant over the
appropriate number of bits and mask it with the computed mask
(in case this was a signed field). If we changed it, make a new one. */
- lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
+ lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
+ lreversep);
rhs = const_binop (BIT_AND_EXPR,
const_binop (LSHIFT_EXPR,
@@ -3895,6 +3903,8 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
*PUNSIGNEDP is set to the signedness of the field.
+ *PREVERSEP is set to the storage order of the field.
+
*PMASK is set to the mask used. This is either contained in a
BIT_AND_EXPR or derived from the width of the field.
@@ -3906,7 +3916,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
static tree
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
HOST_WIDE_INT *pbitpos, machine_mode *pmode,
- int *punsignedp, int *pvolatilep,
+ int *punsignedp, int *preversep, int *pvolatilep,
tree *pmask, tree *pand_mask)
{
tree outer_type = 0;
@@ -3939,7 +3949,7 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, pvolatilep, false);
+ punsignedp, preversep, pvolatilep, false);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
@@ -5441,6 +5451,7 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
+ int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
machine_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
@@ -5553,33 +5564,39 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
volatilep = 0;
ll_inner = decode_field_reference (loc, ll_arg,
&ll_bitsize, &ll_bitpos, &ll_mode,
- &ll_unsignedp, &volatilep, &ll_mask,
- &ll_and_mask);
+ &ll_unsignedp, &ll_reversep, &volatilep,
+ &ll_mask, &ll_and_mask);
lr_inner = decode_field_reference (loc, lr_arg,
&lr_bitsize, &lr_bitpos, &lr_mode,
- &lr_unsignedp, &volatilep, &lr_mask,
- &lr_and_mask);
+ &lr_unsignedp, &lr_reversep, &volatilep,
+ &lr_mask, &lr_and_mask);
rl_inner = decode_field_reference (loc, rl_arg,
&rl_bitsize, &rl_bitpos, &rl_mode,
- &rl_unsignedp, &volatilep, &rl_mask,
- &rl_and_mask);
+ &rl_unsignedp, &rl_reversep, &volatilep,
+ &rl_mask, &rl_and_mask);
rr_inner = decode_field_reference (loc, rr_arg,
&rr_bitsize, &rr_bitpos, &rr_mode,
- &rr_unsignedp, &volatilep, &rr_mask,
- &rr_and_mask);
+ &rr_unsignedp, &rr_reversep, &volatilep,
+ &rr_mask, &rr_and_mask);
/* It must be true that the inner operation on the lhs of each
comparison must be the same if we are to be able to do anything.
Then see if we have constants. If not, the same must be true for
the rhs's. */
- if (volatilep || ll_inner == 0 || rl_inner == 0
+ if (volatilep
+ || ll_reversep != rl_reversep
+ || ll_inner == 0 || rl_inner == 0
|| ! operand_equal_p (ll_inner, rl_inner, 0))
return 0;
if (TREE_CODE (lr_arg) == INTEGER_CST
&& TREE_CODE (rr_arg) == INTEGER_CST)
- l_const = lr_arg, r_const = rr_arg;
- else if (lr_inner == 0 || rr_inner == 0
+ {
+ l_const = lr_arg, r_const = rr_arg;
+ lr_reversep = ll_reversep;
+ }
+ else if (lr_reversep != rr_reversep
+ || lr_inner == 0 || rr_inner == 0
|| ! operand_equal_p (lr_inner, rr_inner, 0))
return 0;
else
@@ -5632,7 +5649,7 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
- if (BYTES_BIG_ENDIAN)
+ if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
{
xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
@@ -5697,7 +5714,7 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
- if (BYTES_BIG_ENDIAN)
+ if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
{
xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
@@ -5720,12 +5737,12 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
{
lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
- ll_unsignedp || rl_unsignedp);
+ ll_unsignedp || rl_unsignedp, ll_reversep);
if (! all_ones_mask_p (ll_mask, lnbitsize))
lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
- lr_unsignedp || rr_unsignedp);
+ lr_unsignedp || rr_unsignedp, lr_reversep);
if (! all_ones_mask_p (lr_mask, rnbitsize))
rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
@@ -5748,10 +5765,12 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
lhs = make_bit_field_ref (loc, ll_inner, lntype,
ll_bitsize + rl_bitsize,
- MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
+ MIN (ll_bitpos, rl_bitpos),
+ ll_unsignedp, ll_reversep);
rhs = make_bit_field_ref (loc, lr_inner, rntype,
lr_bitsize + rr_bitsize,
- MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
+ MIN (lr_bitpos, rr_bitpos),
+ lr_unsignedp, lr_reversep);
ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
size_int (MIN (xll_bitpos, xrl_bitpos)));
@@ -5814,7 +5833,7 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
that field, perform the mask operation. Then compare with the
merged constant. */
result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
- ll_unsignedp || rl_unsignedp);
+ ll_unsignedp || rl_unsignedp, ll_reversep);
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
if (! all_ones_mask_p (ll_mask, lnbitsize))
@@ -7678,10 +7697,11 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
HOST_WIDE_INT bitsize, bitpos;
tree offset;
machine_mode mode;
- int unsignedp, volatilep;
- tree base = TREE_OPERAND (op0, 0);
- base = get_inner_reference (base, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ int unsignedp, reversep, volatilep;
+ tree base
+ = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
+ &offset, &mode, &unsignedp, &reversep,
+ &volatilep, false);
/* If the reference was to a (constant) zero offset, we can use
the address of the base if it has the same base type
as the result type and the pointer type is unqualified. */
@@ -7817,8 +7837,12 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
case VIEW_CONVERT_EXPR:
if (TREE_CODE (op0) == MEM_REF)
- return fold_build2_loc (loc, MEM_REF, type,
- TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
+ {
+ tem = fold_build2_loc (loc, MEM_REF, type,
+ TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
+ REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
+ return tem;
+ }
return NULL_TREE;
@@ -8335,7 +8359,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
machine_mode mode;
- int volatilep, unsignedp;
+ int volatilep, reversep, unsignedp;
bool indirect_base0 = false, indirect_base1 = false;
/* Get base and offset for the access. Strip ADDR_EXPR for
@@ -8345,9 +8369,10 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
base0 = arg0;
if (TREE_CODE (arg0) == ADDR_EXPR)
{
- base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
- &bitsize, &bitpos0, &offset0, &mode,
- &unsignedp, &volatilep, false);
+ base0
+ = get_inner_reference (TREE_OPERAND (arg0, 0),
+ &bitsize, &bitpos0, &offset0, &mode,
+ &unsignedp, &reversep, &volatilep, false);
if (TREE_CODE (base0) == INDIRECT_REF)
base0 = TREE_OPERAND (base0, 0);
else
@@ -8379,9 +8404,10 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
base1 = arg1;
if (TREE_CODE (arg1) == ADDR_EXPR)
{
- base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
- &bitsize, &bitpos1, &offset1, &mode,
- &unsignedp, &volatilep, false);
+ base1
+ = get_inner_reference (TREE_OPERAND (arg1, 0),
+ &bitsize, &bitpos1, &offset1, &mode,
+ &unsignedp, &reversep, &volatilep, false);
if (TREE_CODE (base1) == INDIRECT_REF)
base1 = TREE_OPERAND (base1, 0);
else
@@ -14311,15 +14337,15 @@ split_address_to_core_and_offset (tree exp,
{
tree core;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
HOST_WIDE_INT bitsize;
location_t loc = EXPR_LOCATION (exp);
if (TREE_CODE (exp) == ADDR_EXPR)
{
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
- poffset, &mode, &unsignedp, &volatilep,
- false);
+ poffset, &mode, &unsignedp, &reversep,
+ &volatilep, false);
core = build_fold_addr_expr_loc (loc, core);
}
else