aboutsummaryrefslogtreecommitdiff
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
authorBernd Edlinger <edlinger@gcc.gnu.org>2013-11-27 16:33:01 +0000
committerBernd Edlinger <edlinger@gcc.gnu.org>2013-11-27 16:33:01 +0000
commitb3ecff82d7f193033d98d04124114aa57c2adba3 (patch)
tree57109bd0c5a05fb6134524e9eba3e9890ae43804 /gcc/fold-const.c
parent36db5f30004bdf03cde1fbd0e1355ff50e1b91cc (diff)
downloadgcc-b3ecff82d7f193033d98d04124114aa57c2adba3.zip
gcc-b3ecff82d7f193033d98d04124114aa57c2adba3.tar.gz
gcc-b3ecff82d7f193033d98d04124114aa57c2adba3.tar.bz2
2013-11-27 Bernd Edlinger <bernd.edlinger@hotmail.de>
reverted r205398 on request: Remove parameter keep_aligning from get_inner_reference. From-SVN: r205452
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c15
1 files changed, 8 insertions, 7 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index fcd7f08..5cf8ed1 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -3503,7 +3503,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lvolatilep);
+ &lunsignedp, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
return 0;
@@ -3513,7 +3513,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
/* If this is not a constant, we can only do something if bit positions,
sizes, and signedness are the same. */
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rvolatilep);
+ &runsignedp, &rvolatilep, false);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
@@ -3687,7 +3687,7 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, pvolatilep);
+ punsignedp, pvolatilep, false);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
@@ -8071,7 +8071,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
int unsignedp, volatilep;
tree base = TREE_OPERAND (op0, 0);
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
/* If the reference was to a (constant) zero offset, we can use
the address of the base if it has the same base type
as the result type and the pointer type is unqualified. */
@@ -9096,7 +9096,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
{
base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
&bitsize, &bitpos0, &offset0, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
if (TREE_CODE (base0) == INDIRECT_REF)
base0 = TREE_OPERAND (base0, 0);
else
@@ -9130,7 +9130,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
{
base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
&bitsize, &bitpos1, &offset1, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
if (TREE_CODE (base1) == INDIRECT_REF)
base1 = TREE_OPERAND (base1, 0);
else
@@ -16997,7 +16997,8 @@ split_address_to_core_and_offset (tree exp,
if (TREE_CODE (exp) == ADDR_EXPR)
{
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
- poffset, &mode, &unsignedp, &volatilep);
+ poffset, &mode, &unsignedp, &volatilep,
+ false);
core = build_fold_addr_expr_loc (loc, core);
}
else