aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorBernd Edlinger <bernd.edlinger@hotmail.de>2013-10-21 15:57:05 +0000
committerBernd Edlinger <edlinger@gcc.gnu.org>2013-10-21 15:57:05 +0000
commit0cad6830a42f45f7d755fd607f91f934baaa0abe (patch)
tree5c2c938a6b01353b413b726cd5724b4f715e5cf7 /gcc
parent5dfd32cddaa43eeda595dba8dff86ae1794f0830 (diff)
downloadgcc-0cad6830a42f45f7d755fd607f91f934baaa0abe.zip
gcc-0cad6830a42f45f7d755fd607f91f934baaa0abe.tar.gz
gcc-0cad6830a42f45f7d755fd607f91f934baaa0abe.tar.bz2
Fix volatile issues in optimize_bit_field_compare.
2013-10-21 Bernd Edlinger <bernd.edlinger@hotmail.de> Fix volatile issues in optimize_bit_field_compare. * fold-const.c (optimize_bit_field_compare): Bail out if lvolatilep or rvolatilep. From-SVN: r203899
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/fold-const.c29
2 files changed, 13 insertions, 22 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index fd9123f..e839517 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,11 @@
2013-10-21 Bernd Edlinger <bernd.edlinger@hotmail.de>
+ Fix volatile issues in optimize_bit_field_compare.
+ * fold-const.c (optimize_bit_field_compare): Bail out if
+ lvolatilep or rvolatilep.
+
+2013-10-21 Bernd Edlinger <bernd.edlinger@hotmail.de>
+
Fix DECL_BIT_FIELD depencency on flag_strict_volatile_bitfields
and get_inner_reference returning different pmode for non-volatile
bit-field members dependent on flag_strict_volatile_bitfields.
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 0f9454d4..2c2b929 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -3473,11 +3473,6 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
tree mask;
tree offset;
- /* In the strict volatile bitfields case, doing code changes here may prevent
- other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
- if (flag_strict_volatile_bitfields > 0)
- return 0;
-
/* Get all the information about the extractions being done. If the bit size
if the same as the size of the underlying object, we aren't doing an
extraction at all and so can do nothing. We also don't want to
@@ -3486,7 +3481,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
&lunsignedp, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
- || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
+ || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
return 0;
if (!const_p)
@@ -3498,22 +3493,17 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
- || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
+ || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
return 0;
}
/* See if we can find a mode to refer to this field. We should be able to,
but fail if we can't. */
- if (lvolatilep
- && GET_MODE_BITSIZE (lmode) > 0
- && flag_strict_volatile_bitfields > 0)
- nmode = lmode;
- else
- nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
- const_p ? TYPE_ALIGN (TREE_TYPE (linner))
- : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
- TYPE_ALIGN (TREE_TYPE (rinner))),
- word_mode, lvolatilep || rvolatilep);
+ nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
+ const_p ? TYPE_ALIGN (TREE_TYPE (linner))
+ : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
+ TYPE_ALIGN (TREE_TYPE (rinner))),
+ word_mode, false);
if (nmode == VOIDmode)
return 0;
@@ -3602,11 +3592,6 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
appropriate number of bits and mask it with the computed mask
(in case this was a signed field). If we changed it, make a new one. */
lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
- if (lvolatilep)
- {
- TREE_SIDE_EFFECTS (lhs) = 1;
- TREE_THIS_VOLATILE (lhs) = 1;
- }
rhs = const_binop (BIT_AND_EXPR,
const_binop (LSHIFT_EXPR,