aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorBernd Edlinger <bernd.edlinger@hotmail.de>2014-05-28 13:37:02 +0000
committerBernd Edlinger <edlinger@gcc.gnu.org>2014-05-28 13:37:02 +0000
commit7fbf8a3ede761711ffed9a8658746db31bfbd0e1 (patch)
treecf2d6a8238ccc834c31562af45ad8fcf181e6fd6 /gcc
parentb017a17489a1e89510a224a3796275ec5cac97c4 (diff)
downloadgcc-7fbf8a3ede761711ffed9a8658746db31bfbd0e1.zip
gcc-7fbf8a3ede761711ffed9a8658746db31bfbd0e1.tar.gz
gcc-7fbf8a3ede761711ffed9a8658746db31bfbd0e1.tar.bz2
expr.c (expand_assignment): Fold the bitpos in the to_rtx if sufficiently aligned and an offset is used at...
2014-05-28 Bernd Edlinger <bernd.edlinger@hotmail.de> * expr.c (expand_assignment): Fold the bitpos in the to_rtx if sufficiently aligned and an offset is used at the same time. (expand_expr_real_1): Likewise. From-SVN: r211020
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/expr.c37
2 files changed, 31 insertions, 12 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 76d4a66..7d888f8 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,9 @@
+2014-05-28 Bernd Edlinger <bernd.edlinger@hotmail.de>
+
+ * expr.c (expand_assignment): Fold the bitpos in the to_rtx if
+ sufficiently aligned and an offset is used at the same time.
+ (expand_expr_real_1): Likewise.
+
2014-05-28 Richard Biener <rguenther@suse.de>
PR middle-end/61045
diff --git a/gcc/expr.c b/gcc/expr.c
index 2868d9d..d99bc1e 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -4838,15 +4838,29 @@ expand_assignment (tree to, tree from, bool nontemporal)
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
- /* The check for a constant address in TO_RTX not having VOIDmode
- is probably no longer necessary. */
- if (MEM_P (to_rtx)
- && GET_MODE (to_rtx) == BLKmode
- && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
+ /* If we have an expression in OFFSET_RTX and a non-zero
+ byte offset in BITPOS, adding the byte offset before the
+ OFFSET_RTX results in better intermediate code, which makes
+ later rtl optimization passes perform better.
+
+ We prefer intermediate code like this:
+
+ r124:DI=r123:DI+0x18
+ [r124:DI]=r121:DI
+
+ ... instead of ...
+
+ r124:DI=r123:DI+0x10
+ [r124:DI+0x8]=r121:DI
+
+ This is only done for aligned data values, as these can
+ be expected to result in single move instructions. */
+ if (mode1 != VOIDmode
+ && bitpos != 0
&& bitsize > 0
&& (bitpos % bitsize) == 0
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
- && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
+ && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
{
to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
bitregion_start = 0;
@@ -10090,14 +10104,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
- if (GET_MODE (op0) == BLKmode
- /* The check for a constant address in OP0 not having VOIDmode
- is probably no longer necessary. */
- && GET_MODE (XEXP (op0, 0)) != VOIDmode
- && bitsize != 0
+ /* See the comment in expand_assignment for the rationale. */
+ if (mode1 != VOIDmode
+ && bitpos != 0
+ && bitsize > 0
&& (bitpos % bitsize) == 0
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
- && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
+ && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
{
op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
bitpos = 0;