aboutsummaryrefslogtreecommitdiff
path: root/gcc/expr.c
diff options
context:
space:
mode:
authorJoseph Myers <joseph@codesourcery.com>2008-09-18 02:09:40 +0100
committerJoseph Myers <jsm28@gcc.gnu.org>2008-09-18 02:09:40 +0100
commit38c4df0b852230edc05c20b000bec61819735ecf (patch)
treec0cfca6716c8f6cbcceac8450b8516d949cd8c34 /gcc/expr.c
parent2acafa84bd7685b95391d143d2b2421b5371c7a1 (diff)
downloadgcc-38c4df0b852230edc05c20b000bec61819735ecf.zip
gcc-38c4df0b852230edc05c20b000bec61819735ecf.tar.gz
gcc-38c4df0b852230edc05c20b000bec61819735ecf.tar.bz2
expr.c (emit_group_store): Do not shift before moving via a stack slot.
* expr.c (emit_group_store): Do not shift before moving via a stack slot. From-SVN: r140442
Diffstat (limited to 'gcc/expr.c')
-rw-r--r--gcc/expr.c45
1 files changed, 25 insertions, 20 deletions
diff --git a/gcc/expr.c b/gcc/expr.c
index f2e9c79..ae0daf0 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -2039,33 +2039,17 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
enum machine_mode mode = GET_MODE (tmps[i]);
unsigned int bytelen = GET_MODE_SIZE (mode);
+ unsigned int adj_bytelen = bytelen;
rtx dest = dst;
/* Handle trailing fragments that run over the size of the struct. */
if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
- {
- /* store_bit_field always takes its value from the lsb.
- Move the fragment to the lsb if it's not already there. */
- if (
-#ifdef BLOCK_REG_PADDING
- BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
- == (BYTES_BIG_ENDIAN ? upward : downward)
-#else
- BYTES_BIG_ENDIAN
-#endif
- )
- {
- int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
- tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
- build_int_cst (NULL_TREE, shift),
- tmps[i], 0);
- }
- bytelen = ssize - bytepos;
- }
+ adj_bytelen = ssize - bytepos;
if (GET_CODE (dst) == CONCAT)
{
- if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
+ if (bytepos + adj_bytelen
+ <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
dest = XEXP (dst, 0);
else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
{
@@ -2103,6 +2087,27 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
}
}
+ if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
+ {
+ /* store_bit_field always takes its value from the lsb.
+ Move the fragment to the lsb if it's not already there. */
+ if (
+#ifdef BLOCK_REG_PADDING
+ BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
+ == (BYTES_BIG_ENDIAN ? upward : downward)
+#else
+ BYTES_BIG_ENDIAN
+#endif
+ )
+ {
+ int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
+ tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
+ build_int_cst (NULL_TREE, shift),
+ tmps[i], 0);
+ }
+ bytelen = adj_bytelen;
+ }
+
/* Optimize the access just a bit. */
if (MEM_P (dest)
&& (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))