aboutsummaryrefslogtreecommitdiff
path: root/gcc/config
diff options
context:
space:
mode:
authorKyrylo Tkachov <kyrylo.tkachov@arm.com>2015-11-17 13:20:08 +0000
committerKyrylo Tkachov <ktkachov@gcc.gnu.org>2015-11-17 13:20:08 +0000
commitf17b0ebc79a09122a3ed86ded43c3c0e68640314 (patch)
tree24a71f5e90b257aaa72fb3c4bdf1b114fc391694 /gcc/config
parent8502951bceff3f0679f79182c0285dcc54dfa717 (diff)
downloadgcc-f17b0ebc79a09122a3ed86ded43c3c0e68640314.zip
gcc-f17b0ebc79a09122a3ed86ded43c3c0e68640314.tar.gz
gcc-f17b0ebc79a09122a3ed86ded43c3c0e68640314.tar.bz2
[ARM] PR 68143 Properly update memory offsets when expanding setmem
PR target/68143 * config/arm/arm.c (arm_block_set_unaligned_vect): Keep track of offset from dstbase and use it appropriately in adjust_automodify_address. (arm_block_set_aligned_vect): Likewise. * gcc.c-torture/execute/pr68143_1.c: New test. From-SVN: r230462
Diffstat (limited to 'gcc/config')
-rw-r--r--gcc/config/arm/arm.c37
1 files changed, 24 insertions, 13 deletions
diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c
index c839f52..c3c0c93 100644
--- a/gcc/config/arm/arm.c
+++ b/gcc/config/arm/arm.c
@@ -29164,7 +29164,7 @@ arm_block_set_unaligned_vect (rtx dstbase,
rtx (*gen_func) (rtx, rtx);
machine_mode mode;
unsigned HOST_WIDE_INT v = value;
-
+ unsigned int offset = 0;
gcc_assert ((align & 0x3) != 0);
nelt_v8 = GET_MODE_NUNITS (V8QImode);
nelt_v16 = GET_MODE_NUNITS (V16QImode);
@@ -29185,7 +29185,7 @@ arm_block_set_unaligned_vect (rtx dstbase,
return false;
dst = copy_addr_to_reg (XEXP (dstbase, 0));
- mem = adjust_automodify_address (dstbase, mode, dst, 0);
+ mem = adjust_automodify_address (dstbase, mode, dst, offset);
v = sext_hwi (v, BITS_PER_WORD);
val_elt = GEN_INT (v);
@@ -29202,7 +29202,11 @@ arm_block_set_unaligned_vect (rtx dstbase,
{
emit_insn ((*gen_func) (mem, reg));
if (i + 2 * nelt_mode <= length)
- emit_insn (gen_add2_insn (dst, GEN_INT (nelt_mode)));
+ {
+ emit_insn (gen_add2_insn (dst, GEN_INT (nelt_mode)));
+ offset += nelt_mode;
+ mem = adjust_automodify_address (dstbase, mode, dst, offset);
+ }
}
/* If there are not less than nelt_v8 bytes leftover, we must be in
@@ -29213,6 +29217,9 @@ arm_block_set_unaligned_vect (rtx dstbase,
if (i + nelt_v8 < length)
{
emit_insn (gen_add2_insn (dst, GEN_INT (length - i)));
+ offset += length - i;
+ mem = adjust_automodify_address (dstbase, mode, dst, offset);
+
/* We are shifting bytes back, set the alignment accordingly. */
if ((length & 1) != 0 && align >= 2)
set_mem_align (mem, BITS_PER_UNIT);
@@ -29223,12 +29230,13 @@ arm_block_set_unaligned_vect (rtx dstbase,
else if (i < length && i + nelt_v8 >= length)
{
if (mode == V16QImode)
- {
- reg = gen_lowpart (V8QImode, reg);
- mem = adjust_automodify_address (dstbase, V8QImode, dst, 0);
- }
+ reg = gen_lowpart (V8QImode, reg);
+
emit_insn (gen_add2_insn (dst, GEN_INT ((length - i)
+ (nelt_mode - nelt_v8))));
+ offset += (length - i) + (nelt_mode - nelt_v8);
+ mem = adjust_automodify_address (dstbase, V8QImode, dst, offset);
+
/* We are shifting bytes back, set the alignment accordingly. */
if ((length & 1) != 0 && align >= 2)
set_mem_align (mem, BITS_PER_UNIT);
@@ -29255,6 +29263,7 @@ arm_block_set_aligned_vect (rtx dstbase,
rtx rval[MAX_VECT_LEN];
machine_mode mode;
unsigned HOST_WIDE_INT v = value;
+ unsigned int offset = 0;
gcc_assert ((align & 0x3) == 0);
nelt_v8 = GET_MODE_NUNITS (V8QImode);
@@ -29286,14 +29295,15 @@ arm_block_set_aligned_vect (rtx dstbase,
/* Handle first 16 bytes specially using vst1:v16qi instruction. */
if (mode == V16QImode)
{
- mem = adjust_automodify_address (dstbase, mode, dst, 0);
+ mem = adjust_automodify_address (dstbase, mode, dst, offset);
emit_insn (gen_movmisalignv16qi (mem, reg));
i += nelt_mode;
/* Handle (8, 16) bytes leftover using vst1:v16qi again. */
if (i + nelt_v8 < length && i + nelt_v16 > length)
{
emit_insn (gen_add2_insn (dst, GEN_INT (length - nelt_mode)));
- mem = adjust_automodify_address (dstbase, mode, dst, 0);
+ offset += length - nelt_mode;
+ mem = adjust_automodify_address (dstbase, mode, dst, offset);
/* We are shifting bytes back, set the alignment accordingly. */
if ((length & 0x3) == 0)
set_mem_align (mem, BITS_PER_UNIT * 4);
@@ -29315,7 +29325,7 @@ arm_block_set_aligned_vect (rtx dstbase,
for (; (i + nelt_mode <= length); i += nelt_mode)
{
addr = plus_constant (Pmode, dst, i);
- mem = adjust_automodify_address (dstbase, mode, addr, i);
+ mem = adjust_automodify_address (dstbase, mode, addr, offset + i);
emit_move_insn (mem, reg);
}
@@ -29324,8 +29334,8 @@ arm_block_set_aligned_vect (rtx dstbase,
if (i + UNITS_PER_WORD == length)
{
addr = plus_constant (Pmode, dst, i - UNITS_PER_WORD);
- mem = adjust_automodify_address (dstbase, mode,
- addr, i - UNITS_PER_WORD);
+ offset += i - UNITS_PER_WORD;
+ mem = adjust_automodify_address (dstbase, mode, addr, offset);
/* We are shifting 4 bytes back, set the alignment accordingly. */
if (align > UNITS_PER_WORD)
set_mem_align (mem, BITS_PER_UNIT * UNITS_PER_WORD);
@@ -29337,7 +29347,8 @@ arm_block_set_aligned_vect (rtx dstbase,
else if (i < length)
{
emit_insn (gen_add2_insn (dst, GEN_INT (length - nelt_mode)));
- mem = adjust_automodify_address (dstbase, mode, dst, 0);
+ offset += length - nelt_mode;
+ mem = adjust_automodify_address (dstbase, mode, dst, offset);
/* We are shifting bytes back, set the alignment accordingly. */
if ((length & 1) == 0)
set_mem_align (mem, BITS_PER_UNIT * 2);