aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Sandiford <richard.sandiford@linaro.org>2018-01-03 07:18:52 +0000
committerRichard Sandiford <rsandifo@gcc.gnu.org>2018-01-03 07:18:52 +0000
commitcff7107a97be6b32efc4c100ed85957c7e0b331e (patch)
tree9f51809c8a2ed51d25f59ccba8361d873a0b8367 /gcc
parent65f914e714d985db7eaaa8e63a210b4fee17b13f (diff)
downloadgcc-cff7107a97be6b32efc4c100ed85957c7e0b331e.zip
gcc-cff7107a97be6b32efc4c100ed85957c7e0b331e.tar.gz
gcc-cff7107a97be6b32efc4c100ed85957c7e0b331e.tar.bz2
poly_int: alter_reg
This patch makes alter_reg cope with polynomial mode sizes. 2018-01-03 Richard Sandiford <richard.sandiford@linaro.org> Alan Hayward <alan.hayward@arm.com> David Sherwood <david.sherwood@arm.com> gcc/ * reload1.c (spill_stack_slot_width): Change element type from unsigned int to poly_uint64_pod. (alter_reg): Treat mode sizes as polynomial. Co-Authored-By: Alan Hayward <alan.hayward@arm.com> Co-Authored-By: David Sherwood <david.sherwood@arm.com> From-SVN: r256160
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog8
-rw-r--r--gcc/reload1.c35
2 files changed, 31 insertions, 12 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 93587d7..2b47abc 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -2,6 +2,14 @@
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
+ * reload1.c (spill_stack_slot_width): Change element type
+ from unsigned int to poly_uint64_pod.
+ (alter_reg): Treat mode sizes as polynomial.
+
+2018-01-03 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
* reload.c (complex_word_subreg_p): New function.
(reload_inner_reg_of_subreg, push_reload): Use it.
diff --git a/gcc/reload1.c b/gcc/reload1.c
index a1dcf42..d9d0999 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -200,7 +200,7 @@ static int last_spill_reg;
static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
/* Width allocated so far for that stack slot. */
-static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
+static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
/* Record which pseudos needed to be spilled. */
static regset_head spilled_pseudos;
@@ -2142,10 +2142,10 @@ alter_reg (int i, int from_reg, bool dont_share_p)
{
rtx x = NULL_RTX;
machine_mode mode = GET_MODE (regno_reg_rtx[i]);
- unsigned HOST_WIDE_INT inherent_size = PSEUDO_REGNO_BYTES (i);
+ poly_uint64 inherent_size = GET_MODE_SIZE (mode);
unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
- unsigned HOST_WIDE_INT total_size = GET_MODE_SIZE (wider_mode);
+ poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
unsigned int min_align = GET_MODE_BITSIZE (reg_max_ref_mode[i]);
poly_int64 adjust = 0;
@@ -2174,10 +2174,15 @@ alter_reg (int i, int from_reg, bool dont_share_p)
{
rtx stack_slot;
+ /* The sizes are taken from a subreg operation, which guarantees
+ that they're ordered. */
+ gcc_checking_assert (ordered_p (total_size, inherent_size));
+
/* No known place to spill from => no slot to reuse. */
x = assign_stack_local (mode, total_size,
min_align > inherent_align
- || total_size > inherent_size ? -1 : 0);
+ || maybe_gt (total_size, inherent_size)
+ ? -1 : 0);
stack_slot = x;
@@ -2189,7 +2194,7 @@ alter_reg (int i, int from_reg, bool dont_share_p)
adjust = inherent_size - total_size;
if (maybe_ne (adjust, 0))
{
- unsigned int total_bits = total_size * BITS_PER_UNIT;
+ poly_uint64 total_bits = total_size * BITS_PER_UNIT;
machine_mode mem_mode
= int_mode_for_size (total_bits, 1).else_blk ();
stack_slot = adjust_address_nv (x, mem_mode, adjust);
@@ -2203,9 +2208,10 @@ alter_reg (int i, int from_reg, bool dont_share_p)
/* Reuse a stack slot if possible. */
else if (spill_stack_slot[from_reg] != 0
- && spill_stack_slot_width[from_reg] >= total_size
- && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
- >= inherent_size)
+ && known_ge (spill_stack_slot_width[from_reg], total_size)
+ && known_ge (GET_MODE_SIZE
+ (GET_MODE (spill_stack_slot[from_reg])),
+ inherent_size)
&& MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
x = spill_stack_slot[from_reg];
@@ -2221,16 +2227,21 @@ alter_reg (int i, int from_reg, bool dont_share_p)
if (partial_subreg_p (mode,
GET_MODE (spill_stack_slot[from_reg])))
mode = GET_MODE (spill_stack_slot[from_reg]);
- if (spill_stack_slot_width[from_reg] > total_size)
- total_size = spill_stack_slot_width[from_reg];
+ total_size = ordered_max (total_size,
+ spill_stack_slot_width[from_reg]);
if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
}
+ /* The sizes are taken from a subreg operation, which guarantees
+ that they're ordered. */
+ gcc_checking_assert (ordered_p (total_size, inherent_size));
+
/* Make a slot with that size. */
x = assign_stack_local (mode, total_size,
min_align > inherent_align
- || total_size > inherent_size ? -1 : 0);
+ || maybe_gt (total_size, inherent_size)
+ ? -1 : 0);
stack_slot = x;
/* Cancel the big-endian correction done in assign_stack_local.
@@ -2241,7 +2252,7 @@ alter_reg (int i, int from_reg, bool dont_share_p)
adjust = GET_MODE_SIZE (mode) - total_size;
if (maybe_ne (adjust, 0))
{
- unsigned int total_bits = total_size * BITS_PER_UNIT;
+ poly_uint64 total_bits = total_size * BITS_PER_UNIT;
machine_mode mem_mode
= int_mode_for_size (total_bits, 1).else_blk ();
stack_slot = adjust_address_nv (x, mem_mode, adjust);