aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2008-11-20 22:26:52 +0100
committerJakub Jelinek <jakub@gcc.gnu.org>2008-11-20 22:26:52 +0100
commit1fd8faf0599d4054a7a6118c35fc0ef6c2db5e4a (patch)
tree8774b5a4abd0590e8cef985676755b3aa93672f5
parentb52b558a25df4860b17f5e23a9597f20898b802e (diff)
downloadgcc-1fd8faf0599d4054a7a6118c35fc0ef6c2db5e4a.zip
gcc-1fd8faf0599d4054a7a6118c35fc0ef6c2db5e4a.tar.gz
gcc-1fd8faf0599d4054a7a6118c35fc0ef6c2db5e4a.tar.bz2
re PR rtl-optimization/36998 (Ada bootstrap broken on i586-*-*)
PR rtl-optimization/36998 * dwarf2out.c (stack_adjust_offset): Add cur_args_size and cur_offset arguments. Handle sp = reg and (set (foo) (mem (pre_inc (reg sp)))). (compute_barrier_args_size_1, dwarf2out_frame_debug_expr): Adjust stack_adjust_offset callers. (dwarf2out_stack_adjust): Likewise. Handle insns in annulled branches properly. (compute_barrier_args_size): Handle insns in annulled branches properly. From-SVN: r142060
-rw-r--r--gcc/ChangeLog12
-rw-r--r--gcc/dwarf2out.c68
2 files changed, 66 insertions, 14 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index f63d37b..4b61173 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,15 @@
+2008-11-20 Jakub Jelinek <jakub@redhat.com>
+
+ PR rtl-optimization/36998
+ * dwarf2out.c (stack_adjust_offset): Add cur_args_size and cur_offset
+ arguments. Handle sp = reg and (set (foo) (mem (pre_inc (reg sp)))).
+ (compute_barrier_args_size_1, dwarf2out_frame_debug_expr): Adjust
+ stack_adjust_offset callers.
+ (dwarf2out_stack_adjust): Likewise. Handle insns in annulled
+ branches properly.
+ (compute_barrier_args_size): Handle insns in annulled branches
+ properly.
+
2008-11-20 Uros Bizjak <ubizjak@gmail.com>
PR target/38151
diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c
index 337c22e..68d0ee8 100644
--- a/gcc/dwarf2out.c
+++ b/gcc/dwarf2out.c
@@ -411,7 +411,8 @@ static void reg_save (const char *, unsigned, unsigned, HOST_WIDE_INT);
#ifdef DWARF2_UNWIND_INFO
static void initial_return_save (rtx);
#endif
-static HOST_WIDE_INT stack_adjust_offset (const_rtx);
+static HOST_WIDE_INT stack_adjust_offset (const_rtx, HOST_WIDE_INT,
+ HOST_WIDE_INT);
static void output_cfi (dw_cfi_ref, dw_fde_ref, int);
static void output_cfi_directive (dw_cfi_ref);
static void output_call_frame_info (int);
@@ -1110,7 +1111,8 @@ initial_return_save (rtx rtl)
contains. */
static HOST_WIDE_INT
-stack_adjust_offset (const_rtx pattern)
+stack_adjust_offset (const_rtx pattern, HOST_WIDE_INT cur_args_size,
+ HOST_WIDE_INT cur_offset)
{
const_rtx src = SET_SRC (pattern);
const_rtx dest = SET_DEST (pattern);
@@ -1119,18 +1121,34 @@ stack_adjust_offset (const_rtx pattern)
if (dest == stack_pointer_rtx)
{
- /* (set (reg sp) (plus (reg sp) (const_int))) */
code = GET_CODE (src);
+
+ /* Assume (set (reg sp) (reg whatever)) sets args_size
+ level to 0. */
+ if (code == REG && src != stack_pointer_rtx)
+ {
+ offset = -cur_args_size;
+#ifndef STACK_GROWS_DOWNWARD
+ offset = -offset;
+#endif
+ return offset - cur_offset;
+ }
+
if (! (code == PLUS || code == MINUS)
|| XEXP (src, 0) != stack_pointer_rtx
|| GET_CODE (XEXP (src, 1)) != CONST_INT)
return 0;
+ /* (set (reg sp) (plus (reg sp) (const_int))) */
offset = INTVAL (XEXP (src, 1));
if (code == PLUS)
offset = -offset;
+ return offset;
}
- else if (MEM_P (dest))
+
+ if (MEM_P (src) && !MEM_P (dest))
+ dest = src;
+ if (MEM_P (dest))
{
/* (set (mem (pre_dec (reg sp))) (foo)) */
src = XEXP (dest, 0);
@@ -1199,7 +1217,7 @@ compute_barrier_args_size_1 (rtx insn, HOST_WIDE_INT cur_args_size,
|| sibcall_epilogue_contains (insn))
/* Nothing */;
else if (GET_CODE (PATTERN (insn)) == SET)
- offset = stack_adjust_offset (PATTERN (insn));
+ offset = stack_adjust_offset (PATTERN (insn), cur_args_size, 0);
else if (GET_CODE (PATTERN (insn)) == PARALLEL
|| GET_CODE (PATTERN (insn)) == SEQUENCE)
{
@@ -1207,7 +1225,8 @@ compute_barrier_args_size_1 (rtx insn, HOST_WIDE_INT cur_args_size,
for them. */
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
- offset += stack_adjust_offset (XVECEXP (PATTERN (insn), 0, i));
+ offset += stack_adjust_offset (XVECEXP (PATTERN (insn), 0, i),
+ cur_args_size, offset);
}
}
else
@@ -1224,7 +1243,7 @@ compute_barrier_args_size_1 (rtx insn, HOST_WIDE_INT cur_args_size,
rtx elem = XVECEXP (expr, 0, i);
if (GET_CODE (elem) == SET && !RTX_FRAME_RELATED_P (elem))
- offset += stack_adjust_offset (elem);
+ offset += stack_adjust_offset (elem, cur_args_size, offset);
}
}
}
@@ -1312,13 +1331,25 @@ compute_barrier_args_size (void)
body = PATTERN (insn);
if (GET_CODE (body) == SEQUENCE)
{
+ HOST_WIDE_INT dest_args_size = cur_args_size;
for (i = 1; i < XVECLEN (body, 0); i++)
+ if (INSN_ANNULLED_BRANCH_P (XVECEXP (body, 0, 0))
+ && INSN_FROM_TARGET_P (XVECEXP (body, 0, i)))
+ dest_args_size
+ = compute_barrier_args_size_1 (XVECEXP (body, 0, i),
+ dest_args_size, &next);
+ else
+ cur_args_size
+ = compute_barrier_args_size_1 (XVECEXP (body, 0, i),
+ cur_args_size, &next);
+
+ if (INSN_ANNULLED_BRANCH_P (XVECEXP (body, 0, 0)))
+ compute_barrier_args_size_1 (XVECEXP (body, 0, 0),
+ dest_args_size, &next);
+ else
cur_args_size
- = compute_barrier_args_size_1 (XVECEXP (body, 0, i),
+ = compute_barrier_args_size_1 (XVECEXP (body, 0, 0),
cur_args_size, &next);
- cur_args_size
- = compute_barrier_args_size_1 (XVECEXP (body, 0, 0),
- cur_args_size, &next);
}
else
cur_args_size
@@ -1359,6 +1390,14 @@ dwarf2out_stack_adjust (rtx insn, bool after_p)
if (prologue_epilogue_contains (insn) || sibcall_epilogue_contains (insn))
return;
+ /* If INSN is an instruction from target of an annulled branch, the
+ effects are for the target only and so current argument size
+ shouldn't change at all. */
+ if (final_sequence
+ && INSN_ANNULLED_BRANCH_P (XVECEXP (final_sequence, 0, 0))
+ && INSN_FROM_TARGET_P (insn))
+ return;
+
/* If only calls can throw, and we have a frame pointer,
save up adjustments until we see the CALL_INSN. */
if (!flag_asynchronous_unwind_tables && cfa.reg != STACK_POINTER_REGNUM)
@@ -1404,7 +1443,7 @@ dwarf2out_stack_adjust (rtx insn, bool after_p)
#endif
}
else if (GET_CODE (PATTERN (insn)) == SET)
- offset = stack_adjust_offset (PATTERN (insn));
+ offset = stack_adjust_offset (PATTERN (insn), args_size, 0);
else if (GET_CODE (PATTERN (insn)) == PARALLEL
|| GET_CODE (PATTERN (insn)) == SEQUENCE)
{
@@ -1412,7 +1451,8 @@ dwarf2out_stack_adjust (rtx insn, bool after_p)
for them. */
for (offset = 0, i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
- offset += stack_adjust_offset (XVECEXP (PATTERN (insn), 0, i));
+ offset += stack_adjust_offset (XVECEXP (PATTERN (insn), 0, i),
+ args_size, offset);
}
else
return;
@@ -1871,7 +1911,7 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
{
/* Stack adjustment combining might combine some post-prologue
stack adjustment into a prologue stack adjustment. */
- HOST_WIDE_INT offset = stack_adjust_offset (elem);
+ HOST_WIDE_INT offset = stack_adjust_offset (elem, args_size, 0);
if (offset != 0)
dwarf2out_args_size_adjust (offset, label);