aboutsummaryrefslogtreecommitdiff
path: root/gcc/reorg.c
diff options
context:
space:
mode:
authorBernd Schmidt <bernds@codesourcery.com>2011-07-28 18:45:20 +0000
committerBernd Schmidt <bernds@gcc.gnu.org>2011-07-28 18:45:20 +0000
commitdc0ff1c802ef8a9e8075e5a8f4da9881848e08ab (patch)
tree05341594c41a09d1c77b0330d5b66b0fcb4a9821 /gcc/reorg.c
parenta2e49bb27eba6fe675e6a6ceac8c2041218792e5 (diff)
downloadgcc-dc0ff1c802ef8a9e8075e5a8f4da9881848e08ab.zip
gcc-dc0ff1c802ef8a9e8075e5a8f4da9881848e08ab.tar.gz
gcc-dc0ff1c802ef8a9e8075e5a8f4da9881848e08ab.tar.bz2
rtlanal.c (tablejump_p): False for returns.
* rtlanal.c (tablejump_p): False for returns. * reorg.c (first_active_target_insn): New static function. (find_end_label): Set JUMP_LABEL for a new returnjump. (optimize_skip, get_jump_flags, rare_destination, mostly_true_jump, get_branch_condition, steal_delay_list_from_target, own_thread_p, fill_simple_delay_slots, follow_jumps, fill_slots_from_thread, fill_eager_delay_slots, relax_delay_slots, make_return_insns, dbr_schedule): Adjust to handle ret_rtx in JUMP_LABELs. * jump.c (delete_related_insns): Likewise. (jump_to_label_p): New function. (redirect_target): New static function. (redirect_exp_1): Use it. Adjust to handle ret_rtx in JUMP_LABELS. (redirect_jump_1): Assert that the new label is nonnull. (redirect_jump): Likewise. (redirect_jump_2): Check for ANY_RETURN_P rather than NULL labels. * ifcvt.c (find_if_case_1): Take care when redirecting jumps to the exit block. (dead_or_predicable): Change NEW_DEST arg to DEST_EDGE. All callers changed. Ensure that the right label is passed to redirect_jump. * function.c (emit_return_into_block, thread_prologue_and_epilogue_insns): Ensure new returnjumps have ret_rtx in their JUMP_LABEL. * print-rtl.c (print_rtx): Handle ret_rtx in a JUMP_LABEL. * emit-rtl.c (skip_consecutive_labels): Allow the caller to pass ret_rtx as label. * cfglayout.c (fixup_reorder_chain): Use force_nonfallthru_and_redirect rather than force_nonfallthru. (duplicate_insn_chain): Copy JUMP_LABELs for returns. * rtl.h (ANY_RETURN_P): New macro. (jump_to_label_p): Declare. * resource.c (find_dead_or_set_registers): Handle ret_rtx in JUMP_LABELs. (mark_target_live_regs): Likewise. * basic-block.h (force_nonfallthru_and_redirect): Declare. * cfgrtl.c (force_nonfallthru_and_redirect): No longer static. * config/alpha/alpha.c (alpha_tablejump_addr_vec, alpha_tablejump_best_label): Remove functions. * config/alpha/alpha-protos.c (alpha_tablejump_addr_vec, alpha_tablejump_best_label): Remove declarations. * config/sh/sh.c (barrier_align, split_branches): Adjust for ret_rtx in JUMP_LABELs. * config/arm/arm.c (is_jump_table): Likewise. From-SVN: r176881
Diffstat (limited to 'gcc/reorg.c')
-rw-r--r--gcc/reorg.c104
1 files changed, 59 insertions, 45 deletions
diff --git a/gcc/reorg.c b/gcc/reorg.c
index 4df9105..97d02e9 100644
--- a/gcc/reorg.c
+++ b/gcc/reorg.c
@@ -220,6 +220,17 @@ static void relax_delay_slots (rtx);
static void make_return_insns (rtx);
#endif
+/* A wrapper around next_active_insn which takes care to return ret_rtx
+ unchanged. */
+
+static rtx
+first_active_target_insn (rtx insn)
+{
+ if (ANY_RETURN_P (insn))
+ return insn;
+ return next_active_insn (insn);
+}
+
/* Return TRUE if this insn should stop the search for insn to fill delay
slots. LABELS_P indicates that labels should terminate the search.
In all cases, jumps terminate the search. */
@@ -437,6 +448,7 @@ find_end_label (void)
/* The return we make may have delay slots too. */
rtx insn = gen_return ();
insn = emit_jump_insn (insn);
+ JUMP_LABEL (insn) = ret_rtx;
emit_barrier ();
if (num_delay_slots (insn) > 0)
obstack_ptr_grow (&unfilled_slots_obstack, insn);
@@ -824,7 +836,7 @@ optimize_skip (rtx insn)
|| GET_CODE (PATTERN (next_trial)) == RETURN))
{
rtx target_label = JUMP_LABEL (next_trial);
- if (target_label == 0)
+ if (ANY_RETURN_P (target_label))
target_label = find_end_label ();
if (target_label)
@@ -861,12 +873,12 @@ get_jump_flags (rtx insn, rtx label)
be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
direction information, and only if they are conditional jumps.
- If LABEL is zero, then there is no way to determine the branch
+ If LABEL is a return, then there is no way to determine the branch
direction. */
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
+ && !ANY_RETURN_P (label)
&& INSN_UID (insn) <= max_uid
- && label != 0
&& INSN_UID (label) <= max_uid)
flags
= (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
@@ -921,7 +933,7 @@ rare_destination (rtx insn)
int jump_count = 0;
rtx next;
- for (; insn; insn = next)
+ for (; insn && !ANY_RETURN_P (insn); insn = next)
{
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
@@ -1017,7 +1029,7 @@ mostly_true_jump (rtx jump_insn, rtx condition)
/* Predict backward branches usually take, forward branches usually not. If
we don't know whether this is forward or backward, assume the branch
will be taken, since most are. */
- return (target_label == 0 || INSN_UID (jump_insn) > max_uid
+ return (ANY_RETURN_P (target_label) || INSN_UID (jump_insn) > max_uid
|| INSN_UID (target_label) > max_uid
|| (uid_to_ruid[INSN_UID (jump_insn)]
> uid_to_ruid[INSN_UID (target_label)]));
@@ -1037,10 +1049,10 @@ get_branch_condition (rtx insn, rtx target)
if (condjump_in_parallel_p (insn))
pat = XVECEXP (pat, 0, 0);
- if (GET_CODE (pat) == RETURN)
- return target == 0 ? const_true_rtx : 0;
+ if (ANY_RETURN_P (pat))
+ return pat == target ? const_true_rtx : 0;
- else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
+ if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
return 0;
src = SET_SRC (pat);
@@ -1048,16 +1060,12 @@ get_branch_condition (rtx insn, rtx target)
return const_true_rtx;
else if (GET_CODE (src) == IF_THEN_ELSE
- && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
- || (GET_CODE (XEXP (src, 1)) == LABEL_REF
- && XEXP (XEXP (src, 1), 0) == target))
+ && XEXP (XEXP (src, 1), 0) == target
&& XEXP (src, 2) == pc_rtx)
return XEXP (src, 0);
else if (GET_CODE (src) == IF_THEN_ELSE
- && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
- || (GET_CODE (XEXP (src, 2)) == LABEL_REF
- && XEXP (XEXP (src, 2), 0) == target))
+ && XEXP (XEXP (src, 2), 0) == target
&& XEXP (src, 1) == pc_rtx)
{
enum rtx_code rev;
@@ -1318,7 +1326,7 @@ steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
}
/* Show the place to which we will be branching. */
- *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
+ *pnew_thread = first_active_target_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
/* Add any new insns to the delay list and update the count of the
number of slots filled. */
@@ -1827,7 +1835,7 @@ own_thread_p (rtx thread, rtx label, int allow_fallthrough)
rtx insn;
/* We don't own the function end. */
- if (thread == 0)
+ if (thread == 0 || ANY_RETURN_P (thread))
return 0;
/* Get the first active insn, or THREAD, if it is an active insn. */
@@ -2245,7 +2253,7 @@ fill_simple_delay_slots (int non_jumps_p)
&& (!JUMP_P (insn)
|| ((condjump_p (insn) || condjump_in_parallel_p (insn))
&& ! simplejump_p (insn)
- && JUMP_LABEL (insn) != 0)))
+ && !ANY_RETURN_P (JUMP_LABEL (insn)))))
{
/* Invariant: If insn is a JUMP_INSN, the insn's jump
label. Otherwise, zero. */
@@ -2270,7 +2278,7 @@ fill_simple_delay_slots (int non_jumps_p)
target = JUMP_LABEL (insn);
}
- if (target == 0)
+ if (target == 0 || ANY_RETURN_P (target))
for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
trial = next_trial)
{
@@ -2343,7 +2351,7 @@ fill_simple_delay_slots (int non_jumps_p)
Don't do this if the insn at the branch target is a branch. */
if (slots_to_fill != slots_filled
&& trial
- && JUMP_P (trial)
+ && jump_to_label_p (trial)
&& simplejump_p (trial)
&& (target == 0 || JUMP_LABEL (trial) == target)
&& (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
@@ -2500,7 +2508,7 @@ fill_simple_delay_slots (int non_jumps_p)
/* Follow any unconditional jump at LABEL;
return the ultimate label reached by any such chain of jumps.
- Return null if the chain ultimately leads to a return instruction.
+ Return ret_rtx if the chain ultimately leads to a return instruction.
If LABEL is not followed by a jump, return LABEL.
If the chain loops or we can't find end, return LABEL,
since that tells caller to avoid changing the insn. */
@@ -2513,29 +2521,34 @@ follow_jumps (rtx label)
rtx value = label;
int depth;
+ if (ANY_RETURN_P (label))
+ return label;
for (depth = 0;
(depth < 10
&& (insn = next_active_insn (value)) != 0
&& JUMP_P (insn)
- && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
- && onlyjump_p (insn))
+ && JUMP_LABEL (insn) != NULL_RTX
+ && ((any_uncondjump_p (insn) && onlyjump_p (insn))
|| GET_CODE (PATTERN (insn)) == RETURN)
&& (next = NEXT_INSN (insn))
&& BARRIER_P (next));
depth++)
{
+ rtx this_label = JUMP_LABEL (insn);
rtx tem;
/* If we have found a cycle, make the insn jump to itself. */
- if (JUMP_LABEL (insn) == label)
+ if (this_label == label)
return label;
-
- tem = next_active_insn (JUMP_LABEL (insn));
- if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
- || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
+ if (ANY_RETURN_P (this_label))
+ return this_label;
+ tem = next_active_insn (this_label);
+ if (tem
+ && (GET_CODE (PATTERN (tem)) == ADDR_VEC
+ || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
break;
- value = JUMP_LABEL (insn);
+ value = this_label;
}
if (depth == 10)
return label;
@@ -2587,7 +2600,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
/* If our thread is the end of subroutine, we can't get any delay
insns from that. */
- if (thread == 0)
+ if (thread == NULL_RTX || ANY_RETURN_P (thread))
return delay_list;
/* If this is an unconditional branch, nothing is needed at the
@@ -2757,7 +2770,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
gcc_assert (REG_NOTE_KIND (note)
== REG_LABEL_OPERAND);
}
- if (JUMP_P (trial) && JUMP_LABEL (trial))
+ if (jump_to_label_p (trial))
LABEL_NUSES (JUMP_LABEL (trial))++;
delete_related_insns (trial);
@@ -2776,7 +2789,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
gcc_assert (REG_NOTE_KIND (note)
== REG_LABEL_OPERAND);
}
- if (JUMP_P (trial) && JUMP_LABEL (trial))
+ if (jump_to_label_p (trial))
LABEL_NUSES (JUMP_LABEL (trial))--;
}
else
@@ -2897,7 +2910,8 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
depend on the destination register. If so, try to place the opposite
arithmetic insn after the jump insn and put the arithmetic insn in the
delay slot. If we can't do this, return. */
- if (delay_list == 0 && likely && new_thread
+ if (delay_list == 0 && likely
+ && new_thread && !ANY_RETURN_P (new_thread)
&& NONJUMP_INSN_P (new_thread)
&& GET_CODE (PATTERN (new_thread)) != ASM_INPUT
&& asm_noperands (PATTERN (new_thread)) < 0)
@@ -2990,7 +3004,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
delay_list))
new_thread = follow_jumps (JUMP_LABEL (new_thread));
- if (new_thread == 0)
+ if (ANY_RETURN_P (new_thread))
label = find_end_label ();
else if (LABEL_P (new_thread))
label = new_thread;
@@ -3063,7 +3077,7 @@ fill_eager_delay_slots (void)
them. Then see whether the branch is likely true. We don't need
to do a lot of this for unconditional branches. */
- insn_at_target = next_active_insn (target_label);
+ insn_at_target = first_active_target_insn (target_label);
own_target = own_thread_p (target_label, target_label, 0);
if (condition == const_true_rtx)
@@ -3098,7 +3112,7 @@ fill_eager_delay_slots (void)
from the thread that was filled. So we have to recompute
the next insn at the target. */
target_label = JUMP_LABEL (insn);
- insn_at_target = next_active_insn (target_label);
+ insn_at_target = first_active_target_insn (target_label);
delay_list
= fill_slots_from_thread (insn, condition, fallthrough_insn,
@@ -3337,10 +3351,10 @@ relax_delay_slots (rtx first)
group of consecutive labels. */
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
- && (target_label = JUMP_LABEL (insn)) != 0)
+ && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
{
target_label = skip_consecutive_labels (follow_jumps (target_label));
- if (target_label == 0)
+ if (ANY_RETURN_P (target_label))
target_label = find_end_label ();
if (target_label && next_active_insn (target_label) == next
@@ -3373,7 +3387,7 @@ relax_delay_slots (rtx first)
invert_jump fails. */
++LABEL_NUSES (target_label);
- if (label)
+ if (!ANY_RETURN_P (label))
++LABEL_NUSES (label);
if (invert_jump (insn, label, 1))
@@ -3382,7 +3396,7 @@ relax_delay_slots (rtx first)
next = insn;
}
- if (label)
+ if (!ANY_RETURN_P (label))
--LABEL_NUSES (label);
if (--LABEL_NUSES (target_label) == 0)
@@ -3485,12 +3499,12 @@ relax_delay_slots (rtx first)
target_label = JUMP_LABEL (delay_insn);
- if (target_label)
+ if (!ANY_RETURN_P (target_label))
{
/* If this jump goes to another unconditional jump, thread it, but
don't convert a jump into a RETURN here. */
trial = skip_consecutive_labels (follow_jumps (target_label));
- if (trial == 0)
+ if (ANY_RETURN_P (trial))
trial = find_end_label ();
if (trial && trial != target_label
@@ -3540,7 +3554,7 @@ relax_delay_slots (rtx first)
&& redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
{
target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
- if (target_label == 0)
+ if (ANY_RETURN_P (target_label))
target_label = find_end_label ();
if (target_label
@@ -3627,7 +3641,7 @@ relax_delay_slots (rtx first)
rtx label = JUMP_LABEL (next);
rtx old_label = JUMP_LABEL (delay_insn);
- if (label == 0)
+ if (ANY_RETURN_P (label))
label = find_end_label ();
/* find_end_label can generate a new label. Check this first. */
@@ -3737,7 +3751,7 @@ make_return_insns (rtx first)
/* If we can't make the jump into a RETURN, try to redirect it to the best
RETURN and go on to the next insn. */
- if (! reorg_redirect_jump (jump_insn, NULL_RTX))
+ if (! reorg_redirect_jump (jump_insn, ret_rtx))
{
/* Make sure redirecting the jump will not invalidate the delay
slot insns. */
@@ -3866,7 +3880,7 @@ dbr_schedule (rtx first)
/* Ensure all jumps go to the last of a set of consecutive labels. */
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
- && JUMP_LABEL (insn) != 0
+ && !ANY_RETURN_P (JUMP_LABEL (insn))
&& ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
!= JUMP_LABEL (insn)))
redirect_jump (insn, target, 1);