aboutsummaryrefslogtreecommitdiff
path: root/gcc/cfglayout.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/cfglayout.c')
-rw-r--r--gcc/cfglayout.c246
1 files changed, 112 insertions, 134 deletions
diff --git a/gcc/cfglayout.c b/gcc/cfglayout.c
index 12cc255..a576f0e 100644
--- a/gcc/cfglayout.c
+++ b/gcc/cfglayout.c
@@ -34,13 +34,16 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "cfgloop.h"
#include "target.h"
#include "ggc.h"
+#include "alloc-pool.h"
/* The contents of the current function definition are allocated
in this obstack, and all are freed at the end of the function. */
extern struct obstack flow_obstack;
+alloc_pool cfg_layout_pool;
+
/* Holds the interesting trailing notes for the function. */
-rtx cfg_layout_function_footer;
+rtx cfg_layout_function_footer, cfg_layout_function_header;
static rtx skip_insns_after_block (basic_block);
static void record_effective_endpoints (void);
@@ -51,7 +54,6 @@ static void set_block_levels (tree, int);
static void change_scope (rtx, tree, tree);
void verify_insn_chain (void);
-static void cleanup_unconditional_jumps (struct loops *);
static void fixup_fallthru_exit_predecessor (void);
static rtx duplicate_insn_chain (rtx, rtx);
static void break_superblocks (void);
@@ -189,19 +191,38 @@ label_for_bb (basic_block bb)
static void
record_effective_endpoints (void)
{
- rtx next_insn = get_insns ();
+ rtx next_insn;
basic_block bb;
+ rtx insn;
+
+ for (insn = get_insns ();
+ NEXT_INSN (insn) && GET_CODE (insn) == NOTE;
+ insn = NEXT_INSN (insn))
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
+ {
+ insn = NULL;
+ break;
+ }
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
+ break;
+ }
+ if (insn)
+ cfg_layout_function_header = unlink_insn_chain (get_insns (), insn);
+ else
+ cfg_layout_function_header = NULL_RTX;
+ next_insn = get_insns ();
FOR_EACH_BB (bb)
{
rtx end;
if (PREV_INSN (bb->head) && next_insn != bb->head)
- RBI (bb)->header = unlink_insn_chain (next_insn,
+ bb->rbi->header = unlink_insn_chain (next_insn,
PREV_INSN (bb->head));
end = skip_insns_after_block (bb);
if (NEXT_INSN (bb->end) && bb->end != end)
- RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
+ bb->rbi->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
next_insn = NEXT_INSN (bb->end);
}
@@ -529,21 +550,29 @@ fixup_reorder_chain (void)
int index;
rtx insn = NULL;
+ if (cfg_layout_function_header)
+ {
+ set_first_insn (cfg_layout_function_header);
+ insn = cfg_layout_function_header;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ }
+
/* First do the bulk reordering -- rechain the blocks without regard to
the needed changes to jumps and labels. */
for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
bb != 0;
- bb = RBI (bb)->next, index++)
+ bb = bb->rbi->next, index++)
{
- if (RBI (bb)->header)
+ if (bb->rbi->header)
{
if (insn)
- NEXT_INSN (insn) = RBI (bb)->header;
+ NEXT_INSN (insn) = bb->rbi->header;
else
- set_first_insn (RBI (bb)->header);
- PREV_INSN (RBI (bb)->header) = insn;
- insn = RBI (bb)->header;
+ set_first_insn (bb->rbi->header);
+ PREV_INSN (bb->rbi->header) = insn;
+ insn = bb->rbi->header;
while (NEXT_INSN (insn))
insn = NEXT_INSN (insn);
}
@@ -553,10 +582,10 @@ fixup_reorder_chain (void)
set_first_insn (bb->head);
PREV_INSN (bb->head) = insn;
insn = bb->end;
- if (RBI (bb)->footer)
+ if (bb->rbi->footer)
{
- NEXT_INSN (insn) = RBI (bb)->footer;
- PREV_INSN (RBI (bb)->footer) = insn;
+ NEXT_INSN (insn) = bb->rbi->footer;
+ PREV_INSN (bb->rbi->footer) = insn;
while (NEXT_INSN (insn))
insn = NEXT_INSN (insn);
}
@@ -580,7 +609,7 @@ fixup_reorder_chain (void)
/* Now add jumps and labels as needed to match the blocks new
outgoing edges. */
- for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
+ for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
{
edge e_fall, e_taken, e;
rtx bb_end_insn;
@@ -604,8 +633,8 @@ fixup_reorder_chain (void)
if (any_condjump_p (bb_end_insn))
{
/* If the old fallthru is still next, nothing to do. */
- if (RBI (bb)->next == e_fall->dest
- || (!RBI (bb)->next
+ if (bb->rbi->next == e_fall->dest
+ || (!bb->rbi->next
&& e_fall->dest == EXIT_BLOCK_PTR))
continue;
@@ -645,7 +674,7 @@ fixup_reorder_chain (void)
such as happens at the very end of a function, then we'll
need to add a new unconditional jump. Choose the taken
edge based on known or assumed probability. */
- else if (RBI (bb)->next != e_taken->dest)
+ else if (bb->rbi->next != e_taken->dest)
{
rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
@@ -684,7 +713,7 @@ fixup_reorder_chain (void)
#ifdef CASE_DROPS_THROUGH
/* Except for VAX. Since we didn't have predication for the
tablejump, the fallthru block should not have moved. */
- if (RBI (bb)->next == e_fall->dest)
+ if (bb->rbi->next == e_fall->dest)
continue;
bb_end_insn = skip_insns_after_block (bb);
#else
@@ -701,11 +730,11 @@ fixup_reorder_chain (void)
continue;
/* If the fallthru block is still next, nothing to do. */
- if (RBI (bb)->next == e_fall->dest)
+ if (bb->rbi->next == e_fall->dest)
continue;
/* A fallthru to exit block. */
- if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
+ if (!bb->rbi->next && e_fall->dest == EXIT_BLOCK_PTR)
continue;
}
@@ -713,10 +742,10 @@ fixup_reorder_chain (void)
nb = force_nonfallthru (e_fall);
if (nb)
{
- alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
- RBI (nb)->visited = 1;
- RBI (nb)->next = RBI (bb)->next;
- RBI (bb)->next = nb;
+ cfg_layout_initialize_rbi (nb);
+ nb->rbi->visited = 1;
+ nb->rbi->next = bb->rbi->next;
+ bb->rbi->next = nb;
/* Don't process this new block. */
bb = nb;
}
@@ -727,12 +756,12 @@ fixup_reorder_chain (void)
if (rtl_dump_file)
{
fprintf (rtl_dump_file, "Reordered sequence:\n");
- for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
+ for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = bb->rbi->next, index ++)
{
fprintf (rtl_dump_file, " %i ", index);
- if (RBI (bb)->original)
+ if (bb->rbi->original)
fprintf (rtl_dump_file, "duplicate of %i ",
- RBI (bb)->original->index);
+ bb->rbi->original->index);
else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
fprintf (rtl_dump_file, "compensation ");
else
@@ -745,7 +774,7 @@ fixup_reorder_chain (void)
bb = ENTRY_BLOCK_PTR->next_bb;
index = 0;
- for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
+ for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
{
bb->index = index;
BASIC_BLOCK (index) = bb;
@@ -755,6 +784,16 @@ fixup_reorder_chain (void)
}
prev_bb->next_bb = EXIT_BLOCK_PTR;
EXIT_BLOCK_PTR->prev_bb = prev_bb;
+
+ /* Anoying special case - jump around dead jumptables left in the code. */
+ FOR_EACH_BB (bb)
+ {
+ edge e;
+ for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
+ continue;
+ if (e && !can_fallthru (e->src, e->dest))
+ force_nonfallthru (e);
+ }
}
/* Perform sanity checks on the insn chain.
@@ -788,87 +827,6 @@ verify_insn_chain (void)
abort ();
}
-/* Remove any unconditional jumps and forwarder block creating fallthru
- edges instead. During BB reordering, fallthru edges are not required
- to target next basic block in the linear CFG layout, so the unconditional
- jumps are not needed. If LOOPS is not null, also update loop structure &
- dominators. */
-
-static void
-cleanup_unconditional_jumps (struct loops *loops)
-{
- basic_block bb;
-
- FOR_EACH_BB (bb)
- {
- if (!bb->succ)
- continue;
- if (bb->succ->flags & EDGE_FALLTHRU)
- continue;
- if (!bb->succ->succ_next)
- {
- rtx insn;
- if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
- && bb->prev_bb != ENTRY_BLOCK_PTR)
- {
- basic_block prev = bb->prev_bb;
-
- if (rtl_dump_file)
- fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
- bb->index);
-
- if (loops)
- {
- /* bb cannot be loop header, as it only has one entry
- edge. It could be a loop latch. */
- if (bb->loop_father->header == bb)
- abort ();
-
- if (bb->loop_father->latch == bb)
- bb->loop_father->latch = bb->pred->src;
-
- if (get_immediate_dominator
- (loops->cfg.dom, bb->succ->dest) == bb)
- set_immediate_dominator
- (loops->cfg.dom, bb->succ->dest, bb->pred->src);
-
- remove_bb_from_loops (bb);
- delete_from_dominance_info (loops->cfg.dom, bb);
- }
-
- redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
- delete_block (bb);
- bb = prev;
- }
- else if (simplejump_p (bb->end))
- {
- rtx jump = bb->end;
-
- if (rtl_dump_file)
- fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
- INSN_UID (jump), bb->index);
- delete_insn (jump);
- bb->succ->flags |= EDGE_FALLTHRU;
- }
- else
- continue;
-
- insn = NEXT_INSN (bb->end);
- while (insn
- && (GET_CODE (insn) != NOTE
- || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
- {
- rtx next = NEXT_INSN (insn);
-
- if (GET_CODE (insn) == BARRIER)
- delete_barrier (insn);
-
- insn = next;
- }
- }
- }
-}
-
/* The block falling through to exit must be the last one in the
reordered chain. Ensure that this condition is met. */
static void
@@ -881,19 +839,19 @@ fixup_fallthru_exit_predecessor (void)
if (e->flags & EDGE_FALLTHRU)
bb = e->src;
- if (bb && RBI (bb)->next)
+ if (bb && bb->rbi->next)
{
basic_block c = ENTRY_BLOCK_PTR->next_bb;
- while (RBI (c)->next != bb)
- c = RBI (c)->next;
+ while (c->rbi->next != bb)
+ c = c->rbi->next;
- RBI (c)->next = RBI (bb)->next;
- while (RBI (c)->next)
- c = RBI (c)->next;
+ c->rbi->next = bb->rbi->next;
+ while (c->rbi->next)
+ c = c->rbi->next;
- RBI (c)->next = bb;
- RBI (bb)->next = NULL;
+ c->rbi->next = bb;
+ bb->rbi->next = NULL;
}
}
@@ -1050,26 +1008,25 @@ cfg_layout_duplicate_bb (basic_block bb, edge e)
new_bb = create_basic_block (insn,
insn ? get_last_insn () : NULL,
EXIT_BLOCK_PTR->prev_bb);
- alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
- if (RBI (bb)->header)
+ if (bb->rbi->header)
{
- insn = RBI (bb)->header;
+ insn = bb->rbi->header;
while (NEXT_INSN (insn))
insn = NEXT_INSN (insn);
- insn = duplicate_insn_chain (RBI (bb)->header, insn);
+ insn = duplicate_insn_chain (bb->rbi->header, insn);
if (insn)
- RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
+ new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
}
- if (RBI (bb)->footer)
+ if (bb->rbi->footer)
{
- insn = RBI (bb)->footer;
+ insn = bb->rbi->footer;
while (NEXT_INSN (insn))
insn = NEXT_INSN (insn);
- insn = duplicate_insn_chain (RBI (bb)->footer, insn);
+ insn = duplicate_insn_chain (bb->rbi->footer, insn);
if (insn)
- RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
+ new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
}
if (bb->global_live_at_start)
@@ -1113,25 +1070,42 @@ cfg_layout_duplicate_bb (basic_block bb, edge e)
if (bb->frequency < 0)
bb->frequency = 0;
- RBI (new_bb)->original = bb;
- RBI (bb)->copy = new_bb;
+ new_bb->rbi->original = bb;
+ bb->rbi->copy = new_bb;
return new_bb;
}
+void
+cfg_layout_initialize_rbi (bb)
+ basic_block bb;
+{
+ if (bb->rbi)
+ abort ();
+ bb->rbi = pool_alloc (cfg_layout_pool);
+ memset (bb->rbi, 0, sizeof (struct reorder_block_def));
+}
+
/* Main entry point to this module - initialize the datastructures for
CFG layout changes. It keeps LOOPS up-to-date if not null. */
void
-cfg_layout_initialize (struct loops *loops)
+cfg_layout_initialize ()
{
+ basic_block bb;
+
/* Our algorithm depends on fact that there are now dead jumptables
around the code. */
- alloc_aux_for_blocks (sizeof (struct reorder_block_def));
- cfg_layout_rtl_register_cfg_hooks ();
+ cfg_layout_pool =
+ create_alloc_pool ("cfg layout pool", sizeof (struct reorder_block_def),
+ n_basic_blocks + 2);
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+ cfg_layout_initialize_rbi (bb);
- cleanup_unconditional_jumps (loops);
+ cfg_layout_rtl_register_cfg_hooks ();
record_effective_endpoints ();
+
+ cleanup_cfg (CLEANUP_CFGLAYOUT);
}
/* Splits superblocks. */
@@ -1169,6 +1143,8 @@ break_superblocks (void)
void
cfg_layout_finalize (void)
{
+ basic_block bb;
+
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
@@ -1180,7 +1156,9 @@ cfg_layout_finalize (void)
verify_insn_chain ();
#endif
- free_aux_for_blocks ();
+ free_alloc_pool (cfg_layout_pool);
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+ bb->rbi = NULL;
break_superblocks ();