aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog58
-rw-r--r--gcc/Makefile.in4
-rw-r--r--gcc/basic-block.h1
-rw-r--r--gcc/c-semantics.c9
-rw-r--r--gcc/cfgcleanup.c5
-rw-r--r--gcc/cfglayout.c380
-rw-r--r--gcc/cfglayout.h11
-rw-r--r--gcc/cfgrtl.c13
-rw-r--r--gcc/emit-rtl.c86
-rw-r--r--gcc/output.h1
-rw-r--r--gcc/predict.c247
-rw-r--r--gcc/predict.def17
-rw-r--r--gcc/predict.h3
-rw-r--r--gcc/print-rtl.c9
-rw-r--r--gcc/rtl.c3
-rw-r--r--gcc/rtl.h11
-rw-r--r--gcc/sibcall.c11
-rw-r--r--gcc/stmt.c59
-rw-r--r--gcc/toplev.c30
19 files changed, 861 insertions, 97 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 4e90317..bfeecda 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,61 @@
+Wed May 8 11:08:50 CEST 2002 Jan Hubicka <jh@suse.cz>
+
+ * cfglayout.c (function_tail_eff_head): Rename to ...
+ (function_footer): ... this one.
+ (unlink_insn_chain): New functions.
+ (label_for_bb): Only call block_label and emit debug message.
+ (record_effective_endpoints): Actually unlink the headers and footers.
+ (fixup_reorder_cahin): Re-insert the unlinked sequences.
+ (cfg_layout_duplicate_bb): Use duplicate_insn_chain.
+ * cfglayout.h (struct reorder_block_def): New fields footer/header;
+ remove eff_head/eff_end.
+ * rtl.h (set_first_insn): Declare.
+ * emit-rtl.c (set_first_insn): New function.
+
+ * cfglayout.c (fixup_reorder_chain): Dump duplicated
+ (cfg_layout_can_duplicate_bb_p, cfg_layout_rerirect_edge,
+ cfg_layout_duplicate_bb): New global function.
+ (duplicate_insn_chain): New static function.
+ * cfglayout.h (cfg_layout_can_duplicate_bb_p, cfg_layout_rerirect_edge,
+ cfg_layout_duplicate_bb): Declare.
+ (struct reorder_block_def): Add "original" field.
+ * emit-rtl.c (emit_copy_of_insn_after): New function.
+ * rtl.h (emit_copy_of_insn_after): Declare.
+
+ * cfglayout.c (fixup_fallthru_exit_predecessor): Kill.
+ (fixup_reorder_chain): properly handle edges to exit block.
+
+Wed May 8 11:10:31 CEST 2002 Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>
+ Jan Hubicka <jh@suse.cz>
+
+ * basic-block.h (note_prediction_to_br_prob): declare.
+ * c-semantics.c: Inlucde predit.h
+ (expand_stmt): predict GOTO_STMT as not taken.
+ * cfgcleanup.c: (delete_unreachable_blocks): Make global.
+ (cleanup_cfg): Do not free tail_recursion_list.
+ * cfgrtl.c (can_delete_note_p): Delete NOTE_INSN_PREDICTION.
+ (flow_delete_block): Kill predictions past end of basic block.
+ * output.h (delete_unreachable_blocks): Declare.
+ * predict.c (predicted_by_p, process_note_predictions,
+ process_note_prediction, last_block_p): New function.
+ (estimate_probability): Bypass loop on PRED_CONTINUE;
+ do not handle noreturn heuristics; kill PRED_RETURN; add
+ PRED_EARLY_RETURN.
+ * predict.def (PRED_CONTINUE, PRED_EARLY_RETURN, PRED_GOTO,
+ PRED_CONST_RETURN, PRED_NEGATIVE_RETURN, PRED_NULL_RETURN): New.
+ * predict.h (IS_TAKEN): New constant.
+ * print-rtl.c (print_rtx): Pretty print NOTE_INSN_PREDICTION.
+ * rtl.c (NOTE_INSN_PREDICTION): New.
+ * rtl.h (NOTE_PREDICTION, NOTE_PREDICTION_ALG, NOTE_PREDICTION_FLAGS):
+ New macro.
+ (insn_note): add NOTE_INSN_PREDICTION.
+ * sibcall.c (optimize_sibling_and_tail_recursive_call): Do not build
+ CFG; free tail_recursion_label_list.
+ * stmt.c: Include predict.h;
+ (return_prediction): New.
+ (expand_value_return): Use it.
+ * toplev.c: Lower NOTE_INSN_PREDICTION before sibcall.
+
2002-05-08 Richard Sandiford <rsandifo@redhat.com>
* config/mips/mips.md: Name the unspecs with define_constant.
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
index 068add3..7a55da7 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
@@ -1227,7 +1227,7 @@ c-format.o : c-format.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) langhooks.h \
c-semantics.o : c-semantics.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) $(C_TREE_H) \
c-lex.h flags.h toplev.h output.h c-pragma.h $(RTL_H) $(GGC_H) \
- $(EXPR_H)
+ $(EXPR_H) predict.h
# Language-independent files.
@@ -1389,7 +1389,7 @@ function.o : function.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) $(TREE_H) flags.h \
stmt.o : stmt.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) $(TREE_H) flags.h function.h \
insn-config.h hard-reg-set.h $(EXPR_H) libfuncs.h except.h \
$(LOOP_H) $(RECOG_H) toplev.h output.h varray.h $(GGC_H) $(TM_P_H) \
- langhooks.h
+ langhooks.h predict.h
except.o : except.c $(CONFIG_H) $(SYSTEM_H) $(RTL_H) $(TREE_H) flags.h \
except.h function.h $(EXPR_H) libfuncs.h integrate.h langhooks.h \
insn-config.h hard-reg-set.h $(BASIC_BLOCK_H) output.h \
diff --git a/gcc/basic-block.h b/gcc/basic-block.h
index 24d2af8..e1c1905 100644
--- a/gcc/basic-block.h
+++ b/gcc/basic-block.h
@@ -625,6 +625,7 @@ extern rtx emit_block_insn_before PARAMS ((rtx, rtx, basic_block));
/* In predict.c */
extern void estimate_probability PARAMS ((struct loops *));
+extern void note_prediction_to_br_prob PARAMS ((void));
extern void expected_value_to_br_prob PARAMS ((void));
/* In flow.c */
diff --git a/gcc/c-semantics.c b/gcc/c-semantics.c
index 030a04c..32fae44 100644
--- a/gcc/c-semantics.c
+++ b/gcc/c-semantics.c
@@ -36,6 +36,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "expr.h"
#include "output.h"
#include "timevar.h"
+#include "predict.h"
/* If non-NULL, the address of a language-specific function for
expanding statements. */
@@ -834,6 +835,14 @@ expand_stmt (t)
break;
case GOTO_STMT:
+ /* Emit information for branch prediction. */
+ if (!GOTO_FAKE_P (t)
+ && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL)
+ {
+ rtx note = emit_note (NULL, NOTE_INSN_PREDICTION);
+
+ NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_GOTO, NOT_TAKEN);
+ }
genrtl_goto_stmt (GOTO_DESTINATION (t));
break;
diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index bff7623..1bb3d9b 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -74,7 +74,7 @@ static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
rtx *, rtx *));
static bool insns_match_p PARAMS ((int, rtx, rtx));
-static bool delete_unreachable_blocks PARAMS ((void));
+bool delete_unreachable_blocks PARAMS ((void));
static bool label_is_jump_target_p PARAMS ((rtx, rtx));
static bool tail_recursion_label_p PARAMS ((rtx));
static void merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
@@ -1748,7 +1748,7 @@ try_optimize_cfg (mode)
/* Delete all unreachable basic blocks. */
-static bool
+bool
delete_unreachable_blocks ()
{
int i, j;
@@ -1829,7 +1829,6 @@ cleanup_cfg (mode)
/* Kill the data we won't maintain. */
free_EXPR_LIST_list (&label_value_list);
- free_EXPR_LIST_list (&tail_recursion_label_list);
timevar_pop (TV_CLEANUP_CFG);
return changed;
diff --git a/gcc/cfglayout.c b/gcc/cfglayout.c
index 329e9f8..632280c 100644
--- a/gcc/cfglayout.c
+++ b/gcc/cfglayout.c
@@ -35,7 +35,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
extern struct obstack flow_obstack;
/* Holds the interesting trailing notes for the function. */
-static rtx function_tail_eff_head;
+static rtx function_footer;
static rtx skip_insns_after_block PARAMS ((basic_block));
static void record_effective_endpoints PARAMS ((void));
@@ -47,10 +47,33 @@ static void change_scope PARAMS ((rtx, tree, tree));
void verify_insn_chain PARAMS ((void));
static void fixup_fallthru_exit_predecessor PARAMS ((void));
+static rtx unlink_insn_chain PARAMS ((rtx, rtx));
+static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
/* Map insn uid to lexical block. */
static varray_type insn_scopes;
+static rtx
+unlink_insn_chain (first, last)
+ rtx first;
+ rtx last;
+{
+ rtx prevfirst = PREV_INSN (first);
+ rtx nextlast = NEXT_INSN (last);
+
+ PREV_INSN (first) = NULL;
+ NEXT_INSN (last) = NULL;
+ if (prevfirst)
+ NEXT_INSN (prevfirst) = nextlast;
+ if (nextlast)
+ PREV_INSN (nextlast) = prevfirst;
+ else
+ set_last_insn (prevfirst);
+ if (!prevfirst)
+ set_first_insn (nextlast);
+ return first;
+}
+
/* Skip over inter-block insns occurring after BB which are typically
associated with BB (e.g., barriers). If there are any such insns,
we return the last one. Otherwise, we return the end of BB. */
@@ -155,8 +178,6 @@ label_for_bb (bb)
fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
label = block_label (bb);
- if (bb->head == PREV_INSN (RBI (bb)->eff_head))
- RBI (bb)->eff_head = label;
}
return label;
@@ -176,13 +197,18 @@ record_effective_endpoints ()
basic_block bb = BASIC_BLOCK (i);
rtx end;
- RBI (bb)->eff_head = next_insn;
+ if (PREV_INSN (bb->head) && next_insn != bb->head)
+ RBI (bb)->header = unlink_insn_chain (next_insn,
+ PREV_INSN (bb->head));
end = skip_insns_after_block (bb);
- RBI (bb)->eff_end = end;
- next_insn = NEXT_INSN (end);
+ if (NEXT_INSN (bb->end) && bb->end != end)
+ RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
+ next_insn = NEXT_INSN (bb->end);
}
- function_tail_eff_head = next_insn;
+ function_footer = next_insn;
+ if (function_footer)
+ function_footer = unlink_insn_chain (function_footer, get_last_insn ());
}
/* Build a varray mapping INSN_UID to lexical block. Return it. */
@@ -237,7 +263,7 @@ set_block_levels (block, level)
block = BLOCK_CHAIN (block);
}
}
-
+
/* Emit lexical block notes needed to change scope from S1 to S2. */
static void
@@ -330,32 +356,49 @@ scope_to_insns_finalize ()
static void
fixup_reorder_chain ()
{
- basic_block bb, last_bb;
+ basic_block bb;
int index;
- rtx insn;
- int old_n_basic_blocks = n_basic_blocks;
+ rtx insn = NULL;
/* First do the bulk reordering -- rechain the blocks without regard to
the needed changes to jumps and labels. */
- for (last_bb = BASIC_BLOCK (0), bb = RBI (last_bb)->next, index = 1;
+ for (bb = BASIC_BLOCK (0), index = 0;
bb != 0;
- last_bb = bb, bb = RBI (bb)->next, index++)
+ bb = RBI (bb)->next, index++)
{
- rtx last_e = RBI (last_bb)->eff_end;
- rtx curr_h = RBI (bb)->eff_head;
-
- NEXT_INSN (last_e) = curr_h;
- PREV_INSN (curr_h) = last_e;
+ if (RBI (bb)->header)
+ {
+ if (insn)
+ NEXT_INSN (insn) = RBI (bb)->header;
+ else
+ set_first_insn (RBI (bb)->header);
+ PREV_INSN (RBI (bb)->header) = insn;
+ insn = RBI (bb)->header;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ }
+ if (insn)
+ NEXT_INSN (insn) = bb->head;
+ else
+ set_first_insn (bb->head);
+ PREV_INSN (bb->head) = insn;
+ insn = bb->end;
+ if (RBI (bb)->footer)
+ {
+ NEXT_INSN (insn) = RBI (bb)->footer;
+ PREV_INSN (RBI (bb)->footer) = insn;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ }
}
if (index != n_basic_blocks)
abort ();
- insn = RBI (last_bb)->eff_end;
- NEXT_INSN (insn) = function_tail_eff_head;
- if (function_tail_eff_head)
- PREV_INSN (function_tail_eff_head) = insn;
+ NEXT_INSN (insn) = function_footer;
+ if (function_footer)
+ PREV_INSN (function_footer) = insn;
while (NEXT_INSN (insn))
insn = NEXT_INSN (insn);
@@ -470,8 +513,6 @@ fixup_reorder_chain ()
if (nb)
{
alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
- RBI (nb)->eff_head = nb->head;
- RBI (nb)->eff_end = NEXT_INSN (nb->end);
RBI (nb)->visited = 1;
RBI (nb)->next = RBI (bb)->next;
RBI (bb)->next = nb;
@@ -481,20 +522,26 @@ fixup_reorder_chain ()
}
/* Put basic_block_info in the new order. */
- bb = BASIC_BLOCK (0);
- index = 0;
if (rtl_dump_file)
- fprintf (rtl_dump_file, "Reordered sequence:\n");
-
- for (; bb; bb = RBI (bb)->next, index++)
{
- if (rtl_dump_file)
- fprintf (rtl_dump_file, " %i %sbb %i freq %i\n", index,
- bb->index >= old_n_basic_blocks ? "compensation " : "",
- bb->index,
- bb->frequency);
+ fprintf (rtl_dump_file, "Reordered sequence:\n");
+ for (bb = BASIC_BLOCK (0), index = 0; bb; bb = RBI (bb)->next, index ++)
+ {
+ fprintf (rtl_dump_file, " %i ", index);
+ if (RBI (bb)->original)
+ fprintf (rtl_dump_file, "duplicate of %i ",
+ RBI (bb)->original->index);
+ else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
+ fprintf (rtl_dump_file, "compensation ");
+ else
+ fprintf (rtl_dump_file, "bb %i ", bb->index);
+ fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
+ }
+ }
+ for (bb = BASIC_BLOCK (0), index = 0; bb; bb = RBI (bb)->next, index ++)
+ {
bb->index = index;
BASIC_BLOCK (index) = bb;
}
@@ -530,10 +577,9 @@ verify_insn_chain ()
if (insn_cnt1 != insn_cnt2)
abort ();
}
-
-/* The block falling through to exit must be the last one in the reordered
- chain. Ensure it is. */
-
+
+/* The block falling through to exit must be the last one in the
+ reordered chain. Ensure that this condition is met. */
static void
fixup_fallthru_exit_predecessor ()
{
@@ -560,12 +606,266 @@ fixup_fallthru_exit_predecessor ()
}
}
-/* Main entry point to this module: initialize the datastructures for CFG
- layout changes. */
+/* Return true in case it is possible to duplicate the basic block BB. */
+
+bool
+cfg_layout_can_duplicate_bb_p (bb)
+ basic_block bb;
+{
+ rtx next;
+ edge s;
+
+ if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
+ return false;
+
+ /* Duplicating fallthru block to exit would require adding an jump
+ and splitting the real last BB. */
+ for (s = bb->succ; s; s = s->succ_next)
+ if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
+ return false;
+
+ /* Do not attempt to duplicate tablejumps, as we need to unshare
+ the dispatch table. This is dificult to do, as the instructions
+ computing jump destination may be hoisted outside the basic block. */
+ if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
+ && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
+ && GET_CODE (next) == JUMP_INSN
+ && (GET_CODE (PATTERN (next)) == ADDR_VEC
+ || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ return false;
+ return true;
+}
+
+static rtx
+duplicate_insn_chain (from, to)
+ rtx from, to;
+{
+ rtx insn, last;
+
+ /* Avoid updating of boundaries of previous basic block. The
+ note will get removed from insn stream in fixup. */
+ last = emit_note (NULL, NOTE_INSN_DELETED);
+
+ /* Create copy at the end of INSN chain. The chain will
+ be reordered later. */
+ for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
+ {
+ rtx new;
+ switch (GET_CODE (insn))
+ {
+ case INSN:
+ case CALL_INSN:
+ case JUMP_INSN:
+ /* Avoid copying of dispatch tables. We never duplicate
+ tablejumps, so this can hit only in case the table got
+ moved far from original jump. */
+ if (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ break;
+ new = emit_copy_of_insn_after (insn, get_last_insn ());
+ /* Record the INSN_SCOPE. */
+ VARRAY_GROW (insn_scopes, INSN_UID (new) + 1);
+ VARRAY_TREE (insn_scopes, INSN_UID (new))
+ = VARRAY_TREE (insn_scopes, INSN_UID (insn));
+ break;
+
+ case CODE_LABEL:
+ break;
+
+ case BARRIER:
+ emit_barrier ();
+ break;
+
+ case NOTE:
+ switch (NOTE_LINE_NUMBER (insn))
+ {
+ /* In case prologue is empty and function contain label
+ in first BB, we may want to copy the block. */
+ case NOTE_INSN_PROLOGUE_END:
+
+ case NOTE_INSN_LOOP_VTOP:
+ case NOTE_INSN_LOOP_CONT:
+ case NOTE_INSN_LOOP_BEG:
+ case NOTE_INSN_LOOP_END:
+ /* Strip down the loop notes - we don't really want to keep
+ them consistent in loop copies. */
+ case NOTE_INSN_DELETED:
+ case NOTE_INSN_DELETED_LABEL:
+ /* No problem to strip these. */
+ case NOTE_INSN_EPILOGUE_BEG:
+ case NOTE_INSN_FUNCTION_END:
+ /* Debug code expect these notes to exist just once.
+ Keep them in the master copy.
+ ??? It probably makes more sense to duplicate them for each
+ epilogue copy. */
+ case NOTE_INSN_FUNCTION_BEG:
+ /* There is always just single entry to function. */
+ case NOTE_INSN_BASIC_BLOCK:
+ break;
+
+ /* There is no purpose to duplicate prologue. */
+ case NOTE_INSN_BLOCK_BEG:
+ case NOTE_INSN_BLOCK_END:
+ /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
+ reordering is in the progress. */
+ case NOTE_INSN_EH_REGION_BEG:
+ case NOTE_INSN_EH_REGION_END:
+ case NOTE_INSN_RANGE_BEG:
+ case NOTE_INSN_RANGE_END:
+ /* Should never exist at BB duplication time. */
+ abort ();
+ break;
+ case NOTE_INSN_REPEATED_LINE_NUMBER:
+ emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
+ break;
+
+ default:
+ if (NOTE_LINE_NUMBER (insn) < 0)
+ abort ();
+ /* It is possible that no_line_number is set and the note
+ won't be emitted. */
+ emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
+ }
+ break;
+ default:
+ abort ();
+ }
+ }
+ insn = NEXT_INSN (last);
+ delete_insn (last);
+ return insn;
+}
+
+/* Redirect Edge to DEST. */
+void
+cfg_layout_redirect_edge (e, dest)
+ edge e;
+ basic_block dest;
+{
+ int old_index = dest->index;
+ basic_block src = e->src;
+
+ /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
+ in the case the basic block appears to be in sequence. Avoid this
+ transformation. */
+
+ dest->index = n_basic_blocks + 1;
+ if (e->flags & EDGE_FALLTHRU)
+ {
+ /* In case we are redirecting fallthru edge to the branch edge
+ of conditional jump, remove it. */
+ if (src->succ->succ_next
+ && !src->succ->succ_next->succ_next)
+ {
+ edge s = e->succ_next ? e->succ_next : src->succ;
+ if (s->dest == dest
+ && any_condjump_p (src->end)
+ && onlyjump_p (src->end))
+ delete_insn (src->end);
+ }
+ redirect_edge_succ_nodup (e, dest);
+ }
+ else
+ redirect_edge_and_branch (e, dest);
+ dest->index = old_index;
+}
+
+/* Create an duplicate of the basic block BB and redirect edge E into it. */
+
+basic_block
+cfg_layout_duplicate_bb (bb, e)
+ basic_block bb;
+ edge e;
+{
+ rtx insn;
+ edge s, n;
+ basic_block new_bb;
+ gcov_type new_count = e ? e->count : 0;
+
+ if (bb->count < new_count)
+ new_count = bb->count;
+ if (!bb->pred)
+ abort ();
+#ifdef ENABLE_CHECKING
+ if (!cfg_layout_can_duplicate_bb_p (bb))
+ abort ();
+#endif
+
+ insn = duplicate_insn_chain (bb->head, bb->end);
+ new_bb = create_basic_block (n_basic_blocks, insn,
+ insn ? get_last_insn () : NULL);
+ alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
+
+ if (RBI (bb)->header)
+ {
+ insn = RBI (bb)->header;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ insn = duplicate_insn_chain (RBI (bb)->header, insn);
+ if (insn)
+ RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
+ }
+
+ if (RBI (bb)->footer)
+ {
+ insn = RBI (bb)->footer;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ insn = duplicate_insn_chain (RBI (bb)->footer, insn);
+ if (insn)
+ RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
+ }
+
+ if (bb->global_live_at_start)
+ {
+ new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
+ COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
+ }
+
+ new_bb->loop_depth = bb->loop_depth;
+ new_bb->flags = bb->flags;
+ for (s = bb->succ; s; s = s->succ_next)
+ {
+ n = make_edge (new_bb, s->dest, s->flags);
+ n->probability = s->probability;
+ if (new_count)
+ /* Take care for overflows! */
+ n->count = s->count * (new_count * 10000 / bb->count) / 10000;
+ else
+ n->count = 0;
+ s->count -= n->count;
+ }
+
+ new_bb->count = new_count;
+ bb->count -= new_count;
+
+ if (e)
+ {
+ new_bb->frequency = EDGE_FREQUENCY (e);
+ bb->frequency -= EDGE_FREQUENCY (e);
+
+ cfg_layout_redirect_edge (e, new_bb);
+ }
+
+ if (bb->count < 0)
+ bb->count = 0;
+ if (bb->frequency < 0)
+ bb->frequency = 0;
+
+ RBI (new_bb)->original = bb;
+ return new_bb;
+}
+
+/* Main entry point to this module - initialize the datastructures for
+ CFG layout changes. It keeps LOOPS up-to-date if not null. */
void
cfg_layout_initialize ()
{
+ /* Our algorithm depends on fact that there are now dead jumptables
+ around the code. */
alloc_aux_for_blocks (sizeof (struct reorder_block_def));
scope_to_insns_initialize ();
diff --git a/gcc/cfglayout.h b/gcc/cfglayout.h
index 3b12e50..e4e27d0 100644
--- a/gcc/cfglayout.h
+++ b/gcc/cfglayout.h
@@ -21,9 +21,12 @@
/* Structure to hold information about the blocks during reordering. */
typedef struct reorder_block_def
{
- rtx eff_head;
- rtx eff_end;
+ rtx header;
+ rtx footer;
basic_block next;
+ basic_block original;
+
+ /* These fields are used by bb-reorder pass. */
int visited;
} *reorder_block_def;
@@ -31,6 +34,8 @@ typedef struct reorder_block_def
extern void cfg_layout_initialize PARAMS ((void));
extern void cfg_layout_finalize PARAMS ((void));
-
+extern bool cfg_layout_can_duplicate_bb_p PARAMS ((basic_block));
+extern basic_block cfg_layout_duplicate_bb PARAMS ((basic_block, edge));
extern void scope_to_insns_initialize PARAMS ((void));
extern void scope_to_insns_finalize PARAMS ((void));
+extern void cfg_layout_redirect_edge PARAMS ((edge, basic_block));
diff --git a/gcc/cfgrtl.c b/gcc/cfgrtl.c
index 3fcd8a4..d96f677 100644
--- a/gcc/cfgrtl.c
+++ b/gcc/cfgrtl.c
@@ -89,7 +89,8 @@ can_delete_note_p (note)
rtx note;
{
return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
- || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
+ || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
+ || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
}
/* True if a given label can be deleted. */
@@ -375,6 +376,16 @@ flow_delete_block_noexpunge (b)
and remove the associated NOTE_INSN_EH_REGION_BEG and
NOTE_INSN_EH_REGION_END notes. */
+ /* Get rid of all NOTE_INSN_PREDICTIONs hanging before the block. */
+
+ for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
+ {
+ if (GET_CODE (insn) != NOTE)
+ break;
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
+ NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ }
+
insn = b->head;
never_reached_warning (insn, b->end);
diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c
index 4b9c2dc..adeabe9 100644
--- a/gcc/emit-rtl.c
+++ b/gcc/emit-rtl.c
@@ -3731,6 +3731,27 @@ emit_call_insn_before (pattern, before)
return insn;
}
+/* Make an instruction with body PATTERN and code CALL_INSN
+ and output it before the instruction BEFORE. */
+
+rtx
+emit_call_insn_after (pattern, before)
+ rtx pattern, before;
+{
+ rtx insn;
+
+ if (GET_CODE (pattern) == SEQUENCE)
+ insn = emit_insn_after (pattern, before);
+ else
+ {
+ insn = make_call_insn_raw (pattern);
+ add_insn_after (insn, before);
+ PUT_CODE (insn, CALL_INSN);
+ }
+
+ return insn;
+}
+
/* Make an insn of code BARRIER
and output it before the insn BEFORE. */
@@ -5052,3 +5073,68 @@ restore_line_number_status (old_value)
{
no_line_numbers = old_value;
}
+
+/* Produce exact duplicate of insn INSN after AFTER.
+ Care updating of libcall regions if present. */
+
+rtx
+emit_copy_of_insn_after (insn, after)
+ rtx insn, after;
+{
+ rtx new;
+ rtx note1, note2, link;
+
+ switch (GET_CODE (insn))
+ {
+ case INSN:
+ new = emit_insn_after (copy_insn (PATTERN (insn)), after);
+ break;
+
+ case JUMP_INSN:
+ new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
+ break;
+
+ case CALL_INSN:
+ new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
+ if (CALL_INSN_FUNCTION_USAGE (insn))
+ CALL_INSN_FUNCTION_USAGE (new)
+ = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
+ SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
+ CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
+ break;
+
+ default:
+ abort ();
+ }
+
+ /* Update LABEL_NUSES. */
+ mark_jump_label (PATTERN (new), new, 0);
+
+ /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
+ make them. */
+ for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
+ if (REG_NOTE_KIND (link) != REG_LABEL)
+ {
+ if (GET_CODE (link) == EXPR_LIST)
+ REG_NOTES (new)
+ = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
+ XEXP (link, 0),
+ REG_NOTES (new)));
+ else
+ REG_NOTES (new)
+ = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
+ XEXP (link, 0),
+ REG_NOTES (new)));
+ }
+
+ /* Fix the libcall sequences. */
+ if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
+ {
+ rtx p = new;
+ while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
+ p = PREV_INSN (p);
+ XEXP (note1, 0) = p;
+ XEXP (note2, 0) = new;
+ }
+ return new;
+}
diff --git a/gcc/output.h b/gcc/output.h
index 67c635b..289bdc0 100644
--- a/gcc/output.h
+++ b/gcc/output.h
@@ -145,6 +145,7 @@ extern int regno_uninitialized PARAMS ((unsigned int));
extern int regno_clobbered_at_setjmp PARAMS ((int));
extern void find_basic_blocks PARAMS ((rtx, int, FILE *));
extern bool cleanup_cfg PARAMS ((int));
+extern bool delete_unreachable_blocks PARAMS ((void));
extern void check_function_return_warnings PARAMS ((void));
#endif
diff --git a/gcc/predict.c b/gcc/predict.c
index 77f1a99..5896c10 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -61,6 +61,8 @@ static REAL_VALUE_TYPE real_zero, real_one, real_almost_one, real_br_prob_base,
#define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
#define PROB_ALWAYS (REG_BR_PROB_BASE)
+static bool predicted_by_p PARAMS ((basic_block,
+ enum br_predictor));
static void combine_predictions_for_insn PARAMS ((rtx, basic_block));
static void dump_prediction PARAMS ((enum br_predictor, int,
basic_block, int));
@@ -68,6 +70,11 @@ static void estimate_loops_at_level PARAMS ((struct loop *loop));
static void propagate_freq PARAMS ((basic_block));
static void estimate_bb_frequencies PARAMS ((struct loops *));
static void counts_to_freqs PARAMS ((void));
+static void process_note_predictions PARAMS ((basic_block, int *, int *,
+ sbitmap *));
+static void process_note_prediction PARAMS ((basic_block, int *, int *,
+ sbitmap *, int, int));
+static bool last_basic_block_p PARAMS ((basic_block));
/* Information we hold about each branch predictor.
Filled using information from predict.def. */
@@ -96,6 +103,23 @@ static const struct predictor_info predictor_info[]= {
{NULL, 0, 0}
};
#undef DEF_PREDICTOR
+/* Return true if the one of outgoing edges is already predicted by
+ PREDICTOR. */
+
+static bool
+predicted_by_p (bb, predictor)
+ basic_block bb;
+ enum br_predictor predictor;
+{
+ rtx note;
+ if (!INSN_P (bb->end))
+ return false;
+ for (note = REG_NOTES (bb->end); note; note = XEXP (note, 1))
+ if (REG_NOTE_KIND (note) == REG_BR_PRED
+ && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
+ return true;
+ return false;
+}
void
predict_insn (insn, predictor, probability)
@@ -333,7 +357,6 @@ estimate_probability (loops_info)
{
sbitmap *dominators, *post_dominators;
int i;
- int found_noreturn = 0;
dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
@@ -357,6 +380,13 @@ estimate_probability (loops_info)
int header_found = 0;
edge e;
+ /* Bypass loop heuristics on continue statement. These
+ statements construct loops via "non-loop" constructs
+ in the source language and are better to be handled
+ separately. */
+ if (predicted_by_p (BASIC_BLOCK (j), PRED_CONTINUE))
+ continue;
+
/* Loop branch heuristics - predict an edge back to a
loop's head as taken. */
for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
@@ -389,37 +419,22 @@ estimate_probability (loops_info)
rtx cond, earliest;
edge e;
- /* If block has no successor, predict all possible paths to it as
- improbable, as the block contains a call to a noreturn function and
- thus can be executed only once. */
- if (bb->succ == NULL && !found_noreturn)
- {
- int y;
-
- /* ??? Postdominator claims each noreturn block to be postdominated
- by each, so we need to run only once. This needs to be changed
- once postdominace algorithm is updated to say something more
- sane. */
- found_noreturn = 1;
- for (y = 0; y < n_basic_blocks; y++)
- if (!TEST_BIT (post_dominators[y], i))
- for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
- if (e->dest->index >= 0
- && TEST_BIT (post_dominators[e->dest->index], i))
- predict_edge_def (e, PRED_NORETURN, NOT_TAKEN);
- }
-
if (GET_CODE (last_insn) != JUMP_INSN || ! any_condjump_p (last_insn))
continue;
for (e = bb->succ; e; e = e->succ_next)
{
- /* Predict edges to blocks that return immediately to be
- improbable. These are usually used to signal error states. */
- if (e->dest == EXIT_BLOCK_PTR
- || (e->dest->succ && !e->dest->succ->succ_next
- && e->dest->succ->dest == EXIT_BLOCK_PTR))
- predict_edge_def (e, PRED_ERROR_RETURN, NOT_TAKEN);
+ /* Predict early returns to be probable, as we've already taken
+ care for error returns and other are often used for fast paths
+ trought function. */
+ if ((e->dest == EXIT_BLOCK_PTR
+ || (e->dest->succ && !e->dest->succ->succ_next
+ && e->dest->succ->dest == EXIT_BLOCK_PTR))
+ && !predicted_by_p (bb, PRED_NULL_RETURN)
+ && !predicted_by_p (bb, PRED_CONST_RETURN)
+ && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
+ && !last_basic_block_p (e->dest))
+ predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
/* Look for block we are guarding (ie we dominate it,
but it doesn't postdominate us). */
@@ -538,7 +553,8 @@ estimate_probability (loops_info)
/* Attach the combined probability to each conditional jump. */
for (i = 0; i < n_basic_blocks; i++)
if (GET_CODE (BLOCK_END (i)) == JUMP_INSN
- && any_condjump_p (BLOCK_END (i)))
+ && any_condjump_p (BLOCK_END (i))
+ && BASIC_BLOCK (i)->succ->succ_next != NULL)
combine_predictions_for_insn (BLOCK_END (i), BASIC_BLOCK (i));
sbitmap_vector_free (post_dominators);
@@ -620,6 +636,181 @@ expected_value_to_br_prob ()
}
}
+/* Check whether this is the last basic block of function. Commonly tehre
+ is one extra common cleanup block. */
+static bool
+last_basic_block_p (bb)
+ basic_block bb;
+{
+ return (bb->index == n_basic_blocks - 1
+ || (bb->index == n_basic_blocks - 2
+ && bb->succ && !bb->succ->succ_next
+ && bb->succ->dest->index == n_basic_blocks - 1));
+}
+
+/* Sets branch probabilities according to PREDiction and FLAGS. HEADS[bb->index]
+ should be index of basic block in that we need to alter branch predictions
+ (i.e. the first of our dominators such that we do not post-dominate it)
+ (but we fill this information on demand, so -1 may be there in case this
+ was not needed yet). */
+
+static void
+process_note_prediction (bb, heads, dominators, post_dominators, pred, flags)
+ basic_block bb;
+ int *heads;
+ int *dominators;
+ sbitmap *post_dominators;
+ int pred;
+ int flags;
+{
+ edge e;
+ int y;
+ bool taken;
+
+ taken = flags & IS_TAKEN;
+
+ if (heads[bb->index] < 0)
+ {
+ /* This is first time we need this field in heads array; so
+ find first dominator that we do not post-dominate (we are
+ using already known members of heads array). */
+ int ai = bb->index;
+ int next_ai = dominators[bb->index];
+ int head;
+
+ while (heads[next_ai] < 0)
+ {
+ if (!TEST_BIT (post_dominators[next_ai], bb->index))
+ break;
+ heads[next_ai] = ai;
+ ai = next_ai;
+ next_ai = dominators[next_ai];
+ }
+ if (!TEST_BIT (post_dominators[next_ai], bb->index))
+ head = next_ai;
+ else
+ head = heads[next_ai];
+ while (next_ai != bb->index)
+ {
+ next_ai = ai;
+ ai = heads[ai];
+ heads[next_ai] = head;
+ }
+ }
+ y = heads[bb->index];
+
+ /* Now find the edge that leads to our branch and aply the prediction. */
+
+ if (y == n_basic_blocks)
+ return;
+ for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
+ if (e->dest->index >= 0
+ && TEST_BIT (post_dominators[e->dest->index], bb->index))
+ predict_edge_def (e, pred, taken);
+}
+
+/* Gathers NOTE_INSN_PREDICTIONs in given basic block and turns them
+ into branch probabilities. For description of heads array, see
+ process_note_prediction. */
+
+static void
+process_note_predictions (bb, heads, dominators, post_dominators)
+ basic_block bb;
+ int *heads;
+ int *dominators;
+ sbitmap *post_dominators;
+{
+ rtx insn;
+ edge e;
+
+ /* Additionaly, we check here for blocks with no successors. */
+ int contained_noreturn_call = 0;
+ int was_bb_head = 0;
+ int noreturn_block = 1;
+
+ for (insn = bb->end; insn;
+ was_bb_head |= (insn == bb->head), insn = PREV_INSN (insn))
+ {
+ if (GET_CODE (insn) != NOTE)
+ {
+ if (was_bb_head)
+ break;
+ else
+ {
+ /* Noreturn calls cause program to exit, therefore they are
+ always predicted as not taken. */
+ if (GET_CODE (insn) == CALL_INSN
+ && find_reg_note (insn, REG_NORETURN, NULL))
+ contained_noreturn_call = 1;
+ continue;
+ }
+ }
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
+ {
+ int alg = (int) NOTE_PREDICTION_ALG (insn);
+ /* Process single prediction note. */
+ process_note_prediction (bb,
+ heads,
+ dominators,
+ post_dominators,
+ alg, (int) NOTE_PREDICTION_FLAGS (insn));
+ delete_insn (insn);
+ }
+ }
+ for (e = bb->succ; e; e = e->succ_next)
+ if (!(e->flags & EDGE_FAKE))
+ noreturn_block = 0;
+ if (contained_noreturn_call)
+ {
+ /* This block ended from other reasons than because of return.
+ If it is because of noreturn call, this should certainly not
+ be taken. Otherwise it is probably some error recovery. */
+ process_note_prediction (bb,
+ heads,
+ dominators,
+ post_dominators, PRED_NORETURN, NOT_TAKEN);
+ }
+}
+
+/* Gathers NOTE_INSN_PREDICTIONs and turns them into
+ branch probabilities. */
+
+void
+note_prediction_to_br_prob ()
+{
+ int i;
+ sbitmap *post_dominators;
+ int *dominators, *heads;
+
+ /* To enable handling of noreturn blocks. */
+ add_noreturn_fake_exit_edges ();
+ connect_infinite_loops_to_exit ();
+
+ dominators = xmalloc (sizeof (int) * n_basic_blocks);
+ memset (dominators, -1, sizeof (int) * n_basic_blocks);
+ post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
+ calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
+ calculate_dominance_info (dominators, NULL, CDI_DOMINATORS);
+
+ heads = xmalloc (sizeof (int) * n_basic_blocks);
+ memset (heads, -1, sizeof (int) * n_basic_blocks);
+ heads[0] = n_basic_blocks;
+
+ /* Process all prediction notes. */
+
+ for (i = 0; i < n_basic_blocks; ++i)
+ {
+ basic_block bb = BASIC_BLOCK (i);
+ process_note_predictions (bb, heads, dominators, post_dominators);
+ }
+
+ sbitmap_vector_free (post_dominators);
+ free (dominators);
+ free (heads);
+
+ remove_fake_edges ();
+}
+
/* This is used to carry information about basic blocks. It is
attached to the AUX field of the standard CFG block. */
diff --git a/gcc/predict.def b/gcc/predict.def
index 1201625..0e44131 100644
--- a/gcc/predict.def
+++ b/gcc/predict.def
@@ -63,6 +63,9 @@ DEF_PREDICTOR (PRED_LOOP_ITERATIONS, "loop iterations", PROB_ALWAYS,
DEF_PREDICTOR (PRED_BUILTIN_EXPECT, "__builtin_expect", PROB_VERY_LIKELY,
PRED_FLAG_FIRST_MATCH)
+/* Branch containing goto is probably not taken. */
+DEF_PREDICTOR (PRED_CONTINUE, "continue", HITRATE (56), 0)
+
/* Branch to basic block containing call marked by noreturn attribute. */
DEF_PREDICTOR (PRED_NORETURN, "noreturn call", HITRATE (99),
PRED_FLAG_FIRST_MATCH)
@@ -97,4 +100,16 @@ DEF_PREDICTOR (PRED_FPOPCODE, "fp_opcode", HITRATE (90), 0)
DEF_PREDICTOR (PRED_CALL, "call", HITRATE (70), 0)
/* Branch causing function to terminate is probably not taken. */
-DEF_PREDICTOR (PRED_ERROR_RETURN, "error return", HITRATE (52), 0)
+DEF_PREDICTOR (PRED_EARLY_RETURN, "early return", HITRATE (67), 0)
+
+/* Branch containing goto is probably not taken. */
+DEF_PREDICTOR (PRED_GOTO, "goto", HITRATE (70), 0)
+
+/* Branch ending with return constant is probably not taken. */
+DEF_PREDICTOR (PRED_CONST_RETURN, "const return", HITRATE (95), 0)
+
+/* Branch ending with return negative constant is probably not taken. */
+DEF_PREDICTOR (PRED_NEGATIVE_RETURN, "negative return", HITRATE (96), 0)
+
+/* Branch ending with return; is probably not taken */
+DEF_PREDICTOR (PRED_NULL_RETURN, "null return", HITRATE (90), 0)
diff --git a/gcc/predict.h b/gcc/predict.h
index 6af5f8d..a936b56 100644
--- a/gcc/predict.h
+++ b/gcc/predict.h
@@ -34,6 +34,9 @@ enum prediction
TAKEN
};
+/* Flags for NOTE_PREDICTION */
+#define IS_TAKEN 1 /* Predict edges to the block as taken. */
+
extern void predict_insn_def PARAMS ((rtx, enum br_predictor,
enum prediction));
extern void predict_insn PARAMS ((rtx, enum br_predictor, int));
diff --git a/gcc/print-rtl.c b/gcc/print-rtl.c
index 5bdffb4..8cd339a 100644
--- a/gcc/print-rtl.c
+++ b/gcc/print-rtl.c
@@ -284,6 +284,15 @@ print_rtx (in_rtx)
fprintf (outfile, " \"\"");
break;
+ case NOTE_INSN_PREDICTION:
+ if (NOTE_PREDICTION (in_rtx))
+ fprintf (outfile, " [ %d %d ] ",
+ (int)NOTE_PREDICTION_ALG (in_rtx),
+ (int) NOTE_PREDICTION_FLAGS (in_rtx));
+ else
+ fprintf (outfile, " [ ERROR ]");
+ break;
+
default:
{
const char * const str = X0STR (in_rtx, i);
diff --git a/gcc/rtl.c b/gcc/rtl.c
index ad0bc53..2b816d7 100644
--- a/gcc/rtl.c
+++ b/gcc/rtl.c
@@ -216,7 +216,8 @@ const char * const note_insn_name[NOTE_INSN_MAX - NOTE_INSN_BIAS] =
"NOTE_INSN_EH_REGION_BEG", "NOTE_INSN_EH_REGION_END",
"NOTE_INSN_REPEATED_LINE_NUMBER", "NOTE_INSN_RANGE_BEG",
"NOTE_INSN_RANGE_END", "NOTE_INSN_LIVE",
- "NOTE_INSN_BASIC_BLOCK", "NOTE_INSN_EXPECTED_VALUE"
+ "NOTE_INSN_BASIC_BLOCK", "NOTE_INSN_EXPECTED_VALUE",
+ "NOTE_INSN_PREDICTION"
};
const char * const reg_note_name[] =
diff --git a/gcc/rtl.h b/gcc/rtl.h
index 3a20343..f201903 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -740,6 +740,7 @@ extern const char * const reg_note_name[];
#define NOTE_LIVE_INFO(INSN) XCEXP (INSN, 3, NOTE)
#define NOTE_BASIC_BLOCK(INSN) XCBBDEF (INSN, 3, NOTE)
#define NOTE_EXPECTED_VALUE(INSN) XCEXP (INSN, 3, NOTE)
+#define NOTE_PREDICTION(INSN) XCINT (INSN, 3, NOTE)
/* In a NOTE that is a line number, this is the line number.
Other kinds of NOTEs are identified by negative numbers here. */
@@ -750,6 +751,11 @@ extern const char * const reg_note_name[];
(GET_CODE (INSN) == NOTE \
&& NOTE_LINE_NUMBER (INSN) == NOTE_INSN_BASIC_BLOCK)
+/* Algorithm and flags for prediction. */
+#define NOTE_PREDICTION_ALG(INSN) (XCINT(INSN, 3, NOTE)>>8)
+#define NOTE_PREDICTION_FLAGS(INSN) (XCINT(INSN, 3, NOTE)&0xff)
+#define NOTE_PREDICT(ALG,FLAGS) ((ALG<<8)+(FLAGS))
+
/* Codes that appear in the NOTE_LINE_NUMBER field
for kinds of notes that are not line numbers.
@@ -838,6 +844,9 @@ enum insn_note
NOTE_EXPECTED_VALUE; stored as (eq (reg) (const_int)). */
NOTE_INSN_EXPECTED_VALUE,
+ /* Record a prediction. Uses NOTE_PREDICTION. */
+ NOTE_INSN_PREDICTION,
+
NOTE_INSN_MAX
};
@@ -1349,6 +1358,7 @@ extern rtx copy_insn_1 PARAMS ((rtx));
extern rtx copy_insn PARAMS ((rtx));
extern rtx gen_int_mode PARAMS ((HOST_WIDE_INT,
enum machine_mode));
+extern rtx emit_copy_of_insn_after PARAMS ((rtx, rtx));
/* In rtl.c */
extern rtx rtx_alloc PARAMS ((RTX_CODE));
@@ -1437,6 +1447,7 @@ extern rtx emit_label_before PARAMS ((rtx, rtx));
extern rtx emit_note_before PARAMS ((int, rtx));
extern rtx emit_insn_after PARAMS ((rtx, rtx));
extern rtx emit_jump_insn_after PARAMS ((rtx, rtx));
+extern rtx emit_call_insn_after PARAMS ((rtx, rtx));
extern rtx emit_barrier_after PARAMS ((rtx));
extern rtx emit_label_after PARAMS ((rtx, rtx));
extern rtx emit_note_after PARAMS ((int, rtx));
diff --git a/gcc/sibcall.c b/gcc/sibcall.c
index ec13d89..535c827 100644
--- a/gcc/sibcall.c
+++ b/gcc/sibcall.c
@@ -580,16 +580,6 @@ optimize_sibling_and_tail_recursive_calls ()
insns = get_insns ();
- /* We do not perform these calls when flag_exceptions is true, so this
- is probably a NOP at the current time. However, we may want to support
- sibling and tail recursion optimizations in the future, so let's plan
- ahead and find all the EH labels. */
- find_exception_handler_labels ();
-
- rebuild_jump_labels (insns);
- /* We need cfg information to determine which blocks are succeeded
- only by the epilogue. */
- find_basic_blocks (insns, max_reg_num (), 0);
cleanup_cfg (CLEANUP_PRE_SIBCALL | CLEANUP_PRE_LOOP);
/* If there are no basic blocks, then there is nothing to do. */
@@ -776,4 +766,5 @@ optimize_sibling_and_tail_recursive_calls ()
/* This information will be invalid after inline expansion. Kill it now. */
free_basic_block_vars (0);
+ free_EXPR_LIST_list (&tail_recursion_label_list);
}
diff --git a/gcc/stmt.c b/gcc/stmt.c
index 42a9659..639587a 100644
--- a/gcc/stmt.c
+++ b/gcc/stmt.c
@@ -54,6 +54,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "output.h"
#include "ggc.h"
#include "langhooks.h"
+#include "predict.h"
#define obstack_chunk_alloc xmalloc
#define obstack_chunk_free free
@@ -414,6 +415,7 @@ static tree resolve_operand_names PARAMS ((tree, tree, tree,
const char **));
static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
static void expand_null_return_1 PARAMS ((rtx));
+static enum br_predictor return_prediction PARAMS ((rtx));
static void expand_value_return PARAMS ((rtx));
static int tail_recursion_args PARAMS ((tree, tree));
static void expand_cleanups PARAMS ((tree, tree, int, int));
@@ -2824,6 +2826,11 @@ int
expand_continue_loop (whichloop)
struct nesting *whichloop;
{
+ /* Emit information for branch prediction. */
+ rtx note;
+
+ note = emit_note (NULL, NOTE_INSN_PREDICTION);
+ NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
last_expr_type = 0;
if (whichloop == 0)
whichloop = loop_stack;
@@ -2965,7 +2972,9 @@ expand_exit_something ()
void
expand_null_return ()
{
- rtx last_insn = get_last_insn ();
+ rtx last_insn;
+
+ last_insn = get_last_insn ();
/* If this function was declared to return a value, but we
didn't, clobber the return registers so that they are not
@@ -2975,14 +2984,58 @@ expand_null_return ()
expand_null_return_1 (last_insn);
}
+/* Try to guess whether the value of return means error code. */
+static enum br_predictor
+return_prediction (val)
+ rtx val;
+{
+ /* Different heuristics for pointers and scalars. */
+ if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
+ {
+ /* NULL is usually not returned. */
+ if (val == const0_rtx)
+ return PRED_NULL_RETURN;
+ }
+ else
+ {
+ /* Negative return values are often used to indicate
+ errors. */
+ if (GET_CODE (val) == CONST_INT
+ && INTVAL (val) < 0)
+ return PRED_NEGATIVE_RETURN;
+ /* Constant return values are also usually erors,
+ zero/one often mean booleans so exclude them from the
+ heuristics. */
+ if (CONSTANT_P (val)
+ && (val != const0_rtx && val != const1_rtx))
+ return PRED_CONST_RETURN;
+ }
+ return PRED_NO_PREDICTION;
+}
+
/* Generate RTL to return from the current function, with value VAL. */
static void
expand_value_return (val)
rtx val;
{
- rtx last_insn = get_last_insn ();
- rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
+ rtx last_insn;
+ rtx return_reg;
+ enum br_predictor pred;
+
+ if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
+ {
+ /* Emit information for branch prediction. */
+ rtx note;
+
+ note = emit_note (NULL, NOTE_INSN_PREDICTION);
+
+ NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
+
+ }
+
+ last_insn = get_last_insn ();
+ return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
/* Copy the value to the return location
unless it's already there. */
diff --git a/gcc/toplev.c b/gcc/toplev.c
index 6984edb..572d4c0 100644
--- a/gcc/toplev.c
+++ b/gcc/toplev.c
@@ -2492,18 +2492,38 @@ rest_of_compilation (decl)
|| errorcount || sorrycount)
goto exit_rest_of_compilation;
+ timevar_push (TV_JUMP);
+ open_dump_file (DFI_sibling, decl);
+ insns = get_insns ();
+ rebuild_jump_labels (insns);
+ find_exception_handler_labels ();
+ find_basic_blocks (insns, max_reg_num (), rtl_dump_file);
+
+ delete_unreachable_blocks ();
+
+ /* Turn NOTE_INSN_PREDICTIONs into branch predictions. */
+ note_prediction_to_br_prob ();
+
/* We may have potential sibling or tail recursion sites. Select one
(of possibly multiple) methods of performing the call. */
if (flag_optimize_sibling_calls)
{
- timevar_push (TV_JUMP);
- open_dump_file (DFI_sibling, decl);
-
+ rtx insn;
optimize_sibling_and_tail_recursive_calls ();
- close_dump_file (DFI_sibling, print_rtl, get_insns ());
- timevar_pop (TV_JUMP);
+ /* There is pass ordering problem - we must lower NOTE_INSN_PREDICTION
+ notes before simplifying cfg and we must do lowering after sibcall
+ that unhides parts of RTL chain and cleans up the CFG.
+
+ Until sibcall is replaced by tree-level optimizer, lets just
+ sweep away the NOTE_INSN_PREDICTION notes that leaked out. */
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
+ delete_insn (insn);
}
+ close_dump_file (DFI_sibling, print_rtl, get_insns ());
+ timevar_pop (TV_JUMP);
/* Complete generation of exception handling code. */
find_exception_handler_labels ();