summaryrefslogtreecommitdiffstats
path: root/contrib/gcc/cfglayout.c
diff options
context:
space:
mode:
authorkan <kan@FreeBSD.org>2003-07-11 03:40:53 +0000
committerkan <kan@FreeBSD.org>2003-07-11 03:40:53 +0000
commitb2a8872fbe1ec1c49094559ac7b78e6ea4ab7180 (patch)
treef6b0610f4a17fd26aa234354f050080f789861a4 /contrib/gcc/cfglayout.c
parent52e69d78eee5612ac195e0701a5cebe40d1ab0e1 (diff)
downloadFreeBSD-src-b2a8872fbe1ec1c49094559ac7b78e6ea4ab7180.zip
FreeBSD-src-b2a8872fbe1ec1c49094559ac7b78e6ea4ab7180.tar.gz
Gcc 3.3.1-pre as of 2003-07-11.
Diffstat (limited to 'contrib/gcc/cfglayout.c')
-rw-r--r--contrib/gcc/cfglayout.c581
1 files changed, 506 insertions, 75 deletions
diff --git a/contrib/gcc/cfglayout.c b/contrib/gcc/cfglayout.c
index 329e9f8..ca5f475 100644
--- a/contrib/gcc/cfglayout.c
+++ b/contrib/gcc/cfglayout.c
@@ -35,7 +35,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
extern struct obstack flow_obstack;
/* Holds the interesting trailing notes for the function. */
-static rtx function_tail_eff_head;
+static rtx function_footer;
static rtx skip_insns_after_block PARAMS ((basic_block));
static void record_effective_endpoints PARAMS ((void));
@@ -46,10 +46,31 @@ static void set_block_levels PARAMS ((tree, int));
static void change_scope PARAMS ((rtx, tree, tree));
void verify_insn_chain PARAMS ((void));
+static void cleanup_unconditional_jumps PARAMS ((void));
static void fixup_fallthru_exit_predecessor PARAMS ((void));
-
-/* Map insn uid to lexical block. */
-static varray_type insn_scopes;
+static rtx unlink_insn_chain PARAMS ((rtx, rtx));
+static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
+
+static rtx
+unlink_insn_chain (first, last)
+ rtx first;
+ rtx last;
+{
+ rtx prevfirst = PREV_INSN (first);
+ rtx nextlast = NEXT_INSN (last);
+
+ PREV_INSN (first) = NULL;
+ NEXT_INSN (last) = NULL;
+ if (prevfirst)
+ NEXT_INSN (prevfirst) = nextlast;
+ if (nextlast)
+ PREV_INSN (nextlast) = prevfirst;
+ else
+ set_last_insn (prevfirst);
+ if (!prevfirst)
+ set_first_insn (nextlast);
+ return first;
+}
/* Skip over inter-block insns occurring after BB which are typically
associated with BB (e.g., barriers). If there are any such insns,
@@ -62,8 +83,8 @@ skip_insns_after_block (bb)
rtx insn, last_insn, next_head, prev;
next_head = NULL_RTX;
- if (bb->index + 1 != n_basic_blocks)
- next_head = BASIC_BLOCK (bb->index + 1)->head;
+ if (bb->next_bb != EXIT_BLOCK_PTR)
+ next_head = bb->next_bb->head;
for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
{
@@ -103,7 +124,7 @@ skip_insns_after_block (bb)
last_insn = insn;
continue;
}
- break;
+ break;
default:
break;
@@ -113,7 +134,7 @@ skip_insns_after_block (bb)
}
/* It is possible to hit contradictory sequence. For instance:
-
+
jump_insn
NOTE_INSN_LOOP_BEG
barrier
@@ -128,14 +149,14 @@ skip_insns_after_block (bb)
if (GET_CODE (insn) == NOTE)
switch (NOTE_LINE_NUMBER (insn))
{
- case NOTE_INSN_LOOP_END:
- case NOTE_INSN_BLOCK_END:
- case NOTE_INSN_DELETED:
- case NOTE_INSN_DELETED_LABEL:
+ case NOTE_INSN_LOOP_END:
+ case NOTE_INSN_BLOCK_END:
+ case NOTE_INSN_DELETED:
+ case NOTE_INSN_DELETED_LABEL:
continue;
- default:
+ default:
reorder_insns (insn, insn, last_insn);
- }
+ }
}
return last_insn;
@@ -155,8 +176,6 @@ label_for_bb (bb)
fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
label = block_label (bb);
- if (bb->head == PREV_INSN (RBI (bb)->eff_head))
- RBI (bb)->eff_head = label;
}
return label;
@@ -169,20 +188,24 @@ static void
record_effective_endpoints ()
{
rtx next_insn = get_insns ();
- int i;
-
- for (i = 0; i < n_basic_blocks; i++)
+ basic_block bb;
+
+ FOR_EACH_BB (bb)
{
- basic_block bb = BASIC_BLOCK (i);
rtx end;
- RBI (bb)->eff_head = next_insn;
+ if (PREV_INSN (bb->head) && next_insn != bb->head)
+ RBI (bb)->header = unlink_insn_chain (next_insn,
+ PREV_INSN (bb->head));
end = skip_insns_after_block (bb);
- RBI (bb)->eff_end = end;
- next_insn = NEXT_INSN (end);
+ if (NEXT_INSN (bb->end) && bb->end != end)
+ RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
+ next_insn = NEXT_INSN (bb->end);
}
- function_tail_eff_head = next_insn;
+ function_footer = next_insn;
+ if (function_footer)
+ function_footer = unlink_insn_chain (function_footer, get_last_insn ());
}
/* Build a varray mapping INSN_UID to lexical block. Return it. */
@@ -193,8 +216,6 @@ scope_to_insns_initialize ()
tree block = NULL;
rtx insn, next;
- VARRAY_TREE_INIT (insn_scopes, get_max_uid (), "insn scopes");
-
for (insn = get_insns (); insn; insn = next)
{
next = NEXT_INSN (insn);
@@ -202,7 +223,7 @@ scope_to_insns_initialize ()
if (active_insn_p (insn)
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
- VARRAY_TREE (insn_scopes, INSN_UID (insn)) = block;
+ INSN_SCOPE (insn) = block;
else if (GET_CODE (insn) == NOTE)
{
switch (NOTE_LINE_NUMBER (insn))
@@ -220,6 +241,10 @@ scope_to_insns_initialize ()
}
}
}
+
+ /* Tag the blocks with a depth number so that change_scope can find
+ the common parent easily. */
+ set_block_levels (DECL_INITIAL (cfun->decl), 0);
}
/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
@@ -237,7 +262,21 @@ set_block_levels (block, level)
block = BLOCK_CHAIN (block);
}
}
-
+
+/* Return sope resulting from combination of S1 and S2. */
+tree
+choose_inner_scope (s1, s2)
+ tree s1, s2;
+{
+ if (!s1)
+ return s2;
+ if (!s2)
+ return s1;
+ if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
+ return s1;
+ return s2;
+}
+
/* Emit lexical block notes needed to change scope from S1 to S2. */
static void
@@ -294,17 +333,26 @@ scope_to_insns_finalize ()
tree cur_block = DECL_INITIAL (cfun->decl);
rtx insn, note;
- /* Tag the blocks with a depth number so that change_scope can find
- the common parent easily. */
- set_block_levels (cur_block, 0);
-
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ insn = get_insns ();
+ if (!active_insn_p (insn))
+ insn = next_active_insn (insn);
+ for (; insn; insn = next_active_insn (insn))
{
tree this_block;
- if ((size_t) INSN_UID (insn) >= insn_scopes->num_elements)
- continue;
- this_block = VARRAY_TREE (insn_scopes, INSN_UID (insn));
+ this_block = INSN_SCOPE (insn);
+ /* For sequences compute scope resulting from merging all scopes
+ of instructions nested inside. */
+ if (GET_CODE (PATTERN (insn)) == SEQUENCE)
+ {
+ int i;
+ rtx body = PATTERN (insn);
+
+ this_block = NULL;
+ for (i = 0; i < XVECLEN (body, 0); i++)
+ this_block = choose_inner_scope (this_block,
+ INSN_SCOPE (XVECEXP (body, 0, i)));
+ }
if (! this_block)
continue;
@@ -315,8 +363,6 @@ scope_to_insns_finalize ()
}
}
- VARRAY_FREE (insn_scopes);
-
/* change_scope emits before the insn, not after. */
note = emit_note (NULL, NOTE_INSN_DELETED);
change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
@@ -330,32 +376,49 @@ scope_to_insns_finalize ()
static void
fixup_reorder_chain ()
{
- basic_block bb, last_bb;
+ basic_block bb, prev_bb;
int index;
- rtx insn;
- int old_n_basic_blocks = n_basic_blocks;
+ rtx insn = NULL;
/* First do the bulk reordering -- rechain the blocks without regard to
the needed changes to jumps and labels. */
- for (last_bb = BASIC_BLOCK (0), bb = RBI (last_bb)->next, index = 1;
+ for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
bb != 0;
- last_bb = bb, bb = RBI (bb)->next, index++)
+ bb = RBI (bb)->next, index++)
{
- rtx last_e = RBI (last_bb)->eff_end;
- rtx curr_h = RBI (bb)->eff_head;
-
- NEXT_INSN (last_e) = curr_h;
- PREV_INSN (curr_h) = last_e;
+ if (RBI (bb)->header)
+ {
+ if (insn)
+ NEXT_INSN (insn) = RBI (bb)->header;
+ else
+ set_first_insn (RBI (bb)->header);
+ PREV_INSN (RBI (bb)->header) = insn;
+ insn = RBI (bb)->header;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ }
+ if (insn)
+ NEXT_INSN (insn) = bb->head;
+ else
+ set_first_insn (bb->head);
+ PREV_INSN (bb->head) = insn;
+ insn = bb->end;
+ if (RBI (bb)->footer)
+ {
+ NEXT_INSN (insn) = RBI (bb)->footer;
+ PREV_INSN (RBI (bb)->footer) = insn;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ }
}
if (index != n_basic_blocks)
abort ();
- insn = RBI (last_bb)->eff_end;
- NEXT_INSN (insn) = function_tail_eff_head;
- if (function_tail_eff_head)
- PREV_INSN (function_tail_eff_head) = insn;
+ NEXT_INSN (insn) = function_footer;
+ if (function_footer)
+ PREV_INSN (function_footer) = insn;
while (NEXT_INSN (insn))
insn = NEXT_INSN (insn);
@@ -368,7 +431,7 @@ fixup_reorder_chain ()
/* Now add jumps and labels as needed to match the blocks new
outgoing edges. */
- for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
+ for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
{
edge e_fall, e_taken, e;
rtx bb_end_insn;
@@ -397,11 +460,44 @@ fixup_reorder_chain ()
&& e_fall->dest == EXIT_BLOCK_PTR))
continue;
+ if (!e_taken)
+ e_taken = e_fall;
+
+ /* The degenerated case of conditional jump jumping to the next
+ instruction can happen on target having jumps with side
+ effects.
+
+ Create temporarily the duplicated edge representing branch.
+ It will get unidentified by force_nonfallthru_and_redirect
+ that would otherwise get confused by fallthru edge not pointing
+ to the next basic block. */
+ if (!e_taken)
+ {
+ rtx note;
+ edge e_fake;
+
+ e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
+
+ note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
+ if (note)
+ {
+ int prob = INTVAL (XEXP (note, 0));
+
+ e_fake->probability = prob;
+ e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
+ e_fall->probability -= e_fall->probability;
+ e_fall->count -= e_fake->count;
+ if (e_fall->probability < 0)
+ e_fall->probability = 0;
+ if (e_fall->count < 0)
+ e_fall->count = 0;
+ }
+ }
/* There is one special case: if *neither* block is next,
such as happens at the very end of a function, then we'll
need to add a new unconditional jump. Choose the taken
edge based on known or assumed probability. */
- if (RBI (bb)->next != e_taken->dest)
+ else if (RBI (bb)->next != e_taken->dest)
{
rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
@@ -417,7 +513,7 @@ fixup_reorder_chain ()
}
}
- /* Otherwise we can try to invert the jump. This will
+ /* Otherwise we can try to invert the jump. This will
basically never fail, however, keep up the pretense. */
else if (invert_jump (bb_end_insn,
label_for_bb (e_fall->dest), 0))
@@ -470,8 +566,6 @@ fixup_reorder_chain ()
if (nb)
{
alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
- RBI (nb)->eff_head = nb->head;
- RBI (nb)->eff_end = NEXT_INSN (nb->end);
RBI (nb)->visited = 1;
RBI (nb)->next = RBI (bb)->next;
RBI (bb)->next = nb;
@@ -481,23 +575,38 @@ fixup_reorder_chain ()
}
/* Put basic_block_info in the new order. */
- bb = BASIC_BLOCK (0);
- index = 0;
if (rtl_dump_file)
- fprintf (rtl_dump_file, "Reordered sequence:\n");
-
- for (; bb; bb = RBI (bb)->next, index++)
{
- if (rtl_dump_file)
- fprintf (rtl_dump_file, " %i %sbb %i freq %i\n", index,
- bb->index >= old_n_basic_blocks ? "compensation " : "",
- bb->index,
- bb->frequency);
+ fprintf (rtl_dump_file, "Reordered sequence:\n");
+ for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
+ {
+ fprintf (rtl_dump_file, " %i ", index);
+ if (RBI (bb)->original)
+ fprintf (rtl_dump_file, "duplicate of %i ",
+ RBI (bb)->original->index);
+ else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
+ fprintf (rtl_dump_file, "compensation ");
+ else
+ fprintf (rtl_dump_file, "bb %i ", bb->index);
+ fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
+ }
+ }
+ prev_bb = ENTRY_BLOCK_PTR;
+ bb = ENTRY_BLOCK_PTR->next_bb;
+ index = 0;
+
+ for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
+ {
bb->index = index;
BASIC_BLOCK (index) = bb;
+
+ bb->prev_bb = prev_bb;
+ prev_bb->next_bb = bb;
}
+ prev_bb->next_bb = EXIT_BLOCK_PTR;
+ EXIT_BLOCK_PTR->prev_bb = prev_bb;
}
/* Perform sanity checks on the insn chain.
@@ -530,10 +639,70 @@ verify_insn_chain ()
if (insn_cnt1 != insn_cnt2)
abort ();
}
+
+/* Remove any unconditional jumps and forwarder block creating fallthru
+ edges instead. During BB reordering, fallthru edges are not required
+ to target next basic block in the linear CFG layout, so the unconditional
+ jumps are not needed. */
+
+static void
+cleanup_unconditional_jumps ()
+{
+ basic_block bb;
+
+ FOR_EACH_BB (bb)
+ {
+ if (!bb->succ)
+ continue;
+ if (bb->succ->flags & EDGE_FALLTHRU)
+ continue;
+ if (!bb->succ->succ_next)
+ {
+ rtx insn;
+ if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
+ && bb->prev_bb != ENTRY_BLOCK_PTR)
+ {
+ basic_block prev = bb->prev_bb;
+
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
+ bb->index);
-/* The block falling through to exit must be the last one in the reordered
- chain. Ensure it is. */
+ redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
+ flow_delete_block (bb);
+ bb = prev;
+ }
+ else if (simplejump_p (bb->end))
+ {
+ rtx jump = bb->end;
+
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
+ INSN_UID (jump), bb->index);
+ delete_insn (jump);
+ bb->succ->flags |= EDGE_FALLTHRU;
+ }
+ else
+ continue;
+ insn = NEXT_INSN (bb->end);
+ while (insn
+ && (GET_CODE (insn) != NOTE
+ || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ {
+ rtx next = NEXT_INSN (insn);
+
+ if (GET_CODE (insn) == BARRIER)
+ delete_barrier (insn);
+
+ insn = next;
+ }
+ }
+ }
+}
+
+/* The block falling through to exit must be the last one in the
+ reordered chain. Ensure that this condition is met. */
static void
fixup_fallthru_exit_predecessor ()
{
@@ -546,7 +715,7 @@ fixup_fallthru_exit_predecessor ()
if (bb && RBI (bb)->next)
{
- basic_block c = BASIC_BLOCK (0);
+ basic_block c = ENTRY_BLOCK_PTR->next_bb;
while (RBI (c)->next != bb)
c = RBI (c)->next;
@@ -560,15 +729,279 @@ fixup_fallthru_exit_predecessor ()
}
}
-/* Main entry point to this module: initialize the datastructures for CFG
- layout changes. */
+/* Return true in case it is possible to duplicate the basic block BB. */
+
+bool
+cfg_layout_can_duplicate_bb_p (bb)
+ basic_block bb;
+{
+ rtx next;
+ edge s;
+
+ if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
+ return false;
+
+ /* Duplicating fallthru block to exit would require adding a jump
+ and splitting the real last BB. */
+ for (s = bb->succ; s; s = s->succ_next)
+ if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
+ return false;
+
+ /* Do not attempt to duplicate tablejumps, as we need to unshare
+ the dispatch table. This is dificult to do, as the instructions
+ computing jump destination may be hoisted outside the basic block. */
+ if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
+ && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
+ && GET_CODE (next) == JUMP_INSN
+ && (GET_CODE (PATTERN (next)) == ADDR_VEC
+ || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ return false;
+ return true;
+}
+
+static rtx
+duplicate_insn_chain (from, to)
+ rtx from, to;
+{
+ rtx insn, last;
+
+ /* Avoid updating of boundaries of previous basic block. The
+ note will get removed from insn stream in fixup. */
+ last = emit_note (NULL, NOTE_INSN_DELETED);
+
+ /* Create copy at the end of INSN chain. The chain will
+ be reordered later. */
+ for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
+ {
+ rtx new;
+ switch (GET_CODE (insn))
+ {
+ case INSN:
+ case CALL_INSN:
+ case JUMP_INSN:
+ /* Avoid copying of dispatch tables. We never duplicate
+ tablejumps, so this can hit only in case the table got
+ moved far from original jump. */
+ if (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ break;
+ new = emit_copy_of_insn_after (insn, get_last_insn ());
+ break;
+
+ case CODE_LABEL:
+ break;
+
+ case BARRIER:
+ emit_barrier ();
+ break;
+
+ case NOTE:
+ switch (NOTE_LINE_NUMBER (insn))
+ {
+ /* In case prologue is empty and function contain label
+ in first BB, we may want to copy the block. */
+ case NOTE_INSN_PROLOGUE_END:
+
+ case NOTE_INSN_LOOP_VTOP:
+ case NOTE_INSN_LOOP_CONT:
+ case NOTE_INSN_LOOP_BEG:
+ case NOTE_INSN_LOOP_END:
+ /* Strip down the loop notes - we don't really want to keep
+ them consistent in loop copies. */
+ case NOTE_INSN_DELETED:
+ case NOTE_INSN_DELETED_LABEL:
+ /* No problem to strip these. */
+ case NOTE_INSN_EPILOGUE_BEG:
+ case NOTE_INSN_FUNCTION_END:
+ /* Debug code expect these notes to exist just once.
+ Keep them in the master copy.
+ ??? It probably makes more sense to duplicate them for each
+ epilogue copy. */
+ case NOTE_INSN_FUNCTION_BEG:
+ /* There is always just single entry to function. */
+ case NOTE_INSN_BASIC_BLOCK:
+ break;
+
+ /* There is no purpose to duplicate prologue. */
+ case NOTE_INSN_BLOCK_BEG:
+ case NOTE_INSN_BLOCK_END:
+ /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
+ reordering is in the progress. */
+ case NOTE_INSN_EH_REGION_BEG:
+ case NOTE_INSN_EH_REGION_END:
+ /* Should never exist at BB duplication time. */
+ abort ();
+ break;
+ case NOTE_INSN_REPEATED_LINE_NUMBER:
+ emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
+ break;
+
+ default:
+ if (NOTE_LINE_NUMBER (insn) < 0)
+ abort ();
+ /* It is possible that no_line_number is set and the note
+ won't be emitted. */
+ emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
+ }
+ break;
+ default:
+ abort ();
+ }
+ }
+ insn = NEXT_INSN (last);
+ delete_insn (last);
+ return insn;
+}
+
+/* Redirect Edge to DEST. */
+void
+cfg_layout_redirect_edge (e, dest)
+ edge e;
+ basic_block dest;
+{
+ basic_block src = e->src;
+ basic_block old_next_bb = src->next_bb;
+
+ /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
+ in the case the basic block appears to be in sequence. Avoid this
+ transformation. */
+
+ src->next_bb = NULL;
+ if (e->flags & EDGE_FALLTHRU)
+ {
+ /* Redirect any branch edges unified with the fallthru one. */
+ if (GET_CODE (src->end) == JUMP_INSN
+ && JUMP_LABEL (src->end) == e->dest->head)
+ {
+ if (!redirect_jump (src->end, block_label (dest), 0))
+ abort ();
+ }
+ /* In case we are redirecting fallthru edge to the branch edge
+ of conditional jump, remove it. */
+ if (src->succ->succ_next
+ && !src->succ->succ_next->succ_next)
+ {
+ edge s = e->succ_next ? e->succ_next : src->succ;
+ if (s->dest == dest
+ && any_condjump_p (src->end)
+ && onlyjump_p (src->end))
+ delete_insn (src->end);
+ }
+ redirect_edge_succ_nodup (e, dest);
+ }
+ else
+ redirect_edge_and_branch (e, dest);
+
+ /* We don't want simplejumps in the insn stream during cfglayout. */
+ if (simplejump_p (src->end))
+ {
+ delete_insn (src->end);
+ delete_barrier (NEXT_INSN (src->end));
+ src->succ->flags |= EDGE_FALLTHRU;
+ }
+ src->next_bb = old_next_bb;
+}
+
+/* Create a duplicate of the basic block BB and redirect edge E into it. */
+
+basic_block
+cfg_layout_duplicate_bb (bb, e)
+ basic_block bb;
+ edge e;
+{
+ rtx insn;
+ edge s, n;
+ basic_block new_bb;
+ gcov_type new_count = e ? e->count : 0;
+
+ if (bb->count < new_count)
+ new_count = bb->count;
+ if (!bb->pred)
+ abort ();
+#ifdef ENABLE_CHECKING
+ if (!cfg_layout_can_duplicate_bb_p (bb))
+ abort ();
+#endif
+
+ insn = duplicate_insn_chain (bb->head, bb->end);
+ new_bb = create_basic_block (insn,
+ insn ? get_last_insn () : NULL,
+ EXIT_BLOCK_PTR->prev_bb);
+ alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
+
+ if (RBI (bb)->header)
+ {
+ insn = RBI (bb)->header;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ insn = duplicate_insn_chain (RBI (bb)->header, insn);
+ if (insn)
+ RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
+ }
+
+ if (RBI (bb)->footer)
+ {
+ insn = RBI (bb)->footer;
+ while (NEXT_INSN (insn))
+ insn = NEXT_INSN (insn);
+ insn = duplicate_insn_chain (RBI (bb)->footer, insn);
+ if (insn)
+ RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
+ }
+
+ if (bb->global_live_at_start)
+ {
+ new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
+ COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
+ }
+
+ new_bb->loop_depth = bb->loop_depth;
+ new_bb->flags = bb->flags;
+ for (s = bb->succ; s; s = s->succ_next)
+ {
+ n = make_edge (new_bb, s->dest, s->flags);
+ n->probability = s->probability;
+ if (new_count)
+ /* Take care for overflows! */
+ n->count = s->count * (new_count * 10000 / bb->count) / 10000;
+ else
+ n->count = 0;
+ s->count -= n->count;
+ }
+
+ new_bb->count = new_count;
+ bb->count -= new_count;
+
+ if (e)
+ {
+ new_bb->frequency = EDGE_FREQUENCY (e);
+ bb->frequency -= EDGE_FREQUENCY (e);
+
+ cfg_layout_redirect_edge (e, new_bb);
+ }
+
+ if (bb->count < 0)
+ bb->count = 0;
+ if (bb->frequency < 0)
+ bb->frequency = 0;
+
+ RBI (new_bb)->original = bb;
+ return new_bb;
+}
+
+/* Main entry point to this module - initialize the datastructures for
+ CFG layout changes. It keeps LOOPS up-to-date if not null. */
void
cfg_layout_initialize ()
{
+ /* Our algorithm depends on fact that there are now dead jumptables
+ around the code. */
alloc_aux_for_blocks (sizeof (struct reorder_block_def));
- scope_to_insns_initialize ();
+ cleanup_unconditional_jumps ();
record_effective_endpoints ();
}
@@ -586,8 +1019,6 @@ cfg_layout_finalize ()
verify_insn_chain ();
#endif
- scope_to_insns_finalize ();
-
free_aux_for_blocks ();
#ifdef ENABLE_CHECKING
OpenPOWER on IntegriCloud