mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-03-15 16:11:24 +08:00
basic-block.h (EDGE_CRITICAL): Remove; renumber other flags.
* basic-block.h (EDGE_CRITICAL): Remove; renumber other flags. (EDGE_CRITICAL_P): New predicate. * cfg.c (force_nonfallthru_and_redirect, split_edge): Kill EDGE_CRITICAL handling. (insert_insn_on_edge): Use EDGE_CRITICAL_P. (dump_edge_info): Remove "crit". * cfganal.c (mark_critical_edges): Kill. * cfgbuild.c (find_basic_blocks): Remove mark_critical_edges call. * cfgcleanup.c (cleanup_cfg): Likewise. * profile.c (instrument_edges): Use EDGE_CRITICAL_P. (find_spanning_tree): Likewise. * reg-stack.c (convert_regs_1): Likewise. * ssa.c (mark_regs_equivalent_over_bad_edges): Likewise. * basic-block.h (create_basic_block_structure): New. (create_basic_block): Update prototype. (force_nonfallthru): New. * bb-reorder.c (fixup_reorder_chain): Fixup use force_nonfallthru. * cfg.c (create_basic_block_structure): Rename from create_basic_block; handle updating of block_for_insn, creating of empty BBs and BBs at the end of INSN chain. (create_basic_block): New function. (split_block): Use create_basic_block. (force_nonfallthru_and_redirect): Break out from ...; cleanup (redirect_edge_and_branch_force): ... here. (force_nonfallthru): New. (split_edge): Rewrite to use force_nonfallthru and create_block. * cfgbuild.c (find_basic_blocks_1): Use create_basic_block_structure. (find_basic_blocks): Free basic_block_for_insn. * cfgcleanup.c (merge_blocks): Use force_nonfallthru. * cfg.c: Fix formating. * cfgcleanup.c: Fix formating. (merge_blocks, tail_recursion_label_p): Return bool. (merge_blocks_move_predecessor_nojumps, merge_blocks_move_successor_nojumps): Return void. From-SVN: r45549
This commit is contained in:
parent
201556f0e0
commit
4262e6231a
@ -1,3 +1,42 @@
|
||||
Tue Sep 11 18:57:47 CEST 2001 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* basic-block.h (EDGE_CRITICAL): Remove; renumber other flags.
|
||||
(EDGE_CRITICAL_P): New predicate.
|
||||
* cfg.c (force_nonfallthru_and_redirect, split_edge): Kill EDGE_CRITICAL
|
||||
handling.
|
||||
(insert_insn_on_edge): Use EDGE_CRITICAL_P.
|
||||
(dump_edge_info): Remove "crit".
|
||||
* cfganal.c (mark_critical_edges): Kill.
|
||||
* cfgbuild.c (find_basic_blocks): Remove mark_critical_edges call.
|
||||
* cfgcleanup.c (cleanup_cfg): Likewise.
|
||||
* profile.c (instrument_edges): Use EDGE_CRITICAL_P.
|
||||
(find_spanning_tree): Likewise.
|
||||
* reg-stack.c (convert_regs_1): Likewise.
|
||||
* ssa.c (mark_regs_equivalent_over_bad_edges): Likewise.
|
||||
|
||||
* basic-block.h (create_basic_block_structure): New.
|
||||
(create_basic_block): Update prototype.
|
||||
(force_nonfallthru): New.
|
||||
* bb-reorder.c (fixup_reorder_chain): Fixup use force_nonfallthru.
|
||||
* cfg.c (create_basic_block_structure): Rename from create_basic_block;
|
||||
handle updating of block_for_insn, creating of empty BBs and BBs at
|
||||
the end of INSN chain.
|
||||
(create_basic_block): New function.
|
||||
(split_block): Use create_basic_block.
|
||||
(force_nonfallthru_and_redirect): Break out from ...; cleanup
|
||||
(redirect_edge_and_branch_force): ... here.
|
||||
(force_nonfallthru): New.
|
||||
(split_edge): Rewrite to use force_nonfallthru and create_block.
|
||||
* cfgbuild.c (find_basic_blocks_1): Use create_basic_block_structure.
|
||||
(find_basic_blocks): Free basic_block_for_insn.
|
||||
* cfgcleanup.c (merge_blocks): Use force_nonfallthru.
|
||||
|
||||
* cfg.c: Fix formating.
|
||||
* cfgcleanup.c: Fix formating.
|
||||
(merge_blocks, tail_recursion_label_p): Return bool.
|
||||
(merge_blocks_move_predecessor_nojumps,
|
||||
merge_blocks_move_successor_nojumps): Return void.
|
||||
|
||||
2001-09-11 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
* configure.in: Check whether assembler supports section merging.
|
||||
|
@ -140,12 +140,11 @@ typedef struct edge_def {
|
||||
} *edge;
|
||||
|
||||
#define EDGE_FALLTHRU 1
|
||||
#define EDGE_CRITICAL 2
|
||||
#define EDGE_ABNORMAL 4
|
||||
#define EDGE_ABNORMAL_CALL 8
|
||||
#define EDGE_EH 16
|
||||
#define EDGE_FAKE 32
|
||||
#define EDGE_DFS_BACK 64
|
||||
#define EDGE_ABNORMAL 2
|
||||
#define EDGE_ABNORMAL_CALL 4
|
||||
#define EDGE_EH 8
|
||||
#define EDGE_FAKE 16
|
||||
#define EDGE_DFS_BACK 32
|
||||
|
||||
#define EDGE_COMPLEX (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
|
||||
|
||||
@ -315,7 +314,8 @@ extern void remove_edge PARAMS ((edge));
|
||||
extern void redirect_edge_succ PARAMS ((edge, basic_block));
|
||||
extern edge redirect_edge_succ_nodup PARAMS ((edge, basic_block));
|
||||
extern void redirect_edge_pred PARAMS ((edge, basic_block));
|
||||
extern void create_basic_block PARAMS ((int, rtx, rtx, rtx));
|
||||
extern basic_block create_basic_block_structure PARAMS ((int, rtx, rtx, rtx));
|
||||
extern basic_block create_basic_block PARAMS ((int, rtx, rtx));
|
||||
extern int flow_delete_block PARAMS ((basic_block));
|
||||
extern void merge_blocks_nomove PARAMS ((basic_block, basic_block));
|
||||
extern void tidy_fallthru_edge PARAMS ((edge, basic_block,
|
||||
@ -536,6 +536,10 @@ struct edge_list
|
||||
+ REG_BR_PROB_BASE / 2) \
|
||||
/ REG_BR_PROB_BASE)
|
||||
|
||||
/* Return nonzero if edge is critical. */
|
||||
#define EDGE_CRITICAL_P(e) ((e)->src->succ->succ_next \
|
||||
&& (e)->dest->pred->pred_next)
|
||||
|
||||
struct edge_list * create_edge_list PARAMS ((void));
|
||||
void free_edge_list PARAMS ((struct edge_list *));
|
||||
void print_edge_list PARAMS ((FILE *, struct edge_list *));
|
||||
@ -629,6 +633,7 @@ extern void allocate_bb_life_data PARAMS ((void));
|
||||
extern void find_unreachable_blocks PARAMS ((void));
|
||||
extern void delete_noop_moves PARAMS ((rtx));
|
||||
extern basic_block redirect_edge_and_branch_force PARAMS ((edge, basic_block));
|
||||
extern basic_block force_nonfallthru PARAMS ((edge));
|
||||
extern bool redirect_edge_and_branch PARAMS ((edge, basic_block));
|
||||
extern rtx block_label PARAMS ((basic_block));
|
||||
extern bool forwarder_block_p PARAMS ((basic_block));
|
||||
|
@ -610,7 +610,7 @@ fixup_reorder_chain ()
|
||||
for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
|
||||
{
|
||||
edge e_fall, e_taken, e;
|
||||
rtx jump_insn, barrier_insn, bb_end_insn;
|
||||
rtx bb_end_insn;
|
||||
basic_block nb;
|
||||
|
||||
if (bb->succ == NULL)
|
||||
@ -698,54 +698,24 @@ fixup_reorder_chain ()
|
||||
/* An fallthru to exit block. */
|
||||
if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
|
||||
continue;
|
||||
|
||||
/* We need a new jump insn. If the block has only one outgoing
|
||||
edge, then we can stuff the new jump insn in directly. */
|
||||
if (bb->succ->succ_next == NULL)
|
||||
{
|
||||
e_fall->flags &= ~EDGE_FALLTHRU;
|
||||
|
||||
jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
|
||||
bb->end = jump_insn;
|
||||
barrier_insn = emit_barrier_after (jump_insn);
|
||||
RBI (bb)->eff_end = barrier_insn;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
/* We got here if we need to add a new jump insn in a new block
|
||||
across the edge e_fall. */
|
||||
/* We got here if we need to add a new jump insn. */
|
||||
|
||||
jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
|
||||
barrier_insn = emit_barrier_after (jump_insn);
|
||||
nb = force_nonfallthru (e_fall);
|
||||
|
||||
VARRAY_GROW (basic_block_info, ++n_basic_blocks);
|
||||
create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
|
||||
|
||||
nb = BASIC_BLOCK (n_basic_blocks - 1);
|
||||
nb->local_set = 0;
|
||||
nb->count = e_fall->count;
|
||||
nb->frequency = EDGE_FREQUENCY (e_fall);
|
||||
|
||||
nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
|
||||
COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
|
||||
|
||||
nb->aux = xmalloc (sizeof (struct reorder_block_def));
|
||||
RBI (nb)->eff_head = nb->head;
|
||||
RBI (nb)->eff_end = barrier_insn;
|
||||
RBI (nb)->scope = RBI (bb)->scope;
|
||||
RBI (nb)->visited = 1;
|
||||
RBI (nb)->next = RBI (bb)->next;
|
||||
RBI (bb)->next = nb;
|
||||
|
||||
/* Link to new block. */
|
||||
make_single_succ_edge (nb, e_fall->dest, 0);
|
||||
redirect_edge_succ (e_fall, nb);
|
||||
|
||||
/* Don't process this new block. */
|
||||
bb = nb;
|
||||
if (nb)
|
||||
{
|
||||
nb->aux = xmalloc (sizeof (struct reorder_block_def));
|
||||
RBI (nb)->eff_head = nb->head;
|
||||
RBI (nb)->eff_end = NEXT_INSN (nb->end);
|
||||
RBI (nb)->scope = RBI (bb)->scope;
|
||||
RBI (nb)->visited = 1;
|
||||
RBI (nb)->next = RBI (bb)->next;
|
||||
RBI (bb)->next = nb;
|
||||
/* Don't process this new block. */
|
||||
bb = nb;
|
||||
}
|
||||
}
|
||||
|
||||
/* Put basic_block_info in the new order. */
|
||||
|
443
gcc/cfg.c
443
gcc/cfg.c
@ -40,7 +40,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
- High level edge redirection (with updating and optimizing instruction
|
||||
chain)
|
||||
block_label, redirect_edge_and_branch,
|
||||
redirect_edge_and_branch_force, tidy_fallthru_edge
|
||||
redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
|
||||
- Edge splitting and commiting to edges
|
||||
split_edge, insert_insn_on_edge, commit_edge_insertions
|
||||
- Dumpipng and debugging
|
||||
@ -147,6 +147,7 @@ static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
|
||||
static void expunge_block PARAMS ((basic_block));
|
||||
static rtx last_loop_beg_note PARAMS ((rtx));
|
||||
static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
|
||||
static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
|
||||
|
||||
/* Called once at intialization time. */
|
||||
|
||||
@ -320,11 +321,16 @@ flow_delete_insn_chain (start, finish)
|
||||
}
|
||||
|
||||
/* Create a new basic block consisting of the instructions between
|
||||
HEAD and END inclusive. Reuses the note and basic block struct
|
||||
in BB_NOTE, if any. */
|
||||
HEAD and END inclusive. This function is designed to allow fast
|
||||
BB construction - reuses the note and basic block struct
|
||||
in BB_NOTE, if any and do not grow BASIC_BLOCK chain and should
|
||||
be used directly only by CFG construction code.
|
||||
END can be NULL in to create new empty basic block before HEAD.
|
||||
Both END and HEAD can be NULL to create basic block at the end of
|
||||
INSN chain. */
|
||||
|
||||
void
|
||||
create_basic_block (index, head, end, bb_note)
|
||||
basic_block
|
||||
create_basic_block_structure (index, head, end, bb_note)
|
||||
int index;
|
||||
rtx head, end, bb_note;
|
||||
{
|
||||
@ -360,12 +366,23 @@ create_basic_block (index, head, end, bb_note)
|
||||
bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
|
||||
memset (bb, 0, sizeof (*bb));
|
||||
|
||||
if (GET_CODE (head) == CODE_LABEL)
|
||||
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
|
||||
if (!head && !end)
|
||||
{
|
||||
head = end = bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
|
||||
get_last_insn ());
|
||||
}
|
||||
else if (GET_CODE (head) == CODE_LABEL && end)
|
||||
{
|
||||
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
|
||||
if (head == end)
|
||||
end = bb_note;
|
||||
}
|
||||
else
|
||||
{
|
||||
bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
|
||||
head = bb_note;
|
||||
if (!end)
|
||||
end = head;
|
||||
}
|
||||
NOTE_BASIC_BLOCK (bb_note) = bb;
|
||||
}
|
||||
@ -378,10 +395,46 @@ create_basic_block (index, head, end, bb_note)
|
||||
bb->end = end;
|
||||
bb->index = index;
|
||||
BASIC_BLOCK (index) = bb;
|
||||
if (basic_block_for_insn)
|
||||
update_bb_for_insn (bb);
|
||||
|
||||
/* Tag the block so that we know it has been used when considering
|
||||
other basic block notes. */
|
||||
bb->aux = bb;
|
||||
|
||||
return bb;
|
||||
}
|
||||
|
||||
/* Create new basic block consisting of instructions in between HEAD and
|
||||
END and place it to the BB chain at possition INDEX.
|
||||
END can be NULL in to create new empty basic block before HEAD.
|
||||
Both END and HEAD can be NULL to create basic block at the end of
|
||||
INSN chain. */
|
||||
|
||||
basic_block
|
||||
create_basic_block (index, head, end)
|
||||
int index;
|
||||
rtx head, end;
|
||||
{
|
||||
basic_block bb;
|
||||
int i;
|
||||
|
||||
/* Place the new block just after the block being split. */
|
||||
VARRAY_GROW (basic_block_info, ++n_basic_blocks);
|
||||
|
||||
/* Some parts of the compiler expect blocks to be number in
|
||||
sequential order so insert the new block immediately after the
|
||||
block being split.. */
|
||||
for (i = n_basic_blocks - 1; i > index; --i)
|
||||
{
|
||||
basic_block tmp = BASIC_BLOCK (i - 1);
|
||||
BASIC_BLOCK (i) = tmp;
|
||||
tmp->index = i;
|
||||
}
|
||||
|
||||
bb = create_basic_block_structure (index, head, end, NULL);
|
||||
bb->aux = NULL;
|
||||
return bb;
|
||||
}
|
||||
|
||||
/* Remove block B from the basic block array and compact behind it. */
|
||||
@ -570,6 +623,7 @@ set_block_for_new_insns (insn, bb)
|
||||
|
||||
/* Create an edge connecting SRC and DST with FLAGS optionally using
|
||||
edge cache CACHE. Return the new edge, NULL if already exist. */
|
||||
|
||||
edge
|
||||
cached_make_edge (edge_cache, src, dst, flags)
|
||||
sbitmap *edge_cache;
|
||||
@ -777,75 +831,26 @@ split_block (bb, insn)
|
||||
basic_block new_bb;
|
||||
edge new_edge;
|
||||
edge e;
|
||||
rtx bb_note;
|
||||
int i, j;
|
||||
|
||||
/* There is no point splitting the block after its end. */
|
||||
if (bb->end == insn)
|
||||
return 0;
|
||||
|
||||
/* Create the new structures. */
|
||||
new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
|
||||
|
||||
memset (new_bb, 0, sizeof (*new_bb));
|
||||
|
||||
new_bb->head = NEXT_INSN (insn);
|
||||
new_bb->end = bb->end;
|
||||
bb->end = insn;
|
||||
|
||||
new_bb->succ = bb->succ;
|
||||
bb->succ = NULL;
|
||||
new_bb->pred = NULL;
|
||||
/* Create the new basic block. */
|
||||
new_bb = create_basic_block (bb->index + 1, NEXT_INSN (insn), bb->end);
|
||||
new_bb->count = bb->count;
|
||||
new_bb->frequency = bb->frequency;
|
||||
new_bb->loop_depth = bb->loop_depth;
|
||||
bb->end = insn;
|
||||
|
||||
/* Redirect the src of the successor edges of bb to point to new_bb. */
|
||||
/* Redirect the outgoing edges. */
|
||||
new_bb->succ = bb->succ;
|
||||
bb->succ = NULL;
|
||||
for (e = new_bb->succ; e; e = e->succ_next)
|
||||
e->src = new_bb;
|
||||
|
||||
new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
|
||||
|
||||
/* Place the new block just after the block being split. */
|
||||
VARRAY_GROW (basic_block_info, ++n_basic_blocks);
|
||||
|
||||
/* Some parts of the compiler expect blocks to be number in
|
||||
sequential order so insert the new block immediately after the
|
||||
block being split.. */
|
||||
j = bb->index;
|
||||
for (i = n_basic_blocks - 1; i > j + 1; --i)
|
||||
{
|
||||
basic_block tmp = BASIC_BLOCK (i - 1);
|
||||
BASIC_BLOCK (i) = tmp;
|
||||
tmp->index = i;
|
||||
}
|
||||
|
||||
BASIC_BLOCK (i) = new_bb;
|
||||
new_bb->index = i;
|
||||
|
||||
if (GET_CODE (new_bb->head) == CODE_LABEL)
|
||||
{
|
||||
/* Create the basic block note. */
|
||||
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
|
||||
new_bb->head);
|
||||
NOTE_BASIC_BLOCK (bb_note) = new_bb;
|
||||
|
||||
/* If the only thing in this new block was the label, make sure
|
||||
the block note gets included. */
|
||||
if (new_bb->head == new_bb->end)
|
||||
new_bb->end = bb_note;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Create the basic block note. */
|
||||
bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
|
||||
new_bb->head);
|
||||
NOTE_BASIC_BLOCK (bb_note) = new_bb;
|
||||
new_bb->head = bb_note;
|
||||
}
|
||||
|
||||
update_bb_for_insn (new_bb);
|
||||
|
||||
if (bb->global_live_at_start)
|
||||
{
|
||||
new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
@ -1110,7 +1115,7 @@ last_loop_beg_note (insn)
|
||||
{
|
||||
rtx last = insn;
|
||||
insn = NEXT_INSN (insn);
|
||||
while (GET_CODE (insn) == NOTE
|
||||
while (insn && GET_CODE (insn) == NOTE
|
||||
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
|
||||
{
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
|
||||
@ -1222,6 +1227,80 @@ redirect_edge_and_branch (e, target)
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Like force_nonfallthru bellow, but additionally performs redirection
|
||||
Used by redirect_edge_and_branch_force. */
|
||||
|
||||
static basic_block
|
||||
force_nonfallthru_and_redirect (e, target)
|
||||
edge e;
|
||||
basic_block target;
|
||||
{
|
||||
basic_block jump_block, new_bb = NULL;
|
||||
rtx note;
|
||||
edge new_edge;
|
||||
rtx label;
|
||||
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
abort ();
|
||||
if (!(e->flags & EDGE_FALLTHRU))
|
||||
abort ();
|
||||
if (e->src->succ->succ_next)
|
||||
{
|
||||
/* Create the new structures. */
|
||||
note = last_loop_beg_note (e->src->end);
|
||||
jump_block = create_basic_block (e->src->index + 1, NEXT_INSN (note), NULL);
|
||||
jump_block->count = e->count;
|
||||
jump_block->frequency = EDGE_FREQUENCY (e);
|
||||
jump_block->loop_depth = target->loop_depth;
|
||||
|
||||
if (target->global_live_at_start)
|
||||
{
|
||||
jump_block->global_live_at_start =
|
||||
OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
jump_block->global_live_at_end =
|
||||
OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
COPY_REG_SET (jump_block->global_live_at_start,
|
||||
target->global_live_at_start);
|
||||
COPY_REG_SET (jump_block->global_live_at_end,
|
||||
target->global_live_at_start);
|
||||
}
|
||||
|
||||
/* Wire edge in. */
|
||||
new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
|
||||
new_edge->probability = e->probability;
|
||||
new_edge->count = e->count;
|
||||
|
||||
/* Redirect old edge. */
|
||||
redirect_edge_pred (e, jump_block);
|
||||
e->probability = REG_BR_PROB_BASE;
|
||||
|
||||
new_bb = jump_block;
|
||||
}
|
||||
else
|
||||
jump_block = e->src;
|
||||
e->flags &= ~EDGE_FALLTHRU;
|
||||
label = block_label (target);
|
||||
jump_block->end = emit_jump_insn_after (gen_jump (label), jump_block->end);
|
||||
JUMP_LABEL (jump_block->end) = label;
|
||||
LABEL_NUSES (label)++;
|
||||
if (basic_block_for_insn)
|
||||
set_block_for_new_insns (jump_block->end, jump_block);
|
||||
emit_barrier_after (jump_block->end);
|
||||
redirect_edge_succ_nodup (e, target);
|
||||
|
||||
return new_bb;
|
||||
}
|
||||
|
||||
/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
|
||||
(and possibly create new basic block) to make edge non-fallthru.
|
||||
Return newly created BB or NULL if none. */
|
||||
basic_block
|
||||
force_nonfallthru (e)
|
||||
edge e;
|
||||
{
|
||||
return force_nonfallthru_and_redirect (e, e->dest);
|
||||
}
|
||||
|
||||
/* Redirect edge even at the expense of creating new jump insn or
|
||||
basic block. Return new basic block if created, NULL otherwise.
|
||||
Abort if converison is impossible. */
|
||||
@ -1232,107 +1311,15 @@ redirect_edge_and_branch_force (e, target)
|
||||
basic_block target;
|
||||
{
|
||||
basic_block new_bb;
|
||||
edge new_edge;
|
||||
rtx label;
|
||||
rtx bb_note;
|
||||
int i, j;
|
||||
|
||||
if (redirect_edge_and_branch (e, target))
|
||||
return NULL;
|
||||
if (e->dest == target)
|
||||
return NULL;
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
abort ();
|
||||
if (!(e->flags & EDGE_FALLTHRU))
|
||||
abort ();
|
||||
|
||||
e->flags &= ~EDGE_FALLTHRU;
|
||||
label = block_label (target);
|
||||
/* Case of the fallthru block. */
|
||||
if (!e->src->succ->succ_next)
|
||||
{
|
||||
e->src->end = emit_jump_insn_after (gen_jump (label),
|
||||
last_loop_beg_note (e->src->end));
|
||||
JUMP_LABEL (e->src->end) = label;
|
||||
LABEL_NUSES (label)++;
|
||||
if (basic_block_for_insn)
|
||||
set_block_for_new_insns (e->src->end, e->src);
|
||||
emit_barrier_after (e->src->end);
|
||||
if (rtl_dump_file)
|
||||
fprintf (rtl_dump_file,
|
||||
"Emitting jump insn %i to redirect edge %i->%i to %i\n",
|
||||
INSN_UID (e->src->end), e->src->index, e->dest->index,
|
||||
target->index);
|
||||
redirect_edge_succ (e, target);
|
||||
return NULL;
|
||||
}
|
||||
/* Redirecting fallthru edge of the conditional needs extra work. */
|
||||
|
||||
if (rtl_dump_file)
|
||||
fprintf (rtl_dump_file,
|
||||
"Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
|
||||
INSN_UID (e->src->end), e->src->index, e->dest->index,
|
||||
target->index);
|
||||
|
||||
/* Create the new structures. */
|
||||
new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
|
||||
|
||||
memset (new_bb, 0, sizeof (*new_bb));
|
||||
|
||||
new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
|
||||
new_bb->succ = NULL;
|
||||
new_bb->pred = NULL;
|
||||
new_bb->count = e->count;
|
||||
new_bb->frequency = EDGE_FREQUENCY (e);
|
||||
new_bb->loop_depth = e->dest->loop_depth;
|
||||
|
||||
if (target->global_live_at_start)
|
||||
{
|
||||
new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
COPY_REG_SET (new_bb->global_live_at_start,
|
||||
target->global_live_at_start);
|
||||
COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
|
||||
}
|
||||
|
||||
/* Wire edge in. */
|
||||
new_edge = make_edge (e->src, new_bb, EDGE_FALLTHRU);
|
||||
new_edge->probability = e->probability;
|
||||
new_edge->count = e->count;
|
||||
|
||||
/* Redirect old edge. */
|
||||
redirect_edge_succ (e, target);
|
||||
redirect_edge_pred (e, new_bb);
|
||||
e->probability = REG_BR_PROB_BASE;
|
||||
|
||||
/* Place the new block just after the block being split. */
|
||||
VARRAY_GROW (basic_block_info, ++n_basic_blocks);
|
||||
|
||||
/* Some parts of the compiler expect blocks to be number in
|
||||
sequential order so insert the new block immediately after the
|
||||
block being split.. */
|
||||
j = new_edge->src->index;
|
||||
for (i = n_basic_blocks - 1; i > j + 1; --i)
|
||||
{
|
||||
basic_block tmp = BASIC_BLOCK (i - 1);
|
||||
BASIC_BLOCK (i) = tmp;
|
||||
tmp->index = i;
|
||||
}
|
||||
|
||||
BASIC_BLOCK (i) = new_bb;
|
||||
new_bb->index = i;
|
||||
|
||||
/* Create the basic block note. */
|
||||
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
|
||||
NOTE_BASIC_BLOCK (bb_note) = new_bb;
|
||||
new_bb->head = bb_note;
|
||||
|
||||
new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
|
||||
JUMP_LABEL (new_bb->end) = label;
|
||||
LABEL_NUSES (label)++;
|
||||
if (basic_block_for_insn)
|
||||
set_block_for_new_insns (new_bb->end, new_bb);
|
||||
emit_barrier_after (new_bb->end);
|
||||
/* In case the edge redirection failed, try to force it to be non-fallthru
|
||||
and redirect newly created simplejump. */
|
||||
new_bb = force_nonfallthru_and_redirect (e, target);
|
||||
return new_bb;
|
||||
}
|
||||
|
||||
@ -1479,111 +1466,27 @@ basic_block
|
||||
split_edge (edge_in)
|
||||
edge edge_in;
|
||||
{
|
||||
basic_block old_pred, bb, old_succ;
|
||||
basic_block bb;
|
||||
edge edge_out;
|
||||
rtx bb_note;
|
||||
int i, j;
|
||||
rtx before;
|
||||
|
||||
/* Abnormal edges cannot be split. */
|
||||
if ((edge_in->flags & EDGE_ABNORMAL) != 0)
|
||||
abort ();
|
||||
|
||||
old_pred = edge_in->src;
|
||||
old_succ = edge_in->dest;
|
||||
|
||||
/* Create the new structures. */
|
||||
bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
|
||||
|
||||
memset (bb, 0, sizeof (*bb));
|
||||
|
||||
/* ??? This info is likely going to be out of date very soon. */
|
||||
if (old_succ->global_live_at_start)
|
||||
{
|
||||
bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
|
||||
COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
|
||||
}
|
||||
|
||||
/* Wire them up. */
|
||||
bb->succ = NULL;
|
||||
bb->count = edge_in->count;
|
||||
bb->frequency = EDGE_FREQUENCY (edge_in);
|
||||
|
||||
edge_in->flags &= ~EDGE_CRITICAL;
|
||||
|
||||
edge_out = make_single_succ_edge (bb, old_succ, EDGE_FALLTHRU);
|
||||
|
||||
/* Tricky case -- if there existed a fallthru into the successor
|
||||
(and we're not it) we must add a new unconditional jump around
|
||||
the new block we're actually interested in.
|
||||
|
||||
Further, if that edge is critical, this means a second new basic
|
||||
block must be created to hold it. In order to simplify correct
|
||||
insn placement, do this before we touch the existing basic block
|
||||
ordering for the block we were really wanting. */
|
||||
/* We are going to place the new block in front of edge destination.
|
||||
Avoid existence of fallthru predecesors. */
|
||||
if ((edge_in->flags & EDGE_FALLTHRU) == 0)
|
||||
{
|
||||
edge e;
|
||||
for (e = edge_out->pred_next; e; e = e->pred_next)
|
||||
for (e = edge_in->dest->pred; e; e = e->pred_next)
|
||||
if (e->flags & EDGE_FALLTHRU)
|
||||
break;
|
||||
|
||||
if (e)
|
||||
{
|
||||
basic_block jump_block;
|
||||
rtx pos;
|
||||
|
||||
if ((e->flags & EDGE_CRITICAL) == 0
|
||||
&& e->src != ENTRY_BLOCK_PTR)
|
||||
{
|
||||
/* Non critical -- we can simply add a jump to the end
|
||||
of the existing predecessor. */
|
||||
jump_block = e->src;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* We need a new block to hold the jump. The simplest
|
||||
way to do the bulk of the work here is to recursively
|
||||
call ourselves. */
|
||||
jump_block = split_edge (e);
|
||||
e = jump_block->succ;
|
||||
}
|
||||
|
||||
/* Now add the jump insn ... */
|
||||
pos = emit_jump_insn_after (gen_jump (old_succ->head),
|
||||
last_loop_beg_note (jump_block->end));
|
||||
jump_block->end = pos;
|
||||
if (basic_block_for_insn)
|
||||
set_block_for_new_insns (pos, jump_block);
|
||||
emit_barrier_after (pos);
|
||||
|
||||
/* ... let jump know that label is in use, ... */
|
||||
JUMP_LABEL (pos) = old_succ->head;
|
||||
++LABEL_NUSES (old_succ->head);
|
||||
|
||||
/* ... and clear fallthru on the outgoing edge. */
|
||||
e->flags &= ~EDGE_FALLTHRU;
|
||||
|
||||
/* Continue splitting the interesting edge. */
|
||||
}
|
||||
force_nonfallthru (e);
|
||||
}
|
||||
|
||||
/* Place the new block just in front of the successor. */
|
||||
VARRAY_GROW (basic_block_info, ++n_basic_blocks);
|
||||
if (old_succ == EXIT_BLOCK_PTR)
|
||||
j = n_basic_blocks - 1;
|
||||
else
|
||||
j = old_succ->index;
|
||||
for (i = n_basic_blocks - 1; i > j; --i)
|
||||
{
|
||||
basic_block tmp = BASIC_BLOCK (i - 1);
|
||||
BASIC_BLOCK (i) = tmp;
|
||||
tmp->index = i;
|
||||
}
|
||||
BASIC_BLOCK (i) = bb;
|
||||
bb->index = i;
|
||||
|
||||
/* Create the basic block note.
|
||||
|
||||
Where we place the note can have a noticable impact on the generated
|
||||
@ -1601,19 +1504,33 @@ split_edge (edge_in)
|
||||
we want to ensure the instructions we insert are outside of any
|
||||
loop notes that physically sit between block 0 and block 1. Otherwise
|
||||
we confuse the loop optimizer into thinking the loop is a phony. */
|
||||
if (old_succ != EXIT_BLOCK_PTR
|
||||
&& PREV_INSN (old_succ->head)
|
||||
&& GET_CODE (PREV_INSN (old_succ->head)) == NOTE
|
||||
&& NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
|
||||
&& !back_edge_of_syntactic_loop_p (old_succ, old_pred))
|
||||
bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
|
||||
PREV_INSN (old_succ->head));
|
||||
else if (old_succ != EXIT_BLOCK_PTR)
|
||||
bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
|
||||
|
||||
if (edge_in->dest != EXIT_BLOCK_PTR
|
||||
&& PREV_INSN (edge_in->dest->head)
|
||||
&& GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
|
||||
&& NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head)) == NOTE_INSN_LOOP_BEG
|
||||
&& !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
|
||||
before = PREV_INSN (edge_in->dest->head);
|
||||
else if (edge_in->dest != EXIT_BLOCK_PTR)
|
||||
before = edge_in->dest->head;
|
||||
else
|
||||
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
|
||||
NOTE_BASIC_BLOCK (bb_note) = bb;
|
||||
bb->head = bb->end = bb_note;
|
||||
before = NULL_RTX;
|
||||
|
||||
bb = create_basic_block (edge_in->dest == EXIT_BLOCK_PTR ? n_basic_blocks
|
||||
: edge_in->dest->index, before, NULL);
|
||||
bb->count = edge_in->count;
|
||||
bb->frequency = EDGE_FREQUENCY (edge_in);
|
||||
|
||||
/* ??? This info is likely going to be out of date very soon. */
|
||||
if (edge_in->dest->global_live_at_start)
|
||||
{
|
||||
bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
COPY_REG_SET (bb->global_live_at_start, edge_in->dest->global_live_at_start);
|
||||
COPY_REG_SET (bb->global_live_at_end, edge_in->dest->global_live_at_start);
|
||||
}
|
||||
|
||||
edge_out = make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
|
||||
|
||||
/* For non-fallthry edges, we must adjust the predecessor's
|
||||
jump instruction to target our new block. */
|
||||
@ -1639,8 +1556,7 @@ insert_insn_on_edge (pattern, e)
|
||||
{
|
||||
/* We cannot insert instructions on an abnormal critical edge.
|
||||
It will be easier to find the culprit if we die now. */
|
||||
if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
|
||||
== (EDGE_ABNORMAL|EDGE_CRITICAL))
|
||||
if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
|
||||
abort ();
|
||||
|
||||
if (e->insns == NULL_RTX)
|
||||
@ -1917,7 +1833,7 @@ dump_edge_info (file, e, do_succ)
|
||||
if (e->flags)
|
||||
{
|
||||
static const char * const bitnames[] = {
|
||||
"fallthru", "crit", "ab", "abcall", "eh", "fake", "dfs_back"
|
||||
"fallthru", "ab", "abcall", "eh", "fake", "dfs_back"
|
||||
};
|
||||
int comma = 0;
|
||||
int i, flags = e->flags;
|
||||
@ -2390,7 +2306,6 @@ verify_flow_info ()
|
||||
free (edge_checksum);
|
||||
}
|
||||
|
||||
|
||||
/* Assume that the preceeding pass has possibly eliminated jump instructions
|
||||
or converted the unconditional jumps. Eliminate the edges from CFG.
|
||||
Return true if any edges are eliminated. */
|
||||
|
@ -89,50 +89,6 @@ can_fallthru (src, target)
|
||||
return next_active_insn (insn) == insn2;
|
||||
}
|
||||
|
||||
/* Identify critical edges and set the bits appropriately. */
|
||||
|
||||
void
|
||||
mark_critical_edges ()
|
||||
{
|
||||
int i, n = n_basic_blocks;
|
||||
basic_block bb;
|
||||
|
||||
/* We begin with the entry block. This is not terribly important now,
|
||||
but could be if a front end (Fortran) implemented alternate entry
|
||||
points. */
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
i = -1;
|
||||
|
||||
while (1)
|
||||
{
|
||||
edge e;
|
||||
|
||||
/* (1) Critical edges must have a source with multiple successors. */
|
||||
if (bb->succ && bb->succ->succ_next)
|
||||
{
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
{
|
||||
/* (2) Critical edges must have a destination with multiple
|
||||
predecessors. Note that we know there is at least one
|
||||
predecessor -- the edge we followed to get here. */
|
||||
if (e->dest->pred->pred_next)
|
||||
e->flags |= EDGE_CRITICAL;
|
||||
else
|
||||
e->flags &= ~EDGE_CRITICAL;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
e->flags &= ~EDGE_CRITICAL;
|
||||
}
|
||||
|
||||
if (++i >= n)
|
||||
break;
|
||||
bb = BASIC_BLOCK (i);
|
||||
}
|
||||
}
|
||||
|
||||
/* Mark the back edges in DFS traversal.
|
||||
Return non-zero if a loop (natural or otherwise) is present.
|
||||
Inspired by Depth_First_Search_PP described in:
|
||||
|
@ -452,7 +452,7 @@ find_basic_blocks_1 (f)
|
||||
to a barrier or some such, no need to do it again. */
|
||||
if (head != NULL_RTX)
|
||||
{
|
||||
create_basic_block (i++, head, end, bb_note);
|
||||
create_basic_block_structure (i++, head, end, bb_note);
|
||||
bb_note = NULL_RTX;
|
||||
}
|
||||
|
||||
@ -523,7 +523,7 @@ find_basic_blocks_1 (f)
|
||||
end = insn;
|
||||
|
||||
new_bb_exclusive:
|
||||
create_basic_block (i++, head, end, bb_note);
|
||||
create_basic_block_structure (i++, head, end, bb_note);
|
||||
head = end = NULL_RTX;
|
||||
bb_note = NULL_RTX;
|
||||
break;
|
||||
@ -579,7 +579,7 @@ find_basic_blocks_1 (f)
|
||||
}
|
||||
|
||||
if (head != NULL_RTX)
|
||||
create_basic_block (i++, head, end, bb_note);
|
||||
create_basic_block_structure (i++, head, end, bb_note);
|
||||
else if (bb_note)
|
||||
flow_delete_insn (bb_note);
|
||||
|
||||
@ -604,6 +604,10 @@ find_basic_blocks (f, nregs, file)
|
||||
int max_uid;
|
||||
timevar_push (TV_CFG);
|
||||
|
||||
if (basic_block_for_insn)
|
||||
VARRAY_FREE (basic_block_for_insn);
|
||||
basic_block_for_insn = 0;
|
||||
|
||||
/* Flush out existing data. */
|
||||
if (basic_block_info != NULL)
|
||||
{
|
||||
@ -655,8 +659,6 @@ find_basic_blocks (f, nregs, file)
|
||||
here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
|
||||
tidy_fallthru_edges ();
|
||||
|
||||
mark_critical_edges ();
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
verify_flow_info ();
|
||||
#endif
|
||||
|
104
gcc/cfgcleanup.c
104
gcc/cfgcleanup.c
@ -52,12 +52,12 @@ static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
|
||||
rtx *, rtx *));
|
||||
|
||||
static bool delete_unreachable_blocks PARAMS ((void));
|
||||
static int tail_recursion_label_p PARAMS ((rtx));
|
||||
static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
|
||||
static bool tail_recursion_label_p PARAMS ((rtx));
|
||||
static void merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
|
||||
basic_block));
|
||||
static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
|
||||
static void merge_blocks_move_successor_nojumps PARAMS ((basic_block,
|
||||
basic_block));
|
||||
static int merge_blocks PARAMS ((edge,basic_block,basic_block,
|
||||
static bool merge_blocks PARAMS ((edge,basic_block,basic_block,
|
||||
int));
|
||||
static bool try_optimize_cfg PARAMS ((int));
|
||||
static bool try_simplify_condjump PARAMS ((basic_block));
|
||||
@ -248,7 +248,9 @@ try_forward_edges (mode, b)
|
||||
return changed;
|
||||
}
|
||||
|
||||
static int
|
||||
/* Return true if LABEL is used for tail recursion. */
|
||||
|
||||
static bool
|
||||
tail_recursion_label_p (label)
|
||||
rtx label;
|
||||
{
|
||||
@ -256,16 +258,16 @@ tail_recursion_label_p (label)
|
||||
|
||||
for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
|
||||
if (label == XEXP (x, 0))
|
||||
return 1;
|
||||
return true;
|
||||
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Blocks A and B are to be merged into a single block. A has no incoming
|
||||
fallthru edge, so it can be moved before B without adding or modifying
|
||||
any jumps (aside from the jump from A to B). */
|
||||
|
||||
static int
|
||||
static void
|
||||
merge_blocks_move_predecessor_nojumps (a, b)
|
||||
basic_block a, b;
|
||||
{
|
||||
@ -307,15 +309,13 @@ merge_blocks_move_predecessor_nojumps (a, b)
|
||||
|
||||
/* Now blocks A and B are contiguous. Merge them. */
|
||||
merge_blocks_nomove (a, b);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Blocks A and B are to be merged into a single block. B has no outgoing
|
||||
fallthru edge, so it can be moved after A without adding or modifying
|
||||
any jumps (aside from the jump from A to B). */
|
||||
|
||||
static int
|
||||
static void
|
||||
merge_blocks_move_successor_nojumps (a, b)
|
||||
basic_block a, b;
|
||||
{
|
||||
@ -359,14 +359,12 @@ merge_blocks_move_successor_nojumps (a, b)
|
||||
fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
|
||||
b->index, a->index);
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Attempt to merge basic blocks that are potentially non-adjacent.
|
||||
Return true iff the attempt succeeded. */
|
||||
|
||||
static int
|
||||
static bool
|
||||
merge_blocks (e, b, c, mode)
|
||||
edge e;
|
||||
basic_block b, c;
|
||||
@ -376,9 +374,10 @@ merge_blocks (e, b, c, mode)
|
||||
edge recorded from the call_placeholder back to this label, as
|
||||
that would make optimize_sibling_and_tail_recursive_calls more
|
||||
complex for no gain. */
|
||||
if (GET_CODE (c->head) == CODE_LABEL
|
||||
if ((mode & CLEANUP_PRE_SIBCALL)
|
||||
&& GET_CODE (c->head) == CODE_LABEL
|
||||
&& tail_recursion_label_p (c->head))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
/* If B has a fallthru edge to C, no need to move anything. */
|
||||
if (e->flags & EDGE_FALLTHRU)
|
||||
@ -391,22 +390,22 @@ merge_blocks (e, b, c, mode)
|
||||
b->index, c->index);
|
||||
}
|
||||
|
||||
return 1;
|
||||
return true;
|
||||
}
|
||||
/* Otherwise we will need to move code around. Do that only if expensive
|
||||
transformations are allowed. */
|
||||
else if (mode & CLEANUP_EXPENSIVE)
|
||||
{
|
||||
edge tmp_edge, c_fallthru_edge;
|
||||
int c_has_outgoing_fallthru;
|
||||
int b_has_incoming_fallthru;
|
||||
edge tmp_edge, b_fallthru_edge;
|
||||
bool c_has_outgoing_fallthru;
|
||||
bool b_has_incoming_fallthru;
|
||||
|
||||
/* Avoid overactive code motion, as the forwarder blocks should be
|
||||
eliminated by edge redirection instead. One exception might have
|
||||
been if B is a forwarder block and C has no fallthru edge, but
|
||||
that should be cleaned up by bb-reorder instead. */
|
||||
if (forwarder_block_p (b) || forwarder_block_p (c))
|
||||
return 0;
|
||||
return false;
|
||||
|
||||
/* We must make sure to not munge nesting of lexical blocks,
|
||||
and loop notes. This is done by squeezing out all the notes
|
||||
@ -416,59 +415,37 @@ merge_blocks (e, b, c, mode)
|
||||
if (tmp_edge->flags & EDGE_FALLTHRU)
|
||||
break;
|
||||
c_has_outgoing_fallthru = (tmp_edge != NULL);
|
||||
c_fallthru_edge = tmp_edge;
|
||||
|
||||
for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
|
||||
if (tmp_edge->flags & EDGE_FALLTHRU)
|
||||
break;
|
||||
b_has_incoming_fallthru = (tmp_edge != NULL);
|
||||
|
||||
/* If B does not have an incoming fallthru, then it can be moved
|
||||
immediately before C without introducing or modifying jumps.
|
||||
C cannot be the first block, so we do not have to worry about
|
||||
accessing a non-existent block. */
|
||||
if (! b_has_incoming_fallthru)
|
||||
return merge_blocks_move_predecessor_nojumps (b, c);
|
||||
b_fallthru_edge = tmp_edge;
|
||||
|
||||
/* Otherwise, we're going to try to move C after B. If C does
|
||||
not have an outgoing fallthru, then it can be moved
|
||||
immediately after B without introducing or modifying jumps. */
|
||||
if (! c_has_outgoing_fallthru)
|
||||
return merge_blocks_move_successor_nojumps (b, c);
|
||||
{
|
||||
merge_blocks_move_successor_nojumps (b, c);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Otherwise, we'll need to insert an extra jump, and possibly
|
||||
a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
|
||||
as we don't have explicit return instructions before epilogues
|
||||
are generated, so give up on that case. */
|
||||
/* If B does not have an incoming fallthru, then it can be moved
|
||||
immediately before C without introducing or modifying jumps.
|
||||
C cannot be the first block, so we do not have to worry about
|
||||
accessing a non-existent block. */
|
||||
|
||||
if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
|
||||
&& merge_blocks_move_successor_nojumps (b, c))
|
||||
{
|
||||
basic_block target = c_fallthru_edge->dest;
|
||||
rtx barrier;
|
||||
basic_block new;
|
||||
|
||||
/* This is a dirty hack to avoid code duplication.
|
||||
|
||||
Set edge to point to wrong basic block, so
|
||||
redirect_edge_and_branch_force will do the trick
|
||||
and rewire edge back to the original location. */
|
||||
redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
|
||||
new = redirect_edge_and_branch_force (c_fallthru_edge, target);
|
||||
|
||||
/* We've just created barrier, but another barrier is
|
||||
already present in the stream. Avoid the duplicate. */
|
||||
barrier = next_nonnote_insn (new ? new->end : b->end);
|
||||
if (GET_CODE (barrier) != BARRIER)
|
||||
abort ();
|
||||
flow_delete_insn (barrier);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
if (b_has_incoming_fallthru)
|
||||
{
|
||||
if (b_fallthru_edge->src == ENTRY_BLOCK_PTR)
|
||||
return false;
|
||||
force_nonfallthru (b_fallthru_edge);
|
||||
}
|
||||
merge_blocks_move_predecessor_nojumps (b, c);
|
||||
return true;
|
||||
}
|
||||
return 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Look through the insns at the end of BB1 and BB2 and find the longest
|
||||
@ -1191,6 +1168,7 @@ try_optimize_cfg (mode)
|
||||
}
|
||||
|
||||
/* Delete all unreachable basic blocks. */
|
||||
|
||||
static bool
|
||||
delete_unreachable_blocks ()
|
||||
{
|
||||
@ -1215,7 +1193,6 @@ delete_unreachable_blocks ()
|
||||
tidy_fallthru_edges ();
|
||||
return changed;
|
||||
}
|
||||
|
||||
|
||||
/* Tidy the CFG by deleting unreachable code and whatnot. */
|
||||
|
||||
@ -1231,9 +1208,6 @@ cleanup_cfg (mode)
|
||||
if (try_optimize_cfg (mode))
|
||||
delete_unreachable_blocks (), changed = true;
|
||||
|
||||
if (changed)
|
||||
mark_critical_edges ();
|
||||
|
||||
/* Kill the data we won't maintain. */
|
||||
free_EXPR_LIST_list (&label_value_list);
|
||||
free_EXPR_LIST_list (&tail_recursion_label_list);
|
||||
|
@ -152,7 +152,7 @@ instrument_edges (el)
|
||||
if (rtl_dump_file)
|
||||
fprintf (rtl_dump_file, "Edge %d to %d instrumented%s\n",
|
||||
e->src->index, e->dest->index,
|
||||
e->flags & EDGE_CRITICAL ? " (and split)" : "");
|
||||
EDGE_CRITICAL_P (e) ? " (and split)" : "");
|
||||
need_func_profiler = 1;
|
||||
insert_insn_on_edge (
|
||||
gen_edge_profiler (total_num_edges_instrumented
|
||||
@ -884,7 +884,7 @@ find_spanning_tree (el)
|
||||
for (i = 0; i < num_edges; i++)
|
||||
{
|
||||
edge e = INDEX_EDGE (el, i);
|
||||
if ((e->flags & EDGE_CRITICAL)
|
||||
if ((EDGE_CRITICAL_P (e))
|
||||
&& !EDGE_INFO (e)->ignore
|
||||
&& (find_group (e->src) != find_group (e->dest)))
|
||||
{
|
||||
|
@ -2660,9 +2660,10 @@ convert_regs_1 (file, block)
|
||||
beste = e;
|
||||
else if (beste->count > e->count)
|
||||
;
|
||||
else if ((e->flags & EDGE_CRITICAL) != (beste->flags & EDGE_CRITICAL))
|
||||
else if ((EDGE_CRITICAL_P (e) != 0)
|
||||
!= (EDGE_CRITICAL_P (beste) != 0))
|
||||
{
|
||||
if (e->flags & EDGE_CRITICAL)
|
||||
if (EDGE_CRITICAL_P (e))
|
||||
beste = e;
|
||||
}
|
||||
else if (e->src->index < beste->src->index)
|
||||
|
@ -1498,8 +1498,7 @@ make_regs_equivalent_over_bad_edges (bb, reg_partition)
|
||||
|
||||
/* Scan incoming abnormal critical edges. */
|
||||
for (e = b->pred; e; e = e->pred_next)
|
||||
if ((e->flags & (EDGE_ABNORMAL | EDGE_CRITICAL))
|
||||
== (EDGE_ABNORMAL | EDGE_CRITICAL))
|
||||
if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
|
||||
{
|
||||
rtx *alt = phi_alternative (set, e->src->index);
|
||||
int alt_regno;
|
||||
|
Loading…
x
Reference in New Issue
Block a user