mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-16 18:40:57 +08:00
bb-reorder.c (make_reorder_chain, [...]): Use FOR_EACH_BB macros to iterate over basic block chain.
* bb-reorder.c (make_reorder_chain, make_reorder_chain_1): Use FOR_EACH_BB macros to iterate over basic block chain. * cfg.c (clear_edges, clear_bb_flags, dump_flow_info, alloc_aux_for_blocks, clear_aux_for_blocks, alloc_aux_for_edges): Likewise. * cfganal.c (set_edge_can_fallthru_flag, flow_call_edges_add, find_unreachable_blocks, create_edge_list, verify_edge_list, remove_fake_edges, add_noreturn_fake_exit_edges, flow_preorder_transversal_compute, flow_dfs_compute_reverse_execute): Likewise. * cfgbuild.c (make_edges, find_basic_blocks, find_many_sub_basic_blocks, find_sub_basic_blocks): Likewise. * cfgcleanup.c (try_optimize_cfg, delete_unreachable_blocks): Likewise. * cfglayout.c (record_effective_endpoints, cleanup_unconditional_jumps): Likewise. * cfgloop.c (flow_loops_cfg_dump, flow_loops_find): Likewise. * cfgrtl.c (compute_bb_for_insn, tidy_fallthru_edges, commit_edge_insertions, commit_edge_insertions_watch_calls, print_rtl_with_bb, verify_flow_info, purge_all_dead_edges): Likewise. * combine.c (combine_instructions, reg_dead_at_p): Likewise. * conflict.c (conflict_graph_compute): Likewise. * df.c (df_bitmaps_alloc, df_bitmaps_free, df_alloc, df_analyse_1, df_modified_p, df_refs_unlink, df_dump): Likewise. * dominance.c (calc_dfs_tree, calculate_dominance_info): Likewise. * final.c (compute_alignments): Likewise. * flow.c (update_life_info, update_life_info_in_dirty_blocks, delete_noop_moves, calculate_global_regs_live, allocate_bb_life_data, count_or_remove_death_notes): Likewise. * gcse.c (oprs_unchanged_p, record_last_reg_set_info, compute_hash_table, compute_kill_rd, compute_rd, compute_ae_kill, classic_gcse, compute_transp, cprop, compute_pre_data, compute_transpout, invalidate_nonnull_info, delete_null_pointer_checks_1, delete_null_pointer_checks, compute_code_hoist_vbeinout, hoist_code, compute_ld_motion_mems, compute_store_table, build_store_vectors, store_motion): Likewise. * global.c (global_conflicts, mark_elimination): Likewise. * graph.c (print_rtl_graph_with_bb): Likewise. * haifa-sched.c (sched_init): Likewise. * ifcvt.c (if_convert): Likewise. * lcm.c (compute_antinout_edge, compute_laterin, compute_insert_delete, compute_available, compute_nearerout, compute_rev_insert_delete, optimize_mode_switching): Likewise. * local-alloc.c (local_alloc, update_equiv_regs): Likewise. * predict.c (estimate_probability, note_prediction_to_br_prob, propagate_freq, counts_to_freqs, expensive_function_p, estimate_bb_frequencies): Likewise. * profile.c (instrument_edges, get_exec_counts, compute_branch_probabilities, compute_checksum, branch_prob, find_spanning_tree): Likewise. * recog.c (split_all_insns, peephole2_optimize): Likewise. * reg-stack.c (reg_to_stack, convert_regs_entry, convert_regs): Likewise. * regclass.c (scan_one_insn, regclass): Likewise. * regmove.c (mark_flags_life_zones, regmove_optimize, record_stack_memrefs): Likewise. * regrename.c (regrename_optimize, copyprop_hardreg_forward): Likewise. * reload1.c (reload, reload_combine, fixup_abnormal_edges): Likewise. * resource.c (find_basic_block): Likewise. * sched-ebb.c (schedule_ebbs): Likewise. * sched-rgn.c (is_cfg_nonregular, build_control_flow, find_single_block_region, find_rgns, schedule_insns) * sibcall.c (optimize_sibling_and_tail_recursive_call) * ssa-ccp.c (optimize_unexecutable_edges, ssa_ccp_df_delete_unreachable_insns): Likewise. * ssa-dce.c (ssa_eliminate_dead_code): Likewise. * ssa.c (find_evaluations, compute_dominance_frontiers_1, rename_block, convert_to_ssa, compute_conservative_reg_partition, compute_coalesced_reg_partition, rename_equivalent_regs, convert_from_ssa): Likewise. * config/ia64/ia64.c (emit_predicate_relation_info, process_epilogue, process_for_unwind_directive): Likewise. * df.c (FOR_ALL_BBS): Removed. * gcse.c (struct null_pointer_info): Type of current_block field changed. (struct reg_avail_info): Type of last_bb field changed. * config/ia64/ia64.c (block_num): Removed. (need_copy_state): Type changed. (last_block): New. From-SVN: r53804
This commit is contained in:
parent
17645b154d
commit
e0082a7265
@ -1,3 +1,87 @@
|
||||
2002-05-23 Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>
|
||||
|
||||
* bb-reorder.c (make_reorder_chain, make_reorder_chain_1):
|
||||
Use FOR_EACH_BB macros to iterate over basic block chain.
|
||||
* cfg.c (clear_edges, clear_bb_flags, dump_flow_info,
|
||||
alloc_aux_for_blocks, clear_aux_for_blocks, alloc_aux_for_edges):
|
||||
Likewise.
|
||||
* cfganal.c (set_edge_can_fallthru_flag, flow_call_edges_add,
|
||||
find_unreachable_blocks, create_edge_list, verify_edge_list,
|
||||
remove_fake_edges, add_noreturn_fake_exit_edges,
|
||||
flow_preorder_transversal_compute, flow_dfs_compute_reverse_execute):
|
||||
Likewise.
|
||||
* cfgbuild.c (make_edges, find_basic_blocks, find_many_sub_basic_blocks,
|
||||
find_sub_basic_blocks): Likewise.
|
||||
* cfgcleanup.c (try_optimize_cfg, delete_unreachable_blocks):
|
||||
Likewise.
|
||||
* cfglayout.c (record_effective_endpoints, cleanup_unconditional_jumps):
|
||||
Likewise.
|
||||
* cfgloop.c (flow_loops_cfg_dump, flow_loops_find):
|
||||
Likewise.
|
||||
* cfgrtl.c (compute_bb_for_insn, tidy_fallthru_edges,
|
||||
commit_edge_insertions, commit_edge_insertions_watch_calls,
|
||||
print_rtl_with_bb, verify_flow_info, purge_all_dead_edges): Likewise.
|
||||
* combine.c (combine_instructions, reg_dead_at_p): Likewise.
|
||||
* conflict.c (conflict_graph_compute): Likewise.
|
||||
* df.c (df_bitmaps_alloc, df_bitmaps_free, df_alloc, df_analyse_1,
|
||||
df_modified_p, df_refs_unlink, df_dump): Likewise.
|
||||
* dominance.c (calc_dfs_tree, calculate_dominance_info): Likewise.
|
||||
* final.c (compute_alignments): Likewise.
|
||||
* flow.c (update_life_info, update_life_info_in_dirty_blocks,
|
||||
delete_noop_moves, calculate_global_regs_live, allocate_bb_life_data,
|
||||
count_or_remove_death_notes): Likewise.
|
||||
* gcse.c (oprs_unchanged_p, record_last_reg_set_info,
|
||||
compute_hash_table, compute_kill_rd, compute_rd, compute_ae_kill,
|
||||
classic_gcse, compute_transp, cprop, compute_pre_data,
|
||||
compute_transpout, invalidate_nonnull_info,
|
||||
delete_null_pointer_checks_1, delete_null_pointer_checks,
|
||||
compute_code_hoist_vbeinout, hoist_code, compute_ld_motion_mems,
|
||||
compute_store_table, build_store_vectors, store_motion): Likewise.
|
||||
* global.c (global_conflicts, mark_elimination): Likewise.
|
||||
* graph.c (print_rtl_graph_with_bb): Likewise.
|
||||
* haifa-sched.c (sched_init): Likewise.
|
||||
* ifcvt.c (if_convert): Likewise.
|
||||
* lcm.c (compute_antinout_edge, compute_laterin, compute_insert_delete,
|
||||
compute_available, compute_nearerout, compute_rev_insert_delete,
|
||||
optimize_mode_switching): Likewise.
|
||||
* local-alloc.c (local_alloc, update_equiv_regs): Likewise.
|
||||
* predict.c (estimate_probability, note_prediction_to_br_prob,
|
||||
propagate_freq, counts_to_freqs, expensive_function_p,
|
||||
estimate_bb_frequencies): Likewise.
|
||||
* profile.c (instrument_edges, get_exec_counts,
|
||||
compute_branch_probabilities, compute_checksum, branch_prob,
|
||||
find_spanning_tree): Likewise.
|
||||
* recog.c (split_all_insns, peephole2_optimize): Likewise.
|
||||
* reg-stack.c (reg_to_stack, convert_regs_entry, convert_regs):
|
||||
Likewise.
|
||||
* regclass.c (scan_one_insn, regclass): Likewise.
|
||||
* regmove.c (mark_flags_life_zones, regmove_optimize,
|
||||
record_stack_memrefs): Likewise.
|
||||
* regrename.c (regrename_optimize, copyprop_hardreg_forward): Likewise.
|
||||
* reload1.c (reload, reload_combine, fixup_abnormal_edges): Likewise.
|
||||
* resource.c (find_basic_block): Likewise.
|
||||
* sched-ebb.c (schedule_ebbs): Likewise.
|
||||
* sched-rgn.c (is_cfg_nonregular, build_control_flow,
|
||||
find_single_block_region, find_rgns, schedule_insns)
|
||||
* sibcall.c (optimize_sibling_and_tail_recursive_call)
|
||||
* ssa-ccp.c (optimize_unexecutable_edges,
|
||||
ssa_ccp_df_delete_unreachable_insns): Likewise.
|
||||
* ssa-dce.c (ssa_eliminate_dead_code): Likewise.
|
||||
* ssa.c (find_evaluations, compute_dominance_frontiers_1,
|
||||
rename_block, convert_to_ssa, compute_conservative_reg_partition,
|
||||
compute_coalesced_reg_partition, rename_equivalent_regs,
|
||||
convert_from_ssa): Likewise.
|
||||
* config/ia64/ia64.c (emit_predicate_relation_info, process_epilogue,
|
||||
process_for_unwind_directive): Likewise.
|
||||
|
||||
* df.c (FOR_ALL_BBS): Removed.
|
||||
* gcse.c (struct null_pointer_info): Type of current_block field
|
||||
changed.
|
||||
(struct reg_avail_info): Type of last_bb field changed.
|
||||
* config/ia64/ia64.c (block_num): Removed.
|
||||
(need_copy_state): Type changed.
|
||||
(last_block): New.
|
||||
|
||||
2002-05-23 Neil Booth <neil@daikokuya.demon.co.uk>
|
||||
|
||||
* cppinit.c (mark_named_operators): Split out from init_builtins.
|
||||
|
@ -102,14 +102,11 @@ static void
|
||||
make_reorder_chain ()
|
||||
{
|
||||
basic_block prev = NULL;
|
||||
int nbb_m1 = n_basic_blocks - 1;
|
||||
basic_block next;
|
||||
basic_block next, bb;
|
||||
|
||||
/* Loop until we've placed every block. */
|
||||
do
|
||||
{
|
||||
int i;
|
||||
|
||||
next = NULL;
|
||||
|
||||
/* Find the next unplaced block. */
|
||||
@ -119,12 +116,13 @@ make_reorder_chain ()
|
||||
remove from the list as we place. The head of that list is
|
||||
what we're looking for here. */
|
||||
|
||||
for (i = 0; i <= nbb_m1 && !next; ++i)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
if (! RBI (bb)->visited)
|
||||
FOR_EACH_BB (bb)
|
||||
if (! RBI (bb)->visited)
|
||||
{
|
||||
next = bb;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (next)
|
||||
prev = make_reorder_chain_1 (next, prev);
|
||||
}
|
||||
@ -164,7 +162,7 @@ make_reorder_chain_1 (bb, prev)
|
||||
}
|
||||
else
|
||||
{
|
||||
if (bb->index != 0)
|
||||
if (bb->prev_bb != ENTRY_BLOCK_PTR)
|
||||
abort ();
|
||||
}
|
||||
RBI (bb)->visited = 1;
|
||||
|
62
gcc/cfg.c
62
gcc/cfg.c
@ -167,12 +167,11 @@ free_edge (e)
|
||||
void
|
||||
clear_edges ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
edge e;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; ++i)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
edge e = bb->succ;
|
||||
|
||||
while (e)
|
||||
@ -480,11 +479,10 @@ redirect_edge_pred (e, new_pred)
|
||||
void
|
||||
clear_bb_flags ()
|
||||
{
|
||||
int i;
|
||||
ENTRY_BLOCK_PTR->flags = 0;
|
||||
EXIT_BLOCK_PTR->flags = 0;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
BASIC_BLOCK (i)->flags = 0;
|
||||
basic_block bb;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
bb->flags = 0;
|
||||
}
|
||||
|
||||
void
|
||||
@ -492,6 +490,7 @@ dump_flow_info (file)
|
||||
FILE *file;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
static const char * const reg_class_names[] = REG_CLASS_NAMES;
|
||||
|
||||
fprintf (file, "%d registers.\n", max_regno);
|
||||
@ -539,9 +538,8 @@ dump_flow_info (file)
|
||||
}
|
||||
|
||||
fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
edge e;
|
||||
int sum;
|
||||
gcov_type lsum;
|
||||
@ -709,13 +707,10 @@ alloc_aux_for_blocks (size)
|
||||
first_block_aux_obj = (char *) obstack_alloc (&block_aux_obstack, 0);
|
||||
if (size)
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
alloc_aux_for_block (BASIC_BLOCK (i), size);
|
||||
|
||||
alloc_aux_for_block (ENTRY_BLOCK_PTR, size);
|
||||
alloc_aux_for_block (EXIT_BLOCK_PTR, size);
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
alloc_aux_for_block (bb, size);
|
||||
}
|
||||
}
|
||||
|
||||
@ -724,13 +719,10 @@ alloc_aux_for_blocks (size)
|
||||
void
|
||||
clear_aux_for_blocks ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
BASIC_BLOCK (i)->aux = NULL;
|
||||
|
||||
ENTRY_BLOCK_PTR->aux = NULL;
|
||||
EXIT_BLOCK_PTR->aux = NULL;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
bb->aux = NULL;
|
||||
}
|
||||
|
||||
/* Free data allocated in block_aux_obstack and clear AUX pointers
|
||||
@ -784,16 +776,11 @@ alloc_aux_for_edges (size)
|
||||
first_edge_aux_obj = (char *) obstack_alloc (&edge_aux_obstack, 0);
|
||||
if (size)
|
||||
{
|
||||
int i;
|
||||
for (i = -1; i < n_basic_blocks; i++)
|
||||
{
|
||||
basic_block bb;
|
||||
edge e;
|
||||
basic_block bb;
|
||||
|
||||
if (i >= 0)
|
||||
bb = BASIC_BLOCK (i);
|
||||
else
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
edge e;
|
||||
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
alloc_aux_for_edge (e, size);
|
||||
@ -806,18 +793,11 @@ alloc_aux_for_edges (size)
|
||||
void
|
||||
clear_aux_for_edges ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
edge e;
|
||||
|
||||
for (i = -1; i < n_basic_blocks; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
basic_block bb;
|
||||
edge e;
|
||||
|
||||
if (i >= 0)
|
||||
bb = BASIC_BLOCK (i);
|
||||
else
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
e->aux = NULL;
|
||||
}
|
||||
|
183
gcc/cfganal.c
183
gcc/cfganal.c
@ -194,10 +194,10 @@ mark_dfs_back_edges ()
|
||||
void
|
||||
set_edge_can_fallthru_flag ()
|
||||
{
|
||||
int i;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
edge e;
|
||||
|
||||
/* The FALLTHRU edge is also CAN_FALLTHRU edge. */
|
||||
@ -259,7 +259,7 @@ flow_call_edges_add (blocks)
|
||||
int i;
|
||||
int blocks_split = 0;
|
||||
int bb_num = 0;
|
||||
basic_block *bbs;
|
||||
basic_block *bbs, bb;
|
||||
bool check_last_block = false;
|
||||
|
||||
/* Map bb indices into basic block pointers since split_block
|
||||
@ -269,8 +269,8 @@ flow_call_edges_add (blocks)
|
||||
|
||||
if (! blocks)
|
||||
{
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
bbs[bb_num++] = BASIC_BLOCK (i);
|
||||
FOR_EACH_BB (bb)
|
||||
bbs[bb_num++] = bb;
|
||||
|
||||
check_last_block = true;
|
||||
}
|
||||
@ -386,16 +386,15 @@ void
|
||||
find_unreachable_blocks ()
|
||||
{
|
||||
edge e;
|
||||
int i, n;
|
||||
basic_block *tos, *worklist;
|
||||
basic_block *tos, *worklist, bb;
|
||||
|
||||
n = n_basic_blocks;
|
||||
tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
|
||||
tos = worklist =
|
||||
(basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
|
||||
|
||||
/* Clear all the reachability flags. */
|
||||
|
||||
for (i = 0; i < n; ++i)
|
||||
BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
|
||||
FOR_EACH_BB (bb)
|
||||
bb->flags &= ~BB_REACHABLE;
|
||||
|
||||
/* Add our starting points to the worklist. Almost always there will
|
||||
be only one. It isn't inconceivable that we might one day directly
|
||||
@ -445,8 +444,8 @@ create_edge_list ()
|
||||
struct edge_list *elist;
|
||||
edge e;
|
||||
int num_edges;
|
||||
int x;
|
||||
int block_count;
|
||||
basic_block bb;
|
||||
|
||||
block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
|
||||
|
||||
@ -454,18 +453,12 @@ create_edge_list ()
|
||||
|
||||
/* Determine the number of edges in the flow graph by counting successor
|
||||
edges on each basic block. */
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (x);
|
||||
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
num_edges++;
|
||||
}
|
||||
|
||||
/* Don't forget successors of the entry block. */
|
||||
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
|
||||
num_edges++;
|
||||
|
||||
elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
|
||||
elist->num_blocks = block_count;
|
||||
elist->num_edges = num_edges;
|
||||
@ -473,18 +466,10 @@ create_edge_list ()
|
||||
|
||||
num_edges = 0;
|
||||
|
||||
/* Follow successors of the entry block, and register these edges. */
|
||||
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
|
||||
elist->index_to_edge[num_edges++] = e;
|
||||
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (x);
|
||||
|
||||
/* Follow all successors of blocks, and register these edges. */
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
elist->index_to_edge[num_edges++] = e;
|
||||
}
|
||||
/* Follow successors of blocks, and register these edges. */
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
elist->index_to_edge[num_edges++] = e;
|
||||
|
||||
return elist;
|
||||
}
|
||||
@ -538,13 +523,12 @@ verify_edge_list (f, elist)
|
||||
FILE *f;
|
||||
struct edge_list *elist;
|
||||
{
|
||||
int x, pred, succ, index;
|
||||
int pred, succ, index;
|
||||
edge e;
|
||||
basic_block bb, p, s;
|
||||
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (x);
|
||||
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
{
|
||||
pred = e->src->index;
|
||||
@ -565,33 +549,12 @@ verify_edge_list (f, elist)
|
||||
}
|
||||
}
|
||||
|
||||
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
|
||||
{
|
||||
pred = e->src->index;
|
||||
succ = e->dest->index;
|
||||
index = EDGE_INDEX (elist, e->src, e->dest);
|
||||
if (index == EDGE_INDEX_NO_EDGE)
|
||||
{
|
||||
fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
|
||||
fprintf (f, "*p* Pred for index %d should be %d not %d\n",
|
||||
index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
|
||||
if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
|
||||
fprintf (f, "*p* Succ for index %d should be %d not %d\n",
|
||||
index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
|
||||
}
|
||||
|
||||
/* We've verified that all the edges are in the list, no lets make sure
|
||||
/* We've verified that all the edges are in the list, now lets make sure
|
||||
there are no spurious edges in the list. */
|
||||
|
||||
for (pred = 0; pred < n_basic_blocks; pred++)
|
||||
for (succ = 0; succ < n_basic_blocks; succ++)
|
||||
FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
|
||||
{
|
||||
basic_block p = BASIC_BLOCK (pred);
|
||||
basic_block s = BASIC_BLOCK (succ);
|
||||
int found_edge = 0;
|
||||
|
||||
for (e = p->succ; e; e = e->succ_next)
|
||||
@ -608,78 +571,15 @@ verify_edge_list (f, elist)
|
||||
break;
|
||||
}
|
||||
|
||||
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
|
||||
if (EDGE_INDEX (elist, p, s)
|
||||
== EDGE_INDEX_NO_EDGE && found_edge != 0)
|
||||
fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
|
||||
pred, succ);
|
||||
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
|
||||
p->index, s->index);
|
||||
if (EDGE_INDEX (elist, p, s)
|
||||
!= EDGE_INDEX_NO_EDGE && found_edge == 0)
|
||||
fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
|
||||
pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
|
||||
BASIC_BLOCK (succ)));
|
||||
p->index, s->index, EDGE_INDEX (elist, p, s));
|
||||
}
|
||||
|
||||
for (succ = 0; succ < n_basic_blocks; succ++)
|
||||
{
|
||||
basic_block p = ENTRY_BLOCK_PTR;
|
||||
basic_block s = BASIC_BLOCK (succ);
|
||||
int found_edge = 0;
|
||||
|
||||
for (e = p->succ; e; e = e->succ_next)
|
||||
if (e->dest == s)
|
||||
{
|
||||
found_edge = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
for (e = s->pred; e; e = e->pred_next)
|
||||
if (e->src == p)
|
||||
{
|
||||
found_edge = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
|
||||
== EDGE_INDEX_NO_EDGE && found_edge != 0)
|
||||
fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
|
||||
succ);
|
||||
if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
|
||||
!= EDGE_INDEX_NO_EDGE && found_edge == 0)
|
||||
fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
|
||||
succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
|
||||
BASIC_BLOCK (succ)));
|
||||
}
|
||||
|
||||
for (pred = 0; pred < n_basic_blocks; pred++)
|
||||
{
|
||||
basic_block p = BASIC_BLOCK (pred);
|
||||
basic_block s = EXIT_BLOCK_PTR;
|
||||
int found_edge = 0;
|
||||
|
||||
for (e = p->succ; e; e = e->succ_next)
|
||||
if (e->dest == s)
|
||||
{
|
||||
found_edge = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
for (e = s->pred; e; e = e->pred_next)
|
||||
if (e->src == p)
|
||||
{
|
||||
found_edge = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
|
||||
== EDGE_INDEX_NO_EDGE && found_edge != 0)
|
||||
fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
|
||||
pred);
|
||||
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
|
||||
!= EDGE_INDEX_NO_EDGE && found_edge == 0)
|
||||
fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
|
||||
pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
|
||||
EXIT_BLOCK_PTR));
|
||||
}
|
||||
}
|
||||
|
||||
/* This routine will determine what, if any, edge there is between
|
||||
@ -768,13 +668,10 @@ remove_fake_successors (bb)
|
||||
void
|
||||
remove_fake_edges ()
|
||||
{
|
||||
int x;
|
||||
basic_block bb;
|
||||
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
remove_fake_successors (BASIC_BLOCK (x));
|
||||
|
||||
/* We've handled all successors except the entry block's. */
|
||||
remove_fake_successors (ENTRY_BLOCK_PTR);
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
remove_fake_successors (bb);
|
||||
}
|
||||
|
||||
/* This function will add a fake edge between any block which has no
|
||||
@ -784,11 +681,11 @@ remove_fake_edges ()
|
||||
void
|
||||
add_noreturn_fake_exit_edges ()
|
||||
{
|
||||
int x;
|
||||
basic_block bb;
|
||||
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
if (BASIC_BLOCK (x)->succ == NULL)
|
||||
make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
|
||||
FOR_EACH_BB (bb)
|
||||
if (bb->succ == NULL)
|
||||
make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
|
||||
}
|
||||
|
||||
/* This function adds a fake edge between any infinite loops to the
|
||||
@ -1014,6 +911,7 @@ flow_preorder_transversal_compute (pot_order)
|
||||
sbitmap visited;
|
||||
struct dfst_node *node;
|
||||
struct dfst_node *dfst;
|
||||
basic_block bb;
|
||||
|
||||
/* Allocate stack for back-tracking up CFG. */
|
||||
stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
|
||||
@ -1023,10 +921,10 @@ flow_preorder_transversal_compute (pot_order)
|
||||
dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
|
||||
sizeof (struct dfst_node));
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
max_successors = 0;
|
||||
for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
max_successors++;
|
||||
|
||||
dfst[i].node
|
||||
@ -1183,7 +1081,6 @@ flow_dfs_compute_reverse_execute (data)
|
||||
{
|
||||
basic_block bb;
|
||||
edge e;
|
||||
int i;
|
||||
|
||||
while (data->sp > 0)
|
||||
{
|
||||
@ -1197,9 +1094,9 @@ flow_dfs_compute_reverse_execute (data)
|
||||
}
|
||||
|
||||
/* Determine if there are unvisited basic blocks. */
|
||||
for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
|
||||
if (!TEST_BIT (data->visited_blocks, i))
|
||||
return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
|
||||
FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
|
||||
if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
|
||||
return bb;
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
@ -284,7 +284,7 @@ make_edges (label_value_list, min, max, update_p)
|
||||
basic_block min, max;
|
||||
int update_p;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
sbitmap *edge_cache = NULL;
|
||||
|
||||
/* Assume no computed jump; revise as we create edges. */
|
||||
@ -299,24 +299,24 @@ make_edges (label_value_list, min, max, update_p)
|
||||
sbitmap_vector_zero (edge_cache, n_basic_blocks);
|
||||
|
||||
if (update_p)
|
||||
for (i = min->index; i <= max->index; ++i)
|
||||
FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
|
||||
{
|
||||
edge e;
|
||||
|
||||
for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
|
||||
for (e = bb->succ; e ; e = e->succ_next)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
SET_BIT (edge_cache[i], e->dest->index);
|
||||
SET_BIT (edge_cache[bb->index], e->dest->index);
|
||||
}
|
||||
}
|
||||
|
||||
/* By nature of the way these get numbered, block 0 is always the entry. */
|
||||
/* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
|
||||
is always the entry. */
|
||||
if (min == ENTRY_BLOCK_PTR->next_bb)
|
||||
cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
|
||||
EDGE_FALLTHRU);
|
||||
|
||||
for (i = min->index; i <= max->index; ++i)
|
||||
FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx insn, x;
|
||||
enum rtx_code code;
|
||||
int force_fallthru = 0;
|
||||
@ -614,6 +614,8 @@ find_basic_blocks (f, nregs, file)
|
||||
FILE *file ATTRIBUTE_UNUSED;
|
||||
{
|
||||
int max_uid;
|
||||
basic_block bb;
|
||||
|
||||
timevar_push (TV_CFG);
|
||||
|
||||
basic_block_for_insn = 0;
|
||||
@ -621,15 +623,13 @@ find_basic_blocks (f, nregs, file)
|
||||
/* Flush out existing data. */
|
||||
if (basic_block_info != NULL)
|
||||
{
|
||||
int i;
|
||||
|
||||
clear_edges ();
|
||||
|
||||
/* Clear bb->aux on all extant basic blocks. We'll use this as a
|
||||
tag for reuse during create_basic_block, just in case some pass
|
||||
copies around basic block notes improperly. */
|
||||
for (i = 0; i < n_basic_blocks; ++i)
|
||||
BASIC_BLOCK (i)->aux = NULL;
|
||||
FOR_EACH_BB (bb)
|
||||
bb->aux = NULL;
|
||||
|
||||
VARRAY_FREE (basic_block_info);
|
||||
}
|
||||
@ -794,55 +794,53 @@ void
|
||||
find_many_sub_basic_blocks (blocks)
|
||||
sbitmap blocks;
|
||||
{
|
||||
int i;
|
||||
int min, max;
|
||||
basic_block bb, min, max;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
SET_STATE (BASIC_BLOCK (i),
|
||||
TEST_BIT (blocks, i) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
|
||||
FOR_EACH_BB (bb)
|
||||
SET_STATE (bb,
|
||||
TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
if (STATE (BASIC_BLOCK (i)) == BLOCK_TO_SPLIT)
|
||||
find_bb_boundaries (BASIC_BLOCK (i));
|
||||
FOR_EACH_BB (bb)
|
||||
if (STATE (bb) == BLOCK_TO_SPLIT)
|
||||
find_bb_boundaries (bb);
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
if (STATE (BASIC_BLOCK (i)) != BLOCK_ORIGINAL)
|
||||
FOR_EACH_BB (bb)
|
||||
if (STATE (bb) != BLOCK_ORIGINAL)
|
||||
break;
|
||||
|
||||
min = max = i;
|
||||
for (; i < n_basic_blocks; i++)
|
||||
if (STATE (BASIC_BLOCK (i)) != BLOCK_ORIGINAL)
|
||||
max = i;
|
||||
min = max = bb;
|
||||
for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
|
||||
if (STATE (bb) != BLOCK_ORIGINAL)
|
||||
max = bb;
|
||||
|
||||
/* Now re-scan and wire in all edges. This expect simple (conditional)
|
||||
jumps at the end of each new basic blocks. */
|
||||
make_edges (NULL, BASIC_BLOCK (min), BASIC_BLOCK (max), 1);
|
||||
make_edges (NULL, min, max, 1);
|
||||
|
||||
/* Update branch probabilities. Expect only (un)conditional jumps
|
||||
to be created with only the forward edges. */
|
||||
for (i = min; i <= max; i++)
|
||||
FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
|
||||
{
|
||||
edge e;
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
|
||||
if (STATE (b) == BLOCK_ORIGINAL)
|
||||
if (STATE (bb) == BLOCK_ORIGINAL)
|
||||
continue;
|
||||
if (STATE (b) == BLOCK_NEW)
|
||||
if (STATE (bb) == BLOCK_NEW)
|
||||
{
|
||||
b->count = 0;
|
||||
b->frequency = 0;
|
||||
for (e = b->pred; e; e=e->pred_next)
|
||||
bb->count = 0;
|
||||
bb->frequency = 0;
|
||||
for (e = bb->pred; e; e=e->pred_next)
|
||||
{
|
||||
b->count += e->count;
|
||||
b->frequency += EDGE_FREQUENCY (e);
|
||||
bb->count += e->count;
|
||||
bb->frequency += EDGE_FREQUENCY (e);
|
||||
}
|
||||
}
|
||||
|
||||
compute_outgoing_frequencies (b);
|
||||
compute_outgoing_frequencies (bb);
|
||||
}
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
SET_STATE (BASIC_BLOCK (i), 0);
|
||||
FOR_EACH_BB (bb)
|
||||
SET_STATE (bb, 0);
|
||||
}
|
||||
|
||||
/* Like above but for single basic block only. */
|
||||
@ -851,26 +849,24 @@ void
|
||||
find_sub_basic_blocks (bb)
|
||||
basic_block bb;
|
||||
{
|
||||
int i;
|
||||
int min, max;
|
||||
basic_block min, max, b;
|
||||
basic_block next = bb->next_bb;
|
||||
|
||||
min = bb->index;
|
||||
min = bb;
|
||||
find_bb_boundaries (bb);
|
||||
max = next->prev_bb->index;
|
||||
max = next->prev_bb;
|
||||
|
||||
/* Now re-scan and wire in all edges. This expect simple (conditional)
|
||||
jumps at the end of each new basic blocks. */
|
||||
make_edges (NULL, BASIC_BLOCK (min), BASIC_BLOCK (max), 1);
|
||||
make_edges (NULL, min, max, 1);
|
||||
|
||||
/* Update branch probabilities. Expect only (un)conditional jumps
|
||||
to be created with only the forward edges. */
|
||||
for (i = min; i <= max; i++)
|
||||
FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
|
||||
{
|
||||
edge e;
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
|
||||
if (i != min)
|
||||
if (b != min)
|
||||
{
|
||||
b->count = 0;
|
||||
b->frequency = 0;
|
||||
|
@ -1577,16 +1577,16 @@ static bool
|
||||
try_optimize_cfg (mode)
|
||||
int mode;
|
||||
{
|
||||
int i;
|
||||
bool changed_overall = false;
|
||||
bool changed;
|
||||
int iterations = 0;
|
||||
basic_block bb, b;
|
||||
|
||||
if (mode & CLEANUP_CROSSJUMP)
|
||||
add_noreturn_fake_exit_edges ();
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
update_forwarder_flag (BASIC_BLOCK (i));
|
||||
FOR_EACH_BB (bb)
|
||||
update_forwarder_flag (bb);
|
||||
|
||||
if (mode & CLEANUP_UPDATE_LIFE)
|
||||
clear_bb_flags ();
|
||||
@ -1606,9 +1606,9 @@ try_optimize_cfg (mode)
|
||||
"\n\ntry_optimize_cfg iteration %i\n\n",
|
||||
iterations);
|
||||
|
||||
for (i = 0; i < n_basic_blocks;)
|
||||
for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR;)
|
||||
{
|
||||
basic_block c, b = BASIC_BLOCK (i);
|
||||
basic_block c;
|
||||
edge s;
|
||||
bool changed_here = false;
|
||||
|
||||
@ -1721,7 +1721,7 @@ try_optimize_cfg (mode)
|
||||
/* Don't get confused by the index shift caused by
|
||||
deleting blocks. */
|
||||
if (!changed_here)
|
||||
i = b->index + 1;
|
||||
b = b->next_bb;
|
||||
else
|
||||
changed = true;
|
||||
}
|
||||
@ -1753,8 +1753,9 @@ try_optimize_cfg (mode)
|
||||
bool
|
||||
delete_unreachable_blocks ()
|
||||
{
|
||||
int i, j;
|
||||
bool changed = false;
|
||||
basic_block b, next_bb;
|
||||
int j = 0;
|
||||
|
||||
find_unreachable_blocks ();
|
||||
|
||||
@ -1762,9 +1763,9 @@ delete_unreachable_blocks ()
|
||||
as otherwise we can wind up with O(N^2) behaviour here when we
|
||||
have oodles of dead code. */
|
||||
|
||||
for (i = j = 0; i < n_basic_blocks; ++i)
|
||||
for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
next_bb = b->next_bb;
|
||||
|
||||
if (!(b->flags & BB_REACHABLE))
|
||||
{
|
||||
|
@ -191,11 +191,10 @@ static void
|
||||
record_effective_endpoints ()
|
||||
{
|
||||
rtx next_insn = get_insns ();
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx end;
|
||||
|
||||
if (PREV_INSN (bb->head) && next_insn != bb->head)
|
||||
@ -597,11 +596,10 @@ verify_insn_chain ()
|
||||
static void
|
||||
cleanup_unconditional_jumps ()
|
||||
{
|
||||
int i;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
if (!bb->succ)
|
||||
continue;
|
||||
if (bb->succ->flags & EDGE_FALLTHRU)
|
||||
@ -609,12 +607,11 @@ cleanup_unconditional_jumps ()
|
||||
if (!bb->succ->succ_next)
|
||||
{
|
||||
rtx insn;
|
||||
if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb) && i)
|
||||
if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
|
||||
&& bb->prev_bb != ENTRY_BLOCK_PTR)
|
||||
{
|
||||
basic_block prev = bb->prev_bb;
|
||||
|
||||
i--;
|
||||
|
||||
if (rtl_dump_file)
|
||||
fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
|
||||
bb->index);
|
||||
|
@ -50,16 +50,17 @@ flow_loops_cfg_dump (loops, file)
|
||||
FILE *file;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
if (! loops->num || ! file || ! loops->cfg.dom)
|
||||
return;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
edge succ;
|
||||
|
||||
fprintf (file, ";; %d succs { ", i);
|
||||
for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
|
||||
fprintf (file, ";; %d succs { ", bb->index);
|
||||
for (succ = bb->succ; succ; succ = succ->succ_next)
|
||||
fprintf (file, "%d ", succ->dest->index);
|
||||
flow_nodes_print ("} dom", loops->cfg.dom[i], file);
|
||||
}
|
||||
@ -643,6 +644,7 @@ flow_loops_find (loops, flags)
|
||||
sbitmap *dom;
|
||||
int *dfs_order;
|
||||
int *rc_order;
|
||||
basic_block header;
|
||||
|
||||
/* This function cannot be repeatedly called with different
|
||||
flags to build up the loop information. The loop tree
|
||||
@ -667,11 +669,8 @@ flow_loops_find (loops, flags)
|
||||
/* Count the number of loop edges (back edges). This should be the
|
||||
same as the number of natural loops. */
|
||||
num_loops = 0;
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
FOR_EACH_BB (header)
|
||||
{
|
||||
basic_block header;
|
||||
|
||||
header = BASIC_BLOCK (b);
|
||||
header->loop_depth = 0;
|
||||
|
||||
for (e = header->pred; e; e = e->pred_next)
|
||||
@ -684,10 +683,7 @@ flow_loops_find (loops, flags)
|
||||
loop. It also has single back edge to the header
|
||||
from a latch node. Note that multiple natural loops
|
||||
may share the same header. */
|
||||
if (b != header->index)
|
||||
abort ();
|
||||
|
||||
if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
|
||||
if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], header->index))
|
||||
num_loops++;
|
||||
}
|
||||
}
|
||||
|
65
gcc/cfgrtl.c
65
gcc/cfgrtl.c
@ -448,16 +448,15 @@ void
|
||||
compute_bb_for_insn (max)
|
||||
int max;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
if (basic_block_for_insn)
|
||||
VARRAY_FREE (basic_block_for_insn);
|
||||
|
||||
VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
|
||||
|
||||
for (i = 0; i < n_basic_blocks; ++i)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx end = bb->end;
|
||||
rtx insn;
|
||||
|
||||
@ -1168,14 +1167,17 @@ tidy_fallthru_edge (e, b, c)
|
||||
void
|
||||
tidy_fallthru_edges ()
|
||||
{
|
||||
int i;
|
||||
basic_block b, c;
|
||||
|
||||
for (i = 1; i < n_basic_blocks; i++)
|
||||
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
|
||||
return;
|
||||
|
||||
FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
|
||||
{
|
||||
basic_block c = BASIC_BLOCK (i);
|
||||
basic_block b = c->prev_bb;
|
||||
edge s;
|
||||
|
||||
c = b->next_bb;
|
||||
|
||||
/* We care about simple conditional or unconditional jumps with
|
||||
a single successor.
|
||||
|
||||
@ -1476,16 +1478,13 @@ commit_one_edge_insertion (e, watch_calls)
|
||||
void
|
||||
commit_edge_insertions ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
verify_flow_info ();
|
||||
#endif
|
||||
|
||||
i = -1;
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
while (1)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
edge e, next;
|
||||
|
||||
@ -1495,10 +1494,6 @@ commit_edge_insertions ()
|
||||
if (e->insns)
|
||||
commit_one_edge_insertion (e, false);
|
||||
}
|
||||
|
||||
if (++i >= n_basic_blocks)
|
||||
break;
|
||||
bb = BASIC_BLOCK (i);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1508,16 +1503,13 @@ commit_edge_insertions ()
|
||||
void
|
||||
commit_edge_insertions_watch_calls ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
verify_flow_info ();
|
||||
#endif
|
||||
|
||||
i = -1;
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
while (1)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
edge e, next;
|
||||
|
||||
@ -1527,10 +1519,6 @@ commit_edge_insertions_watch_calls ()
|
||||
if (e->insns)
|
||||
commit_one_edge_insertion (e, true);
|
||||
}
|
||||
|
||||
if (++i >= n_basic_blocks)
|
||||
break;
|
||||
bb = BASIC_BLOCK (i);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1601,7 +1589,6 @@ print_rtl_with_bb (outf, rtx_first)
|
||||
fprintf (outf, "(nil)\n");
|
||||
else
|
||||
{
|
||||
int i;
|
||||
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
|
||||
int max_uid = get_max_uid ();
|
||||
basic_block *start
|
||||
@ -1611,9 +1598,10 @@ print_rtl_with_bb (outf, rtx_first)
|
||||
enum bb_state *in_bb_p
|
||||
= (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; i--)
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx x;
|
||||
|
||||
start[INSN_UID (bb->head)] = bb;
|
||||
@ -1634,7 +1622,6 @@ print_rtl_with_bb (outf, rtx_first)
|
||||
for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
|
||||
{
|
||||
int did_output;
|
||||
basic_block bb;
|
||||
|
||||
if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
|
||||
{
|
||||
@ -1721,7 +1708,7 @@ verify_flow_info ()
|
||||
basic_block *bb_info, *last_visited;
|
||||
size_t *edge_checksum;
|
||||
rtx x;
|
||||
int i, last_bb_num_seen, num_bb_notes, err = 0;
|
||||
int i, num_bb_notes, err = 0;
|
||||
basic_block bb, last_bb_seen;
|
||||
|
||||
bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
|
||||
@ -1765,9 +1752,8 @@ verify_flow_info ()
|
||||
last_bb_seen = bb;
|
||||
}
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; i--)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx head = bb->head;
|
||||
rtx end = bb->end;
|
||||
|
||||
@ -1813,9 +1799,8 @@ verify_flow_info ()
|
||||
}
|
||||
|
||||
/* Now check the basic blocks (boundaries etc.) */
|
||||
for (i = n_basic_blocks - 1; i >= 0; i--)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
|
||||
edge e;
|
||||
rtx note;
|
||||
@ -2087,8 +2072,9 @@ verify_flow_info ()
|
||||
err = 1;
|
||||
}
|
||||
|
||||
last_bb_num_seen = -1;
|
||||
num_bb_notes = 0;
|
||||
last_bb_seen = ENTRY_BLOCK_PTR;
|
||||
|
||||
for (x = rtx_first; x; x = NEXT_INSN (x))
|
||||
{
|
||||
if (NOTE_INSN_BASIC_BLOCK_P (x))
|
||||
@ -2096,10 +2082,10 @@ verify_flow_info ()
|
||||
basic_block bb = NOTE_BASIC_BLOCK (x);
|
||||
|
||||
num_bb_notes++;
|
||||
if (bb->index != last_bb_num_seen + 1)
|
||||
if (bb != last_bb_seen->next_bb)
|
||||
internal_error ("basic blocks not numbered consecutively");
|
||||
|
||||
last_bb_num_seen = bb->index;
|
||||
last_bb_seen = bb;
|
||||
}
|
||||
|
||||
if (!bb_info[INSN_UID (x)])
|
||||
@ -2325,8 +2311,9 @@ bool
|
||||
purge_all_dead_edges (update_life_p)
|
||||
int update_life_p;
|
||||
{
|
||||
int i, purged = false;
|
||||
int purged = false;
|
||||
sbitmap blocks = 0;
|
||||
basic_block bb;
|
||||
|
||||
if (update_life_p)
|
||||
{
|
||||
@ -2334,13 +2321,13 @@ purge_all_dead_edges (update_life_p)
|
||||
sbitmap_zero (blocks);
|
||||
}
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
bool purged_here = purge_dead_edges (BASIC_BLOCK (i));
|
||||
bool purged_here = purge_dead_edges (bb);
|
||||
|
||||
purged |= purged_here;
|
||||
if (purged_here && update_life_p)
|
||||
SET_BIT (blocks, i);
|
||||
SET_BIT (blocks, bb->index);
|
||||
}
|
||||
|
||||
if (update_life_p && purged)
|
||||
|
229
gcc/combine.c
229
gcc/combine.c
@ -610,133 +610,132 @@ combine_instructions (f, nregs)
|
||||
|
||||
/* Now scan all the insns in forward order. */
|
||||
|
||||
this_basic_block = ENTRY_BLOCK_PTR;
|
||||
label_tick = 1;
|
||||
last_call_cuid = 0;
|
||||
mem_last_set = 0;
|
||||
init_reg_last_arrays ();
|
||||
setup_incoming_promotions ();
|
||||
|
||||
for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
|
||||
FOR_EACH_BB (this_basic_block)
|
||||
{
|
||||
next = 0;
|
||||
|
||||
/* If INSN starts a new basic block, update our basic block number. */
|
||||
if (this_basic_block->next_bb != EXIT_BLOCK_PTR
|
||||
&& this_basic_block->next_bb->head == insn)
|
||||
this_basic_block = this_basic_block->next_bb;
|
||||
|
||||
if (GET_CODE (insn) == CODE_LABEL)
|
||||
label_tick++;
|
||||
|
||||
else if (INSN_P (insn))
|
||||
for (insn = this_basic_block->head;
|
||||
insn != NEXT_INSN (this_basic_block->end);
|
||||
insn = next ? next : NEXT_INSN (insn))
|
||||
{
|
||||
/* See if we know about function return values before this
|
||||
insn based upon SUBREG flags. */
|
||||
check_promoted_subreg (insn, PATTERN (insn));
|
||||
next = 0;
|
||||
|
||||
/* Try this insn with each insn it links back to. */
|
||||
if (GET_CODE (insn) == CODE_LABEL)
|
||||
label_tick++;
|
||||
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
if ((next = try_combine (insn, XEXP (links, 0),
|
||||
NULL_RTX, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
|
||||
/* Try each sequence of three linked insns ending with this one. */
|
||||
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
else if (INSN_P (insn))
|
||||
{
|
||||
rtx link = XEXP (links, 0);
|
||||
/* See if we know about function return values before this
|
||||
insn based upon SUBREG flags. */
|
||||
check_promoted_subreg (insn, PATTERN (insn));
|
||||
|
||||
/* If the linked insn has been replaced by a note, then there
|
||||
is no point in pursuing this chain any further. */
|
||||
if (GET_CODE (link) == NOTE)
|
||||
continue;
|
||||
/* Try this insn with each insn it links back to. */
|
||||
|
||||
for (nextlinks = LOG_LINKS (link);
|
||||
nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, link,
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
if ((next = try_combine (insn, XEXP (links, 0),
|
||||
NULL_RTX, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
/* Try to combine a jump insn that uses CC0
|
||||
with a preceding insn that sets CC0, and maybe with its
|
||||
logical predecessor as well.
|
||||
This is how we make decrement-and-branch insns.
|
||||
We need this special code because data flow connections
|
||||
via CC0 do not get entered in LOG_LINKS. */
|
||||
/* Try each sequence of three linked insns ending with this one. */
|
||||
|
||||
if (GET_CODE (insn) == JUMP_INSN
|
||||
&& (prev = prev_nonnote_insn (insn)) != 0
|
||||
&& GET_CODE (prev) == INSN
|
||||
&& sets_cc0_p (PATTERN (prev)))
|
||||
{
|
||||
if ((next = try_combine (insn, prev,
|
||||
NULL_RTX, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
{
|
||||
rtx link = XEXP (links, 0);
|
||||
|
||||
for (nextlinks = LOG_LINKS (prev); nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, prev,
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
/* If the linked insn has been replaced by a note, then there
|
||||
is no point in pursuing this chain any further. */
|
||||
if (GET_CODE (link) == NOTE)
|
||||
continue;
|
||||
|
||||
for (nextlinks = LOG_LINKS (link);
|
||||
nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, link,
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
/* Try to combine a jump insn that uses CC0
|
||||
with a preceding insn that sets CC0, and maybe with its
|
||||
logical predecessor as well.
|
||||
This is how we make decrement-and-branch insns.
|
||||
We need this special code because data flow connections
|
||||
via CC0 do not get entered in LOG_LINKS. */
|
||||
|
||||
if (GET_CODE (insn) == JUMP_INSN
|
||||
&& (prev = prev_nonnote_insn (insn)) != 0
|
||||
&& GET_CODE (prev) == INSN
|
||||
&& sets_cc0_p (PATTERN (prev)))
|
||||
{
|
||||
if ((next = try_combine (insn, prev,
|
||||
NULL_RTX, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
|
||||
for (nextlinks = LOG_LINKS (prev); nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, prev,
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
}
|
||||
|
||||
/* Do the same for an insn that explicitly references CC0. */
|
||||
if (GET_CODE (insn) == INSN
|
||||
&& (prev = prev_nonnote_insn (insn)) != 0
|
||||
&& GET_CODE (prev) == INSN
|
||||
&& sets_cc0_p (PATTERN (prev))
|
||||
&& GET_CODE (PATTERN (insn)) == SET
|
||||
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
|
||||
{
|
||||
if ((next = try_combine (insn, prev,
|
||||
NULL_RTX, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
|
||||
for (nextlinks = LOG_LINKS (prev); nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, prev,
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
}
|
||||
|
||||
/* Finally, see if any of the insns that this insn links to
|
||||
explicitly references CC0. If so, try this insn, that insn,
|
||||
and its predecessor if it sets CC0. */
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
if (GET_CODE (XEXP (links, 0)) == INSN
|
||||
&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
|
||||
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
|
||||
&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
|
||||
&& GET_CODE (prev) == INSN
|
||||
&& sets_cc0_p (PATTERN (prev))
|
||||
&& (next = try_combine (insn, XEXP (links, 0),
|
||||
prev, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
#endif
|
||||
|
||||
/* Try combining an insn with two different insns whose results it
|
||||
uses. */
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
for (nextlinks = XEXP (links, 1); nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, XEXP (links, 0),
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
|
||||
if (GET_CODE (insn) != NOTE)
|
||||
record_dead_and_set_regs (insn);
|
||||
|
||||
retry:
|
||||
;
|
||||
}
|
||||
|
||||
/* Do the same for an insn that explicitly references CC0. */
|
||||
if (GET_CODE (insn) == INSN
|
||||
&& (prev = prev_nonnote_insn (insn)) != 0
|
||||
&& GET_CODE (prev) == INSN
|
||||
&& sets_cc0_p (PATTERN (prev))
|
||||
&& GET_CODE (PATTERN (insn)) == SET
|
||||
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
|
||||
{
|
||||
if ((next = try_combine (insn, prev,
|
||||
NULL_RTX, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
|
||||
for (nextlinks = LOG_LINKS (prev); nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, prev,
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
}
|
||||
|
||||
/* Finally, see if any of the insns that this insn links to
|
||||
explicitly references CC0. If so, try this insn, that insn,
|
||||
and its predecessor if it sets CC0. */
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
if (GET_CODE (XEXP (links, 0)) == INSN
|
||||
&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
|
||||
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
|
||||
&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
|
||||
&& GET_CODE (prev) == INSN
|
||||
&& sets_cc0_p (PATTERN (prev))
|
||||
&& (next = try_combine (insn, XEXP (links, 0),
|
||||
prev, &new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
#endif
|
||||
|
||||
/* Try combining an insn with two different insns whose results it
|
||||
uses. */
|
||||
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
|
||||
for (nextlinks = XEXP (links, 1); nextlinks;
|
||||
nextlinks = XEXP (nextlinks, 1))
|
||||
if ((next = try_combine (insn, XEXP (links, 0),
|
||||
XEXP (nextlinks, 0),
|
||||
&new_direct_jump_p)) != 0)
|
||||
goto retry;
|
||||
|
||||
if (GET_CODE (insn) != NOTE)
|
||||
record_dead_and_set_regs (insn);
|
||||
|
||||
retry:
|
||||
;
|
||||
}
|
||||
}
|
||||
clear_bb_flags ();
|
||||
@ -11666,7 +11665,7 @@ reg_dead_at_p (reg, insn)
|
||||
rtx reg;
|
||||
rtx insn;
|
||||
{
|
||||
int block;
|
||||
basic_block block;
|
||||
unsigned int i;
|
||||
|
||||
/* Set variables for reg_dead_at_p_1. */
|
||||
@ -11699,21 +11698,21 @@ reg_dead_at_p (reg, insn)
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Get the basic block number that we were in. */
|
||||
/* Get the basic block that we were in. */
|
||||
if (insn == 0)
|
||||
block = 0;
|
||||
block = ENTRY_BLOCK_PTR->next_bb;
|
||||
else
|
||||
{
|
||||
for (block = 0; block < n_basic_blocks; block++)
|
||||
if (insn == BLOCK_HEAD (block))
|
||||
FOR_EACH_BB (block)
|
||||
if (insn == block->head)
|
||||
break;
|
||||
|
||||
if (block == n_basic_blocks)
|
||||
if (block == EXIT_BLOCK_PTR)
|
||||
return 0;
|
||||
}
|
||||
|
||||
for (i = reg_dead_regno; i < reg_dead_endregno; i++)
|
||||
if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
|
||||
if (REGNO_REG_SET_P (block->global_live_at_start, i))
|
||||
return 0;
|
||||
|
||||
return 1;
|
||||
|
@ -6566,11 +6566,10 @@ ia64_sched_finish (dump, sched_verbose)
|
||||
static void
|
||||
emit_predicate_relation_info ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
int r;
|
||||
rtx head = bb->head;
|
||||
|
||||
@ -6596,9 +6595,8 @@ emit_predicate_relation_info ()
|
||||
relations around them. Otherwise the assembler will assume the call
|
||||
returns, and complain about uses of call-clobbered predicates after
|
||||
the call. */
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx insn = bb->head;
|
||||
|
||||
while (1)
|
||||
@ -6974,11 +6972,11 @@ ia64_strip_name_encoding (str)
|
||||
|
||||
/* The current basic block number. */
|
||||
|
||||
static int block_num;
|
||||
static bool last_block;
|
||||
|
||||
/* True if we need a copy_state command at the start of the next block. */
|
||||
|
||||
static int need_copy_state;
|
||||
static bool need_copy_state;
|
||||
|
||||
/* The function emits unwind directives for the start of an epilogue. */
|
||||
|
||||
@ -6988,10 +6986,10 @@ process_epilogue ()
|
||||
/* If this isn't the last block of the function, then we need to label the
|
||||
current state, and copy it back in at the start of the next block. */
|
||||
|
||||
if (block_num != n_basic_blocks - 1)
|
||||
if (!last_block)
|
||||
{
|
||||
fprintf (asm_out_file, "\t.label_state 1\n");
|
||||
need_copy_state = 1;
|
||||
need_copy_state = true;
|
||||
}
|
||||
|
||||
fprintf (asm_out_file, "\t.restore sp\n");
|
||||
@ -7229,14 +7227,14 @@ process_for_unwind_directive (asm_out_file, insn)
|
||||
if (GET_CODE (insn) == NOTE
|
||||
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
|
||||
{
|
||||
block_num = NOTE_BASIC_BLOCK (insn)->index;
|
||||
last_block = NOTE_BASIC_BLOCK (insn)->next_bb == EXIT_BLOCK_PTR;
|
||||
|
||||
/* Restore unwind state from immediately before the epilogue. */
|
||||
if (need_copy_state)
|
||||
{
|
||||
fprintf (asm_out_file, "\t.body\n");
|
||||
fprintf (asm_out_file, "\t.copy_state 1\n");
|
||||
need_copy_state = 0;
|
||||
need_copy_state = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -447,19 +447,18 @@ conflict_graph_compute (regs, p)
|
||||
regset regs;
|
||||
partition p;
|
||||
{
|
||||
int b;
|
||||
conflict_graph graph = conflict_graph_new (max_reg_num ());
|
||||
regset_head live_head;
|
||||
regset live = &live_head;
|
||||
regset_head born_head;
|
||||
regset born = &born_head;
|
||||
basic_block bb;
|
||||
|
||||
INIT_REG_SET (live);
|
||||
INIT_REG_SET (born);
|
||||
|
||||
for (b = n_basic_blocks; --b >= 0; )
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (b);
|
||||
rtx insn;
|
||||
rtx head;
|
||||
|
||||
|
107
gcc/df.c
107
gcc/df.c
@ -171,12 +171,6 @@ Perhaps there should be a bitmap argument to df_analyse to specify
|
||||
#include "df.h"
|
||||
#include "fibheap.h"
|
||||
|
||||
#define FOR_ALL_BBS(BB, CODE) \
|
||||
do { \
|
||||
int node_; \
|
||||
for (node_ = 0; node_ < n_basic_blocks; node_++) \
|
||||
{(BB) = BASIC_BLOCK (node_); CODE;};} while (0)
|
||||
|
||||
#define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
|
||||
do { \
|
||||
unsigned int node_; \
|
||||
@ -406,8 +400,8 @@ df_bitmaps_alloc (df, flags)
|
||||
struct df *df;
|
||||
int flags;
|
||||
{
|
||||
unsigned int i;
|
||||
int dflags = 0;
|
||||
basic_block bb;
|
||||
|
||||
/* Free the bitmaps if they need resizing. */
|
||||
if ((flags & DF_LR) && df->n_regs < (unsigned int)max_reg_num ())
|
||||
@ -423,9 +417,8 @@ df_bitmaps_alloc (df, flags)
|
||||
df->n_defs = df->def_id;
|
||||
df->n_uses = df->use_id;
|
||||
|
||||
for (i = 0; i < df->n_bbs; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
struct bb_info *bb_info = DF_BB_INFO (df, bb);
|
||||
|
||||
if (flags & DF_RD && ! bb_info->rd_in)
|
||||
@ -474,11 +467,10 @@ df_bitmaps_free (df, flags)
|
||||
struct df *df ATTRIBUTE_UNUSED;
|
||||
int flags;
|
||||
{
|
||||
unsigned int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < df->n_bbs; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
struct bb_info *bb_info = DF_BB_INFO (df, bb);
|
||||
|
||||
if (!bb_info)
|
||||
@ -534,7 +526,7 @@ df_alloc (df, n_regs)
|
||||
int n_regs;
|
||||
{
|
||||
int n_insns;
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
gcc_obstack_init (&df_ref_obstack);
|
||||
|
||||
@ -572,8 +564,8 @@ df_alloc (df, n_regs)
|
||||
df->bbs = xcalloc (df->n_bbs, sizeof (struct bb_info));
|
||||
|
||||
df->all_blocks = BITMAP_XMALLOC ();
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
bitmap_set_bit (df->all_blocks, i);
|
||||
FOR_EACH_BB (bb)
|
||||
bitmap_set_bit (df->all_blocks, bb->index);
|
||||
}
|
||||
|
||||
|
||||
@ -1946,6 +1938,8 @@ df_analyse_1 (df, blocks, flags, update)
|
||||
int aflags;
|
||||
int dflags;
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
dflags = 0;
|
||||
aflags = flags;
|
||||
if (flags & DF_UD_CHAIN)
|
||||
@ -2029,17 +2023,16 @@ df_analyse_1 (df, blocks, flags, update)
|
||||
/* Compute the sets of gens and kills for the defs of each bb. */
|
||||
df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
|
||||
{
|
||||
int i;
|
||||
bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
for (i = 0; i < n_basic_blocks; i ++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
in[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_in;
|
||||
out[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_out;
|
||||
gen[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_gen;
|
||||
kill[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_kill;
|
||||
in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
|
||||
out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
|
||||
gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
|
||||
kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
|
||||
}
|
||||
iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
|
||||
FORWARD, UNION, df_rd_transfer_function,
|
||||
@ -2066,17 +2059,16 @@ df_analyse_1 (df, blocks, flags, update)
|
||||
uses in each bb. */
|
||||
df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
|
||||
{
|
||||
int i;
|
||||
bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
for (i = 0; i < n_basic_blocks; i ++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
in[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_in;
|
||||
out[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_out;
|
||||
gen[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_gen;
|
||||
kill[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_kill;
|
||||
in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
|
||||
out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
|
||||
gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
|
||||
kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
|
||||
}
|
||||
iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
|
||||
BACKWARD, UNION, df_ru_transfer_function,
|
||||
@ -2106,17 +2098,16 @@ df_analyse_1 (df, blocks, flags, update)
|
||||
/* Compute the sets of defs and uses of live variables. */
|
||||
df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
|
||||
{
|
||||
int i;
|
||||
bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *use = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
bitmap *def = xmalloc (sizeof (bitmap) * n_basic_blocks);
|
||||
for (i = 0; i < n_basic_blocks; i ++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
in[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_in;
|
||||
out[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_out;
|
||||
use[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_use;
|
||||
def[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_def;
|
||||
in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
|
||||
out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
|
||||
use[bb->index] = DF_BB_INFO (df, bb)->lr_use;
|
||||
def[bb->index] = DF_BB_INFO (df, bb)->lr_def;
|
||||
}
|
||||
iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
|
||||
BACKWARD, UNION, df_lr_transfer_function,
|
||||
@ -2270,12 +2261,15 @@ df_modified_p (df, blocks)
|
||||
struct df *df;
|
||||
bitmap blocks;
|
||||
{
|
||||
unsigned int j;
|
||||
int update = 0;
|
||||
basic_block bb;
|
||||
|
||||
for (j = 0; j < df->n_bbs; j++)
|
||||
if (bitmap_bit_p (df->bbs_modified, j)
|
||||
&& (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, j)))
|
||||
if (!df->n_bbs)
|
||||
return 0;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
if (bitmap_bit_p (df->bbs_modified, bb->index)
|
||||
&& (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
|
||||
{
|
||||
update = 1;
|
||||
break;
|
||||
@ -2408,7 +2402,7 @@ df_refs_unlink (df, blocks)
|
||||
}
|
||||
else
|
||||
{
|
||||
FOR_ALL_BBS (bb,
|
||||
FOR_EACH_BB (bb,
|
||||
{
|
||||
df_bb_refs_unlink (df, bb);
|
||||
});
|
||||
@ -3274,8 +3268,8 @@ df_dump (df, flags, file)
|
||||
int flags;
|
||||
FILE *file;
|
||||
{
|
||||
unsigned int i;
|
||||
unsigned int j;
|
||||
basic_block bb;
|
||||
|
||||
if (! df || ! file)
|
||||
return;
|
||||
@ -3286,22 +3280,23 @@ df_dump (df, flags, file)
|
||||
|
||||
if (flags & DF_RD)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
fprintf (file, "Reaching defs:\n");
|
||||
for (i = 0; i < df->n_bbs; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
struct bb_info *bb_info = DF_BB_INFO (df, bb);
|
||||
|
||||
if (! bb_info->rd_in)
|
||||
continue;
|
||||
|
||||
fprintf (file, "bb %d in \t", i);
|
||||
fprintf (file, "bb %d in \t", bb->index);
|
||||
dump_bitmap (file, bb_info->rd_in);
|
||||
fprintf (file, "bb %d gen \t", i);
|
||||
fprintf (file, "bb %d gen \t", bb->index);
|
||||
dump_bitmap (file, bb_info->rd_gen);
|
||||
fprintf (file, "bb %d kill\t", i);
|
||||
fprintf (file, "bb %d kill\t", bb->index);
|
||||
dump_bitmap (file, bb_info->rd_kill);
|
||||
fprintf (file, "bb %d out \t", i);
|
||||
fprintf (file, "bb %d out \t", bb->index);
|
||||
dump_bitmap (file, bb_info->rd_out);
|
||||
}
|
||||
}
|
||||
@ -3329,21 +3324,20 @@ df_dump (df, flags, file)
|
||||
if (flags & DF_RU)
|
||||
{
|
||||
fprintf (file, "Reaching uses:\n");
|
||||
for (i = 0; i < df->n_bbs; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
struct bb_info *bb_info = DF_BB_INFO (df, bb);
|
||||
|
||||
if (! bb_info->ru_in)
|
||||
continue;
|
||||
|
||||
fprintf (file, "bb %d in \t", i);
|
||||
fprintf (file, "bb %d in \t", bb->index);
|
||||
dump_bitmap (file, bb_info->ru_in);
|
||||
fprintf (file, "bb %d gen \t", i);
|
||||
fprintf (file, "bb %d gen \t", bb->index);
|
||||
dump_bitmap (file, bb_info->ru_gen);
|
||||
fprintf (file, "bb %d kill\t", i);
|
||||
fprintf (file, "bb %d kill\t", bb->index);
|
||||
dump_bitmap (file, bb_info->ru_kill);
|
||||
fprintf (file, "bb %d out \t", i);
|
||||
fprintf (file, "bb %d out \t", bb->index);
|
||||
dump_bitmap (file, bb_info->ru_out);
|
||||
}
|
||||
}
|
||||
@ -3371,21 +3365,20 @@ df_dump (df, flags, file)
|
||||
if (flags & DF_LR)
|
||||
{
|
||||
fprintf (file, "Live regs:\n");
|
||||
for (i = 0; i < df->n_bbs; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
struct bb_info *bb_info = DF_BB_INFO (df, bb);
|
||||
|
||||
if (! bb_info->lr_in)
|
||||
continue;
|
||||
|
||||
fprintf (file, "bb %d in \t", i);
|
||||
fprintf (file, "bb %d in \t", bb->index);
|
||||
dump_bitmap (file, bb_info->lr_in);
|
||||
fprintf (file, "bb %d use \t", i);
|
||||
fprintf (file, "bb %d use \t", bb->index);
|
||||
dump_bitmap (file, bb_info->lr_use);
|
||||
fprintf (file, "bb %d def \t", i);
|
||||
fprintf (file, "bb %d def \t", bb->index);
|
||||
dump_bitmap (file, bb_info->lr_def);
|
||||
fprintf (file, "bb %d out \t", i);
|
||||
fprintf (file, "bb %d out \t", bb->index);
|
||||
dump_bitmap (file, bb_info->lr_out);
|
||||
}
|
||||
}
|
||||
|
@ -326,10 +326,9 @@ calc_dfs_tree (di, reverse)
|
||||
They are reverse-unreachable. In the dom-case we disallow such
|
||||
nodes, but in post-dom we have to deal with them, so we simply
|
||||
include them in the DFS tree which actually becomes a forest. */
|
||||
int i;
|
||||
for (i = n_basic_blocks - 1; i >= 0; i--)
|
||||
basic_block b;
|
||||
FOR_EACH_BB_REVERSE (b)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
if (di->dfs_order[b->index])
|
||||
continue;
|
||||
di->dfs_order[b->index] = di->dfsnum;
|
||||
@ -604,17 +603,17 @@ calculate_dominance_info (idom, doms, reverse)
|
||||
|
||||
if (idom)
|
||||
{
|
||||
int i;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
basic_block b;
|
||||
|
||||
FOR_EACH_BB (b)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
TBB d = di.dom[di.dfs_order[b->index]];
|
||||
|
||||
/* The old code didn't modify array elements of nodes having only
|
||||
itself as dominator (d==0) or only ENTRY_BLOCK (resp. EXIT_BLOCK)
|
||||
(d==1). */
|
||||
if (d > 1)
|
||||
idom[i] = di.dfs_to_bb[d]->index;
|
||||
idom[b->index] = di.dfs_to_bb[d]->index;
|
||||
}
|
||||
}
|
||||
if (doms)
|
||||
|
@ -934,8 +934,8 @@ insn_current_reference_address (branch)
|
||||
void
|
||||
compute_alignments ()
|
||||
{
|
||||
int i;
|
||||
int log, max_skip, max_log;
|
||||
basic_block bb;
|
||||
|
||||
if (label_align)
|
||||
{
|
||||
@ -952,9 +952,8 @@ compute_alignments ()
|
||||
if (! optimize || optimize_size)
|
||||
return;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx label = bb->head;
|
||||
int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
|
||||
edge e;
|
||||
|
71
gcc/flow.c
71
gcc/flow.c
@ -623,6 +623,7 @@ update_life_info (blocks, extent, prop_flags)
|
||||
regset_head tmp_head;
|
||||
int i;
|
||||
int stabilized_prop_flags = prop_flags;
|
||||
basic_block bb;
|
||||
|
||||
tmp = INITIALIZE_REG_SET (tmp_head);
|
||||
ndead = 0;
|
||||
@ -653,10 +654,8 @@ update_life_info (blocks, extent, prop_flags)
|
||||
|
||||
/* Removing dead code may allow the CFG to be simplified which
|
||||
in turn may allow for further dead code detection / removal. */
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
|
||||
COPY_REG_SET (tmp, bb->global_live_at_end);
|
||||
changed |= propagate_block (bb, tmp, NULL, NULL,
|
||||
prop_flags & (PROP_SCAN_DEAD_CODE
|
||||
@ -693,7 +692,7 @@ update_life_info (blocks, extent, prop_flags)
|
||||
{
|
||||
EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
bb = BASIC_BLOCK (i);
|
||||
|
||||
COPY_REG_SET (tmp, bb->global_live_at_end);
|
||||
propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
|
||||
@ -704,10 +703,8 @@ update_life_info (blocks, extent, prop_flags)
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
|
||||
COPY_REG_SET (tmp, bb->global_live_at_end);
|
||||
|
||||
propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
|
||||
@ -762,15 +759,15 @@ update_life_info_in_dirty_blocks (extent, prop_flags)
|
||||
int prop_flags;
|
||||
{
|
||||
sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
|
||||
int block_num;
|
||||
int n = 0;
|
||||
basic_block bb;
|
||||
int retval = 0;
|
||||
|
||||
sbitmap_zero (update_life_blocks);
|
||||
for (block_num = 0; block_num < n_basic_blocks; block_num++)
|
||||
if (BASIC_BLOCK (block_num)->flags & BB_DIRTY)
|
||||
FOR_EACH_BB (bb)
|
||||
if (bb->flags & BB_DIRTY)
|
||||
{
|
||||
SET_BIT (update_life_blocks, block_num);
|
||||
SET_BIT (update_life_blocks, bb->index);
|
||||
n++;
|
||||
}
|
||||
|
||||
@ -811,14 +808,12 @@ int
|
||||
delete_noop_moves (f)
|
||||
rtx f ATTRIBUTE_UNUSED;
|
||||
{
|
||||
int i;
|
||||
rtx insn, next;
|
||||
basic_block bb;
|
||||
int nnoops = 0;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
bb = BASIC_BLOCK (i);
|
||||
for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
|
||||
{
|
||||
next = NEXT_INSN (insn);
|
||||
@ -1065,7 +1060,7 @@ calculate_global_regs_live (blocks_in, blocks_out, flags)
|
||||
sbitmap blocks_in, blocks_out;
|
||||
int flags;
|
||||
{
|
||||
basic_block *queue, *qhead, *qtail, *qend;
|
||||
basic_block *queue, *qhead, *qtail, *qend, bb;
|
||||
regset tmp, new_live_at_end, call_used;
|
||||
regset_head tmp_head, call_used_head;
|
||||
regset_head new_live_at_end_head;
|
||||
@ -1074,10 +1069,8 @@ calculate_global_regs_live (blocks_in, blocks_out, flags)
|
||||
/* Some passes used to forget clear aux field of basic block causing
|
||||
sick behaviour here. */
|
||||
#ifdef ENABLE_CHECKING
|
||||
if (ENTRY_BLOCK_PTR->aux || EXIT_BLOCK_PTR->aux)
|
||||
abort ();
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
if (BASIC_BLOCK (i)->aux)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
if (bb->aux)
|
||||
abort ();
|
||||
#endif
|
||||
|
||||
@ -1102,16 +1095,12 @@ calculate_global_regs_live (blocks_in, blocks_out, flags)
|
||||
useful work. We use AUX non-null to flag that the block is queued. */
|
||||
if (blocks_in)
|
||||
{
|
||||
/* Clear out the garbage that might be hanging out in bb->aux. */
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
BASIC_BLOCK (i)->aux = NULL;
|
||||
|
||||
EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
*--qhead = bb;
|
||||
bb->aux = bb;
|
||||
});
|
||||
FOR_EACH_BB (bb)
|
||||
if (TEST_BIT (blocks_in, bb->index))
|
||||
{
|
||||
*--qhead = bb;
|
||||
bb->aux = bb;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -1356,9 +1345,8 @@ calculate_global_regs_live (blocks_in, blocks_out, flags)
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
FREE_REG_SET (bb->local_set);
|
||||
FREE_REG_SET (bb->cond_local_set);
|
||||
}
|
||||
@ -1484,21 +1472,14 @@ initialize_uninitialized_subregs ()
|
||||
void
|
||||
allocate_bb_life_data ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
|
||||
bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
}
|
||||
|
||||
ENTRY_BLOCK_PTR->global_live_at_end
|
||||
= OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
EXIT_BLOCK_PTR->global_live_at_start
|
||||
= OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
|
||||
regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||||
}
|
||||
|
||||
@ -4228,18 +4209,16 @@ count_or_remove_death_notes (blocks, kill)
|
||||
sbitmap blocks;
|
||||
int kill;
|
||||
{
|
||||
int i, count = 0;
|
||||
int count = 0;
|
||||
basic_block bb;
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
|
||||
if (blocks && ! TEST_BIT (blocks, i))
|
||||
if (blocks && ! TEST_BIT (blocks, bb->index))
|
||||
continue;
|
||||
|
||||
bb = BASIC_BLOCK (i);
|
||||
|
||||
for (insn = bb->head;; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (INSN_P (insn))
|
||||
|
268
gcc/gcse.c
268
gcc/gcse.c
@ -541,7 +541,7 @@ static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
|
||||
struct null_pointer_info
|
||||
{
|
||||
/* The basic block being processed. */
|
||||
int current_block;
|
||||
basic_block current_block;
|
||||
/* The first register to be handled in this pass. */
|
||||
unsigned int min_reg;
|
||||
/* One greater than the last register to be handled in this pass. */
|
||||
@ -1292,13 +1292,13 @@ compute_sets (f)
|
||||
|
||||
struct reg_avail_info
|
||||
{
|
||||
int last_bb;
|
||||
basic_block last_bb;
|
||||
int first_set;
|
||||
int last_set;
|
||||
};
|
||||
|
||||
static struct reg_avail_info *reg_avail_info;
|
||||
static int current_bb;
|
||||
static basic_block current_bb;
|
||||
|
||||
|
||||
/* See whether X, the source of a set, is something we want to consider for
|
||||
@ -1385,7 +1385,7 @@ oprs_unchanged_p (x, insn, avail_p)
|
||||
}
|
||||
|
||||
case MEM:
|
||||
if (load_killed_in_block_p (BASIC_BLOCK (current_bb), INSN_CUID (insn),
|
||||
if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
|
||||
x, avail_p))
|
||||
return 0;
|
||||
else
|
||||
@ -2375,7 +2375,7 @@ record_last_reg_set_info (insn, regno)
|
||||
{
|
||||
info->last_bb = current_bb;
|
||||
info->first_set = cuid;
|
||||
SET_BIT (reg_set_in_block[current_bb], regno);
|
||||
SET_BIT (reg_set_in_block[current_bb->index], regno);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2504,9 +2504,9 @@ compute_hash_table (set_p)
|
||||
gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
|
||||
|
||||
for (i = 0; i < max_gcse_regno; ++i)
|
||||
reg_avail_info[i].last_bb = NEVER_SET;
|
||||
reg_avail_info[i].last_bb = NULL;
|
||||
|
||||
for (current_bb = 0; current_bb < n_basic_blocks; current_bb++)
|
||||
FOR_EACH_BB (current_bb)
|
||||
{
|
||||
rtx insn;
|
||||
unsigned int regno;
|
||||
@ -2517,8 +2517,8 @@ compute_hash_table (set_p)
|
||||
??? hard-reg reg_set_in_block computation
|
||||
could be moved to compute_sets since they currently don't change. */
|
||||
|
||||
for (insn = BLOCK_HEAD (current_bb);
|
||||
insn && insn != NEXT_INSN (BLOCK_END (current_bb));
|
||||
for (insn = current_bb->head;
|
||||
insn && insn != NEXT_INSN (current_bb->end);
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (! INSN_P (insn))
|
||||
@ -2546,8 +2546,8 @@ compute_hash_table (set_p)
|
||||
|
||||
/* The next pass builds the hash table. */
|
||||
|
||||
for (insn = BLOCK_HEAD (current_bb), in_libcall_block = 0;
|
||||
insn && insn != NEXT_INSN (BLOCK_END (current_bb));
|
||||
for (insn = current_bb->head, in_libcall_block = 0;
|
||||
insn && insn != NEXT_INSN (current_bb->end);
|
||||
insn = NEXT_INSN (insn))
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
@ -2983,9 +2983,10 @@ handle_rd_kill_set (insn, regno, bb)
|
||||
static void
|
||||
compute_kill_rd ()
|
||||
{
|
||||
int bb, cuid;
|
||||
int cuid;
|
||||
unsigned int regno;
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
/* For each block
|
||||
For each set bit in `gen' of the block (i.e each insn which
|
||||
@ -2995,9 +2996,9 @@ compute_kill_rd ()
|
||||
For each setting of regx in the linked list, which is not in
|
||||
this block
|
||||
Set the bit in `kill' corresponding to that insn. */
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
for (cuid = 0; cuid < max_cuid; cuid++)
|
||||
if (TEST_BIT (rd_gen[bb], cuid))
|
||||
if (TEST_BIT (rd_gen[bb->index], cuid))
|
||||
{
|
||||
rtx insn = CUID_INSN (cuid);
|
||||
rtx pat = PATTERN (insn);
|
||||
@ -3006,7 +3007,7 @@ compute_kill_rd ()
|
||||
{
|
||||
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
||||
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
|
||||
handle_rd_kill_set (insn, regno, BASIC_BLOCK (bb));
|
||||
handle_rd_kill_set (insn, regno, bb);
|
||||
}
|
||||
|
||||
if (GET_CODE (pat) == PARALLEL)
|
||||
@ -3019,13 +3020,13 @@ compute_kill_rd ()
|
||||
&& GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
|
||||
handle_rd_kill_set (insn,
|
||||
REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
|
||||
BASIC_BLOCK (bb));
|
||||
bb);
|
||||
}
|
||||
}
|
||||
else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
|
||||
/* Each setting of this register outside of this block
|
||||
must be marked in the set of kills in this block. */
|
||||
handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), BASIC_BLOCK (bb));
|
||||
handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3037,21 +3038,22 @@ compute_kill_rd ()
|
||||
static void
|
||||
compute_rd ()
|
||||
{
|
||||
int bb, changed, passes;
|
||||
int changed, passes;
|
||||
basic_block bb;
|
||||
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
|
||||
FOR_EACH_BB (bb)
|
||||
sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
|
||||
|
||||
passes = 0;
|
||||
changed = 1;
|
||||
while (changed)
|
||||
{
|
||||
changed = 0;
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
|
||||
changed |= sbitmap_union_of_diff_cg (rd_out[bb], rd_gen[bb],
|
||||
reaching_defs[bb], rd_kill[bb]);
|
||||
sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
|
||||
changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
|
||||
reaching_defs[bb->index], rd_kill[bb->index]);
|
||||
}
|
||||
passes++;
|
||||
}
|
||||
@ -3178,20 +3180,20 @@ static void
|
||||
compute_ae_kill (ae_gen, ae_kill)
|
||||
sbitmap *ae_gen, *ae_kill;
|
||||
{
|
||||
int bb;
|
||||
basic_block bb;
|
||||
unsigned int i;
|
||||
struct expr *expr;
|
||||
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
for (i = 0; i < expr_hash_table_size; i++)
|
||||
for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
|
||||
{
|
||||
/* Skip EXPR if generated in this block. */
|
||||
if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
|
||||
if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
|
||||
continue;
|
||||
|
||||
if (expr_killed_p (expr->expr, BASIC_BLOCK (bb)))
|
||||
SET_BIT (ae_kill[bb], expr->bitmap_index);
|
||||
if (expr_killed_p (expr->expr, bb))
|
||||
SET_BIT (ae_kill[bb->index], expr->bitmap_index);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3607,20 +3609,24 @@ handle_avail_expr (insn, expr)
|
||||
static int
|
||||
classic_gcse ()
|
||||
{
|
||||
int bb, changed;
|
||||
int changed;
|
||||
rtx insn;
|
||||
basic_block bb;
|
||||
|
||||
/* Note we start at block 1. */
|
||||
|
||||
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
|
||||
return 0;
|
||||
|
||||
changed = 0;
|
||||
for (bb = 1; bb < n_basic_blocks; bb++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
/* Reset tables used to keep track of what's still valid [since the
|
||||
start of the block]. */
|
||||
reset_opr_set_tables ();
|
||||
|
||||
for (insn = BLOCK_HEAD (bb);
|
||||
insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
|
||||
for (insn = bb->head;
|
||||
insn != NULL && insn != NEXT_INSN (bb->end);
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
/* Is insn of form (set (pseudo-reg) ...)? */
|
||||
@ -3638,7 +3644,7 @@ classic_gcse ()
|
||||
&& ((expr = lookup_expr (src)) != NULL)
|
||||
/* Is the expression available [at the start of the
|
||||
block]? */
|
||||
&& TEST_BIT (ae_in[bb], expr->bitmap_index)
|
||||
&& TEST_BIT (ae_in[bb->index], expr->bitmap_index)
|
||||
/* Are the operands unchanged since the start of the
|
||||
block? */
|
||||
&& oprs_not_set_p (src, insn))
|
||||
@ -3749,7 +3755,8 @@ compute_transp (x, indx, bmap, set_p)
|
||||
sbitmap *bmap;
|
||||
int set_p;
|
||||
{
|
||||
int bb, i, j;
|
||||
int i, j;
|
||||
basic_block bb;
|
||||
enum rtx_code code;
|
||||
reg_set *r;
|
||||
const char *fmt;
|
||||
@ -3769,9 +3776,9 @@ compute_transp (x, indx, bmap, set_p)
|
||||
{
|
||||
if (REGNO (x) < FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
|
||||
SET_BIT (bmap[bb], indx);
|
||||
FOR_EACH_BB (bb)
|
||||
if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
|
||||
SET_BIT (bmap[bb->index], indx);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -3783,9 +3790,9 @@ compute_transp (x, indx, bmap, set_p)
|
||||
{
|
||||
if (REGNO (x) < FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
|
||||
RESET_BIT (bmap[bb], indx);
|
||||
FOR_EACH_BB (bb)
|
||||
if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
|
||||
RESET_BIT (bmap[bb->index], indx);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -3797,9 +3804,9 @@ compute_transp (x, indx, bmap, set_p)
|
||||
return;
|
||||
|
||||
case MEM:
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
rtx list_entry = canon_modify_mem_list[bb];
|
||||
rtx list_entry = canon_modify_mem_list[bb->index];
|
||||
|
||||
while (list_entry)
|
||||
{
|
||||
@ -3808,9 +3815,9 @@ compute_transp (x, indx, bmap, set_p)
|
||||
if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
|
||||
{
|
||||
if (set_p)
|
||||
SET_BIT (bmap[bb], indx);
|
||||
SET_BIT (bmap[bb->index], indx);
|
||||
else
|
||||
RESET_BIT (bmap[bb], indx);
|
||||
RESET_BIT (bmap[bb->index], indx);
|
||||
break;
|
||||
}
|
||||
/* LIST_ENTRY must be an INSN of some kind that sets memory.
|
||||
@ -3824,9 +3831,9 @@ compute_transp (x, indx, bmap, set_p)
|
||||
x, rtx_addr_varies_p))
|
||||
{
|
||||
if (set_p)
|
||||
SET_BIT (bmap[bb], indx);
|
||||
SET_BIT (bmap[bb->index], indx);
|
||||
else
|
||||
RESET_BIT (bmap[bb], indx);
|
||||
RESET_BIT (bmap[bb->index], indx);
|
||||
break;
|
||||
}
|
||||
list_entry = XEXP (list_entry, 1);
|
||||
@ -4290,24 +4297,31 @@ static int
|
||||
cprop (alter_jumps)
|
||||
int alter_jumps;
|
||||
{
|
||||
int bb, changed;
|
||||
int changed;
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
|
||||
/* Note we start at block 1. */
|
||||
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
|
||||
{
|
||||
if (gcse_file != NULL)
|
||||
fprintf (gcse_file, "\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
changed = 0;
|
||||
for (bb = 1; bb < n_basic_blocks; bb++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
/* Reset tables used to keep track of what's still valid [since the
|
||||
start of the block]. */
|
||||
reset_opr_set_tables ();
|
||||
|
||||
for (insn = BLOCK_HEAD (bb);
|
||||
insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
|
||||
for (insn = bb->head;
|
||||
insn != NULL && insn != NEXT_INSN (bb->end);
|
||||
insn = NEXT_INSN (insn))
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
changed |= cprop_insn (BASIC_BLOCK (bb), insn, alter_jumps);
|
||||
changed |= cprop_insn (bb, insn, alter_jumps);
|
||||
|
||||
/* Keep track of everything modified by this insn. */
|
||||
/* ??? Need to be careful w.r.t. mods done to INSN. Don't
|
||||
@ -4454,7 +4468,7 @@ static void
|
||||
compute_pre_data ()
|
||||
{
|
||||
sbitmap trapping_expr;
|
||||
int i;
|
||||
basic_block bb;
|
||||
unsigned int ui;
|
||||
|
||||
compute_local_properties (transp, comp, antloc, 0);
|
||||
@ -4477,7 +4491,7 @@ compute_pre_data ()
|
||||
|
||||
This is significantly faster than compute_ae_kill. */
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
edge e;
|
||||
|
||||
@ -4485,16 +4499,16 @@ compute_pre_data ()
|
||||
kill all trapping expressions because we won't be able to properly
|
||||
place the instruction on the edge. So make them neither
|
||||
anticipatable nor transparent. This is fairly conservative. */
|
||||
for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next)
|
||||
for (e = bb->pred; e ; e = e->pred_next)
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
{
|
||||
sbitmap_difference (antloc[i], antloc[i], trapping_expr);
|
||||
sbitmap_difference (transp[i], transp[i], trapping_expr);
|
||||
sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
|
||||
sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
|
||||
break;
|
||||
}
|
||||
|
||||
sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
|
||||
sbitmap_not (ae_kill[i], ae_kill[i]);
|
||||
sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
|
||||
sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
|
||||
}
|
||||
|
||||
edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
|
||||
@ -5181,18 +5195,18 @@ add_label_notes (x, insn)
|
||||
static void
|
||||
compute_transpout ()
|
||||
{
|
||||
int bb;
|
||||
basic_block bb;
|
||||
unsigned int i;
|
||||
struct expr *expr;
|
||||
|
||||
sbitmap_vector_ones (transpout, n_basic_blocks);
|
||||
|
||||
for (bb = 0; bb < n_basic_blocks; ++bb)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
/* Note that flow inserted a nop a the end of basic blocks that
|
||||
end in call instructions for reasons other than abnormal
|
||||
control flow. */
|
||||
if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
|
||||
if (GET_CODE (bb->end) != CALL_INSN)
|
||||
continue;
|
||||
|
||||
for (i = 0; i < expr_hash_table_size; i++)
|
||||
@ -5206,7 +5220,7 @@ compute_transpout ()
|
||||
/* ??? Optimally, we would use interprocedural alias
|
||||
analysis to determine if this mem is actually killed
|
||||
by this call. */
|
||||
RESET_BIT (transpout[bb], expr->bitmap_index);
|
||||
RESET_BIT (transpout[bb->index], expr->bitmap_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5239,8 +5253,8 @@ invalidate_nonnull_info (x, setter, data)
|
||||
|
||||
regno = REGNO (x) - npi->min_reg;
|
||||
|
||||
RESET_BIT (npi->nonnull_local[npi->current_block], regno);
|
||||
SET_BIT (npi->nonnull_killed[npi->current_block], regno);
|
||||
RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
|
||||
SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
|
||||
}
|
||||
|
||||
/* Do null-pointer check elimination for the registers indicated in
|
||||
@ -5255,8 +5269,7 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
sbitmap *nonnull_avout;
|
||||
struct null_pointer_info *npi;
|
||||
{
|
||||
int bb;
|
||||
int current_block;
|
||||
basic_block bb, current_block;
|
||||
sbitmap *nonnull_local = npi->nonnull_local;
|
||||
sbitmap *nonnull_killed = npi->nonnull_killed;
|
||||
|
||||
@ -5271,7 +5284,7 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
sbitmap_vector_zero (nonnull_local, n_basic_blocks);
|
||||
sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
|
||||
|
||||
for (current_block = 0; current_block < n_basic_blocks; current_block++)
|
||||
FOR_EACH_BB (current_block)
|
||||
{
|
||||
rtx insn, stop_insn;
|
||||
|
||||
@ -5280,8 +5293,8 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
|
||||
/* Scan each insn in the basic block looking for memory references and
|
||||
register sets. */
|
||||
stop_insn = NEXT_INSN (BLOCK_END (current_block));
|
||||
for (insn = BLOCK_HEAD (current_block);
|
||||
stop_insn = NEXT_INSN (current_block->end);
|
||||
for (insn = current_block->head;
|
||||
insn != stop_insn;
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
@ -5309,7 +5322,7 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
&& GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
|
||||
&& REGNO (reg) >= npi->min_reg
|
||||
&& REGNO (reg) < npi->max_reg)
|
||||
SET_BIT (nonnull_local[current_block],
|
||||
SET_BIT (nonnull_local[current_block->index],
|
||||
REGNO (reg) - npi->min_reg);
|
||||
|
||||
/* Now invalidate stuff clobbered by this insn. */
|
||||
@ -5322,7 +5335,7 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
&& GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
|
||||
&& REGNO (reg) >= npi->min_reg
|
||||
&& REGNO (reg) < npi->max_reg)
|
||||
SET_BIT (nonnull_local[current_block],
|
||||
SET_BIT (nonnull_local[current_block->index],
|
||||
REGNO (reg) - npi->min_reg);
|
||||
}
|
||||
}
|
||||
@ -5334,17 +5347,17 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
|
||||
/* Now look at each bb and see if it ends with a compare of a value
|
||||
against zero. */
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
rtx last_insn = BLOCK_END (bb);
|
||||
rtx last_insn = bb->end;
|
||||
rtx condition, earliest;
|
||||
int compare_and_branch;
|
||||
|
||||
/* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
|
||||
since BLOCK_REG[BB] is zero if this block did not end with a
|
||||
comparison against zero, this condition works. */
|
||||
if (block_reg[bb] < npi->min_reg
|
||||
|| block_reg[bb] >= npi->max_reg)
|
||||
if (block_reg[bb->index] < npi->min_reg
|
||||
|| block_reg[bb->index] >= npi->max_reg)
|
||||
continue;
|
||||
|
||||
/* LAST_INSN is a conditional jump. Get its condition. */
|
||||
@ -5355,7 +5368,7 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
continue;
|
||||
|
||||
/* Is the register known to have a nonzero value? */
|
||||
if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg))
|
||||
if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
|
||||
continue;
|
||||
|
||||
/* Try to compute whether the compare/branch at the loop end is one or
|
||||
@ -5383,12 +5396,12 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
|
||||
delete_insn (last_insn);
|
||||
if (compare_and_branch == 2)
|
||||
delete_insn (earliest);
|
||||
purge_dead_edges (BASIC_BLOCK (bb));
|
||||
purge_dead_edges (bb);
|
||||
|
||||
/* Don't check this block again. (Note that BLOCK_END is
|
||||
invalid here; we deleted the last instruction in the
|
||||
block.) */
|
||||
block_reg[bb] = 0;
|
||||
block_reg[bb->index] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@ -5422,7 +5435,7 @@ delete_null_pointer_checks (f)
|
||||
{
|
||||
sbitmap *nonnull_avin, *nonnull_avout;
|
||||
unsigned int *block_reg;
|
||||
int bb;
|
||||
basic_block bb;
|
||||
int reg;
|
||||
int regs_per_pass;
|
||||
int max_reg;
|
||||
@ -5458,9 +5471,9 @@ delete_null_pointer_checks (f)
|
||||
ends with a conditional branch whose condition is a comparison
|
||||
against zero. Record the register compared in BLOCK_REG. */
|
||||
block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
rtx last_insn = BLOCK_END (bb);
|
||||
rtx last_insn = bb->end;
|
||||
rtx condition, earliest, reg;
|
||||
|
||||
/* We only want conditional branches. */
|
||||
@ -5486,7 +5499,7 @@ delete_null_pointer_checks (f)
|
||||
if (GET_CODE (reg) != REG)
|
||||
continue;
|
||||
|
||||
block_reg[bb] = REGNO (reg);
|
||||
block_reg[bb->index] = REGNO (reg);
|
||||
}
|
||||
|
||||
/* Go through the algorithm for each block of registers. */
|
||||
@ -5570,7 +5583,8 @@ free_code_hoist_mem ()
|
||||
static void
|
||||
compute_code_hoist_vbeinout ()
|
||||
{
|
||||
int bb, changed, passes;
|
||||
int changed, passes;
|
||||
basic_block bb;
|
||||
|
||||
sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
|
||||
sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
|
||||
@ -5584,12 +5598,12 @@ compute_code_hoist_vbeinout ()
|
||||
|
||||
/* We scan the blocks in the reverse order to speed up
|
||||
the convergence. */
|
||||
for (bb = n_basic_blocks - 1; bb >= 0; bb--)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb], antloc[bb],
|
||||
hoist_vbeout[bb], transp[bb]);
|
||||
if (BASIC_BLOCK (bb)->next_bb != EXIT_BLOCK_PTR)
|
||||
sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
|
||||
changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
|
||||
hoist_vbeout[bb->index], transp[bb->index]);
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
|
||||
}
|
||||
|
||||
passes++;
|
||||
@ -5677,7 +5691,7 @@ hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
|
||||
static void
|
||||
hoist_code ()
|
||||
{
|
||||
int bb, dominated;
|
||||
basic_block bb, dominated;
|
||||
unsigned int i;
|
||||
struct expr **index_map;
|
||||
struct expr *expr;
|
||||
@ -5694,33 +5708,33 @@ hoist_code ()
|
||||
|
||||
/* Walk over each basic block looking for potentially hoistable
|
||||
expressions, nothing gets hoisted from the entry block. */
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
int found = 0;
|
||||
int insn_inserted_p;
|
||||
|
||||
/* Examine each expression that is very busy at the exit of this
|
||||
block. These are the potentially hoistable expressions. */
|
||||
for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
|
||||
for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
|
||||
{
|
||||
int hoistable = 0;
|
||||
|
||||
if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i))
|
||||
if (TEST_BIT (hoist_vbeout[bb->index], i) && TEST_BIT (transpout[bb->index], i))
|
||||
{
|
||||
/* We've found a potentially hoistable expression, now
|
||||
we look at every block BB dominates to see if it
|
||||
computes the expression. */
|
||||
for (dominated = 0; dominated < n_basic_blocks; dominated++)
|
||||
FOR_EACH_BB (dominated)
|
||||
{
|
||||
/* Ignore self dominance. */
|
||||
if (bb == dominated
|
||||
|| ! TEST_BIT (dominators[dominated], bb))
|
||||
|| ! TEST_BIT (dominators[dominated->index], bb->index))
|
||||
continue;
|
||||
|
||||
/* We've found a dominated block, now see if it computes
|
||||
the busy expression and whether or not moving that
|
||||
expression to the "beginning" of that block is safe. */
|
||||
if (!TEST_BIT (antloc[dominated], i))
|
||||
if (!TEST_BIT (antloc[dominated->index], i))
|
||||
continue;
|
||||
|
||||
/* Note if the expression would reach the dominated block
|
||||
@ -5728,8 +5742,7 @@ hoist_code ()
|
||||
|
||||
Keep track of how many times this expression is hoistable
|
||||
from a dominated block into BB. */
|
||||
if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
|
||||
BASIC_BLOCK (dominated), NULL))
|
||||
if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
|
||||
hoistable++;
|
||||
}
|
||||
|
||||
@ -5745,7 +5758,7 @@ hoist_code ()
|
||||
to nullify any benefit we get from code hoisting. */
|
||||
if (hoistable > 1)
|
||||
{
|
||||
SET_BIT (hoist_exprs[bb], i);
|
||||
SET_BIT (hoist_exprs[bb->index], i);
|
||||
found = 1;
|
||||
}
|
||||
}
|
||||
@ -5756,29 +5769,29 @@ hoist_code ()
|
||||
continue;
|
||||
|
||||
/* Loop over all the hoistable expressions. */
|
||||
for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
|
||||
for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
|
||||
{
|
||||
/* We want to insert the expression into BB only once, so
|
||||
note when we've inserted it. */
|
||||
insn_inserted_p = 0;
|
||||
|
||||
/* These tests should be the same as the tests above. */
|
||||
if (TEST_BIT (hoist_vbeout[bb], i))
|
||||
if (TEST_BIT (hoist_vbeout[bb->index], i))
|
||||
{
|
||||
/* We've found a potentially hoistable expression, now
|
||||
we look at every block BB dominates to see if it
|
||||
computes the expression. */
|
||||
for (dominated = 0; dominated < n_basic_blocks; dominated++)
|
||||
FOR_EACH_BB (dominated)
|
||||
{
|
||||
/* Ignore self dominance. */
|
||||
if (bb == dominated
|
||||
|| ! TEST_BIT (dominators[dominated], bb))
|
||||
|| ! TEST_BIT (dominators[dominated->index], bb->index))
|
||||
continue;
|
||||
|
||||
/* We've found a dominated block, now see if it computes
|
||||
the busy expression and whether or not moving that
|
||||
expression to the "beginning" of that block is safe. */
|
||||
if (!TEST_BIT (antloc[dominated], i))
|
||||
if (!TEST_BIT (antloc[dominated->index], i))
|
||||
continue;
|
||||
|
||||
/* The expression is computed in the dominated block and
|
||||
@ -5786,8 +5799,7 @@ hoist_code ()
|
||||
dominated block. Now we have to determine if the
|
||||
expression would reach the dominated block if it was
|
||||
placed at the end of BB. */
|
||||
if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
|
||||
BASIC_BLOCK (dominated), NULL))
|
||||
if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
|
||||
{
|
||||
struct expr *expr = index_map[i];
|
||||
struct occr *occr = expr->antic_occr;
|
||||
@ -5795,7 +5807,7 @@ hoist_code ()
|
||||
rtx set;
|
||||
|
||||
/* Find the right occurrence of this expression. */
|
||||
while (BLOCK_NUM (occr->insn) != dominated && occr)
|
||||
while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
|
||||
occr = occr->next;
|
||||
|
||||
/* Should never happen. */
|
||||
@ -5829,8 +5841,7 @@ hoist_code ()
|
||||
occr->deleted_p = 1;
|
||||
if (!insn_inserted_p)
|
||||
{
|
||||
insert_insn_end_bb (index_map[i],
|
||||
BASIC_BLOCK (bb), 0);
|
||||
insert_insn_end_bb (index_map[i], bb, 0);
|
||||
insn_inserted_p = 1;
|
||||
}
|
||||
}
|
||||
@ -6110,15 +6121,15 @@ static void
|
||||
compute_ld_motion_mems ()
|
||||
{
|
||||
struct ls_expr * ptr;
|
||||
int bb;
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
|
||||
pre_ldst_mems = NULL;
|
||||
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
for (insn = BLOCK_HEAD (bb);
|
||||
insn && insn != NEXT_INSN (BLOCK_END (bb));
|
||||
for (insn = bb->head;
|
||||
insn && insn != NEXT_INSN (bb->end);
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
|
||||
@ -6435,7 +6446,8 @@ find_moveable_store (insn)
|
||||
static int
|
||||
compute_store_table ()
|
||||
{
|
||||
int bb, ret;
|
||||
int ret;
|
||||
basic_block bb;
|
||||
unsigned regno;
|
||||
rtx insn, pat;
|
||||
|
||||
@ -6447,11 +6459,11 @@ compute_store_table ()
|
||||
pre_ldst_mems = 0;
|
||||
|
||||
/* Find all the stores we care about. */
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
regvec = & (reg_set_in_block[bb]);
|
||||
for (insn = BLOCK_END (bb);
|
||||
insn && insn != PREV_INSN (BLOCK_HEAD (bb));
|
||||
regvec = & (reg_set_in_block[bb->index]);
|
||||
for (insn = bb->end;
|
||||
insn && insn != PREV_INSN (bb->end);
|
||||
insn = PREV_INSN (insn))
|
||||
{
|
||||
/* Ignore anything that is not a normal insn. */
|
||||
@ -6470,7 +6482,7 @@ compute_store_table ()
|
||||
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
||||
if (clobbers_all
|
||||
|| TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
|
||||
SET_BIT (reg_set_in_block[bb], regno);
|
||||
SET_BIT (reg_set_in_block[bb->index], regno);
|
||||
}
|
||||
|
||||
pat = PATTERN (insn);
|
||||
@ -6636,8 +6648,7 @@ store_killed_before (x, insn, bb)
|
||||
static void
|
||||
build_store_vectors ()
|
||||
{
|
||||
basic_block bb;
|
||||
int b;
|
||||
basic_block bb, b;
|
||||
rtx insn, st;
|
||||
struct ls_expr * ptr;
|
||||
|
||||
@ -6709,9 +6720,9 @@ build_store_vectors ()
|
||||
sbitmap_vector_zero (transp, n_basic_blocks);
|
||||
|
||||
for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
FOR_EACH_BB (b)
|
||||
{
|
||||
if (store_killed_after (ptr->pattern, BLOCK_HEAD (b), BASIC_BLOCK (b)))
|
||||
if (store_killed_after (ptr->pattern, b->head, b))
|
||||
{
|
||||
/* The anticipatable expression is not killed if it's gen'd. */
|
||||
/*
|
||||
@ -6729,10 +6740,10 @@ build_store_vectors ()
|
||||
If we always kill it in this case, we'll sometimes do
|
||||
uneccessary work, but it shouldn't actually hurt anything.
|
||||
if (!TEST_BIT (ae_gen[b], ptr->index)). */
|
||||
SET_BIT (ae_kill[b], ptr->index);
|
||||
SET_BIT (ae_kill[b->index], ptr->index);
|
||||
}
|
||||
else
|
||||
SET_BIT (transp[b], ptr->index);
|
||||
SET_BIT (transp[b->index], ptr->index);
|
||||
}
|
||||
|
||||
/* Any block with no exits calls some non-returning function, so
|
||||
@ -6941,6 +6952,7 @@ free_store_memory ()
|
||||
static void
|
||||
store_motion ()
|
||||
{
|
||||
basic_block bb;
|
||||
int x;
|
||||
struct ls_expr * ptr;
|
||||
int update_flow = 0;
|
||||
@ -6974,9 +6986,9 @@ store_motion ()
|
||||
/* Now we want to insert the new stores which are going to be needed. */
|
||||
for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
|
||||
{
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
if (TEST_BIT (pre_delete_map[x], ptr->index))
|
||||
delete_store (ptr, BASIC_BLOCK (x));
|
||||
FOR_EACH_BB (bb)
|
||||
if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
|
||||
delete_store (ptr, bb);
|
||||
|
||||
for (x = 0; x < NUM_EDGES (edge_list); x++)
|
||||
if (TEST_BIT (pre_insert_map[x], ptr->index))
|
||||
|
19
gcc/global.c
19
gcc/global.c
@ -636,7 +636,8 @@ allocno_compare (v1p, v2p)
|
||||
static void
|
||||
global_conflicts ()
|
||||
{
|
||||
int b, i;
|
||||
int i;
|
||||
basic_block b;
|
||||
rtx insn;
|
||||
int *block_start_allocnos;
|
||||
|
||||
@ -645,7 +646,7 @@ global_conflicts ()
|
||||
|
||||
block_start_allocnos = (int *) xmalloc (max_allocno * sizeof (int));
|
||||
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
FOR_EACH_BB (b)
|
||||
{
|
||||
memset ((char *) allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
|
||||
|
||||
@ -664,7 +665,7 @@ global_conflicts ()
|
||||
are explicitly marked in basic_block_live_at_start. */
|
||||
|
||||
{
|
||||
regset old = BASIC_BLOCK (b)->global_live_at_start;
|
||||
regset old = b->global_live_at_start;
|
||||
int ax = 0;
|
||||
|
||||
REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
|
||||
@ -713,7 +714,7 @@ global_conflicts ()
|
||||
that is reached by an abnormal edge. */
|
||||
|
||||
edge e;
|
||||
for (e = BASIC_BLOCK (b)->pred; e ; e = e->pred_next)
|
||||
for (e = b->pred; e ; e = e->pred_next)
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
break;
|
||||
if (e != NULL)
|
||||
@ -723,7 +724,7 @@ global_conflicts ()
|
||||
#endif
|
||||
}
|
||||
|
||||
insn = BLOCK_HEAD (b);
|
||||
insn = b->head;
|
||||
|
||||
/* Scan the code of this basic block, noting which allocnos
|
||||
and hard regs are born or die. When one is born,
|
||||
@ -823,7 +824,7 @@ global_conflicts ()
|
||||
}
|
||||
}
|
||||
|
||||
if (insn == BLOCK_END (b))
|
||||
if (insn == b->end)
|
||||
break;
|
||||
insn = NEXT_INSN (insn);
|
||||
}
|
||||
@ -1708,11 +1709,11 @@ void
|
||||
mark_elimination (from, to)
|
||||
int from, to;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
regset r = BASIC_BLOCK (i)->global_live_at_start;
|
||||
regset r = bb->global_live_at_start;
|
||||
if (REGNO_REG_SET_P (r, from))
|
||||
{
|
||||
CLEAR_REGNO_REG_SET (r, from);
|
||||
|
@ -258,7 +258,6 @@ print_rtl_graph_with_bb (base, suffix, rtx_first)
|
||||
fprintf (fp, "(nil)\n");
|
||||
else
|
||||
{
|
||||
int i;
|
||||
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
|
||||
int max_uid = get_max_uid ();
|
||||
int *start = (int *) xmalloc (max_uid * sizeof (int));
|
||||
@ -266,6 +265,7 @@ print_rtl_graph_with_bb (base, suffix, rtx_first)
|
||||
enum bb_state *in_bb_p = (enum bb_state *)
|
||||
xmalloc (max_uid * sizeof (enum bb_state));
|
||||
basic_block bb;
|
||||
int i;
|
||||
|
||||
for (i = 0; i < max_uid; ++i)
|
||||
{
|
||||
@ -273,12 +273,11 @@ print_rtl_graph_with_bb (base, suffix, rtx_first)
|
||||
in_bb_p[i] = NOT_IN_BB;
|
||||
}
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
rtx x;
|
||||
bb = BASIC_BLOCK (i);
|
||||
start[INSN_UID (bb->head)] = i;
|
||||
end[INSN_UID (bb->end)] = i;
|
||||
start[INSN_UID (bb->head)] = bb->index;
|
||||
end[INSN_UID (bb->end)] = bb->index;
|
||||
for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
|
||||
{
|
||||
in_bb_p[INSN_UID (x)]
|
||||
|
@ -2303,7 +2303,8 @@ void
|
||||
sched_init (dump_file)
|
||||
FILE *dump_file;
|
||||
{
|
||||
int luid, b;
|
||||
int luid;
|
||||
basic_block b;
|
||||
rtx insn;
|
||||
int i;
|
||||
|
||||
@ -2356,8 +2357,8 @@ sched_init (dump_file)
|
||||
|
||||
h_i_d[0].luid = 0;
|
||||
luid = 1;
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
for (insn = BLOCK_HEAD (b);; insn = NEXT_INSN (insn))
|
||||
FOR_EACH_BB (b)
|
||||
for (insn = b->head;; insn = NEXT_INSN (insn))
|
||||
{
|
||||
INSN_LUID (insn) = luid;
|
||||
|
||||
@ -2369,7 +2370,7 @@ sched_init (dump_file)
|
||||
if (GET_CODE (insn) != NOTE)
|
||||
++luid;
|
||||
|
||||
if (insn == BLOCK_END (b))
|
||||
if (insn == b->end)
|
||||
break;
|
||||
}
|
||||
|
||||
@ -2391,22 +2392,22 @@ sched_init (dump_file)
|
||||
predecessor has been scheduled, it is impossible to accurately
|
||||
determine the correct line number for the first insn of the block. */
|
||||
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
FOR_EACH_BB (b)
|
||||
{
|
||||
for (line = BLOCK_HEAD (b); line; line = PREV_INSN (line))
|
||||
for (line = b->head; line; line = PREV_INSN (line))
|
||||
if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
|
||||
{
|
||||
line_note_head[b] = line;
|
||||
line_note_head[b->index] = line;
|
||||
break;
|
||||
}
|
||||
/* Do a forward search as well, since we won't get to see the first
|
||||
notes in a basic block. */
|
||||
for (line = BLOCK_HEAD (b); line; line = NEXT_INSN (line))
|
||||
for (line = b->head; line; line = NEXT_INSN (line))
|
||||
{
|
||||
if (INSN_P (line))
|
||||
break;
|
||||
if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
|
||||
line_note_head[b] = line;
|
||||
line_note_head[b->index] = line;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2420,22 +2421,22 @@ sched_init (dump_file)
|
||||
/* ??? Add a NOTE after the last insn of the last basic block. It is not
|
||||
known why this is done. */
|
||||
|
||||
insn = BLOCK_END (n_basic_blocks - 1);
|
||||
insn = EXIT_BLOCK_PTR->prev_bb->end;
|
||||
if (NEXT_INSN (insn) == 0
|
||||
|| (GET_CODE (insn) != NOTE
|
||||
&& GET_CODE (insn) != CODE_LABEL
|
||||
/* Don't emit a NOTE if it would end up before a BARRIER. */
|
||||
&& GET_CODE (NEXT_INSN (insn)) != BARRIER))
|
||||
{
|
||||
emit_note_after (NOTE_INSN_DELETED, BLOCK_END (n_basic_blocks - 1));
|
||||
emit_note_after (NOTE_INSN_DELETED, EXIT_BLOCK_PTR->prev_bb->end);
|
||||
/* Make insn to appear outside BB. */
|
||||
BLOCK_END (n_basic_blocks - 1) = PREV_INSN (BLOCK_END (n_basic_blocks - 1));
|
||||
EXIT_BLOCK_PTR->prev_bb->end = PREV_INSN (EXIT_BLOCK_PTR->prev_bb->end);
|
||||
}
|
||||
|
||||
/* Compute INSN_REG_WEIGHT for all blocks. We must do this before
|
||||
removing death notes. */
|
||||
for (b = n_basic_blocks - 1; b >= 0; b--)
|
||||
find_insn_reg_weight (b);
|
||||
FOR_EACH_BB_REVERSE (b)
|
||||
find_insn_reg_weight (b->index);
|
||||
}
|
||||
|
||||
/* Free global data used during insn scheduling. */
|
||||
|
17
gcc/ifcvt.c
17
gcc/ifcvt.c
@ -2685,7 +2685,7 @@ void
|
||||
if_convert (x_life_data_ok)
|
||||
int x_life_data_ok;
|
||||
{
|
||||
int block_num;
|
||||
basic_block bb;
|
||||
|
||||
num_possible_if_blocks = 0;
|
||||
num_updated_if_blocks = 0;
|
||||
@ -2707,18 +2707,13 @@ if_convert (x_life_data_ok)
|
||||
clear_bb_flags ();
|
||||
|
||||
/* Record initial block numbers. */
|
||||
for (block_num = 0; block_num < n_basic_blocks; block_num++)
|
||||
SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
|
||||
FOR_EACH_BB (bb)
|
||||
SET_ORIG_INDEX (bb, bb->index);
|
||||
|
||||
/* Go through each of the basic blocks looking for things to convert. */
|
||||
for (block_num = 0; block_num < n_basic_blocks; )
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (block_num);
|
||||
if (find_if_header (bb))
|
||||
block_num = bb->index;
|
||||
else
|
||||
block_num++;
|
||||
}
|
||||
FOR_EACH_BB (bb)
|
||||
while (find_if_header (bb))
|
||||
continue;
|
||||
|
||||
if (post_dominators)
|
||||
sbitmap_vector_free (post_dominators);
|
||||
|
162
gcc/lcm.c
162
gcc/lcm.c
@ -106,7 +106,7 @@ compute_antinout_edge (antloc, transp, antin, antout)
|
||||
sbitmap *antin;
|
||||
sbitmap *antout;
|
||||
{
|
||||
int bb;
|
||||
basic_block bb;
|
||||
edge e;
|
||||
basic_block *worklist, *qin, *qout, *qend;
|
||||
unsigned int qlen;
|
||||
@ -123,10 +123,10 @@ compute_antinout_edge (antloc, transp, antin, antout)
|
||||
|
||||
/* Put every block on the worklist; this is necessary because of the
|
||||
optimistic initialization of ANTIN above. */
|
||||
for (bb = n_basic_blocks - 1; bb >= 0; bb--)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
*qin++ = BASIC_BLOCK (bb);
|
||||
BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
|
||||
*qin++ =bb;
|
||||
bb->aux = bb;
|
||||
}
|
||||
|
||||
qin = worklist;
|
||||
@ -142,32 +142,31 @@ compute_antinout_edge (antloc, transp, antin, antout)
|
||||
while (qlen)
|
||||
{
|
||||
/* Take the first entry off the worklist. */
|
||||
basic_block b = *qout++;
|
||||
bb = b->index;
|
||||
bb = *qout++;
|
||||
qlen--;
|
||||
|
||||
if (qout >= qend)
|
||||
qout = worklist;
|
||||
|
||||
if (b->aux == EXIT_BLOCK_PTR)
|
||||
if (bb->aux == EXIT_BLOCK_PTR)
|
||||
/* Do not clear the aux field for blocks which are predecessors of
|
||||
the EXIT block. That way we never add then to the worklist
|
||||
again. */
|
||||
sbitmap_zero (antout[bb]);
|
||||
sbitmap_zero (antout[bb->index]);
|
||||
else
|
||||
{
|
||||
/* Clear the aux field of this block so that it can be added to
|
||||
the worklist again if necessary. */
|
||||
b->aux = NULL;
|
||||
sbitmap_intersection_of_succs (antout[bb], antin, bb);
|
||||
bb->aux = NULL;
|
||||
sbitmap_intersection_of_succs (antout[bb->index], antin, bb->index);
|
||||
}
|
||||
|
||||
if (sbitmap_a_or_b_and_c_cg (antin[bb], antloc[bb],
|
||||
transp[bb], antout[bb]))
|
||||
if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index],
|
||||
transp[bb->index], antout[bb->index]))
|
||||
/* If the in state of this block changed, then we need
|
||||
to add the predecessors of this block to the worklist
|
||||
if they are not already on the worklist. */
|
||||
for (e = b->pred; e; e = e->pred_next)
|
||||
for (e = bb->pred; e; e = e->pred_next)
|
||||
if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
|
||||
{
|
||||
*qin++ = e->src;
|
||||
@ -263,9 +262,9 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
|
||||
struct edge_list *edge_list;
|
||||
sbitmap *earliest, *antloc, *later, *laterin;
|
||||
{
|
||||
int bb, num_edges, i;
|
||||
int num_edges, i;
|
||||
edge e;
|
||||
basic_block *worklist, *qin, *qout, *qend;
|
||||
basic_block *worklist, *qin, *qout, *qend, bb;
|
||||
unsigned int qlen;
|
||||
|
||||
num_edges = NUM_EDGES (edge_list);
|
||||
@ -301,11 +300,10 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
|
||||
|
||||
/* Add all the blocks to the worklist. This prevents an early exit from
|
||||
the loop given our optimistic initialization of LATER above. */
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (bb);
|
||||
*qin++ = b;
|
||||
b->aux = b;
|
||||
*qin++ = bb;
|
||||
bb->aux = bb;
|
||||
}
|
||||
qin = worklist;
|
||||
/* Note that we do not use the last allocated element for our queue,
|
||||
@ -318,20 +316,19 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
|
||||
while (qlen)
|
||||
{
|
||||
/* Take the first entry off the worklist. */
|
||||
basic_block b = *qout++;
|
||||
b->aux = NULL;
|
||||
bb = *qout++;
|
||||
bb->aux = NULL;
|
||||
qlen--;
|
||||
if (qout >= qend)
|
||||
qout = worklist;
|
||||
|
||||
/* Compute the intersection of LATERIN for each incoming edge to B. */
|
||||
bb = b->index;
|
||||
sbitmap_ones (laterin[bb]);
|
||||
for (e = b->pred; e != NULL; e = e->pred_next)
|
||||
sbitmap_a_and_b (laterin[bb], laterin[bb], later[(size_t)e->aux]);
|
||||
sbitmap_ones (laterin[bb->index]);
|
||||
for (e = bb->pred; e != NULL; e = e->pred_next)
|
||||
sbitmap_a_and_b (laterin[bb->index], laterin[bb->index], later[(size_t)e->aux]);
|
||||
|
||||
/* Calculate LATER for all outgoing edges. */
|
||||
for (e = b->succ; e != NULL; e = e->succ_next)
|
||||
for (e = bb->succ; e != NULL; e = e->succ_next)
|
||||
if (sbitmap_union_of_diff_cg (later[(size_t) e->aux],
|
||||
earliest[(size_t) e->aux],
|
||||
laterin[e->src->index],
|
||||
@ -370,9 +367,10 @@ compute_insert_delete (edge_list, antloc, later, laterin,
|
||||
sbitmap *antloc, *later, *laterin, *insert, *delete;
|
||||
{
|
||||
int x;
|
||||
basic_block bb;
|
||||
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
sbitmap_difference (delete[x], antloc[x], laterin[x]);
|
||||
FOR_EACH_BB (bb)
|
||||
sbitmap_difference (delete[bb->index], antloc[bb->index], laterin[bb->index]);
|
||||
|
||||
for (x = 0; x < NUM_EDGES (edge_list); x++)
|
||||
{
|
||||
@ -496,9 +494,8 @@ void
|
||||
compute_available (avloc, kill, avout, avin)
|
||||
sbitmap *avloc, *kill, *avout, *avin;
|
||||
{
|
||||
int bb;
|
||||
edge e;
|
||||
basic_block *worklist, *qin, *qout, *qend;
|
||||
basic_block *worklist, *qin, *qout, *qend, bb;
|
||||
unsigned int qlen;
|
||||
|
||||
/* Allocate a worklist array/queue. Entries are only added to the
|
||||
@ -512,10 +509,10 @@ compute_available (avloc, kill, avout, avin)
|
||||
|
||||
/* Put every block on the worklist; this is necessary because of the
|
||||
optimistic initialization of AVOUT above. */
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
*qin++ = BASIC_BLOCK (bb);
|
||||
BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
|
||||
*qin++ = bb;
|
||||
bb->aux = bb;
|
||||
}
|
||||
|
||||
qin = worklist;
|
||||
@ -531,8 +528,7 @@ compute_available (avloc, kill, avout, avin)
|
||||
while (qlen)
|
||||
{
|
||||
/* Take the first entry off the worklist. */
|
||||
basic_block b = *qout++;
|
||||
bb = b->index;
|
||||
bb = *qout++;
|
||||
qlen--;
|
||||
|
||||
if (qout >= qend)
|
||||
@ -541,23 +537,23 @@ compute_available (avloc, kill, avout, avin)
|
||||
/* If one of the predecessor blocks is the ENTRY block, then the
|
||||
intersection of avouts is the null set. We can identify such blocks
|
||||
by the special value in the AUX field in the block structure. */
|
||||
if (b->aux == ENTRY_BLOCK_PTR)
|
||||
if (bb->aux == ENTRY_BLOCK_PTR)
|
||||
/* Do not clear the aux field for blocks which are successors of the
|
||||
ENTRY block. That way we never add then to the worklist again. */
|
||||
sbitmap_zero (avin[bb]);
|
||||
sbitmap_zero (avin[bb->index]);
|
||||
else
|
||||
{
|
||||
/* Clear the aux field of this block so that it can be added to
|
||||
the worklist again if necessary. */
|
||||
b->aux = NULL;
|
||||
sbitmap_intersection_of_preds (avin[bb], avout, bb);
|
||||
bb->aux = NULL;
|
||||
sbitmap_intersection_of_preds (avin[bb->index], avout, bb->index);
|
||||
}
|
||||
|
||||
if (sbitmap_union_of_diff_cg (avout[bb], avloc[bb], avin[bb], kill[bb]))
|
||||
if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index], avin[bb->index], kill[bb->index]))
|
||||
/* If the out state of this block changed, then we need
|
||||
to add the successors of this block to the worklist
|
||||
if they are not already on the worklist. */
|
||||
for (e = b->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
|
||||
{
|
||||
*qin++ = e->dest;
|
||||
@ -627,9 +623,9 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
|
||||
struct edge_list *edge_list;
|
||||
sbitmap *farthest, *st_avloc, *nearer, *nearerout;
|
||||
{
|
||||
int bb, num_edges, i;
|
||||
int num_edges, i;
|
||||
edge e;
|
||||
basic_block *worklist, *tos;
|
||||
basic_block *worklist, *tos, bb;
|
||||
|
||||
num_edges = NUM_EDGES (edge_list);
|
||||
|
||||
@ -656,29 +652,27 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
|
||||
|
||||
/* Add all the blocks to the worklist. This prevents an early exit
|
||||
from the loop given our optimistic initialization of NEARER. */
|
||||
for (bb = 0; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (bb);
|
||||
*tos++ = b;
|
||||
b->aux = b;
|
||||
*tos++ = bb;
|
||||
bb->aux = bb;
|
||||
}
|
||||
|
||||
/* Iterate until the worklist is empty. */
|
||||
while (tos != worklist)
|
||||
{
|
||||
/* Take the first entry off the worklist. */
|
||||
basic_block b = *--tos;
|
||||
b->aux = NULL;
|
||||
bb = *--tos;
|
||||
bb->aux = NULL;
|
||||
|
||||
/* Compute the intersection of NEARER for each outgoing edge from B. */
|
||||
bb = b->index;
|
||||
sbitmap_ones (nearerout[bb]);
|
||||
for (e = b->succ; e != NULL; e = e->succ_next)
|
||||
sbitmap_a_and_b (nearerout[bb], nearerout[bb],
|
||||
sbitmap_ones (nearerout[bb->index]);
|
||||
for (e = bb->succ; e != NULL; e = e->succ_next)
|
||||
sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index],
|
||||
nearer[(size_t) e->aux]);
|
||||
|
||||
/* Calculate NEARER for all incoming edges. */
|
||||
for (e = b->pred; e != NULL; e = e->pred_next)
|
||||
for (e = bb->pred; e != NULL; e = e->pred_next)
|
||||
if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux],
|
||||
farthest[(size_t) e->aux],
|
||||
nearerout[e->dest->index],
|
||||
@ -714,9 +708,10 @@ compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
|
||||
sbitmap *st_avloc, *nearer, *nearerout, *insert, *delete;
|
||||
{
|
||||
int x;
|
||||
basic_block bb;
|
||||
|
||||
for (x = 0; x < n_basic_blocks; x++)
|
||||
sbitmap_difference (delete[x], st_avloc[x], nearerout[x]);
|
||||
FOR_EACH_BB (bb)
|
||||
sbitmap_difference (delete[bb->index], st_avloc[bb->index], nearerout[bb->index]);
|
||||
|
||||
for (x = 0; x < NUM_EDGES (edge_list); x++)
|
||||
{
|
||||
@ -1019,7 +1014,8 @@ optimize_mode_switching (file)
|
||||
FILE *file;
|
||||
{
|
||||
rtx insn;
|
||||
int bb, e;
|
||||
int e;
|
||||
basic_block bb;
|
||||
int need_commit = 0;
|
||||
sbitmap *kill;
|
||||
struct edge_list *edge_list;
|
||||
@ -1087,16 +1083,16 @@ optimize_mode_switching (file)
|
||||
/* Determine what the first use (if any) need for a mode of entity E is.
|
||||
This will be the mode that is anticipatable for this block.
|
||||
Also compute the initial transparency settings. */
|
||||
for (bb = 0 ; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
struct seginfo *ptr;
|
||||
int last_mode = no_mode;
|
||||
HARD_REG_SET live_now;
|
||||
|
||||
REG_SET_TO_HARD_REG_SET (live_now,
|
||||
BASIC_BLOCK (bb)->global_live_at_start);
|
||||
for (insn = BLOCK_HEAD (bb);
|
||||
insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
|
||||
bb->global_live_at_start);
|
||||
for (insn = bb->head;
|
||||
insn != NULL && insn != NEXT_INSN (bb->end);
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (INSN_P (insn))
|
||||
@ -1107,9 +1103,9 @@ optimize_mode_switching (file)
|
||||
if (mode != no_mode && mode != last_mode)
|
||||
{
|
||||
last_mode = mode;
|
||||
ptr = new_seginfo (mode, insn, bb, live_now);
|
||||
add_seginfo (info + bb, ptr);
|
||||
RESET_BIT (transp[bb], j);
|
||||
ptr = new_seginfo (mode, insn, bb->index, live_now);
|
||||
add_seginfo (info + bb->index, ptr);
|
||||
RESET_BIT (transp[bb->index], j);
|
||||
}
|
||||
|
||||
/* Update LIVE_NOW. */
|
||||
@ -1124,12 +1120,12 @@ optimize_mode_switching (file)
|
||||
}
|
||||
}
|
||||
|
||||
info[bb].computing = last_mode;
|
||||
info[bb->index].computing = last_mode;
|
||||
/* Check for blocks without ANY mode requirements. */
|
||||
if (last_mode == no_mode)
|
||||
{
|
||||
ptr = new_seginfo (no_mode, insn, bb, live_now);
|
||||
add_seginfo (info + bb, ptr);
|
||||
ptr = new_seginfo (no_mode, insn, bb->index, live_now);
|
||||
add_seginfo (info + bb->index, ptr);
|
||||
}
|
||||
}
|
||||
#ifdef NORMAL_MODE
|
||||
@ -1142,13 +1138,13 @@ optimize_mode_switching (file)
|
||||
|
||||
for (eg = ENTRY_BLOCK_PTR->succ; eg; eg = eg->succ_next)
|
||||
{
|
||||
bb = eg->dest->index;
|
||||
bb = eg->dest;
|
||||
|
||||
/* By always making this nontransparent, we save
|
||||
an extra check in make_preds_opaque. We also
|
||||
need this to avoid confusing pre_edge_lcm when
|
||||
antic is cleared but transp and comp are set. */
|
||||
RESET_BIT (transp[bb], j);
|
||||
RESET_BIT (transp[bb->index], j);
|
||||
|
||||
/* If the block already has MODE, pretend it
|
||||
has none (because we don't need to set it),
|
||||
@ -1166,7 +1162,7 @@ optimize_mode_switching (file)
|
||||
}
|
||||
}
|
||||
|
||||
bb = n_basic_blocks - 1;
|
||||
bb = EXIT_BLOCK_PTR;
|
||||
info[bb].seginfo->mode = mode;
|
||||
}
|
||||
}
|
||||
@ -1186,21 +1182,21 @@ optimize_mode_switching (file)
|
||||
int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
|
||||
struct bb_info *info = bb_info[j];
|
||||
|
||||
for (bb = 0 ; bb < n_basic_blocks; bb++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
if (info[bb].seginfo->mode == m)
|
||||
SET_BIT (antic[bb], j);
|
||||
if (info[bb->index].seginfo->mode == m)
|
||||
SET_BIT (antic[bb->index], j);
|
||||
|
||||
if (info[bb].computing == m)
|
||||
SET_BIT (comp[bb], j);
|
||||
if (info[bb->index].computing == m)
|
||||
SET_BIT (comp[bb->index], j);
|
||||
}
|
||||
}
|
||||
|
||||
/* Calculate the optimal locations for the
|
||||
placement mode switches to modes with priority I. */
|
||||
|
||||
for (bb = n_basic_blocks - 1; bb >= 0; bb--)
|
||||
sbitmap_not (kill[bb], transp[bb]);
|
||||
FOR_EACH_BB (bb)
|
||||
sbitmap_not (kill[bb->index], transp[bb->index]);
|
||||
edge_list = pre_edge_lcm (file, 1, transp, comp, antic,
|
||||
kill, &insert, &delete);
|
||||
|
||||
@ -1279,12 +1275,12 @@ optimize_mode_switching (file)
|
||||
}
|
||||
}
|
||||
|
||||
for (bb = n_basic_blocks - 1; bb >= 0; bb--)
|
||||
if (TEST_BIT (delete[bb], j))
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
if (TEST_BIT (delete[bb->index], j))
|
||||
{
|
||||
make_preds_opaque (BASIC_BLOCK (bb), j);
|
||||
make_preds_opaque (bb, j);
|
||||
/* Cancel the 'deleted' mode set. */
|
||||
bb_info[j][bb].seginfo->mode = no_mode;
|
||||
bb_info[j][bb->index].seginfo->mode = no_mode;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1349,10 +1345,10 @@ optimize_mode_switching (file)
|
||||
}
|
||||
#endif
|
||||
|
||||
for (bb = n_basic_blocks - 1; bb >= 0; bb--)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
struct seginfo *ptr, *next;
|
||||
for (ptr = bb_info[j][bb].seginfo; ptr; ptr = next)
|
||||
for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
|
||||
{
|
||||
next = ptr->next;
|
||||
if (ptr->mode != no_mode)
|
||||
|
@ -336,8 +336,9 @@ alloc_qty (regno, mode, size, birth)
|
||||
int
|
||||
local_alloc ()
|
||||
{
|
||||
int b, i;
|
||||
int i;
|
||||
int max_qty;
|
||||
basic_block b;
|
||||
|
||||
/* We need to keep track of whether or not we recorded a LABEL_REF so
|
||||
that we know if the jump optimizer needs to be rerun. */
|
||||
@ -394,7 +395,7 @@ local_alloc ()
|
||||
|
||||
/* Allocate each block's local registers, block by block. */
|
||||
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
FOR_EACH_BB (b)
|
||||
{
|
||||
/* NEXT_QTY indicates which elements of the `qty_...'
|
||||
vectors might need to be initialized because they were used
|
||||
@ -426,7 +427,7 @@ local_alloc ()
|
||||
|
||||
next_qty = 0;
|
||||
|
||||
block_alloc (b);
|
||||
block_alloc (b->index);
|
||||
}
|
||||
|
||||
free (qty);
|
||||
@ -815,7 +816,7 @@ static void
|
||||
update_equiv_regs ()
|
||||
{
|
||||
rtx insn;
|
||||
int block;
|
||||
basic_block bb;
|
||||
int loop_depth;
|
||||
regset_head cleared_regs;
|
||||
int clear_regnos = 0;
|
||||
@ -828,9 +829,8 @@ update_equiv_regs ()
|
||||
/* Scan the insns and find which registers have equivalences. Do this
|
||||
in a separate scan of the insns because (due to -fcse-follow-jumps)
|
||||
a register can be set below its use. */
|
||||
for (block = 0; block < n_basic_blocks; block++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (block);
|
||||
loop_depth = bb->loop_depth;
|
||||
|
||||
for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
|
||||
@ -1044,10 +1044,8 @@ update_equiv_regs ()
|
||||
within the same loop (or in an inner loop), then move the register
|
||||
initialization just before the use, so that they are in the same
|
||||
basic block. */
|
||||
for (block = n_basic_blocks - 1; block >= 0; block--)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (block);
|
||||
|
||||
loop_depth = bb->loop_depth;
|
||||
for (insn = bb->end; insn != PREV_INSN (bb->head); insn = PREV_INSN (insn))
|
||||
{
|
||||
@ -1139,12 +1137,12 @@ update_equiv_regs ()
|
||||
|
||||
XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
|
||||
|
||||
REG_BASIC_BLOCK (regno) = block >= 0 ? block : 0;
|
||||
REG_BASIC_BLOCK (regno) = bb->index;
|
||||
REG_N_CALLS_CROSSED (regno) = 0;
|
||||
REG_LIVE_LENGTH (regno) = 2;
|
||||
|
||||
if (block >= 0 && insn == BLOCK_HEAD (block))
|
||||
BLOCK_HEAD (block) = PREV_INSN (insn);
|
||||
if (insn == bb->head)
|
||||
bb->head = PREV_INSN (insn);
|
||||
|
||||
/* Remember to clear REGNO from all basic block's live
|
||||
info. */
|
||||
@ -1159,24 +1157,22 @@ update_equiv_regs ()
|
||||
/* Clear all dead REGNOs from all basic block's live info. */
|
||||
if (clear_regnos)
|
||||
{
|
||||
int j, l;
|
||||
int j;
|
||||
if (clear_regnos > 8)
|
||||
{
|
||||
for (l = 0; l < n_basic_blocks; l++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
AND_COMPL_REG_SET (BASIC_BLOCK (l)->global_live_at_start,
|
||||
&cleared_regs);
|
||||
AND_COMPL_REG_SET (BASIC_BLOCK (l)->global_live_at_end,
|
||||
&cleared_regs);
|
||||
AND_COMPL_REG_SET (bb->global_live_at_start, &cleared_regs);
|
||||
AND_COMPL_REG_SET (bb->global_live_at_end, &cleared_regs);
|
||||
}
|
||||
}
|
||||
else
|
||||
EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j,
|
||||
{
|
||||
for (l = 0; l < n_basic_blocks; l++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
CLEAR_REGNO_REG_SET (BASIC_BLOCK (l)->global_live_at_start, j);
|
||||
CLEAR_REGNO_REG_SET (BASIC_BLOCK (l)->global_live_at_end, j);
|
||||
CLEAR_REGNO_REG_SET (bb->global_live_at_start, j);
|
||||
CLEAR_REGNO_REG_SET (bb->global_live_at_end, j);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
120
gcc/predict.c
120
gcc/predict.c
@ -409,6 +409,7 @@ estimate_probability (loops_info)
|
||||
struct loops *loops_info;
|
||||
{
|
||||
sbitmap *dominators, *post_dominators;
|
||||
basic_block bb;
|
||||
int i;
|
||||
|
||||
dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
|
||||
@ -420,15 +421,14 @@ estimate_probability (loops_info)
|
||||
natural loop. */
|
||||
for (i = 0; i < loops_info->num; i++)
|
||||
{
|
||||
int j;
|
||||
int exits;
|
||||
struct loop *loop = &loops_info->array[i];
|
||||
|
||||
flow_loop_scan (loops_info, loop, LOOP_EXIT_EDGES);
|
||||
exits = loop->num_exits;
|
||||
|
||||
for (j = loop->first->index; j <= loop->last->index; ++j)
|
||||
if (TEST_BIT (loop->nodes, j))
|
||||
FOR_BB_BETWEEN (bb, loop->first, loop->last->next_bb, next_bb)
|
||||
if (TEST_BIT (loop->nodes, bb->index))
|
||||
{
|
||||
int header_found = 0;
|
||||
edge e;
|
||||
@ -437,12 +437,12 @@ estimate_probability (loops_info)
|
||||
statements construct loops via "non-loop" constructs
|
||||
in the source language and are better to be handled
|
||||
separately. */
|
||||
if (predicted_by_p (BASIC_BLOCK (j), PRED_CONTINUE))
|
||||
if (predicted_by_p (bb, PRED_CONTINUE))
|
||||
continue;
|
||||
|
||||
/* Loop branch heuristics - predict an edge back to a
|
||||
loop's head as taken. */
|
||||
for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if (e->dest == loop->header
|
||||
&& e->src == loop->latch)
|
||||
{
|
||||
@ -453,7 +453,7 @@ estimate_probability (loops_info)
|
||||
/* Loop exit heuristics - predict an edge exiting the loop if the
|
||||
conditinal has no loop header successors as not taken. */
|
||||
if (!header_found)
|
||||
for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if (e->dest->index < 0
|
||||
|| !TEST_BIT (loop->nodes, e->dest->index))
|
||||
predict_edge
|
||||
@ -465,9 +465,8 @@ estimate_probability (loops_info)
|
||||
}
|
||||
|
||||
/* Attempt to predict conditional jumps using a number of heuristics. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx last_insn = bb->end;
|
||||
rtx cond, earliest;
|
||||
edge e;
|
||||
@ -604,11 +603,11 @@ estimate_probability (loops_info)
|
||||
}
|
||||
|
||||
/* Attach the combined probability to each conditional jump. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
if (GET_CODE (BLOCK_END (i)) == JUMP_INSN
|
||||
&& any_condjump_p (BLOCK_END (i))
|
||||
&& BASIC_BLOCK (i)->succ->succ_next != NULL)
|
||||
combine_predictions_for_insn (BLOCK_END (i), BASIC_BLOCK (i));
|
||||
FOR_EACH_BB (bb)
|
||||
if (GET_CODE (bb->end) == JUMP_INSN
|
||||
&& any_condjump_p (bb->end)
|
||||
&& bb->succ->succ_next != NULL)
|
||||
combine_predictions_for_insn (bb->end, bb);
|
||||
|
||||
sbitmap_vector_free (post_dominators);
|
||||
sbitmap_vector_free (dominators);
|
||||
@ -834,7 +833,7 @@ process_note_predictions (bb, heads, dominators, post_dominators)
|
||||
void
|
||||
note_prediction_to_br_prob ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
sbitmap *post_dominators;
|
||||
int *dominators, *heads;
|
||||
|
||||
@ -854,11 +853,8 @@ note_prediction_to_br_prob ()
|
||||
|
||||
/* Process all prediction notes. */
|
||||
|
||||
for (i = 0; i < n_basic_blocks; ++i)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
process_note_predictions (bb, heads, dominators, post_dominators);
|
||||
}
|
||||
FOR_EACH_BB (bb)
|
||||
process_note_predictions (bb, heads, dominators, post_dominators);
|
||||
|
||||
sbitmap_vector_free (post_dominators);
|
||||
free (dominators);
|
||||
@ -906,17 +902,15 @@ static void
|
||||
propagate_freq (head)
|
||||
basic_block head;
|
||||
{
|
||||
basic_block bb = head;
|
||||
basic_block last = bb;
|
||||
basic_block bb;
|
||||
basic_block last;
|
||||
edge e;
|
||||
basic_block nextbb;
|
||||
int n;
|
||||
|
||||
/* For each basic block we need to visit count number of his predecessors
|
||||
we need to visit first. */
|
||||
for (n = 0; n < n_basic_blocks; n++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (n);
|
||||
if (BLOCK_INFO (bb)->tovisit)
|
||||
{
|
||||
int count = 0;
|
||||
@ -934,7 +928,8 @@ propagate_freq (head)
|
||||
}
|
||||
|
||||
memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
|
||||
for (; bb; bb = nextbb)
|
||||
last = head;
|
||||
for (bb = head; bb; bb = nextbb)
|
||||
{
|
||||
REAL_VALUE_TYPE cyclic_probability, frequency;
|
||||
|
||||
@ -1077,24 +1072,13 @@ static void
|
||||
counts_to_freqs ()
|
||||
{
|
||||
HOST_WIDEST_INT count_max = 1;
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
count_max = MAX (BASIC_BLOCK (i)->count, count_max);
|
||||
FOR_EACH_BB (bb)
|
||||
count_max = MAX (bb->count, count_max);
|
||||
|
||||
for (i = -2; i < n_basic_blocks; i++)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
if (i == -2)
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
else if (i == -1)
|
||||
bb = EXIT_BLOCK_PTR;
|
||||
else
|
||||
bb = BASIC_BLOCK (i);
|
||||
|
||||
bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
|
||||
}
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
|
||||
}
|
||||
|
||||
/* Return true if function is likely to be expensive, so there is no point to
|
||||
@ -1107,7 +1091,7 @@ expensive_function_p (threshold)
|
||||
int threshold;
|
||||
{
|
||||
unsigned int sum = 0;
|
||||
int i;
|
||||
basic_block bb;
|
||||
unsigned int limit;
|
||||
|
||||
/* We can not compute accurately for large thresholds due to scaled
|
||||
@ -1123,9 +1107,8 @@ expensive_function_p (threshold)
|
||||
|
||||
/* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
|
||||
limit = ENTRY_BLOCK_PTR->frequency * threshold;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx insn;
|
||||
|
||||
for (insn = bb->head; insn != NEXT_INSN (bb->end);
|
||||
@ -1147,7 +1130,7 @@ static void
|
||||
estimate_bb_frequencies (loops)
|
||||
struct loops *loops;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
REAL_VALUE_TYPE freq_max;
|
||||
enum machine_mode double_mode = TYPE_MODE (double_type_node);
|
||||
|
||||
@ -1169,13 +1152,13 @@ estimate_bb_frequencies (loops)
|
||||
mark_dfs_back_edges ();
|
||||
/* Fill in the probability values in flowgraph based on the REG_BR_PROB
|
||||
notes. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
rtx last_insn = BLOCK_END (i);
|
||||
rtx last_insn = bb->end;
|
||||
|
||||
if (GET_CODE (last_insn) != JUMP_INSN || !any_condjump_p (last_insn)
|
||||
/* Avoid handling of conditional jumps jumping to fallthru edge. */
|
||||
|| BASIC_BLOCK (i)->succ->succ_next == NULL)
|
||||
|| bb->succ->succ_next == NULL)
|
||||
{
|
||||
/* We can predict only conditional jumps at the moment.
|
||||
Expect each edge to be equally probable.
|
||||
@ -1183,14 +1166,14 @@ estimate_bb_frequencies (loops)
|
||||
int nedges = 0;
|
||||
edge e;
|
||||
|
||||
for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
{
|
||||
nedges++;
|
||||
if (e->probability != 0)
|
||||
break;
|
||||
}
|
||||
if (!e)
|
||||
for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
|
||||
}
|
||||
}
|
||||
@ -1200,22 +1183,13 @@ estimate_bb_frequencies (loops)
|
||||
/* Set up block info for each basic block. */
|
||||
alloc_aux_for_blocks (sizeof (struct block_info_def));
|
||||
alloc_aux_for_edges (sizeof (struct edge_info_def));
|
||||
for (i = -2; i < n_basic_blocks; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
basic_block bb;
|
||||
|
||||
if (i == -2)
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
else if (i == -1)
|
||||
bb = EXIT_BLOCK_PTR;
|
||||
else
|
||||
bb = BASIC_BLOCK (i);
|
||||
|
||||
BLOCK_INFO (bb)->tovisit = 0;
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
{
|
||||
|
||||
REAL_VALUE_FROM_INT (EDGE_INFO (e)->back_edge_prob,
|
||||
e->probability, 0, double_mode);
|
||||
REAL_ARITHMETIC (EDGE_INFO (e)->back_edge_prob,
|
||||
@ -1229,32 +1203,24 @@ estimate_bb_frequencies (loops)
|
||||
estimate_loops_at_level (loops->tree_root);
|
||||
|
||||
/* Now fake loop around whole function to finalize probabilities. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
BLOCK_INFO (BASIC_BLOCK (i))->tovisit = 1;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
BLOCK_INFO (bb)->tovisit = 1;
|
||||
|
||||
BLOCK_INFO (ENTRY_BLOCK_PTR)->tovisit = 1;
|
||||
BLOCK_INFO (EXIT_BLOCK_PTR)->tovisit = 1;
|
||||
propagate_freq (ENTRY_BLOCK_PTR);
|
||||
|
||||
memcpy (&freq_max, &real_zero, sizeof (real_zero));
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
if (REAL_VALUES_LESS
|
||||
(freq_max, BLOCK_INFO (BASIC_BLOCK (i))->frequency))
|
||||
memcpy (&freq_max, &BLOCK_INFO (BASIC_BLOCK (i))->frequency,
|
||||
(freq_max, BLOCK_INFO (bb)->frequency))
|
||||
memcpy (&freq_max, &BLOCK_INFO (bb)->frequency,
|
||||
sizeof (freq_max));
|
||||
|
||||
for (i = -2; i < n_basic_blocks; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
basic_block bb;
|
||||
REAL_VALUE_TYPE tmp;
|
||||
|
||||
if (i == -2)
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
else if (i == -1)
|
||||
bb = EXIT_BLOCK_PTR;
|
||||
else
|
||||
bb = BASIC_BLOCK (i);
|
||||
|
||||
REAL_ARITHMETIC (tmp, MULT_EXPR, BLOCK_INFO (bb)->frequency,
|
||||
real_bb_freq_max);
|
||||
REAL_ARITHMETIC (tmp, RDIV_EXPR, tmp, freq_max);
|
||||
@ -1274,14 +1240,14 @@ estimate_bb_frequencies (loops)
|
||||
static void
|
||||
compute_function_frequency ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
if (!profile_info.count_profiles_merged
|
||||
|| !flag_branch_probabilities)
|
||||
return;
|
||||
cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
if (maybe_hot_bb_p (bb))
|
||||
{
|
||||
cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
|
||||
|
@ -137,14 +137,13 @@ static void
|
||||
instrument_edges (el)
|
||||
struct edge_list *el;
|
||||
{
|
||||
int i;
|
||||
int num_instr_edges = 0;
|
||||
int num_edges = NUM_EDGES (el);
|
||||
basic_block bb;
|
||||
remove_fake_edges ();
|
||||
|
||||
for (i = 0; i < n_basic_blocks + 2; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
basic_block bb = GCOV_INDEX_TO_BB (i);
|
||||
edge e = bb->succ;
|
||||
while (e)
|
||||
{
|
||||
@ -216,8 +215,8 @@ static gcov_type *
|
||||
get_exec_counts ()
|
||||
{
|
||||
int num_edges = 0;
|
||||
int i;
|
||||
int okay = 1;
|
||||
basic_block bb;
|
||||
int okay = 1, i;
|
||||
int mismatch = 0;
|
||||
gcov_type *profile;
|
||||
char *function_name_buffer;
|
||||
@ -233,15 +232,12 @@ get_exec_counts ()
|
||||
|
||||
/* Count the edges to be (possibly) instrumented. */
|
||||
|
||||
for (i = 0; i < n_basic_blocks + 2; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
basic_block bb = GCOV_INDEX_TO_BB (i);
|
||||
edge e;
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
|
||||
{
|
||||
num_edges++;
|
||||
}
|
||||
num_edges++;
|
||||
}
|
||||
|
||||
/* now read and combine all matching profiles. */
|
||||
@ -382,6 +378,7 @@ get_exec_counts ()
|
||||
static void
|
||||
compute_branch_probabilities ()
|
||||
{
|
||||
basic_block bb;
|
||||
int i;
|
||||
int num_edges = 0;
|
||||
int changes;
|
||||
@ -395,9 +392,8 @@ compute_branch_probabilities ()
|
||||
/* Attach extra info block to each bb. */
|
||||
|
||||
alloc_aux_for_blocks (sizeof (struct bb_info));
|
||||
for (i = 0; i < n_basic_blocks + 2; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
basic_block bb = GCOV_INDEX_TO_BB (i);
|
||||
edge e;
|
||||
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
@ -418,9 +414,8 @@ compute_branch_probabilities ()
|
||||
/* The first count in the .da file is the number of times that the function
|
||||
was entered. This is the exec_count for block zero. */
|
||||
|
||||
for (i = 0; i < n_basic_blocks + 2; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
basic_block bb = GCOV_INDEX_TO_BB (i);
|
||||
edge e;
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
|
||||
@ -472,9 +467,8 @@ compute_branch_probabilities ()
|
||||
{
|
||||
passes++;
|
||||
changes = 0;
|
||||
for (i = n_basic_blocks + 1; i >= 0; i--)
|
||||
FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
|
||||
{
|
||||
basic_block bb = GCOV_INDEX_TO_BB (i);
|
||||
struct bb_info *bi = BB_INFO (bb);
|
||||
if (! bi->count_valid)
|
||||
{
|
||||
@ -569,9 +563,8 @@ compute_branch_probabilities ()
|
||||
|
||||
/* If the graph has been correctly solved, every block will have a
|
||||
succ and pred count of zero. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
if (BB_INFO (bb)->succ_count || BB_INFO (bb)->pred_count)
|
||||
abort ();
|
||||
}
|
||||
@ -584,9 +577,8 @@ compute_branch_probabilities ()
|
||||
num_never_executed = 0;
|
||||
num_branches = 0;
|
||||
|
||||
for (i = 0; i <= n_basic_blocks + 1; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
{
|
||||
basic_block bb = GCOV_INDEX_TO_BB (i);
|
||||
edge e;
|
||||
gcov_type total;
|
||||
rtx note;
|
||||
@ -702,12 +694,10 @@ static long
|
||||
compute_checksum ()
|
||||
{
|
||||
long chsum = 0;
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
|
||||
for (i = 0; i < n_basic_blocks ; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
edge e;
|
||||
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
@ -740,6 +730,7 @@ compute_checksum ()
|
||||
void
|
||||
branch_prob ()
|
||||
{
|
||||
basic_block bb;
|
||||
int i;
|
||||
int num_edges, ignored_edges;
|
||||
struct edge_list *el;
|
||||
@ -768,11 +759,10 @@ branch_prob ()
|
||||
We also add fake exit edges for each call and asm statement in the
|
||||
basic, since it may not return. */
|
||||
|
||||
for (i = 0; i < n_basic_blocks ; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
int need_exit_edge = 0, need_entry_edge = 0;
|
||||
int have_exit_edge = 0, have_entry_edge = 0;
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx insn;
|
||||
edge e;
|
||||
|
||||
@ -797,7 +787,7 @@ branch_prob ()
|
||||
{
|
||||
/* We should not get abort here, as call to setjmp should not
|
||||
be the very first instruction of function. */
|
||||
if (!i)
|
||||
if (bb == ENTRY_BLOCK_PTR)
|
||||
abort ();
|
||||
make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
|
||||
}
|
||||
@ -864,10 +854,8 @@ branch_prob ()
|
||||
GCOV utility. */
|
||||
if (flag_test_coverage)
|
||||
{
|
||||
int i = 0;
|
||||
for (i = 0 ; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx insn = bb->head;
|
||||
static int ignore_next_note = 0;
|
||||
|
||||
@ -976,9 +964,8 @@ branch_prob ()
|
||||
__write_long (n_basic_blocks + 2, bbg_file, 4);
|
||||
__write_long (num_edges - ignored_edges + 1, bbg_file, 4);
|
||||
|
||||
for (i = 0; i < n_basic_blocks + 1; i++)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
{
|
||||
basic_block bb = GCOV_INDEX_TO_BB (i);
|
||||
edge e;
|
||||
long count = 0;
|
||||
|
||||
@ -1088,12 +1075,11 @@ find_spanning_tree (el)
|
||||
{
|
||||
int i;
|
||||
int num_edges = NUM_EDGES (el);
|
||||
basic_block bb;
|
||||
|
||||
/* We use aux field for standard union-find algorithm. */
|
||||
EXIT_BLOCK_PTR->aux = EXIT_BLOCK_PTR;
|
||||
ENTRY_BLOCK_PTR->aux = ENTRY_BLOCK_PTR;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
BASIC_BLOCK (i)->aux = BASIC_BLOCK (i);
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
bb->aux = bb;
|
||||
|
||||
/* Add fake edge exit to entry we can't instrument. */
|
||||
union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
|
||||
@ -1149,10 +1135,8 @@ find_spanning_tree (el)
|
||||
}
|
||||
}
|
||||
|
||||
EXIT_BLOCK_PTR->aux = NULL;
|
||||
ENTRY_BLOCK_PTR->aux = NULL;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
BASIC_BLOCK (i)->aux = NULL;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
bb->aux = NULL;
|
||||
}
|
||||
|
||||
/* Perform file-level initialization for branch-prob processing. */
|
||||
|
15
gcc/recog.c
15
gcc/recog.c
@ -2727,15 +2727,14 @@ split_all_insns (upd_life)
|
||||
{
|
||||
sbitmap blocks;
|
||||
int changed;
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
blocks = sbitmap_alloc (n_basic_blocks);
|
||||
sbitmap_zero (blocks);
|
||||
changed = 0;
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx insn, next;
|
||||
bool finish = false;
|
||||
|
||||
@ -2756,7 +2755,7 @@ split_all_insns (upd_life)
|
||||
|
||||
while (GET_CODE (last) == BARRIER)
|
||||
last = PREV_INSN (last);
|
||||
SET_BIT (blocks, i);
|
||||
SET_BIT (blocks, bb->index);
|
||||
changed = 1;
|
||||
insn = last;
|
||||
}
|
||||
@ -2999,7 +2998,8 @@ peephole2_optimize (dump_file)
|
||||
regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
|
||||
rtx insn, prev;
|
||||
regset live;
|
||||
int i, b;
|
||||
int i;
|
||||
basic_block bb;
|
||||
#ifdef HAVE_conditional_execution
|
||||
sbitmap blocks;
|
||||
bool changed;
|
||||
@ -3020,9 +3020,8 @@ peephole2_optimize (dump_file)
|
||||
count_or_remove_death_notes (NULL, 1);
|
||||
#endif
|
||||
|
||||
for (b = n_basic_blocks - 1; b >= 0; --b)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (b);
|
||||
struct propagate_block_info *pbi;
|
||||
|
||||
/* Indicate that all slots except the last holds invalid data. */
|
||||
@ -3201,7 +3200,7 @@ peephole2_optimize (dump_file)
|
||||
death data structures are not so self-contained.
|
||||
So record that we've made a modification to this
|
||||
block and update life information at the end. */
|
||||
SET_BIT (blocks, b);
|
||||
SET_BIT (blocks, bb->index);
|
||||
changed = true;
|
||||
|
||||
for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
|
||||
|
@ -418,6 +418,7 @@ reg_to_stack (first, file)
|
||||
rtx first;
|
||||
FILE *file;
|
||||
{
|
||||
basic_block bb;
|
||||
int i;
|
||||
int max_uid;
|
||||
|
||||
@ -451,10 +452,9 @@ reg_to_stack (first, file)
|
||||
|
||||
/* Set up block info for each basic block. */
|
||||
alloc_aux_for_blocks (sizeof (struct block_info_def));
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
edge e;
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
for (e = bb->pred; e; e=e->pred_next)
|
||||
if (!(e->flags & EDGE_DFS_BACK)
|
||||
&& e->src != ENTRY_BLOCK_PTR)
|
||||
@ -2380,12 +2380,12 @@ print_stack (file, s)
|
||||
static int
|
||||
convert_regs_entry ()
|
||||
{
|
||||
int inserted = 0, i;
|
||||
int inserted = 0;
|
||||
edge e;
|
||||
basic_block block;
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (block)
|
||||
{
|
||||
basic_block block = BASIC_BLOCK (i);
|
||||
block_info bi = BLOCK_INFO (block);
|
||||
int reg;
|
||||
|
||||
@ -2813,7 +2813,8 @@ static int
|
||||
convert_regs (file)
|
||||
FILE *file;
|
||||
{
|
||||
int inserted, i;
|
||||
int inserted;
|
||||
basic_block b;
|
||||
edge e;
|
||||
|
||||
/* Initialize uninitialized registers on function entry. */
|
||||
@ -2833,9 +2834,8 @@ convert_regs (file)
|
||||
|
||||
/* ??? Process all unreachable blocks. Though there's no excuse
|
||||
for keeping these even when not optimizing. */
|
||||
for (i = 0; i < n_basic_blocks; ++i)
|
||||
FOR_EACH_BB (b)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
block_info bi = BLOCK_INFO (b);
|
||||
|
||||
if (! bi->done)
|
||||
|
@ -1127,10 +1127,10 @@ scan_one_insn (insn, pass)
|
||||
INSN could not be at the beginning of that block. */
|
||||
if (previnsn == 0 || GET_CODE (previnsn) == JUMP_INSN)
|
||||
{
|
||||
int b;
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
if (insn == BLOCK_HEAD (b))
|
||||
BLOCK_HEAD (b) = newinsn;
|
||||
basic_block b;
|
||||
FOR_EACH_BB (b)
|
||||
if (insn == b->head)
|
||||
b->head = newinsn;
|
||||
}
|
||||
|
||||
/* This makes one more setting of new insns's dest. */
|
||||
@ -1255,7 +1255,7 @@ regclass (f, nregs, dump)
|
||||
|
||||
for (pass = 0; pass <= flag_expensive_optimizations; pass++)
|
||||
{
|
||||
int index;
|
||||
basic_block bb;
|
||||
|
||||
if (dump)
|
||||
fprintf (dump, "\n\nPass %i\n\n",pass);
|
||||
@ -1277,10 +1277,8 @@ regclass (f, nregs, dump)
|
||||
insn = scan_one_insn (insn, pass);
|
||||
}
|
||||
else
|
||||
for (index = 0; index < n_basic_blocks; index++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (index);
|
||||
|
||||
/* Show that an insn inside a loop is likely to be executed three
|
||||
times more than insns outside a loop. This is much more
|
||||
aggressive than the assumptions made elsewhere and is being
|
||||
|
@ -223,7 +223,7 @@ mark_flags_life_zones (flags)
|
||||
{
|
||||
int flags_regno;
|
||||
int flags_nregs;
|
||||
int block;
|
||||
basic_block block;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
/* If we found a flags register on a cc0 host, bail. */
|
||||
@ -254,13 +254,13 @@ mark_flags_life_zones (flags)
|
||||
flags_set_1_rtx = flags;
|
||||
|
||||
/* Process each basic block. */
|
||||
for (block = n_basic_blocks - 1; block >= 0; block--)
|
||||
FOR_EACH_BB_REVERSE (block)
|
||||
{
|
||||
rtx insn, end;
|
||||
int live;
|
||||
|
||||
insn = BLOCK_HEAD (block);
|
||||
end = BLOCK_END (block);
|
||||
insn = block->head;
|
||||
end = block->end;
|
||||
|
||||
/* Look out for the (unlikely) case of flags being live across
|
||||
basic block boundaries. */
|
||||
@ -269,7 +269,7 @@ mark_flags_life_zones (flags)
|
||||
{
|
||||
int i;
|
||||
for (i = 0; i < flags_nregs; ++i)
|
||||
live |= REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start,
|
||||
live |= REGNO_REG_SET_P (block->global_live_at_start,
|
||||
flags_regno + i);
|
||||
}
|
||||
#endif
|
||||
@ -1061,6 +1061,7 @@ regmove_optimize (f, nregs, regmove_dump_file)
|
||||
int pass;
|
||||
int i;
|
||||
rtx copy_src, copy_dst;
|
||||
basic_block bb;
|
||||
|
||||
/* ??? Hack. Regmove doesn't examine the CFG, and gets mightily
|
||||
confused by non-call exceptions ending blocks. */
|
||||
@ -1076,8 +1077,8 @@ regmove_optimize (f, nregs, regmove_dump_file)
|
||||
|
||||
regmove_bb_head = (int *) xmalloc (sizeof (int) * (old_max_uid + 1));
|
||||
for (i = old_max_uid; i >= 0; i--) regmove_bb_head[i] = -1;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
regmove_bb_head[INSN_UID (BLOCK_HEAD (i))] = i;
|
||||
FOR_EACH_BB (bb)
|
||||
regmove_bb_head[INSN_UID (bb->head)] = bb->index;
|
||||
|
||||
/* A forward/backward pass. Replace output operands with input operands. */
|
||||
|
||||
@ -1504,9 +1505,8 @@ regmove_optimize (f, nregs, regmove_dump_file)
|
||||
|
||||
/* In fixup_match_1, some insns may have been inserted after basic block
|
||||
ends. Fix that here. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx end = bb->end;
|
||||
rtx new = end;
|
||||
rtx next = NEXT_INSN (new);
|
||||
@ -2139,10 +2139,10 @@ static int record_stack_memrefs PARAMS ((rtx *, void *));
|
||||
void
|
||||
combine_stack_adjustments ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; ++i)
|
||||
combine_stack_adjustments_for_block (BASIC_BLOCK (i));
|
||||
FOR_EACH_BB (bb)
|
||||
combine_stack_adjustments_for_block (bb);
|
||||
}
|
||||
|
||||
/* Recognize a MEM of the form (sp) or (plus sp const). */
|
||||
|
@ -201,7 +201,7 @@ regrename_optimize ()
|
||||
{
|
||||
int tick[FIRST_PSEUDO_REGISTER];
|
||||
int this_tick = 0;
|
||||
int b;
|
||||
basic_block bb;
|
||||
char *first_obj;
|
||||
|
||||
memset (tick, 0, sizeof tick);
|
||||
@ -209,9 +209,8 @@ regrename_optimize ()
|
||||
gcc_obstack_init (&rename_obstack);
|
||||
first_obj = (char *) obstack_alloc (&rename_obstack, 0);
|
||||
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (b);
|
||||
struct du_chain *all_chains = 0;
|
||||
HARD_REG_SET unavailable;
|
||||
HARD_REG_SET regs_seen;
|
||||
@ -219,7 +218,7 @@ regrename_optimize ()
|
||||
CLEAR_HARD_REG_SET (unavailable);
|
||||
|
||||
if (rtl_dump_file)
|
||||
fprintf (rtl_dump_file, "\nBasic block %d:\n", b);
|
||||
fprintf (rtl_dump_file, "\nBasic block %d:\n", bb->index);
|
||||
|
||||
all_chains = build_def_use (bb);
|
||||
|
||||
@ -1726,30 +1725,30 @@ copyprop_hardreg_forward ()
|
||||
{
|
||||
struct value_data *all_vd;
|
||||
bool need_refresh;
|
||||
int b;
|
||||
basic_block bb, bbp;
|
||||
|
||||
need_refresh = false;
|
||||
|
||||
all_vd = xmalloc (sizeof (struct value_data) * n_basic_blocks);
|
||||
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (b);
|
||||
|
||||
/* If a block has a single predecessor, that we've already
|
||||
processed, begin with the value data that was live at
|
||||
the end of the predecessor block. */
|
||||
/* ??? Ought to use more intelligent queueing of blocks. */
|
||||
if (bb->pred)
|
||||
for (bbp = bb; bbp && bbp != bb->pred->src; bbp = bbp->prev_bb);
|
||||
if (bb->pred
|
||||
&& ! bb->pred->pred_next
|
||||
&& ! (bb->pred->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
|
||||
&& bb->pred->src->index != ENTRY_BLOCK
|
||||
&& bb->pred->src->index < b)
|
||||
all_vd[b] = all_vd[bb->pred->src->index];
|
||||
&& bb->pred->src != ENTRY_BLOCK_PTR
|
||||
&& bbp)
|
||||
all_vd[bb->index] = all_vd[bb->pred->src->index];
|
||||
else
|
||||
init_value_data (all_vd + b);
|
||||
init_value_data (all_vd + bb->index);
|
||||
|
||||
if (copyprop_hardreg_forward_1 (bb, all_vd + b))
|
||||
if (copyprop_hardreg_forward_1 (bb, all_vd + bb->index))
|
||||
need_refresh = true;
|
||||
}
|
||||
|
||||
|
@ -676,6 +676,7 @@ reload (first, global)
|
||||
int i;
|
||||
rtx insn;
|
||||
struct elim_table *ep;
|
||||
basic_block bb;
|
||||
|
||||
/* The two pointers used to track the true location of the memory used
|
||||
for label offsets. */
|
||||
@ -1123,8 +1124,8 @@ reload (first, global)
|
||||
pseudo. */
|
||||
|
||||
if (! frame_pointer_needed)
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start,
|
||||
FOR_EACH_BB (bb)
|
||||
CLEAR_REGNO_REG_SET (bb->global_live_at_start,
|
||||
HARD_FRAME_POINTER_REGNUM);
|
||||
|
||||
/* Come here (with failure set nonzero) if we can't get enough spill regs
|
||||
@ -8613,6 +8614,7 @@ reload_combine ()
|
||||
int first_index_reg = -1;
|
||||
int last_index_reg = 0;
|
||||
int i;
|
||||
basic_block bb;
|
||||
unsigned int r;
|
||||
int last_label_ruid;
|
||||
int min_labelno, n_labels;
|
||||
@ -8648,17 +8650,17 @@ reload_combine ()
|
||||
label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
|
||||
CLEAR_HARD_REG_SET (ever_live_at_start);
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; i--)
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
insn = BLOCK_HEAD (i);
|
||||
insn = bb->head;
|
||||
if (GET_CODE (insn) == CODE_LABEL)
|
||||
{
|
||||
HARD_REG_SET live;
|
||||
|
||||
REG_SET_TO_HARD_REG_SET (live,
|
||||
BASIC_BLOCK (i)->global_live_at_start);
|
||||
bb->global_live_at_start);
|
||||
compute_use_by_pseudos (&live,
|
||||
BASIC_BLOCK (i)->global_live_at_start);
|
||||
bb->global_live_at_start);
|
||||
COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
|
||||
IOR_HARD_REG_SET (ever_live_at_start, live);
|
||||
}
|
||||
@ -9489,12 +9491,11 @@ copy_eh_notes (insn, x)
|
||||
void
|
||||
fixup_abnormal_edges ()
|
||||
{
|
||||
int i;
|
||||
bool inserted = false;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
edge e;
|
||||
|
||||
/* Look for cases we are interested in - an calls or instructions causing
|
||||
|
@ -133,7 +133,7 @@ find_basic_block (insn, search_limit)
|
||||
rtx insn;
|
||||
int search_limit;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
/* Scan backwards to the previous BARRIER. Then see if we can find a
|
||||
label that starts a basic block. Return the basic block number. */
|
||||
@ -156,9 +156,9 @@ find_basic_block (insn, search_limit)
|
||||
insn && GET_CODE (insn) == CODE_LABEL;
|
||||
insn = next_nonnote_insn (insn))
|
||||
{
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
if (insn == BLOCK_HEAD (i))
|
||||
return i;
|
||||
FOR_EACH_BB (bb)
|
||||
if (insn == bb->head)
|
||||
return bb->index;
|
||||
}
|
||||
|
||||
return -1;
|
||||
|
@ -279,7 +279,7 @@ void
|
||||
schedule_ebbs (dump_file)
|
||||
FILE *dump_file;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
/* Taking care of this degenerate case makes the rest of
|
||||
this code simpler. */
|
||||
@ -296,20 +296,19 @@ schedule_ebbs (dump_file)
|
||||
compute_bb_for_insn (get_max_uid ());
|
||||
|
||||
/* Schedule every region in the subroutine. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
rtx head = BASIC_BLOCK (i)->head;
|
||||
rtx head = bb->head;
|
||||
rtx tail;
|
||||
|
||||
for (;;)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
edge e;
|
||||
tail = b->end;
|
||||
if (b->next_bb == EXIT_BLOCK_PTR
|
||||
|| GET_CODE (b->next_bb->head) == CODE_LABEL)
|
||||
tail = bb->end;
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR
|
||||
|| GET_CODE (bb->next_bb->head) == CODE_LABEL)
|
||||
break;
|
||||
for (e = b->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if ((e->flags & EDGE_FALLTHRU) != 0)
|
||||
break;
|
||||
if (! e)
|
||||
@ -325,7 +324,7 @@ schedule_ebbs (dump_file)
|
||||
}
|
||||
}
|
||||
|
||||
i++;
|
||||
bb = bb->next_bb;
|
||||
}
|
||||
|
||||
/* Blah. We should fix the rest of the code not to get confused by
|
||||
|
105
gcc/sched-rgn.c
105
gcc/sched-rgn.c
@ -319,7 +319,7 @@ static void free_pending_lists PARAMS ((void));
|
||||
static int
|
||||
is_cfg_nonregular ()
|
||||
{
|
||||
int b;
|
||||
basic_block b;
|
||||
rtx insn;
|
||||
RTX_CODE code;
|
||||
|
||||
@ -346,8 +346,8 @@ is_cfg_nonregular ()
|
||||
/* If we have non-jumping insns which refer to labels, then we consider
|
||||
the cfg not well structured. */
|
||||
/* Check for labels referred to other thn by jumps. */
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
for (insn = BLOCK_HEAD (b);; insn = NEXT_INSN (insn))
|
||||
FOR_EACH_BB (b)
|
||||
for (insn = b->head;; insn = NEXT_INSN (insn))
|
||||
{
|
||||
code = GET_CODE (insn);
|
||||
if (GET_RTX_CLASS (code) == 'i' && code != JUMP_INSN)
|
||||
@ -361,7 +361,7 @@ is_cfg_nonregular ()
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (insn == BLOCK_END (b))
|
||||
if (insn == b->end)
|
||||
break;
|
||||
}
|
||||
|
||||
@ -382,6 +382,7 @@ build_control_flow (edge_list)
|
||||
struct edge_list *edge_list;
|
||||
{
|
||||
int i, unreachable, num_edges;
|
||||
basic_block b;
|
||||
|
||||
/* This already accounts for entry/exit edges. */
|
||||
num_edges = NUM_EDGES (edge_list);
|
||||
@ -393,10 +394,8 @@ build_control_flow (edge_list)
|
||||
test is redundant with the one in find_rgns, but it's much
|
||||
cheaper to go ahead and catch the trivial case here. */
|
||||
unreachable = 0;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (b)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
|
||||
if (b->pred == NULL
|
||||
|| (b->pred->src == b
|
||||
&& b->pred->pred_next == NULL))
|
||||
@ -544,17 +543,19 @@ debug_regions ()
|
||||
static void
|
||||
find_single_block_region ()
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
nr_regions = 0;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
rgn_bb_table[i] = i;
|
||||
RGN_NR_BLOCKS (i) = 1;
|
||||
RGN_BLOCKS (i) = i;
|
||||
CONTAINING_RGN (i) = i;
|
||||
BLOCK_TO_BB (i) = 0;
|
||||
rgn_bb_table[nr_regions] = bb->index;
|
||||
RGN_NR_BLOCKS (nr_regions) = 1;
|
||||
RGN_BLOCKS (nr_regions) = nr_regions;
|
||||
CONTAINING_RGN (bb->index) = nr_regions;
|
||||
BLOCK_TO_BB (bb->index) = 0;
|
||||
nr_regions++;
|
||||
}
|
||||
nr_regions = n_basic_blocks;
|
||||
}
|
||||
|
||||
/* Update number of blocks and the estimate for number of insns
|
||||
@ -631,6 +632,7 @@ find_rgns (edge_list, dom)
|
||||
int count = 0, sp, idx = 0, current_edge = out_edges[0];
|
||||
int num_bbs, num_insns, unreachable;
|
||||
int too_large_failure;
|
||||
basic_block bb;
|
||||
|
||||
/* Note if an edge has been passed. */
|
||||
sbitmap passed;
|
||||
@ -772,8 +774,8 @@ find_rgns (edge_list, dom)
|
||||
the entry node by placing a nonzero value in dfs_nr. Thus if
|
||||
dfs_nr is zero for any block, then it must be unreachable. */
|
||||
unreachable = 0;
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
if (dfs_nr[i] == 0)
|
||||
FOR_EACH_BB (bb)
|
||||
if (dfs_nr[bb->index] == 0)
|
||||
{
|
||||
unreachable = 1;
|
||||
break;
|
||||
@ -783,8 +785,8 @@ find_rgns (edge_list, dom)
|
||||
to hold degree counts. */
|
||||
degree = dfs_nr;
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
degree[i] = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
degree[bb->index] = 0;
|
||||
for (i = 0; i < num_edges; i++)
|
||||
{
|
||||
edge e = INDEX_EDGE (edge_list, i);
|
||||
@ -809,12 +811,12 @@ find_rgns (edge_list, dom)
|
||||
|
||||
/* Find blocks which are inner loop headers. We still have non-reducible
|
||||
loops to consider at this point. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
if (TEST_BIT (header, i) && TEST_BIT (inner, i))
|
||||
if (TEST_BIT (header, bb->index) && TEST_BIT (inner, bb->index))
|
||||
{
|
||||
edge e;
|
||||
int j;
|
||||
basic_block jbb;
|
||||
|
||||
/* Now check that the loop is reducible. We do this separate
|
||||
from finding inner loops so that we do not find a reducible
|
||||
@ -827,15 +829,15 @@ find_rgns (edge_list, dom)
|
||||
If there exists a block that is not dominated by the loop
|
||||
header, then the block is reachable from outside the loop
|
||||
and thus the loop is not a natural loop. */
|
||||
for (j = 0; j < n_basic_blocks; j++)
|
||||
FOR_EACH_BB (jbb)
|
||||
{
|
||||
/* First identify blocks in the loop, except for the loop
|
||||
entry block. */
|
||||
if (i == max_hdr[j] && i != j)
|
||||
if (bb->index == max_hdr[jbb->index] && bb != jbb)
|
||||
{
|
||||
/* Now verify that the block is dominated by the loop
|
||||
header. */
|
||||
if (!TEST_BIT (dom[j], i))
|
||||
if (!TEST_BIT (dom[jbb->index], bb->index))
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -843,25 +845,25 @@ find_rgns (edge_list, dom)
|
||||
/* If we exited the loop early, then I is the header of
|
||||
a non-reducible loop and we should quit processing it
|
||||
now. */
|
||||
if (j != n_basic_blocks)
|
||||
if (jbb != EXIT_BLOCK_PTR)
|
||||
continue;
|
||||
|
||||
/* I is a header of an inner loop, or block 0 in a subroutine
|
||||
with no loops at all. */
|
||||
head = tail = -1;
|
||||
too_large_failure = 0;
|
||||
loop_head = max_hdr[i];
|
||||
loop_head = max_hdr[bb->index];
|
||||
|
||||
/* Decrease degree of all I's successors for topological
|
||||
ordering. */
|
||||
for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
--degree[e->dest->index];
|
||||
|
||||
/* Estimate # insns, and count # blocks in the region. */
|
||||
num_bbs = 1;
|
||||
num_insns = (INSN_LUID (BLOCK_END (i))
|
||||
- INSN_LUID (BLOCK_HEAD (i)));
|
||||
num_insns = (INSN_LUID (bb->end)
|
||||
- INSN_LUID (bb->head));
|
||||
|
||||
/* Find all loop latches (blocks with back edges to the loop
|
||||
header) or all the leaf blocks in the cfg has no loops.
|
||||
@ -869,17 +871,17 @@ find_rgns (edge_list, dom)
|
||||
Place those blocks into the queue. */
|
||||
if (no_loops)
|
||||
{
|
||||
for (j = 0; j < n_basic_blocks; j++)
|
||||
FOR_EACH_BB (jbb)
|
||||
/* Leaf nodes have only a single successor which must
|
||||
be EXIT_BLOCK. */
|
||||
if (BASIC_BLOCK (j)->succ
|
||||
&& BASIC_BLOCK (j)->succ->dest == EXIT_BLOCK_PTR
|
||||
&& BASIC_BLOCK (j)->succ->succ_next == NULL)
|
||||
if (jbb->succ
|
||||
&& jbb->succ->dest == EXIT_BLOCK_PTR
|
||||
&& jbb->succ->succ_next == NULL)
|
||||
{
|
||||
queue[++tail] = j;
|
||||
SET_BIT (in_queue, j);
|
||||
queue[++tail] = jbb->index;
|
||||
SET_BIT (in_queue, jbb->index);
|
||||
|
||||
if (too_large (j, &num_bbs, &num_insns))
|
||||
if (too_large (jbb->index, &num_bbs, &num_insns))
|
||||
{
|
||||
too_large_failure = 1;
|
||||
break;
|
||||
@ -890,14 +892,14 @@ find_rgns (edge_list, dom)
|
||||
{
|
||||
edge e;
|
||||
|
||||
for (e = BASIC_BLOCK (i)->pred; e; e = e->pred_next)
|
||||
for (e = bb->pred; e; e = e->pred_next)
|
||||
{
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
continue;
|
||||
|
||||
node = e->src->index;
|
||||
|
||||
if (max_hdr[node] == loop_head && node != i)
|
||||
if (max_hdr[node] == loop_head && node != bb->index)
|
||||
{
|
||||
/* This is a loop latch. */
|
||||
queue[++tail] = node;
|
||||
@ -959,7 +961,7 @@ find_rgns (edge_list, dom)
|
||||
tail = -1;
|
||||
break;
|
||||
}
|
||||
else if (!TEST_BIT (in_queue, node) && node != i)
|
||||
else if (!TEST_BIT (in_queue, node) && node != bb->index)
|
||||
{
|
||||
queue[++tail] = node;
|
||||
SET_BIT (in_queue, node);
|
||||
@ -976,12 +978,12 @@ find_rgns (edge_list, dom)
|
||||
if (tail >= 0 && !too_large_failure)
|
||||
{
|
||||
/* Place the loop header into list of region blocks. */
|
||||
degree[i] = -1;
|
||||
rgn_bb_table[idx] = i;
|
||||
degree[bb->index] = -1;
|
||||
rgn_bb_table[idx] = bb->index;
|
||||
RGN_NR_BLOCKS (nr_regions) = num_bbs;
|
||||
RGN_BLOCKS (nr_regions) = idx++;
|
||||
CONTAINING_RGN (i) = nr_regions;
|
||||
BLOCK_TO_BB (i) = count = 0;
|
||||
CONTAINING_RGN (bb->index) = nr_regions;
|
||||
BLOCK_TO_BB (bb->index) = count = 0;
|
||||
|
||||
/* Remove blocks from queue[] when their in degree
|
||||
becomes zero. Repeat until no blocks are left on the
|
||||
@ -1020,14 +1022,14 @@ find_rgns (edge_list, dom)
|
||||
|
||||
/* Any block that did not end up in a region is placed into a region
|
||||
by itself. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
if (degree[i] >= 0)
|
||||
FOR_EACH_BB (bb)
|
||||
if (degree[bb->index] >= 0)
|
||||
{
|
||||
rgn_bb_table[idx] = i;
|
||||
rgn_bb_table[idx] = bb->index;
|
||||
RGN_NR_BLOCKS (nr_regions) = 1;
|
||||
RGN_BLOCKS (nr_regions) = idx++;
|
||||
CONTAINING_RGN (i) = nr_regions++;
|
||||
BLOCK_TO_BB (i) = 0;
|
||||
CONTAINING_RGN (bb->index) = nr_regions++;
|
||||
BLOCK_TO_BB (bb->index) = 0;
|
||||
}
|
||||
|
||||
free (max_hdr);
|
||||
@ -2980,6 +2982,7 @@ schedule_insns (dump_file)
|
||||
sbitmap large_region_blocks, blocks;
|
||||
int rgn;
|
||||
int any_large_regions;
|
||||
basic_block bb;
|
||||
|
||||
/* Taking care of this degenerate case makes the rest of
|
||||
this code simpler. */
|
||||
@ -3019,7 +3022,9 @@ schedule_insns (dump_file)
|
||||
|
||||
any_large_regions = 0;
|
||||
large_region_blocks = sbitmap_alloc (n_basic_blocks);
|
||||
sbitmap_ones (large_region_blocks);
|
||||
sbitmap_zero (large_region_blocks);
|
||||
FOR_EACH_BB (bb)
|
||||
SET_BIT (large_region_blocks, bb->index);
|
||||
|
||||
blocks = sbitmap_alloc (n_basic_blocks);
|
||||
sbitmap_zero (blocks);
|
||||
|
@ -610,7 +610,7 @@ optimize_sibling_and_tail_recursive_calls ()
|
||||
|
||||
/* Walk forwards through the last normal block and see if it
|
||||
does nothing except fall into the exit block. */
|
||||
for (insn = BLOCK_HEAD (n_basic_blocks - 1);
|
||||
for (insn = EXIT_BLOCK_PTR->prev_bb->head;
|
||||
insn;
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
|
@ -740,6 +740,7 @@ optimize_unexecutable_edges (edges, executable_edges)
|
||||
sbitmap executable_edges;
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
for (i = 0; i < NUM_EDGES (edges); i++)
|
||||
{
|
||||
@ -797,9 +798,8 @@ optimize_unexecutable_edges (edges, executable_edges)
|
||||
In cases B & C we are removing uses of registers, so make sure
|
||||
to note those changes for the DF analyzer. */
|
||||
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
rtx insn = bb->end;
|
||||
edge edge = bb->succ;
|
||||
|
||||
@ -929,7 +929,7 @@ ssa_ccp_substitute_constants ()
|
||||
static void
|
||||
ssa_ccp_df_delete_unreachable_insns ()
|
||||
{
|
||||
int i;
|
||||
basic_block b;
|
||||
|
||||
/* Use the CFG to find all the reachable blocks. */
|
||||
find_unreachable_blocks ();
|
||||
@ -937,10 +937,8 @@ ssa_ccp_df_delete_unreachable_insns ()
|
||||
/* Now we know what blocks are not reachable. Mark all the insns
|
||||
in those blocks as deleted for the DF analyzer. We'll let the
|
||||
normal flow code actually remove the unreachable blocks. */
|
||||
for (i = n_basic_blocks - 1; i >= 0; --i)
|
||||
FOR_EACH_BB_REVERSE (b)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (i);
|
||||
|
||||
if (!(b->flags & BB_REACHABLE))
|
||||
{
|
||||
rtx start = b->head;
|
||||
|
@ -490,6 +490,7 @@ ssa_eliminate_dead_code ()
|
||||
{
|
||||
int i;
|
||||
rtx insn;
|
||||
basic_block bb;
|
||||
/* Necessary instructions with operands to explore. */
|
||||
varray_type unprocessed_instructions;
|
||||
/* Map element (b,e) is nonzero if the block is control dependent on
|
||||
@ -718,10 +719,8 @@ ssa_eliminate_dead_code ()
|
||||
/* Find any blocks with no successors and ensure they are followed
|
||||
by a BARRIER. delete_insn has the nasty habit of deleting barriers
|
||||
when deleting insns. */
|
||||
for (i = 0; i < n_basic_blocks; i++)
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
|
||||
if (bb->succ == NULL)
|
||||
{
|
||||
rtx next = NEXT_INSN (bb->end);
|
||||
|
82
gcc/ssa.c
82
gcc/ssa.c
@ -470,18 +470,18 @@ find_evaluations (evals, nregs)
|
||||
sbitmap *evals;
|
||||
int nregs;
|
||||
{
|
||||
int bb;
|
||||
basic_block bb;
|
||||
|
||||
sbitmap_vector_zero (evals, nregs);
|
||||
fe_evals = evals;
|
||||
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
rtx p, last;
|
||||
|
||||
fe_current_bb = bb;
|
||||
p = BLOCK_HEAD (bb);
|
||||
last = BLOCK_END (bb);
|
||||
fe_current_bb = bb->index;
|
||||
p = bb->head;
|
||||
last = bb->end;
|
||||
while (1)
|
||||
{
|
||||
if (INSN_P (p))
|
||||
@ -520,7 +520,7 @@ compute_dominance_frontiers_1 (frontiers, idom, bb, done)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (bb);
|
||||
edge e;
|
||||
int c;
|
||||
basic_block c;
|
||||
|
||||
SET_BIT (done, bb);
|
||||
sbitmap_zero (frontiers[bb]);
|
||||
@ -528,9 +528,9 @@ compute_dominance_frontiers_1 (frontiers, idom, bb, done)
|
||||
/* Do the frontier of the children first. Not all children in the
|
||||
dominator tree (blocks dominated by this one) are children in the
|
||||
CFG, so check all blocks. */
|
||||
for (c = 0; c < n_basic_blocks; ++c)
|
||||
if (idom[c] == bb && ! TEST_BIT (done, c))
|
||||
compute_dominance_frontiers_1 (frontiers, idom, c, done);
|
||||
FOR_EACH_BB (c)
|
||||
if (idom[c->index] == bb && ! TEST_BIT (done, c->index))
|
||||
compute_dominance_frontiers_1 (frontiers, idom, c->index, done);
|
||||
|
||||
/* Find blocks conforming to rule (1) above. */
|
||||
for (e = b->succ; e; e = e->succ_next)
|
||||
@ -542,11 +542,11 @@ compute_dominance_frontiers_1 (frontiers, idom, bb, done)
|
||||
}
|
||||
|
||||
/* Find blocks conforming to rule (2). */
|
||||
for (c = 0; c < n_basic_blocks; ++c)
|
||||
if (idom[c] == bb)
|
||||
FOR_EACH_BB (c)
|
||||
if (idom[c->index] == bb)
|
||||
{
|
||||
int x;
|
||||
EXECUTE_IF_SET_IN_SBITMAP (frontiers[c], 0, x,
|
||||
EXECUTE_IF_SET_IN_SBITMAP (frontiers[c->index], 0, x,
|
||||
{
|
||||
if (idom[x] != bb)
|
||||
SET_BIT (frontiers[bb], x);
|
||||
@ -975,7 +975,7 @@ rename_block (bb, idom)
|
||||
edge e;
|
||||
rtx insn, next, last;
|
||||
struct rename_set_data *set_data = NULL;
|
||||
int c;
|
||||
basic_block c;
|
||||
|
||||
/* Step One: Walk the basic block, adding new names for sets and
|
||||
replacing uses. */
|
||||
@ -1078,9 +1078,9 @@ rename_block (bb, idom)
|
||||
/* Step Three: Do the same to the children of this block in
|
||||
dominator order. */
|
||||
|
||||
for (c = 0; c < n_basic_blocks; ++c)
|
||||
if (idom[c] == bb)
|
||||
rename_block (c, idom);
|
||||
FOR_EACH_BB (c)
|
||||
if (idom[c->index] == bb)
|
||||
rename_block (c->index, idom);
|
||||
|
||||
/* Step Four: Update the sets to refer to their new register,
|
||||
and restore ssa_rename_to to its previous state. */
|
||||
@ -1140,6 +1140,8 @@ convert_to_ssa ()
|
||||
|
||||
int nregs;
|
||||
|
||||
basic_block bb;
|
||||
|
||||
/* Don't do it twice. */
|
||||
if (in_ssa_form)
|
||||
abort ();
|
||||
@ -1154,10 +1156,9 @@ convert_to_ssa ()
|
||||
|
||||
if (rtl_dump_file)
|
||||
{
|
||||
int i;
|
||||
fputs (";; Immediate Dominators:\n", rtl_dump_file);
|
||||
for (i = 0; i < n_basic_blocks; ++i)
|
||||
fprintf (rtl_dump_file, ";\t%3d = %3d\n", i, idom[i]);
|
||||
FOR_EACH_BB (bb)
|
||||
fprintf (rtl_dump_file, ";\t%3d = %3d\n", bb->index, idom[bb->index]);
|
||||
fflush (rtl_dump_file);
|
||||
}
|
||||
|
||||
@ -1629,7 +1630,7 @@ make_equivalent_phi_alternatives_equivalent (bb, reg_partition)
|
||||
static partition
|
||||
compute_conservative_reg_partition ()
|
||||
{
|
||||
int bb;
|
||||
basic_block bb;
|
||||
int changed = 0;
|
||||
|
||||
/* We don't actually work with hard registers, but it's easier to
|
||||
@ -1642,8 +1643,8 @@ compute_conservative_reg_partition ()
|
||||
be copied on abnormal critical edges are placed in the same
|
||||
partition. This saves us from having to split abnormal critical
|
||||
edges. */
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
changed += make_regs_equivalent_over_bad_edges (bb, p);
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
changed += make_regs_equivalent_over_bad_edges (bb->index, p);
|
||||
|
||||
/* Now we have to insure that corresponding arguments of phi nodes
|
||||
assigning to corresponding regs are equivalent. Iterate until
|
||||
@ -1651,8 +1652,8 @@ compute_conservative_reg_partition ()
|
||||
while (changed > 0)
|
||||
{
|
||||
changed = 0;
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
changed += make_equivalent_phi_alternatives_equivalent (bb, p);
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
changed += make_equivalent_phi_alternatives_equivalent (bb->index, p);
|
||||
}
|
||||
|
||||
return p;
|
||||
@ -1848,7 +1849,7 @@ coalesce_regs_in_successor_phi_nodes (bb, p, conflicts)
|
||||
static partition
|
||||
compute_coalesced_reg_partition ()
|
||||
{
|
||||
int bb;
|
||||
basic_block bb;
|
||||
int changed = 0;
|
||||
regset_head phi_set_head;
|
||||
regset phi_set = &phi_set_head;
|
||||
@ -1860,8 +1861,8 @@ compute_coalesced_reg_partition ()
|
||||
be copied on abnormal critical edges are placed in the same
|
||||
partition. This saves us from having to split abnormal critical
|
||||
edges (which can't be done). */
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
make_regs_equivalent_over_bad_edges (bb, p);
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
make_regs_equivalent_over_bad_edges (bb->index, p);
|
||||
|
||||
INIT_REG_SET (phi_set);
|
||||
|
||||
@ -1883,12 +1884,11 @@ compute_coalesced_reg_partition ()
|
||||
blocks first, so that most frequently executed copies would
|
||||
be more likely to be removed by register coalescing. But any
|
||||
order will generate correct, if non-optimal, results. */
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
basic_block block = BASIC_BLOCK (bb);
|
||||
changed += coalesce_regs_in_copies (block, p, conflicts);
|
||||
changed += coalesce_regs_in_copies (bb, p, conflicts);
|
||||
changed +=
|
||||
coalesce_regs_in_successor_phi_nodes (block, p, conflicts);
|
||||
coalesce_regs_in_successor_phi_nodes (bb, p, conflicts);
|
||||
}
|
||||
|
||||
conflict_graph_delete (conflicts);
|
||||
@ -2094,11 +2094,10 @@ static void
|
||||
rename_equivalent_regs (reg_partition)
|
||||
partition reg_partition;
|
||||
{
|
||||
int bb;
|
||||
basic_block b;
|
||||
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
FOR_EACH_BB_REVERSE (b)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (bb);
|
||||
rtx next = b->head;
|
||||
rtx last = b->end;
|
||||
rtx insn;
|
||||
@ -2141,7 +2140,7 @@ rename_equivalent_regs (reg_partition)
|
||||
void
|
||||
convert_from_ssa ()
|
||||
{
|
||||
int bb;
|
||||
basic_block b, bb;
|
||||
partition reg_partition;
|
||||
rtx insns = get_insns ();
|
||||
|
||||
@ -2167,9 +2166,8 @@ convert_from_ssa ()
|
||||
rename_equivalent_regs (reg_partition);
|
||||
|
||||
/* Eliminate the PHI nodes. */
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
FOR_EACH_BB_REVERSE (b)
|
||||
{
|
||||
basic_block b = BASIC_BLOCK (bb);
|
||||
edge e;
|
||||
|
||||
for (e = b->pred; e; e = e->pred_next)
|
||||
@ -2180,17 +2178,17 @@ convert_from_ssa ()
|
||||
partition_delete (reg_partition);
|
||||
|
||||
/* Actually delete the PHI nodes. */
|
||||
for (bb = n_basic_blocks; --bb >= 0; )
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
rtx insn = BLOCK_HEAD (bb);
|
||||
rtx insn = bb->head;
|
||||
|
||||
while (1)
|
||||
{
|
||||
/* If this is a PHI node delete it. */
|
||||
if (PHI_NODE_P (insn))
|
||||
{
|
||||
if (insn == BLOCK_END (bb))
|
||||
BLOCK_END (bb) = PREV_INSN (insn);
|
||||
if (insn == bb->end)
|
||||
bb->end = PREV_INSN (insn);
|
||||
insn = delete_insn (insn);
|
||||
}
|
||||
/* Since all the phi nodes come at the beginning of the
|
||||
@ -2199,7 +2197,7 @@ convert_from_ssa ()
|
||||
else if (INSN_P (insn))
|
||||
break;
|
||||
/* If we've reached the end of the block, stop. */
|
||||
else if (insn == BLOCK_END (bb))
|
||||
else if (insn == bb->end)
|
||||
break;
|
||||
else
|
||||
insn = NEXT_INSN (insn);
|
||||
|
Loading…
x
Reference in New Issue
Block a user