mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-16 18:40:57 +08:00
Eliminate FOR_EACH_BB macro.
gcc/ * basic-block.h (FOR_EACH_BB): Eliminate macro. * asan.c (transform_statements, execute_sanopt): Eliminate use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun explicit. * auto-inc-dec.c (rest_of_handle_auto_inc_dec): Likewise. * bb-reorder.c (find_rarely_executed_basic_blocks_and_crossing_edges, set_edge_can_fallthru_flag, fix_up_fall_thru_edges, fix_crossing_unconditional_branches, add_reg_crossing_jump_notes, insert_section_boundary_note, rest_of_handle_reorder_blocks, duplicate_computed_gotos): Likewise. * cfg.c (clear_edges, compact_blocks, brief_dump_cfg): Likewise. * cfganal.c (find_unreachable_blocks, add_noreturn_fake_exit_edges, compute_dominance_frontiers_1, single_pred_before_succ_order): Likewise. * cfgbuild.c (find_many_sub_basic_blocks): Likewise. * cfgcleanup.c (try_optimize_cfg, delete_dead_jumptables): Likewise. * cfgexpand.c (add_scope_conflicts, discover_nonconstant_array_refs): Likewise. * cfgloop.c (flow_loops_cfg_dump, get_loop_body, record_loop_exits, verify_loop_structure): Likewise. * cfgloopanal.c (mark_loop_exit_edges): Likewise. * cfgrtl.c (compute_bb_for_insn, find_partition_fixes, verify_hot_cold_block_grouping, purge_all_dead_edges, fixup_abnormal_edges, record_effective_endpoints, outof_cfg_layout_mode, fixup_reorder_chain, force_one_exit_fallthru, break_superblocks): Likewise. * cgraphbuild.c (build_cgraph_edges, rebuild_cgraph_edges, cgraph_rebuild_references): Likewise. * combine-stack-adj.c (combine_stack_adjustments): Likewise. * combine.c (delete_noop_moves, create_log_links, combine_instructions): Likewise. * config/arm/arm.c (thumb1_reorg, thumb2_reorg): Likewise. * config/bfin/bfin.c (bfin_gen_bundles, reorder_var_tracking_notes): Likewise. * config/c6x/c6x.c (c6x_gen_bundles, conditionalize_after_sched, c6x_reorg): Likewise. * config/epiphany/resolve-sw-modes.c (resolve_sw_modes): Likewise. * config/frv/frv.c (frv_optimize_membar): Likewise. * config/i386/i386.c (ix86_finalize_stack_realign_flags): Likewise. * config/ia64/ia64.c (ia64_reorg): Likewise. * config/mips/mips.c (mips_annotate_pic_calls): Likewise. * config/picochip/picochip.c (reorder_var_tracking_notes): Likewise. * config/rs6000/rs6000.c (rs6000_alloc_sdmode_stack_slot): Likewise. * config/s390/s390.c (s390_regs_ever_clobbered): Likewise. * config/sh/sh_treg_combine.cc (sh_treg_combine::execute): Likewise. * config/spu/spu.c (spu_machine_dependent_reorg): Likewise. * config/tilegx/tilegx.c (tilegx_gen_bundles, reorder_var_tracking_notes): Likewise. * config/tilepro/tilepro.c (tilepro_gen_bundles, reorder_var_tracking_notes): Likewise. * coverage.c (coverage_compute_cfg_checksum): Likewise. * cprop.c (compute_hash_table_work, compute_cprop_data, local_cprop_pass, find_implicit_sets): Likewise. * cse.c (cse_condition_code_reg): Likewise. * dce.c (prescan_insns_for_dce): Likewise. * df-core.c (df_compact_blocks): Likewise. * df-problems.c (df_word_lr_alloc): Likewise. * df-scan.c (df_scan_start_dump, df_scan_blocks, df_insn_rescan_all, df_update_entry_exit_and_calls): Likewise. * dominance.c (calculate_dominance_info, verify_dominators, debug_dominance_info): Likewise. * dse.c (dse_step5_nospill): Likewise. * except.c (finish_eh_generation): Likewise. * final.c (compute_alignments): Likewise. * function.c (thread_prologue_and_epilogue_insns, rest_of_match_asm_constraints): Likewise. * gcse.c (compute_hash_table_work, prune_expressions, compute_pre_data, compute_code_hoist_vbeinout, hoist_code, calculate_bb_reg_pressure, compute_ld_motion_mems): Likewise. * gimple-iterator.c (gsi_commit_edge_inserts): Likewise. * gimple-ssa-isolate-paths.c (find_implicit_erroneous_behaviour, find_explicit_erroneous_behaviour): Likewise. * graphite-sese-to-poly.c (rewrite_reductions_out_of_ssa, rewrite_cross_bb_scalar_deps_out_of_ssa): Likewise. * haifa-sched.c (haifa_sched_init): Likewise. * hw-doloop.c (discover_loops, set_bb_indices, reorder_loops): Likewise. * ifcvt.c (if_convert): Likewise. * init-regs.c (initialize_uninitialized_regs): Likewise. * ipa-prop.c (ipcp_transform_function): Likewise. * ipa-pure-const.c (analyze_function): Likewise. * ipa-split.c (find_split_points, execute_split_functions): Likewise. * ira-build.c (form_loop_tree): Likewise. * ira-costs.c (find_costs_and_classes): Likewise. * ira-emit.c (emit_moves, add_ranges_and_copies, ira_emit): Likewise. * ira.c (decrease_live_ranges_number, compute_regs_asm_clobbered, mark_elimination, update_equiv_regs, find_moveable_pseudos, split_live_ranges_for_shrink_wrap, allocate_initial_values): Likewise. * jump.c (mark_all_labels): Likewise. * lcm.c (compute_laterin, compute_insert_delete, compute_available, compute_nearerout, compute_rev_insert_delete): Likewise. * loop-init.c (fix_loop_structure): Likewise. * loop-invariant.c (calculate_loop_reg_pressure): Likewise. * lower-subreg.c (decompose_multiword_subregs, decompose_multiword_subregs): Likewise. * lra-assigns.c (assign_by_spills): Likewise. * lra-coalesce.c (lra_coalesce): Likewise. * lra-constraints.c (lra_inheritance, remove_inheritance_pseudos): Likewise. * lra-eliminations.c (lra_init_elimination): Likewise. * lra-spills.c (assign_spill_hard_regs, spill_pseudos, lra_final_code_change): Likewise. * lra.c (remove_scratches, check_rtl, has_nonexceptional_receiver, update_inc_notes): Likewise. * mcf.c (adjust_cfg_counts): Likewise. * mode-switching.c (optimize_mode_switching): Likewise. * modulo-sched.c (rest_of_handle_sms): Likewise. * omp-low.c (optimize_omp_library_calls, expand_omp_taskreg, expand_omp_target): Likewise. * postreload-gcse.c (alloc_mem, compute_hash_table): Likewise. * postreload.c (reload_cse_regs_1): Likewise. * predict.c (strip_predict_hints, tree_bb_level_predictions, tree_estimate_probability, expensive_function_p, estimate_bb_frequencies, compute_function_frequency): Likewise. * profile.c (is_inconsistent, compute_branch_probabilities, branch_prob): Likewise. * ree.c (find_removable_extensions): Likewise. * reg-stack.c (compensate_edges, convert_regs, reg_to_stack): Likewise. * regcprop.c (copyprop_hardreg_forward): Likewise. * reginfo.c (init_subregs_of_mode): Likewise. * regrename.c (regrename_analyze): Likewise. * regstat.c (regstat_compute_ri, regstat_compute_calls_crossed): Likewise. * reload1.c (has_nonexceptional_receiver, reload, calculate_elim_costs_all_insns): Likewise. * resource.c (init_resource_info, free_resource_info): Likewise. * sched-ebb.c (schedule_ebbs): Likewise. * sched-rgn.c (is_cfg_nonregular, find_single_block_region, haifa_find_rgns, sched_rgn_local_init): Likewise. * sel-sched-dump.c (sel_dump_cfg_2): Likewise. * sel-sched-ir.c (init_lv_sets, free_lv_sets, make_regions_from_the_rest): Likewise. * sese.c (build_sese_loop_nests, sese_build_liveouts): Likewise. * stack-ptr-mod.c (notice_stack_pointer_modification): Likewise. * store-motion.c (compute_store_table, build_store_vectors, one_store_motion_pass): Likewise. * tracer.c (tail_duplicate): Likewise. * trans-mem.c (compute_transaction_bits): Likewise. * tree-call-cdce.c (tree_call_cdce): Likewise. * tree-cfg.c (replace_loop_annotate, factor_computed_gotos, fold_cond_expr_cond, make_edges, assign_discriminators, make_abnormal_goto_edges, cleanup_dead_labels, group_case_labels, dump_cfg_stats, gimple_verify_flow_info, print_loop, execute_fixup_cfg): Likewise. * tree-cfgcleanup.c (cleanup_tree_cfg_1, merge_phi_nodes): Likewise. * tree-complex.c (init_dont_simulate_again, tree_lower_complex): Likewise. * tree-dfa.c (collect_dfa_stats, dump_enumerated_decls): Likewise. * tree-eh.c (execute_lower_resx, execute_lower_eh_dispatch, mark_reachable_handlers): Likewise. * tree-emutls.c (lower_emutls_function_body): Likewise. * tree-if-conv.c (main_tree_if_conversion): Likewise. * tree-inline.c (optimize_inline_calls): Likewise. * tree-into-ssa.c (rewrite_into_ssa, update_ssa): Likewise. * tree-nrv.c (tree_nrv, execute_return_slot_opt): Likewise. * tree-object-size.c (compute_object_sizes): Likewise. * tree-outof-ssa.c (eliminate_useless_phis, rewrite_trees, insert_backedge_copies, tree_profiling): Likewise. * tree-scalar-evolution.c (scev_const_prop): Likewise. * tree-sra.c (scan_function, sra_modify_function_body, propagate_dereference_distances, ipa_sra_modify_function_body, convert_callers): Likewise. * tree-ssa-ccp.c (ccp_initialize, execute_fold_all_builtins): Likewise. * tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise. create_outofssa_var_map, coalesce_partitions): Likewise. * tree-ssa-copy.c (init_copy_prop): Likewise. * tree-ssa-copyrename.c (rename_ssa_copies): Likewise. * tree-ssa-dce.c (find_obviously_necessary_stmts, eliminate_unnecessary_stmts): Likewise. * tree-ssa-dom.c (free_all_edge_infos, tree_ssa_dominator_optimize): Likewise. * tree-ssa-forwprop.c (ssa_forward_propagate_and_combine): Likewise. * tree-ssa-live.c (clear_unused_block_pointer, remove_unused_locals, new_tree_live_info, calculate_live_on_exit, dump_live_info, analyze_memory_references, fill_always_executed_in, tree_ssa_lim_finalize): Likewise. * tree-ssa-loop-manip.c (find_uses_to_rename, verify_loop_closed_ssa): Likewise. * tree-ssa-math-opts.c (execute_cse_reciprocals, execute_cse_sincos, execute_optimize_bswap, execute_optimize_widening_mul): Likewise. * tree-ssa-propagate.c (substitute_and_fold): Likewise. * tree-ssa-structalias.c (compute_points_to_sets): Likewise. * tree-ssa-tail-merge.c (find_same_succ, reset_cluster_vectors): Likewise. * tree-ssa-ter.c (find_replaceable_exprs): Likewise. * tree-ssa-threadupdate.c (thread_through_all_blocks): Likewise. * tree-ssa-uncprop.c (associate_equivalences_with_edges, tree_ssa_uncprop): Likewise. * tree-ssa-uninit.c (warn_uninitialized_vars, execute_late_warn_uninitialized): Likewise. * tree-ssa.c (verify_ssa, execute_update_addresses_taken): Likewise. * tree-stdarg.c (check_all_va_list_escapes, execute_optimize_stdarg): Likewise. * tree-switch-conversion.c (do_switchconv): Likewise. * tree-vect-generic.c (expand_vector_operations): Likewise. * tree-vectorizer.c (adjust_simduid_builtins, note_simd_array_uses, execute_vect_slp): Likewise. * tree-vrp.c (check_all_array_refs, remove_range_assertions, vrp_initialize, identify_jump_threads, instrument_memory_accesses): Likewise. * ubsan.c (ubsan_pass): Likewise. * value-prof.c (verify_histograms, gimple_value_profile_transformations, gimple_find_values_to_profile): Likewise. * var-tracking.c (vt_find_locations, dump_dataflow_sets, vt_emit_notes, vt_initialize, delete_debug_insns, vt_finalize): Likewise. gcc/testsuite/ * g++.dg/plugin/selfassign.c (execute_warn_self_assign): Eliminate use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun explicit. * gcc.dg/plugin/selfassign.c (execute_warn_self_assign): Likewise. From-SVN: r205828
This commit is contained in:
parent
8b1c6fd716
commit
11cd3bed28
208
gcc/ChangeLog
208
gcc/ChangeLog
@ -1,3 +1,211 @@
|
||||
2013-12-09 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* basic-block.h (FOR_EACH_BB): Eliminate macro.
|
||||
|
||||
* asan.c (transform_statements, execute_sanopt): Eliminate
|
||||
use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun
|
||||
explicit.
|
||||
* auto-inc-dec.c (rest_of_handle_auto_inc_dec): Likewise.
|
||||
* bb-reorder.c (find_rarely_executed_basic_blocks_and_crossing_edges,
|
||||
set_edge_can_fallthru_flag, fix_up_fall_thru_edges,
|
||||
fix_crossing_unconditional_branches, add_reg_crossing_jump_notes,
|
||||
insert_section_boundary_note, rest_of_handle_reorder_blocks,
|
||||
duplicate_computed_gotos): Likewise.
|
||||
* cfg.c (clear_edges, compact_blocks, brief_dump_cfg): Likewise.
|
||||
* cfganal.c (find_unreachable_blocks, add_noreturn_fake_exit_edges,
|
||||
compute_dominance_frontiers_1, single_pred_before_succ_order): Likewise.
|
||||
* cfgbuild.c (find_many_sub_basic_blocks): Likewise.
|
||||
* cfgcleanup.c (try_optimize_cfg, delete_dead_jumptables): Likewise.
|
||||
* cfgexpand.c (add_scope_conflicts, discover_nonconstant_array_refs):
|
||||
Likewise.
|
||||
* cfgloop.c (flow_loops_cfg_dump, get_loop_body, record_loop_exits,
|
||||
verify_loop_structure): Likewise.
|
||||
* cfgloopanal.c (mark_loop_exit_edges): Likewise.
|
||||
* cfgrtl.c (compute_bb_for_insn, find_partition_fixes,
|
||||
verify_hot_cold_block_grouping, purge_all_dead_edges,
|
||||
fixup_abnormal_edges, record_effective_endpoints,
|
||||
outof_cfg_layout_mode, fixup_reorder_chain, force_one_exit_fallthru,
|
||||
break_superblocks): Likewise.
|
||||
* cgraphbuild.c (build_cgraph_edges, rebuild_cgraph_edges,
|
||||
cgraph_rebuild_references): Likewise.
|
||||
* combine-stack-adj.c (combine_stack_adjustments): Likewise.
|
||||
* combine.c (delete_noop_moves, create_log_links,
|
||||
combine_instructions): Likewise.
|
||||
* config/arm/arm.c (thumb1_reorg, thumb2_reorg): Likewise.
|
||||
* config/bfin/bfin.c (bfin_gen_bundles, reorder_var_tracking_notes):
|
||||
Likewise.
|
||||
* config/c6x/c6x.c (c6x_gen_bundles, conditionalize_after_sched,
|
||||
c6x_reorg): Likewise.
|
||||
* config/epiphany/resolve-sw-modes.c (resolve_sw_modes): Likewise.
|
||||
* config/frv/frv.c (frv_optimize_membar): Likewise.
|
||||
* config/i386/i386.c (ix86_finalize_stack_realign_flags): Likewise.
|
||||
* config/ia64/ia64.c (ia64_reorg): Likewise.
|
||||
* config/mips/mips.c (mips_annotate_pic_calls): Likewise.
|
||||
* config/picochip/picochip.c (reorder_var_tracking_notes): Likewise.
|
||||
* config/rs6000/rs6000.c (rs6000_alloc_sdmode_stack_slot): Likewise.
|
||||
* config/s390/s390.c (s390_regs_ever_clobbered): Likewise.
|
||||
* config/sh/sh_treg_combine.cc (sh_treg_combine::execute): Likewise.
|
||||
* config/spu/spu.c (spu_machine_dependent_reorg): Likewise.
|
||||
* config/tilegx/tilegx.c (tilegx_gen_bundles,
|
||||
reorder_var_tracking_notes): Likewise.
|
||||
* config/tilepro/tilepro.c (tilepro_gen_bundles,
|
||||
reorder_var_tracking_notes): Likewise.
|
||||
* coverage.c (coverage_compute_cfg_checksum): Likewise.
|
||||
* cprop.c (compute_hash_table_work, compute_cprop_data,
|
||||
local_cprop_pass, find_implicit_sets): Likewise.
|
||||
* cse.c (cse_condition_code_reg): Likewise.
|
||||
* dce.c (prescan_insns_for_dce): Likewise.
|
||||
* df-core.c (df_compact_blocks): Likewise.
|
||||
* df-problems.c (df_word_lr_alloc): Likewise.
|
||||
* df-scan.c (df_scan_start_dump, df_scan_blocks, df_insn_rescan_all,
|
||||
df_update_entry_exit_and_calls): Likewise.
|
||||
* dominance.c (calculate_dominance_info, verify_dominators,
|
||||
debug_dominance_info): Likewise.
|
||||
* dse.c (dse_step5_nospill): Likewise.
|
||||
* except.c (finish_eh_generation): Likewise.
|
||||
* final.c (compute_alignments): Likewise.
|
||||
* function.c (thread_prologue_and_epilogue_insns,
|
||||
rest_of_match_asm_constraints): Likewise.
|
||||
* gcse.c (compute_hash_table_work, prune_expressions,
|
||||
compute_pre_data, compute_code_hoist_vbeinout, hoist_code,
|
||||
calculate_bb_reg_pressure, compute_ld_motion_mems): Likewise.
|
||||
* gimple-iterator.c (gsi_commit_edge_inserts): Likewise.
|
||||
* gimple-ssa-isolate-paths.c (find_implicit_erroneous_behaviour,
|
||||
find_explicit_erroneous_behaviour): Likewise.
|
||||
* graphite-sese-to-poly.c (rewrite_reductions_out_of_ssa,
|
||||
rewrite_cross_bb_scalar_deps_out_of_ssa): Likewise.
|
||||
* haifa-sched.c (haifa_sched_init): Likewise.
|
||||
* hw-doloop.c (discover_loops, set_bb_indices, reorder_loops):
|
||||
Likewise.
|
||||
* ifcvt.c (if_convert): Likewise.
|
||||
* init-regs.c (initialize_uninitialized_regs): Likewise.
|
||||
* ipa-prop.c (ipcp_transform_function): Likewise.
|
||||
* ipa-pure-const.c (analyze_function): Likewise.
|
||||
* ipa-split.c (find_split_points, execute_split_functions): Likewise.
|
||||
* ira-build.c (form_loop_tree): Likewise.
|
||||
* ira-costs.c (find_costs_and_classes): Likewise.
|
||||
* ira-emit.c (emit_moves, add_ranges_and_copies, ira_emit): Likewise.
|
||||
* ira.c (decrease_live_ranges_number, compute_regs_asm_clobbered,
|
||||
mark_elimination, update_equiv_regs, find_moveable_pseudos,
|
||||
split_live_ranges_for_shrink_wrap, allocate_initial_values): Likewise.
|
||||
* jump.c (mark_all_labels): Likewise.
|
||||
* lcm.c (compute_laterin, compute_insert_delete, compute_available,
|
||||
compute_nearerout, compute_rev_insert_delete): Likewise.
|
||||
* loop-init.c (fix_loop_structure): Likewise.
|
||||
* loop-invariant.c (calculate_loop_reg_pressure): Likewise.
|
||||
* lower-subreg.c (decompose_multiword_subregs,
|
||||
decompose_multiword_subregs): Likewise.
|
||||
* lra-assigns.c (assign_by_spills): Likewise.
|
||||
* lra-coalesce.c (lra_coalesce): Likewise.
|
||||
* lra-constraints.c (lra_inheritance, remove_inheritance_pseudos):
|
||||
Likewise.
|
||||
* lra-eliminations.c (lra_init_elimination): Likewise.
|
||||
* lra-spills.c (assign_spill_hard_regs, spill_pseudos,
|
||||
lra_final_code_change): Likewise.
|
||||
* lra.c (remove_scratches, check_rtl, has_nonexceptional_receiver,
|
||||
update_inc_notes): Likewise.
|
||||
* mcf.c (adjust_cfg_counts): Likewise.
|
||||
* mode-switching.c (optimize_mode_switching): Likewise.
|
||||
* modulo-sched.c (rest_of_handle_sms): Likewise.
|
||||
* omp-low.c (optimize_omp_library_calls, expand_omp_taskreg,
|
||||
expand_omp_target): Likewise.
|
||||
* postreload-gcse.c (alloc_mem, compute_hash_table): Likewise.
|
||||
* postreload.c (reload_cse_regs_1): Likewise.
|
||||
* predict.c (strip_predict_hints, tree_bb_level_predictions,
|
||||
tree_estimate_probability, expensive_function_p,
|
||||
estimate_bb_frequencies, compute_function_frequency): Likewise.
|
||||
* profile.c (is_inconsistent, compute_branch_probabilities,
|
||||
branch_prob): Likewise.
|
||||
* ree.c (find_removable_extensions): Likewise.
|
||||
* reg-stack.c (compensate_edges, convert_regs, reg_to_stack): Likewise.
|
||||
* regcprop.c (copyprop_hardreg_forward): Likewise.
|
||||
* reginfo.c (init_subregs_of_mode): Likewise.
|
||||
* regrename.c (regrename_analyze): Likewise.
|
||||
* regstat.c (regstat_compute_ri, regstat_compute_calls_crossed):
|
||||
Likewise.
|
||||
* reload1.c (has_nonexceptional_receiver, reload,
|
||||
calculate_elim_costs_all_insns): Likewise.
|
||||
* resource.c (init_resource_info, free_resource_info): Likewise.
|
||||
* sched-ebb.c (schedule_ebbs): Likewise.
|
||||
* sched-rgn.c (is_cfg_nonregular, find_single_block_region,
|
||||
haifa_find_rgns, sched_rgn_local_init): Likewise.
|
||||
* sel-sched-dump.c (sel_dump_cfg_2): Likewise.
|
||||
* sel-sched-ir.c (init_lv_sets, free_lv_sets,
|
||||
make_regions_from_the_rest): Likewise.
|
||||
* sese.c (build_sese_loop_nests, sese_build_liveouts): Likewise.
|
||||
* stack-ptr-mod.c (notice_stack_pointer_modification): Likewise.
|
||||
* store-motion.c (compute_store_table, build_store_vectors,
|
||||
one_store_motion_pass): Likewise.
|
||||
* tracer.c (tail_duplicate): Likewise.
|
||||
* trans-mem.c (compute_transaction_bits): Likewise.
|
||||
* tree-call-cdce.c (tree_call_cdce): Likewise.
|
||||
* tree-cfg.c (replace_loop_annotate, factor_computed_gotos,
|
||||
fold_cond_expr_cond, make_edges, assign_discriminators,
|
||||
make_abnormal_goto_edges, cleanup_dead_labels, group_case_labels,
|
||||
dump_cfg_stats, gimple_verify_flow_info, print_loop,
|
||||
execute_fixup_cfg): Likewise.
|
||||
* tree-cfgcleanup.c (cleanup_tree_cfg_1, merge_phi_nodes): Likewise.
|
||||
* tree-complex.c (init_dont_simulate_again, tree_lower_complex):
|
||||
Likewise.
|
||||
* tree-dfa.c (collect_dfa_stats, dump_enumerated_decls): Likewise.
|
||||
* tree-eh.c (execute_lower_resx, execute_lower_eh_dispatch,
|
||||
mark_reachable_handlers): Likewise.
|
||||
* tree-emutls.c (lower_emutls_function_body): Likewise.
|
||||
* tree-if-conv.c (main_tree_if_conversion): Likewise.
|
||||
* tree-inline.c (optimize_inline_calls): Likewise.
|
||||
* tree-into-ssa.c (rewrite_into_ssa, update_ssa): Likewise.
|
||||
* tree-nrv.c (tree_nrv, execute_return_slot_opt): Likewise.
|
||||
* tree-object-size.c (compute_object_sizes): Likewise.
|
||||
* tree-outof-ssa.c (eliminate_useless_phis, rewrite_trees,
|
||||
insert_backedge_copies, tree_profiling): Likewise.
|
||||
* tree-scalar-evolution.c (scev_const_prop): Likewise.
|
||||
* tree-sra.c (scan_function, sra_modify_function_body,
|
||||
propagate_dereference_distances, ipa_sra_modify_function_body,
|
||||
convert_callers): Likewise.
|
||||
* tree-ssa-ccp.c (ccp_initialize, execute_fold_all_builtins): Likewise.
|
||||
* tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise.
|
||||
create_outofssa_var_map, coalesce_partitions): Likewise.
|
||||
* tree-ssa-copy.c (init_copy_prop): Likewise.
|
||||
* tree-ssa-copyrename.c (rename_ssa_copies): Likewise.
|
||||
* tree-ssa-dce.c (find_obviously_necessary_stmts,
|
||||
eliminate_unnecessary_stmts): Likewise.
|
||||
* tree-ssa-dom.c (free_all_edge_infos, tree_ssa_dominator_optimize):
|
||||
Likewise.
|
||||
* tree-ssa-forwprop.c (ssa_forward_propagate_and_combine): Likewise.
|
||||
* tree-ssa-live.c (clear_unused_block_pointer, remove_unused_locals,
|
||||
new_tree_live_info, calculate_live_on_exit, dump_live_info,
|
||||
analyze_memory_references, fill_always_executed_in,
|
||||
tree_ssa_lim_finalize): Likewise.
|
||||
* tree-ssa-loop-manip.c (find_uses_to_rename, verify_loop_closed_ssa):
|
||||
Likewise.
|
||||
* tree-ssa-math-opts.c (execute_cse_reciprocals, execute_cse_sincos,
|
||||
execute_optimize_bswap, execute_optimize_widening_mul): Likewise.
|
||||
* tree-ssa-propagate.c (substitute_and_fold): Likewise.
|
||||
* tree-ssa-structalias.c (compute_points_to_sets): Likewise.
|
||||
* tree-ssa-tail-merge.c (find_same_succ, reset_cluster_vectors):
|
||||
Likewise.
|
||||
* tree-ssa-ter.c (find_replaceable_exprs): Likewise.
|
||||
* tree-ssa-threadupdate.c (thread_through_all_blocks): Likewise.
|
||||
* tree-ssa-uncprop.c (associate_equivalences_with_edges,
|
||||
tree_ssa_uncprop): Likewise.
|
||||
* tree-ssa-uninit.c (warn_uninitialized_vars,
|
||||
execute_late_warn_uninitialized): Likewise.
|
||||
* tree-ssa.c (verify_ssa, execute_update_addresses_taken): Likewise.
|
||||
* tree-stdarg.c (check_all_va_list_escapes, execute_optimize_stdarg):
|
||||
Likewise.
|
||||
* tree-switch-conversion.c (do_switchconv): Likewise.
|
||||
* tree-vect-generic.c (expand_vector_operations): Likewise.
|
||||
* tree-vectorizer.c (adjust_simduid_builtins, note_simd_array_uses,
|
||||
execute_vect_slp): Likewise.
|
||||
* tree-vrp.c (check_all_array_refs, remove_range_assertions,
|
||||
vrp_initialize, identify_jump_threads, instrument_memory_accesses):
|
||||
Likewise.
|
||||
* ubsan.c (ubsan_pass): Likewise.
|
||||
* value-prof.c (verify_histograms, gimple_value_profile_transformations,
|
||||
gimple_find_values_to_profile): Likewise.
|
||||
* var-tracking.c (vt_find_locations, dump_dataflow_sets, vt_emit_notes,
|
||||
vt_initialize, delete_debug_insns, vt_finalize): Likewise.
|
||||
|
||||
2013-12-09 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* basic-block.h (last_basic_block): Eliminate macro.
|
||||
|
@ -2043,7 +2043,7 @@ transform_statements (void)
|
||||
gimple_stmt_iterator i;
|
||||
int saved_last_basic_block = last_basic_block_for_fn (cfun);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
basic_block prev_bb = bb;
|
||||
|
||||
@ -2557,7 +2557,7 @@ execute_sanopt (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
|
@ -1480,7 +1480,7 @@ rest_of_handle_auto_inc_dec (void)
|
||||
reg_next_use = XCNEWVEC (rtx, max_reg);
|
||||
reg_next_inc_use = XCNEWVEC (rtx, max_reg);
|
||||
reg_next_def = XCNEWVEC (rtx, max_reg);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
merge_in_block (max_reg, bb);
|
||||
|
||||
free (reg_next_use);
|
||||
|
@ -333,8 +333,6 @@ struct GTY(()) control_flow_graph {
|
||||
#define FOR_EACH_BB_FN(BB, FN) \
|
||||
FOR_BB_BETWEEN (BB, (FN)->cfg->x_entry_block_ptr->next_bb, (FN)->cfg->x_exit_block_ptr, next_bb)
|
||||
|
||||
#define FOR_EACH_BB(BB) FOR_EACH_BB_FN (BB, cfun)
|
||||
|
||||
#define FOR_EACH_BB_REVERSE_FN(BB, FN) \
|
||||
FOR_BB_BETWEEN (BB, (FN)->cfg->x_exit_block_ptr->prev_bb, (FN)->cfg->x_entry_block_ptr, prev_bb)
|
||||
|
||||
|
@ -1566,7 +1566,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void)
|
||||
vec<basic_block> bbs_in_hot_partition = vNULL;
|
||||
|
||||
/* Mark which partition (hot/cold) each basic block belongs in. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bool cold_bb = false;
|
||||
|
||||
@ -1658,7 +1658,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void)
|
||||
|
||||
/* Mark every edge that crosses between sections. */
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
unsigned int flags = e->flags;
|
||||
@ -1691,7 +1691,7 @@ set_edge_can_fallthru_flag (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -1792,7 +1792,7 @@ fix_up_fall_thru_edges (void)
|
||||
rtx old_jump;
|
||||
rtx fall_thru_label;
|
||||
|
||||
FOR_EACH_BB (cur_bb)
|
||||
FOR_EACH_BB_FN (cur_bb, cfun)
|
||||
{
|
||||
fall_thru = NULL;
|
||||
if (EDGE_COUNT (cur_bb->succs) > 0)
|
||||
@ -1992,7 +1992,7 @@ fix_crossing_conditional_branches (void)
|
||||
rtx old_label = NULL_RTX;
|
||||
rtx new_label;
|
||||
|
||||
FOR_EACH_BB (cur_bb)
|
||||
FOR_EACH_BB_FN (cur_bb, cfun)
|
||||
{
|
||||
crossing_edge = NULL;
|
||||
if (EDGE_COUNT (cur_bb->succs) > 0)
|
||||
@ -2123,7 +2123,7 @@ fix_crossing_unconditional_branches (void)
|
||||
rtx cur_insn;
|
||||
edge succ;
|
||||
|
||||
FOR_EACH_BB (cur_bb)
|
||||
FOR_EACH_BB_FN (cur_bb, cfun)
|
||||
{
|
||||
last_insn = BB_END (cur_bb);
|
||||
|
||||
@ -2201,7 +2201,7 @@ add_reg_crossing_jump_notes (void)
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if ((e->flags & EDGE_CROSSING)
|
||||
&& JUMP_P (BB_END (e->src))
|
||||
@ -2286,7 +2286,7 @@ insert_section_boundary_note (void)
|
||||
if (!crtl->has_bb_partition)
|
||||
return;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (!current_partition)
|
||||
current_partition = BB_PARTITION (bb);
|
||||
@ -2321,7 +2321,7 @@ rest_of_handle_reorder_blocks (void)
|
||||
reorder_basic_blocks ();
|
||||
cleanup_cfg (CLEANUP_EXPENSIVE);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
cfg_layout_finalize ();
|
||||
@ -2410,7 +2410,7 @@ duplicate_computed_gotos (void)
|
||||
/* Look for blocks that end in a computed jump, and see if such blocks
|
||||
are suitable for unfactoring. If a block is a candidate for unfactoring,
|
||||
mark it in the candidates. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
edge e;
|
||||
@ -2457,7 +2457,7 @@ duplicate_computed_gotos (void)
|
||||
goto done;
|
||||
|
||||
/* Duplicate computed gotos. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (bb->flags & BB_VISITED)
|
||||
continue;
|
||||
|
@ -101,7 +101,7 @@ clear_edges (void)
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
free_edge (e);
|
||||
@ -163,7 +163,7 @@ compact_blocks (void)
|
||||
basic_block bb;
|
||||
|
||||
i = NUM_FIXED_BLOCKS;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
SET_BASIC_BLOCK_FOR_FN (cfun, i, bb);
|
||||
bb->index = i;
|
||||
@ -828,7 +828,7 @@ brief_dump_cfg (FILE *file, int flags)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
dump_bb_info (file, bb, 0,
|
||||
flags & (TDF_COMMENT | TDF_DETAILS),
|
||||
|
@ -159,7 +159,7 @@ find_unreachable_blocks (void)
|
||||
|
||||
/* Clear all the reachability flags. */
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->flags &= ~BB_REACHABLE;
|
||||
|
||||
/* Add our starting points to the worklist. Almost always there will
|
||||
@ -554,7 +554,7 @@ add_noreturn_fake_exit_edges (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (EDGE_COUNT (bb->succs) == 0)
|
||||
make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
|
||||
}
|
||||
@ -1236,7 +1236,7 @@ compute_dominance_frontiers_1 (bitmap_head *frontiers)
|
||||
edge p;
|
||||
edge_iterator ei;
|
||||
basic_block b;
|
||||
FOR_EACH_BB (b)
|
||||
FOR_EACH_BB_FN (b, cfun)
|
||||
{
|
||||
if (EDGE_COUNT (b->preds) >= 2)
|
||||
{
|
||||
@ -1517,7 +1517,7 @@ single_pred_before_succ_order (void)
|
||||
bitmap_clear (visited);
|
||||
|
||||
MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
FOR_EACH_BB (x)
|
||||
FOR_EACH_BB_FN (x, cfun)
|
||||
{
|
||||
if (VISITED_P (x))
|
||||
continue;
|
||||
|
@ -595,15 +595,15 @@ find_many_sub_basic_blocks (sbitmap blocks)
|
||||
{
|
||||
basic_block bb, min, max;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
SET_STATE (bb,
|
||||
bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (STATE (bb) == BLOCK_TO_SPLIT)
|
||||
find_bb_boundaries (bb);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (STATE (bb) != BLOCK_ORIGINAL)
|
||||
break;
|
||||
|
||||
@ -640,6 +640,6 @@ find_many_sub_basic_blocks (sbitmap blocks)
|
||||
compute_outgoing_frequencies (bb);
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
SET_STATE (bb, 0);
|
||||
}
|
||||
|
@ -2613,7 +2613,7 @@ try_optimize_cfg (int mode)
|
||||
|
||||
crossjumps_occured = false;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
update_forwarder_flag (bb);
|
||||
|
||||
if (! targetm.cannot_modify_jumps_p ())
|
||||
@ -2955,7 +2955,7 @@ delete_dead_jumptables (void)
|
||||
|
||||
/* A dead jump table does not belong to any basic block. Scan insns
|
||||
between two adjacent basic blocks. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
|
||||
|
@ -520,7 +520,7 @@ add_scope_conflicts (void)
|
||||
}
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
add_scope_conflicts_1 (bb, work, true);
|
||||
|
||||
free (rpo);
|
||||
@ -5378,7 +5378,7 @@ discover_nonconstant_array_refs (void)
|
||||
basic_block bb;
|
||||
gimple_stmt_iterator gsi;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
gimple stmt = gsi_stmt (gsi);
|
||||
|
@ -50,7 +50,7 @@ flow_loops_cfg_dump (FILE *file)
|
||||
if (!file)
|
||||
return;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge succ;
|
||||
edge_iterator ei;
|
||||
@ -834,7 +834,7 @@ get_loop_body (const struct loop *loop)
|
||||
gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks_for_fn (cfun));
|
||||
body[tv++] = loop->header;
|
||||
body[tv++] = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
body[tv++] = bb;
|
||||
}
|
||||
else
|
||||
@ -1082,7 +1082,7 @@ record_loop_exits (void)
|
||||
loop_exit_hash, loop_exit_eq,
|
||||
loop_exit_free);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
@ -1343,7 +1343,7 @@ verify_loop_structure (void)
|
||||
verify_dominators (CDI_DOMINATORS);
|
||||
|
||||
/* Check the headers. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb_loop_header_p (bb))
|
||||
{
|
||||
if (bb->loop_father->header == NULL)
|
||||
@ -1479,7 +1479,7 @@ verify_loop_structure (void)
|
||||
{
|
||||
/* Record old info. */
|
||||
irreds = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge_iterator ei;
|
||||
if (bb->flags & BB_IRREDUCIBLE_LOOP)
|
||||
@ -1495,7 +1495,7 @@ verify_loop_structure (void)
|
||||
mark_irreducible_loops ();
|
||||
|
||||
/* Compare. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge_iterator ei;
|
||||
|
||||
@ -1578,7 +1578,7 @@ verify_loop_structure (void)
|
||||
|
||||
sizes = XCNEWVEC (unsigned, num);
|
||||
memset (sizes, 0, sizeof (unsigned) * num);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge_iterator ei;
|
||||
if (bb->loop_father == current_loops->tree_root)
|
||||
|
@ -432,7 +432,7 @@ mark_loop_exit_edges (void)
|
||||
if (number_of_loops (cfun) <= 1)
|
||||
return;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge_iterator ei;
|
||||
|
||||
|
22
gcc/cfgrtl.c
22
gcc/cfgrtl.c
@ -416,7 +416,7 @@ compute_bb_for_insn (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx end = BB_END (bb);
|
||||
rtx insn;
|
||||
@ -2275,7 +2275,7 @@ find_partition_fixes (bool flag_only)
|
||||
/* Callers check this. */
|
||||
gcc_checking_assert (crtl->has_bb_partition);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
|
||||
bbs_in_cold_partition.safe_push (bb);
|
||||
|
||||
@ -2372,7 +2372,7 @@ verify_hot_cold_block_grouping (void)
|
||||
|| current_ir_type () != IR_RTL_CFGRTL)
|
||||
return err;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (current_partition != BB_UNPARTITIONED
|
||||
&& BB_PARTITION (bb) != current_partition)
|
||||
@ -3201,7 +3201,7 @@ purge_all_dead_edges (void)
|
||||
int purged = false;
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bool purged_here = purge_dead_edges (bb);
|
||||
|
||||
@ -3226,7 +3226,7 @@ fixup_abnormal_edges (void)
|
||||
bool inserted = false;
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -3449,7 +3449,7 @@ record_effective_endpoints (void)
|
||||
cfg_layout_function_header = NULL_RTX;
|
||||
|
||||
next_insn = get_insns ();
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx end;
|
||||
|
||||
@ -3479,7 +3479,7 @@ outof_cfg_layout_mode (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
|
||||
@ -3857,7 +3857,7 @@ fixup_reorder_chain (void)
|
||||
relink_block_chain (/*stay_in_cfglayout_mode=*/false);
|
||||
|
||||
/* Annoying special case - jump around dead jumptables left in the code. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge e = find_fallthru_edge (bb->succs);
|
||||
|
||||
@ -3868,7 +3868,7 @@ fixup_reorder_chain (void)
|
||||
/* Ensure goto_locus from edges has some instructions with that locus
|
||||
in RTL. */
|
||||
if (!optimize)
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -4047,7 +4047,7 @@ force_one_exit_fallthru (void)
|
||||
|
||||
/* Fix up the chain of blocks -- make FORWARDER immediately precede the
|
||||
exit block. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (bb->aux == NULL && bb != forwarder)
|
||||
{
|
||||
@ -4258,7 +4258,7 @@ break_superblocks (void)
|
||||
superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (superblocks);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb->flags & BB_SUPERBLOCK)
|
||||
{
|
||||
bb->flags &= ~BB_SUPERBLOCK;
|
||||
|
@ -317,7 +317,7 @@ build_cgraph_edges (void)
|
||||
|
||||
/* Create the callgraph edges and record the nodes referenced by the function.
|
||||
body. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
@ -451,7 +451,7 @@ rebuild_cgraph_edges (void)
|
||||
|
||||
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
@ -505,7 +505,7 @@ cgraph_rebuild_references (void)
|
||||
|
||||
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
ipa_record_stmt_references (node, gsi_stmt (gsi));
|
||||
|
@ -95,7 +95,7 @@ combine_stack_adjustments (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
combine_stack_adjustments_for_block (bb);
|
||||
}
|
||||
|
||||
|
@ -960,7 +960,7 @@ delete_noop_moves (void)
|
||||
rtx insn, next;
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
|
||||
{
|
||||
@ -997,7 +997,7 @@ create_log_links (void)
|
||||
usage -- these are taken from original flow.c did. Don't ask me why it is
|
||||
done this way; I don't know and if it works, I don't want to know. */
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
{
|
||||
@ -1160,7 +1160,7 @@ combine_instructions (rtx f, unsigned int nregs)
|
||||
last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
create_log_links ();
|
||||
FOR_EACH_BB (this_basic_block)
|
||||
FOR_EACH_BB_FN (this_basic_block, cfun)
|
||||
{
|
||||
optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
|
||||
last_call_luid = 0;
|
||||
@ -1211,7 +1211,7 @@ combine_instructions (rtx f, unsigned int nregs)
|
||||
setup_incoming_promotions (first);
|
||||
last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
FOR_EACH_BB (this_basic_block)
|
||||
FOR_EACH_BB_FN (this_basic_block, cfun)
|
||||
{
|
||||
rtx last_combined_insn = NULL_RTX;
|
||||
optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
|
||||
|
@ -16673,7 +16673,7 @@ thumb1_reorg (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx dest, src;
|
||||
rtx pat, op0, set = NULL;
|
||||
@ -16751,7 +16751,7 @@ thumb2_reorg (void)
|
||||
compute_bb_for_insn ();
|
||||
df_analyze ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
|
@ -3957,7 +3957,7 @@ static void
|
||||
bfin_gen_bundles (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx slot[3];
|
||||
@ -4036,7 +4036,7 @@ static void
|
||||
reorder_var_tracking_notes (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx queue = NULL_RTX;
|
||||
|
@ -4629,7 +4629,7 @@ c6x_gen_bundles (void)
|
||||
basic_block bb;
|
||||
rtx insn, next, last_call;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
/* The machine is eight insns wide. We can have up to six shadow
|
||||
@ -5383,7 +5383,7 @@ conditionalize_after_sched (void)
|
||||
{
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
unsigned uid = INSN_UID (insn);
|
||||
@ -5959,7 +5959,7 @@ c6x_reorg (void)
|
||||
|
||||
if (c6x_flag_schedule_insns2)
|
||||
{
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if ((bb->flags & BB_DISABLE_SCHEDULE) == 0)
|
||||
assign_reservations (BB_HEAD (bb), BB_END (bb));
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ resolve_sw_modes (void)
|
||||
df_note_add_problem ();
|
||||
df_analyze ();
|
||||
}
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
enum attr_fp_mode selected_mode;
|
||||
|
@ -8070,11 +8070,11 @@ frv_optimize_membar (void)
|
||||
first_io = XCNEWVEC (struct frv_io, last_basic_block_for_fn (cfun));
|
||||
last_membar = XCNEWVEC (rtx, last_basic_block_for_fn (cfun));
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
frv_optimize_membar_local (bb, &first_io[bb->index],
|
||||
&last_membar[bb->index]);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (last_membar[bb->index] != 0)
|
||||
frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
|
||||
|
||||
|
@ -10500,7 +10500,7 @@ ix86_finalize_stack_realign_flags (void)
|
||||
add_to_hard_reg_set (&set_up_by_prologue, Pmode, ARG_POINTER_REGNUM);
|
||||
add_to_hard_reg_set (&set_up_by_prologue, Pmode,
|
||||
HARD_FRAME_POINTER_REGNUM);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
|
@ -9688,7 +9688,7 @@ ia64_reorg (void)
|
||||
|
||||
/* We can't let modulo-sched prevent us from scheduling any bbs,
|
||||
since we need the final schedule to produce bundle information. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->flags &= ~BB_DISABLE_SCHEDULE;
|
||||
|
||||
initiate_bundle_states ();
|
||||
|
@ -15332,7 +15332,7 @@ mips_annotate_pic_calls (void)
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
rtx call, reg, symbol, second_call;
|
||||
|
@ -3174,7 +3174,7 @@ reorder_var_tracking_notes (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next, last_insn = NULL_RTX;
|
||||
rtx queue = NULL_RTX;
|
||||
|
@ -16395,7 +16395,7 @@ rs6000_alloc_sdmode_stack_slot (void)
|
||||
if (TARGET_NO_SDMODE_STACK)
|
||||
return;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
|
||||
|
@ -7458,7 +7458,7 @@ s390_regs_ever_clobbered (char regs_ever_clobbered[])
|
||||
if (!call_really_used_regs[i])
|
||||
regs_ever_clobbered[i] = 1;
|
||||
|
||||
FOR_EACH_BB (cur_bb)
|
||||
FOR_EACH_BB_FN (cur_bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS (cur_bb, cur_insn)
|
||||
{
|
||||
|
@ -1469,7 +1469,7 @@ sh_treg_combine::execute (void)
|
||||
// Look for basic blocks that end with a conditional branch and try to
|
||||
// optimize them.
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx i = BB_END (bb);
|
||||
if (any_condjump_p (i) && onlyjump_p (i))
|
||||
|
@ -2645,7 +2645,7 @@ spu_machine_dependent_reorg (void)
|
||||
find_many_sub_basic_blocks (blocks);
|
||||
|
||||
/* We have to schedule to make sure alignment is ok. */
|
||||
FOR_EACH_BB (bb) bb->flags &= ~BB_DISABLE_SCHEDULE;
|
||||
FOR_EACH_BB_FN (bb, cfun) bb->flags &= ~BB_DISABLE_SCHEDULE;
|
||||
|
||||
/* The hints need to be scheduled, so call it again. */
|
||||
schedule_insns ();
|
||||
|
@ -4383,7 +4383,7 @@ static void
|
||||
tilegx_gen_bundles (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx end = NEXT_INSN (BB_END (bb));
|
||||
@ -4709,7 +4709,7 @@ static void
|
||||
reorder_var_tracking_notes (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx queue = NULL_RTX;
|
||||
|
@ -3988,7 +3988,7 @@ static void
|
||||
tilepro_gen_bundles (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx end = NEXT_INSN (BB_END (bb));
|
||||
@ -4259,7 +4259,7 @@ static void
|
||||
reorder_var_tracking_notes (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx queue = NULL_RTX;
|
||||
|
@ -588,7 +588,7 @@ coverage_compute_cfg_checksum (void)
|
||||
basic_block bb;
|
||||
unsigned chksum = n_basic_blocks_for_fn (cfun);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -400,7 +400,7 @@ compute_hash_table_work (struct hash_table_d *table)
|
||||
/* Allocate vars to track sets of regs. */
|
||||
reg_set_bitmap = ALLOC_REG_SET (NULL);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -649,7 +649,7 @@ compute_cprop_data (void)
|
||||
aren't recorded for the local pass so they cannot be propagated within
|
||||
their basic block by this pass and 2) the global pass would otherwise
|
||||
propagate them only in the successors of their basic block. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
int index = implicit_set_indexes[bb->index];
|
||||
if (index != -1)
|
||||
@ -1234,7 +1234,7 @@ local_cprop_pass (void)
|
||||
unsigned i;
|
||||
|
||||
cselib_init (0);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
@ -1359,7 +1359,7 @@ find_implicit_sets (void)
|
||||
|
||||
implicit_sets = XCNEWVEC (rtx, implicit_sets_size);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
/* Check for more than one successor. */
|
||||
if (EDGE_COUNT (bb->succs) <= 1)
|
||||
|
@ -7335,7 +7335,7 @@ cse_condition_code_reg (void)
|
||||
else
|
||||
cc_reg_2 = NULL_RTX;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx last_insn;
|
||||
rtx cc_reg;
|
||||
|
@ -623,7 +623,7 @@ prescan_insns_for_dce (bool fast)
|
||||
if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
|
||||
arg_stores = BITMAP_ALLOC (NULL);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
|
||||
if (NONDEBUG_INSN_P (insn))
|
||||
|
@ -1543,7 +1543,7 @@ df_compact_blocks (void)
|
||||
bitmap_set_bit (dflow->out_of_date_transfer_functions, EXIT_BLOCK);
|
||||
|
||||
i = NUM_FIXED_BLOCKS;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (bitmap_bit_p (&tmp, bb->index))
|
||||
bitmap_set_bit (dflow->out_of_date_transfer_functions, i);
|
||||
@ -1564,7 +1564,7 @@ df_compact_blocks (void)
|
||||
place in the block_info vector. Null out the copied
|
||||
item. The entry and exit blocks never move. */
|
||||
i = NUM_FIXED_BLOCKS;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
df_set_bb_info (dflow, i,
|
||||
(char *)problem_temps
|
||||
@ -1590,7 +1590,7 @@ df_compact_blocks (void)
|
||||
bitmap_copy (&tmp, df->blocks_to_analyze);
|
||||
bitmap_clear (df->blocks_to_analyze);
|
||||
i = NUM_FIXED_BLOCKS;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (bitmap_bit_p (&tmp, bb->index))
|
||||
bitmap_set_bit (df->blocks_to_analyze, i);
|
||||
@ -1601,7 +1601,7 @@ df_compact_blocks (void)
|
||||
bitmap_clear (&tmp);
|
||||
|
||||
i = NUM_FIXED_BLOCKS;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
SET_BASIC_BLOCK_FOR_FN (cfun, i, bb);
|
||||
bb->index = i;
|
||||
|
@ -2427,7 +2427,7 @@ df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
|
||||
|
||||
bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
|
||||
|
||||
bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
|
||||
|
@ -449,7 +449,7 @@ df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
|
||||
fprintf (file, "} ");
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
@ -673,7 +673,7 @@ df_scan_blocks (void)
|
||||
df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
|
||||
|
||||
/* Regular blocks */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
unsigned int bb_index = bb->index;
|
||||
df_bb_refs_record (bb_index, true);
|
||||
@ -1415,7 +1415,7 @@ df_insn_rescan_all (void)
|
||||
bitmap_clear (&df->insns_to_rescan);
|
||||
bitmap_clear (&df->insns_to_notes_rescan);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
@ -4154,7 +4154,7 @@ df_update_entry_exit_and_calls (void)
|
||||
|
||||
/* The call insns need to be rescanned because there may be changes
|
||||
in the set of registers clobbered across the call. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
|
@ -662,7 +662,7 @@ calculate_dominance_info (enum cdi_direction dir)
|
||||
calc_dfs_tree (&di, reverse);
|
||||
calc_idoms (&di, reverse);
|
||||
|
||||
FOR_EACH_BB (b)
|
||||
FOR_EACH_BB_FN (b, cfun)
|
||||
{
|
||||
TBB d = di.dom[di.dfs_order[b->index]];
|
||||
|
||||
@ -1025,7 +1025,7 @@ verify_dominators (enum cdi_direction dir)
|
||||
calc_dfs_tree (&di, reverse);
|
||||
calc_idoms (&di, reverse);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
imm_bb = get_immediate_dominator (dir, bb);
|
||||
if (!imm_bb)
|
||||
@ -1492,7 +1492,7 @@ DEBUG_FUNCTION void
|
||||
debug_dominance_info (enum cdi_direction dir)
|
||||
{
|
||||
basic_block bb, bb2;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if ((bb2 = get_immediate_dominator (dir, bb)))
|
||||
fprintf (stderr, "%i %i\n", bb->index, bb2->index);
|
||||
}
|
||||
|
@ -3507,7 +3507,7 @@ static void
|
||||
dse_step5_nospill (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bb_info_t bb_info = bb_table[bb->index];
|
||||
insn_info_t insn_info = bb_info->last_insn;
|
||||
|
@ -1511,7 +1511,7 @@ finish_eh_generation (void)
|
||||
commit_edge_insertions ();
|
||||
|
||||
/* Redirect all EH edges from the post_landing_pad to the landing pad. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
eh_landing_pad lp;
|
||||
edge_iterator ei;
|
||||
|
@ -700,14 +700,14 @@ compute_alignments (void)
|
||||
flow_loops_dump (dump_file, NULL, 1);
|
||||
}
|
||||
loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb->frequency > freq_max)
|
||||
freq_max = bb->frequency;
|
||||
freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
|
||||
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "freq_max: %i\n",freq_max);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx label = BB_HEAD (bb);
|
||||
int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
|
||||
|
@ -6043,7 +6043,7 @@ thread_prologue_and_epilogue_insns (void)
|
||||
max_grow_size = get_uncond_jump_length ();
|
||||
max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
unsigned size = 0;
|
||||
@ -6120,7 +6120,7 @@ thread_prologue_and_epilogue_insns (void)
|
||||
needing a prologue. */
|
||||
bitmap_clear (&bb_on_list);
|
||||
bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (!bitmap_bit_p (&bb_antic_flags, bb->index))
|
||||
continue;
|
||||
@ -6154,7 +6154,7 @@ thread_prologue_and_epilogue_insns (void)
|
||||
/* Find exactly one edge that leads to a block in ANTIC from
|
||||
a block that isn't. */
|
||||
if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (!bitmap_bit_p (&bb_antic_flags, bb->index))
|
||||
continue;
|
||||
@ -6202,7 +6202,7 @@ thread_prologue_and_epilogue_insns (void)
|
||||
/* Find tail blocks reachable from both blocks needing a
|
||||
prologue and blocks not needing a prologue. */
|
||||
if (!bitmap_empty_p (&bb_tail))
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bool some_pro, some_no_pro;
|
||||
if (!bitmap_bit_p (&bb_tail, bb->index))
|
||||
@ -6480,7 +6480,7 @@ thread_prologue_and_epilogue_insns (void)
|
||||
we take advantage of cfg_layout_finalize using
|
||||
fixup_fallthru_exit_predecessor. */
|
||||
cfg_layout_initialize (0);
|
||||
FOR_EACH_BB (cur_bb)
|
||||
FOR_EACH_BB_FN (cur_bb, cfun)
|
||||
if (cur_bb->index >= NUM_FIXED_BLOCKS
|
||||
&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
|
||||
cur_bb->aux = cur_bb->next_bb;
|
||||
@ -7192,7 +7192,7 @@ rest_of_match_asm_constraints (void)
|
||||
return 0;
|
||||
|
||||
df_set_flags (DF_DEFER_INSN_RESCAN);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
|
16
gcc/gcse.c
16
gcc/gcse.c
@ -1559,7 +1559,7 @@ compute_hash_table_work (struct hash_table_d *table)
|
||||
for (i = 0; i < max_reg_num (); ++i)
|
||||
reg_avail_info[i].last_bb = NULL;
|
||||
|
||||
FOR_EACH_BB (current_bb)
|
||||
FOR_EACH_BB_FN (current_bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
unsigned int regno;
|
||||
@ -1899,7 +1899,7 @@ prune_expressions (bool pre_p)
|
||||
}
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -2020,7 +2020,7 @@ compute_pre_data (void)
|
||||
~(TRANSP | COMP)
|
||||
*/
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bitmap_ior (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
|
||||
bitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
|
||||
@ -2855,7 +2855,7 @@ compute_code_hoist_vbeinout (void)
|
||||
{
|
||||
fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
fprintf (dump_file, "vbein (%d): ", bb->index);
|
||||
dump_bitmap_file (dump_file, hoist_vbein[bb->index]);
|
||||
@ -3169,7 +3169,7 @@ hoist_code (void)
|
||||
to_bb_head = XCNEWVEC (int, get_max_uid ());
|
||||
bb_size = XCNEWVEC (int, last_basic_block_for_fn (cfun));
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
int to_head;
|
||||
@ -3512,7 +3512,7 @@ calculate_bb_reg_pressure (void)
|
||||
|
||||
ira_setup_eliminable_regset ();
|
||||
curr_regs_live = BITMAP_ALLOC (®_obstack);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
curr_bb = bb;
|
||||
BB_DATA (bb)->live_in = BITMAP_ALLOC (NULL);
|
||||
@ -3562,7 +3562,7 @@ calculate_bb_reg_pressure (void)
|
||||
return;
|
||||
|
||||
fprintf (dump_file, "\nRegister Pressure: \n");
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
fprintf (dump_file, " Basic block %d: \n", bb->index);
|
||||
for (i = 0; (int) i < ira_pressure_classes_num; i++)
|
||||
@ -3888,7 +3888,7 @@ compute_ld_motion_mems (void)
|
||||
pre_ldst_mems = NULL;
|
||||
pre_ldst_table.create (13);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
|
@ -839,7 +839,7 @@ gsi_commit_edge_inserts (void)
|
||||
gsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
NULL);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
gsi_commit_one_edge_insert (e, NULL);
|
||||
}
|
||||
|
@ -216,7 +216,7 @@ find_implicit_erroneous_behaviour (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator si;
|
||||
|
||||
@ -304,7 +304,7 @@ find_explicit_erroneous_behaviour (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator si;
|
||||
|
||||
|
@ -2295,7 +2295,7 @@ rewrite_reductions_out_of_ssa (scop_p scop)
|
||||
gimple_stmt_iterator psi;
|
||||
sese region = SCOP_REGION (scop);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb_in_sese_p (bb, region))
|
||||
for (psi = gsi_start_phis (bb); !gsi_end_p (psi);)
|
||||
{
|
||||
@ -2489,7 +2489,7 @@ rewrite_cross_bb_scalar_deps_out_of_ssa (scop_p scop)
|
||||
/* Create an extra empty BB after the scop. */
|
||||
split_edge (SESE_EXIT (region));
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb_in_sese_p (bb, region))
|
||||
for (psi = gsi_start_bb (bb); !gsi_end_p (psi); gsi_next (&psi))
|
||||
changed |= rewrite_cross_bb_scalar_deps (scop, &psi);
|
||||
|
@ -6709,7 +6709,7 @@ haifa_sched_init (void)
|
||||
|
||||
sched_init_bbs ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bbs.quick_push (bb);
|
||||
sched_init_luids (bbs);
|
||||
sched_deps_init (true);
|
||||
|
@ -357,7 +357,7 @@ discover_loops (bitmap_obstack *loop_stack, struct hw_doloop_hooks *hooks)
|
||||
/* Find all the possible loop tails. This means searching for every
|
||||
loop_end instruction. For each one found, create a hwloop_info
|
||||
structure and add the head block to the work list. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx tail = BB_END (bb);
|
||||
rtx insn, reg;
|
||||
@ -480,7 +480,7 @@ set_bb_indices (void)
|
||||
intptr_t index;
|
||||
|
||||
index = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->aux = (void *) index++;
|
||||
}
|
||||
|
||||
@ -537,7 +537,7 @@ reorder_loops (hwloop_info loops)
|
||||
loops = loops->next;
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
|
@ -4408,7 +4408,7 @@ if_convert (bool after_combine)
|
||||
fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
|
||||
#endif
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
basic_block new_bb;
|
||||
while (!df_get_bb_dirty (bb)
|
||||
|
@ -59,7 +59,7 @@ initialize_uninitialized_regs (void)
|
||||
|
||||
df_analyze ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
bitmap lr = DF_LR_IN (bb);
|
||||
|
@ -4726,7 +4726,7 @@ ipcp_transform_function (struct cgraph_node *node)
|
||||
descriptors.safe_grow_cleared (param_count);
|
||||
ipa_populate_param_decls (node, descriptors);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
struct ipa_agg_replacement_value *v;
|
||||
|
@ -754,7 +754,7 @@ analyze_function (struct cgraph_node *fn, bool ipa)
|
||||
|
||||
push_cfun (DECL_STRUCT_FUNCTION (decl));
|
||||
|
||||
FOR_EACH_BB (this_block)
|
||||
FOR_EACH_BB_FN (this_block, cfun)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
struct walk_stmt_info wi;
|
||||
|
@ -1070,7 +1070,7 @@ find_split_points (int overall_time, int overall_size)
|
||||
stack.pop ();
|
||||
}
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->aux = NULL;
|
||||
stack.release ();
|
||||
BITMAP_FREE (current.ssa_names_to_pass);
|
||||
@ -1595,7 +1595,7 @@ execute_split_functions (void)
|
||||
/* Compute local info about basic blocks and determine function size/time. */
|
||||
bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
|
||||
memset (&best_split_point, 0, sizeof (best_split_point));
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
int time = 0;
|
||||
int size = 0;
|
||||
|
@ -341,7 +341,7 @@ form_loop_tree (void)
|
||||
/* We can not use loop/bb node access macros because of potential
|
||||
checking and because the nodes are not initialized enough
|
||||
yet. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bb_node = &ira_bb_nodes[bb->index];
|
||||
bb_node->bb = bb;
|
||||
|
@ -1585,7 +1585,7 @@ find_costs_and_classes (FILE *dump_file)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
process_bb_for_costs (bb);
|
||||
}
|
||||
|
||||
|
@ -986,7 +986,7 @@ emit_moves (void)
|
||||
edge e;
|
||||
rtx insns, tmp;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (at_bb_start[bb->index] != NULL)
|
||||
{
|
||||
@ -1203,7 +1203,7 @@ add_ranges_and_copies (void)
|
||||
bitmap live_through;
|
||||
|
||||
live_through = ira_allocate_bitmap ();
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
/* It does not matter what loop_tree_node (of source or
|
||||
destination block) to use for searching allocnos by their
|
||||
@ -1260,7 +1260,7 @@ ira_emit (bool loops_p)
|
||||
ira_free_bitmap (renamed_regno_bitmap);
|
||||
ira_free_bitmap (local_allocno_bitmap);
|
||||
setup_entered_from_non_parent_p ();
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
at_bb_start[bb->index] = NULL;
|
||||
at_bb_end[bb->index] = NULL;
|
||||
@ -1275,15 +1275,15 @@ ira_emit (bool loops_p)
|
||||
memset (allocno_last_set_check, 0, sizeof (int) * max_reg_num ());
|
||||
memset (hard_regno_last_set_check, 0, sizeof (hard_regno_last_set_check));
|
||||
curr_tick = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
unify_moves (bb, true);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
unify_moves (bb, false);
|
||||
move_vec.create (ira_allocnos_num);
|
||||
emit_moves ();
|
||||
add_ranges_and_copies ();
|
||||
/* Clean up: */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
free_move_list (at_bb_start[bb->index]);
|
||||
free_move_list (at_bb_end[bb->index]);
|
||||
@ -1301,7 +1301,7 @@ ira_emit (bool loops_p)
|
||||
reload assumes initial insn codes defined. The insn codes can be
|
||||
invalidated by CFG infrastructure for example in jump
|
||||
redirection. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
if (INSN_P (insn))
|
||||
recog_memoized (insn);
|
||||
|
22
gcc/ira.c
22
gcc/ira.c
@ -2135,7 +2135,7 @@ decrease_live_ranges_number (void)
|
||||
if (ira_dump_file)
|
||||
fprintf (ira_dump_file, "Starting decreasing number of live ranges...\n");
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
set = single_set (insn);
|
||||
@ -2358,7 +2358,7 @@ compute_regs_asm_clobbered (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
FOR_BB_INSNS_REVERSE (bb, insn)
|
||||
@ -2951,7 +2951,7 @@ mark_elimination (int from, int to)
|
||||
basic_block bb;
|
||||
bitmap r;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
r = DF_LR_IN (bb);
|
||||
if (bitmap_bit_p (r, from))
|
||||
@ -3473,7 +3473,7 @@ update_equiv_regs (void)
|
||||
paradoxical subreg. Don't set such reg sequivalent to a mem,
|
||||
because lra will not substitute such equiv memory in order to
|
||||
prevent access beyond allocated memory for paradoxical memory subreg. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (NONDEBUG_INSN_P (insn))
|
||||
for_each_rtx (&insn, set_paradoxical_subreg, (void *) pdx_subregs);
|
||||
@ -3481,7 +3481,7 @@ update_equiv_regs (void)
|
||||
/* Scan the insns and find which registers have equivalences. Do this
|
||||
in a separate scan of the insns because (due to -fcse-follow-jumps)
|
||||
a register can be set below its use. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
loop_depth = bb_loop_depth (bb);
|
||||
|
||||
@ -3905,7 +3905,7 @@ update_equiv_regs (void)
|
||||
|
||||
if (!bitmap_empty_p (cleared_regs))
|
||||
{
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bitmap_and_compl_into (DF_LR_IN (bb), cleared_regs);
|
||||
bitmap_and_compl_into (DF_LR_OUT (bb), cleared_regs);
|
||||
@ -4532,7 +4532,7 @@ find_moveable_pseudos (void)
|
||||
bitmap_initialize (&used, 0);
|
||||
bitmap_initialize (&set, 0);
|
||||
bitmap_initialize (&unusable_as_input, 0);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
bitmap transp = bb_transp_live + bb->index;
|
||||
@ -4595,7 +4595,7 @@ find_moveable_pseudos (void)
|
||||
bitmap_clear (&used);
|
||||
bitmap_clear (&set);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bitmap local = bb_local + bb->index;
|
||||
rtx insn;
|
||||
@ -4824,7 +4824,7 @@ find_moveable_pseudos (void)
|
||||
}
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bitmap_clear (bb_local + bb->index);
|
||||
bitmap_clear (bb_transp_live + bb->index);
|
||||
@ -4921,7 +4921,7 @@ split_live_ranges_for_shrink_wrap (void)
|
||||
bitmap_initialize (&reachable, 0);
|
||||
queue.create (n_basic_blocks_for_fn (cfun));
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (CALL_P (insn) && !SIBLING_CALL_P (insn))
|
||||
{
|
||||
@ -5145,7 +5145,7 @@ allocate_initial_values (void)
|
||||
fixed regs are accepted. */
|
||||
SET_REGNO (preg, new_regno);
|
||||
/* Update global register liveness information. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (REGNO_REG_SET_P (df_get_live_in (bb), regno))
|
||||
SET_REGNO_REG_SET (df_get_live_in (bb), new_regno);
|
||||
|
@ -275,7 +275,7 @@ mark_all_labels (rtx f)
|
||||
if (current_ir_type () == IR_RTL_CFGLAYOUT)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
/* In cfglayout mode, we don't bother with trivial next-insn
|
||||
propagation of LABEL_REFs into JUMP_LABEL. This will be
|
||||
|
10
gcc/lcm.c
10
gcc/lcm.c
@ -281,7 +281,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
|
||||
|
||||
/* Add all the blocks to the worklist. This prevents an early exit from
|
||||
the loop given our optimistic initialization of LATER above. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
*qin++ = bb;
|
||||
bb->aux = bb;
|
||||
@ -350,7 +350,7 @@ compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
|
||||
int x;
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bitmap_and_compl (del[bb->index], antloc[bb->index],
|
||||
laterin[bb->index]);
|
||||
|
||||
@ -497,7 +497,7 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
|
||||
|
||||
/* Put every block on the worklist; this is necessary because of the
|
||||
optimistic initialization of AVOUT above. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
*qin++ = bb;
|
||||
bb->aux = bb;
|
||||
@ -638,7 +638,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
|
||||
|
||||
/* Add all the blocks to the worklist. This prevents an early exit
|
||||
from the loop given our optimistic initialization of NEARER. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
*tos++ = bb;
|
||||
bb->aux = bb;
|
||||
@ -695,7 +695,7 @@ compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
|
||||
int x;
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bitmap_and_compl (del[bb->index], st_avloc[bb->index],
|
||||
nearerout[bb->index]);
|
||||
|
||||
|
@ -213,7 +213,7 @@ fix_loop_structure (bitmap changed_bbs)
|
||||
/* Remember the depth of the blocks in the loop hierarchy, so that we can
|
||||
recognize blocks whose loop nesting relationship has changed. */
|
||||
if (changed_bbs)
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->aux = (void *) (size_t) loop_depth (bb->loop_father);
|
||||
|
||||
/* Remove the dead loops from structures. We start from the innermost
|
||||
@ -256,7 +256,7 @@ fix_loop_structure (bitmap changed_bbs)
|
||||
/* Mark the blocks whose loop has changed. */
|
||||
if (changed_bbs)
|
||||
{
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if ((void *) (size_t) loop_depth (bb->loop_father) != bb->aux)
|
||||
bitmap_set_bit (changed_bbs, bb->index);
|
||||
|
@ -1825,7 +1825,7 @@ calculate_loop_reg_pressure (void)
|
||||
}
|
||||
ira_setup_eliminable_regset ();
|
||||
bitmap_initialize (&curr_regs_live, ®_obstack);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
curr_loop = bb->loop_father;
|
||||
if (curr_loop == current_loops->tree_root)
|
||||
|
@ -1463,7 +1463,7 @@ decompose_multiword_subregs (bool decompose_copies)
|
||||
memset (reg_copy_graph.address (), 0, sizeof (bitmap) * max);
|
||||
|
||||
speed_p = optimize_function_for_speed_p (cfun);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -1543,7 +1543,7 @@ decompose_multiword_subregs (bool decompose_copies)
|
||||
EXECUTE_IF_SET_IN_BITMAP (decomposable_context, 0, regno, iter)
|
||||
decompose_register (regno);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
|
@ -1302,7 +1302,7 @@ assign_by_spills (void)
|
||||
|
||||
/* FIXME: Look up the changed insns in the cached LRA insn data using
|
||||
an EXECUTE_IF_SET_IN_BITMAP over changed_insns. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (bitmap_bit_p (&changed_insns, INSN_UID (insn)))
|
||||
{
|
||||
|
@ -239,7 +239,7 @@ lra_coalesce (void)
|
||||
mv_num = 0;
|
||||
/* Collect moves. */
|
||||
coalesced_moves = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS_SAFE (bb, insn, next)
|
||||
if (INSN_P (insn)
|
||||
@ -297,7 +297,7 @@ lra_coalesce (void)
|
||||
}
|
||||
}
|
||||
bitmap_initialize (&used_pseudos_bitmap, ®_obstack);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
update_live_info (df_get_live_in (bb));
|
||||
update_live_info (df_get_live_out (bb));
|
||||
|
@ -5302,7 +5302,7 @@ lra_inheritance (void)
|
||||
bitmap_initialize (&live_regs, ®_obstack);
|
||||
bitmap_initialize (&temp_bitmap, ®_obstack);
|
||||
bitmap_initialize (&ebb_global_regs, ®_obstack);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
start_bb = bb;
|
||||
if (lra_dump_file != NULL)
|
||||
@ -5403,7 +5403,7 @@ remove_inheritance_pseudos (bitmap remove_pseudos)
|
||||
because we need to marks insns affected by previous
|
||||
inheritance/split pass for processing by the subsequent
|
||||
constraint pass. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
|
||||
fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
|
||||
|
@ -1284,7 +1284,7 @@ init_elimination (void)
|
||||
struct elim_table *ep;
|
||||
|
||||
init_elim_table ();
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
curr_sp_change = 0;
|
||||
stop_to_sp_elimination_p = false;
|
||||
|
@ -280,7 +280,7 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
|
||||
add_to_hard_reg_set (&reserved_hard_regs[p],
|
||||
lra_reg_info[i].biggest_mode, hard_regno);
|
||||
bitmap_initialize (&ok_insn_bitmap, ®_obstack);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (DEBUG_INSN_P (insn)
|
||||
|| ((set = single_set (insn)) != NULL_RTX
|
||||
@ -478,7 +478,7 @@ spill_pseudos (void)
|
||||
bitmap_ior_into (&changed_insns, &lra_reg_info[i].insn_bitmap);
|
||||
}
|
||||
}
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (bitmap_bit_p (&changed_insns, INSN_UID (insn)))
|
||||
@ -686,7 +686,7 @@ lra_final_code_change (void)
|
||||
if (lra_reg_info[i].nrefs != 0
|
||||
&& (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
|
||||
SET_REGNO (regno_reg_rtx[i], hard_regno);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS_SAFE (bb, insn, curr)
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
|
@ -1960,7 +1960,7 @@ remove_scratches (void)
|
||||
scratches.create (get_max_uid ());
|
||||
bitmap_initialize (&scratch_bitmap, ®_obstack);
|
||||
bitmap_initialize (&scratch_operand_bitmap, ®_obstack);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (INSN_P (insn))
|
||||
{
|
||||
@ -2049,7 +2049,7 @@ check_rtl (bool final_p)
|
||||
rtx insn;
|
||||
|
||||
lra_assert (! final_p || reload_completed);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (NONDEBUG_INSN_P (insn)
|
||||
&& GET_CODE (PATTERN (insn)) != USE
|
||||
@ -2090,7 +2090,7 @@ has_nonexceptional_receiver (void)
|
||||
/* First determine which blocks can reach exit via normal paths. */
|
||||
tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->flags &= ~BB_REACHABLE;
|
||||
|
||||
/* Place the exit block on our worklist. */
|
||||
@ -2165,7 +2165,7 @@ update_inc_notes (void)
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (NONDEBUG_INSN_P (insn))
|
||||
{
|
||||
|
@ -1281,7 +1281,7 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
|
||||
{
|
||||
fprintf (dump_file, "\nCheck %s() CFG flow conservation:\n",
|
||||
current_function_name ());
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if ((bb->count != sum_edge_counts (bb->preds))
|
||||
|| (bb->count != sum_edge_counts (bb->succs)))
|
||||
|
@ -516,7 +516,7 @@ optimize_mode_switching (void)
|
||||
/* Determine what the first use (if any) need for a mode of entity E is.
|
||||
This will be the mode that is anticipatable for this block.
|
||||
Also compute the initial transparency settings. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
struct seginfo *ptr;
|
||||
int last_mode = no_mode;
|
||||
@ -624,7 +624,7 @@ optimize_mode_switching (void)
|
||||
int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
|
||||
struct bb_info *info = bb_info[j];
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (info[bb->index].seginfo->mode == m)
|
||||
bitmap_set_bit (antic[bb->index], j);
|
||||
@ -637,7 +637,7 @@ optimize_mode_switching (void)
|
||||
/* Calculate the optimal locations for the
|
||||
placement mode switches to modes with priority I. */
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bitmap_not (kill[bb->index], transp[bb->index]);
|
||||
edge_list = pre_edge_lcm (n_entities, transp, comp, antic,
|
||||
kill, &insert, &del);
|
||||
|
@ -3343,7 +3343,7 @@ rest_of_handle_sms (void)
|
||||
max_regno = max_reg_num ();
|
||||
|
||||
/* Finalize layout changes. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
|
@ -4545,7 +4545,7 @@ optimize_omp_library_calls (gimple entry_stmt)
|
||||
&& find_omp_clause (gimple_omp_task_clauses (entry_stmt),
|
||||
OMP_CLAUSE_UNTIED) != NULL);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
gimple call = gsi_stmt (gsi);
|
||||
@ -4849,7 +4849,7 @@ expand_omp_taskreg (struct omp_region *region)
|
||||
basic_block bb;
|
||||
bool changed = false;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
changed |= gimple_purge_dead_eh_edges (bb);
|
||||
if (changed)
|
||||
cleanup_tree_cfg ();
|
||||
@ -7939,7 +7939,7 @@ expand_omp_target (struct omp_region *region)
|
||||
basic_block bb;
|
||||
bool changed = false;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
changed |= gimple_purge_dead_eh_edges (bb);
|
||||
if (changed)
|
||||
cleanup_tree_cfg ();
|
||||
|
@ -266,7 +266,7 @@ alloc_mem (void)
|
||||
/* Find the largest UID and create a mapping from UIDs to CUIDs. */
|
||||
uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
|
||||
i = 1;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
if (INSN_P (insn))
|
||||
@ -828,7 +828,7 @@ compute_hash_table (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
|
@ -213,7 +213,7 @@ reload_cse_regs_1 (void)
|
||||
cselib_init (CSELIB_RECORD_MEMORY);
|
||||
init_alias_analysis ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
if (INSN_P (insn))
|
||||
|
@ -1955,7 +1955,7 @@ strip_predict_hints (void)
|
||||
gimple ass_stmt;
|
||||
tree var;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator bi;
|
||||
for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
|
||||
@ -2226,7 +2226,7 @@ tree_bb_level_predictions (void)
|
||||
|
||||
apply_return_prediction ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
|
||||
@ -2400,10 +2400,10 @@ tree_estimate_probability (void)
|
||||
if (number_of_loops (cfun) > 1)
|
||||
predict_loops ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
tree_estimate_probability_bb (bb);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
combine_predictions_for_bb (bb);
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
@ -2928,7 +2928,7 @@ expensive_function_p (int threshold)
|
||||
|
||||
/* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
|
||||
limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency * threshold;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -2997,7 +2997,7 @@ estimate_bb_frequencies (bool force)
|
||||
estimate_loops ();
|
||||
|
||||
memcpy (&freq_max, &real_zero, sizeof (real_zero));
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
|
||||
memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
|
||||
|
||||
@ -3055,7 +3055,7 @@ compute_function_frequency (void)
|
||||
functions to unlikely and that is most of what we care about. */
|
||||
if (!cfun->after_inlining)
|
||||
node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (maybe_hot_bb_p (cfun, bb))
|
||||
{
|
||||
|
@ -354,7 +354,7 @@ is_inconsistent (void)
|
||||
{
|
||||
basic_block bb;
|
||||
bool inconsistent = false;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
inconsistent |= is_edge_inconsistent (bb->preds);
|
||||
if (!dump_file && inconsistent)
|
||||
@ -692,7 +692,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
|
||||
|
||||
/* If the graph has been correctly solved, every block will have a
|
||||
succ and pred count of zero. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
|
||||
}
|
||||
@ -1021,7 +1021,7 @@ branch_prob (void)
|
||||
We also add fake exit edges for each call and asm statement in the
|
||||
basic, since it may not return. */
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
int need_exit_edge = 0, need_entry_edge = 0;
|
||||
int have_exit_edge = 0, have_entry_edge = 0;
|
||||
@ -1260,7 +1260,7 @@ branch_prob (void)
|
||||
/* Initialize the output. */
|
||||
output_location (NULL, 0, NULL, NULL);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
gcov_position_t offset = 0;
|
||||
|
@ -835,7 +835,7 @@ find_removable_extensions (void)
|
||||
rtx insn, set;
|
||||
unsigned *def_map = XCNEWVEC (unsigned, max_insn_uid);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
if (!NONDEBUG_INSN_P (insn))
|
||||
|
@ -2846,7 +2846,7 @@ compensate_edges (void)
|
||||
|
||||
starting_stack_p = false;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
edge e;
|
||||
@ -3153,7 +3153,7 @@ convert_regs (void)
|
||||
|
||||
/* ??? Process all unreachable blocks. Though there's no excuse
|
||||
for keeping these even when not optimizing. */
|
||||
FOR_EACH_BB (b)
|
||||
FOR_EACH_BB_FN (b, cfun)
|
||||
{
|
||||
block_info bi = BLOCK_INFO (b);
|
||||
|
||||
@ -3212,7 +3212,7 @@ reg_to_stack (void)
|
||||
|
||||
/* Set up block info for each basic block. */
|
||||
alloc_aux_for_blocks (sizeof (struct block_info_def));
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
block_info bi = BLOCK_INFO (bb);
|
||||
edge_iterator ei;
|
||||
|
@ -1076,7 +1076,7 @@ copyprop_hardreg_forward (void)
|
||||
= create_alloc_pool ("debug insn changes pool",
|
||||
sizeof (struct queued_debug_insn_change), 256);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bitmap_set_bit (visited, bb->index);
|
||||
|
||||
@ -1112,7 +1112,7 @@ copyprop_hardreg_forward (void)
|
||||
|
||||
if (MAY_HAVE_DEBUG_INSNS)
|
||||
{
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bitmap_bit_p (visited, bb->index)
|
||||
&& all_vd[bb->index].n_debug_insn_changes)
|
||||
{
|
||||
|
@ -1266,7 +1266,7 @@ init_subregs_of_mode (void)
|
||||
bitmap_obstack_initialize (&srom_obstack);
|
||||
subregs_of_mode = BITMAP_ALLOC (&srom_obstack);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (NONDEBUG_INSN_P (insn))
|
||||
find_subregs_of_mode (PATTERN (insn), subregs_of_mode);
|
||||
|
@ -674,7 +674,7 @@ regrename_analyze (bitmap bb_mask)
|
||||
/* Gather some information about the blocks in this function. */
|
||||
rename_info = XCNEWVEC (struct bb_rename_info, n_basic_blocks_for_fn (cfun));
|
||||
i = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
struct bb_rename_info *ri = rename_info + i;
|
||||
ri->bb = bb;
|
||||
@ -778,7 +778,7 @@ regrename_analyze (bitmap bb_mask)
|
||||
We perform the analysis for both incoming and outgoing edges, but we
|
||||
only need to merge once (in the second part, after verifying outgoing
|
||||
edges). */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
struct bb_rename_info *bb_ri = (struct bb_rename_info *) bb->aux;
|
||||
unsigned j;
|
||||
@ -843,7 +843,7 @@ regrename_analyze (bitmap bb_mask)
|
||||
}
|
||||
}
|
||||
}
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
struct bb_rename_info *bb_ri = (struct bb_rename_info *) bb->aux;
|
||||
unsigned j;
|
||||
@ -920,7 +920,7 @@ regrename_analyze (bitmap bb_mask)
|
||||
|
||||
free (rename_info);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->aux = NULL;
|
||||
}
|
||||
|
||||
|
@ -375,7 +375,7 @@ regstat_compute_ri (void)
|
||||
reg_info_p = XCNEWVEC (struct reg_info_t, max_regno);
|
||||
local_live_last_luid = XNEWVEC (int, max_regno);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
regstat_bb_compute_ri (bb->index, live, artificial_uses,
|
||||
local_live, local_processed,
|
||||
@ -522,7 +522,7 @@ regstat_compute_calls_crossed (void)
|
||||
reg_info_p_size = max_regno;
|
||||
reg_info_p = XCNEWVEC (struct reg_info_t, max_regno);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
regstat_bb_compute_calls_crossed (bb->index, live);
|
||||
}
|
||||
|
@ -613,7 +613,7 @@ has_nonexceptional_receiver (void)
|
||||
/* First determine which blocks can reach exit via normal paths. */
|
||||
tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->flags &= ~BB_REACHABLE;
|
||||
|
||||
/* Place the exit block on our worklist. */
|
||||
@ -641,7 +641,7 @@ has_nonexceptional_receiver (void)
|
||||
|
||||
/* Now see if there's a reachable block with an exceptional incoming
|
||||
edge. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
|
||||
return true;
|
||||
|
||||
@ -1048,7 +1048,7 @@ reload (rtx first, int global)
|
||||
pseudo. */
|
||||
|
||||
if (! frame_pointer_needed)
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
|
||||
|
||||
/* Come here (with failure set nonzero) if we can't get enough spill
|
||||
@ -1592,7 +1592,7 @@ calculate_elim_costs_all_insns (void)
|
||||
set_initial_elim_offsets ();
|
||||
set_initial_label_offsets ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx insn;
|
||||
elim_bb = bb;
|
||||
|
@ -1219,7 +1219,7 @@ init_resource_info (rtx epilogue_insn)
|
||||
bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
|
||||
|
||||
/* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (LABEL_P (BB_HEAD (bb)))
|
||||
BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
|
||||
}
|
||||
@ -1258,7 +1258,7 @@ free_resource_info (void)
|
||||
bb_ticks = NULL;
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (LABEL_P (BB_HEAD (bb)))
|
||||
BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
|
||||
}
|
||||
|
@ -637,7 +637,7 @@ schedule_ebbs (void)
|
||||
schedule_ebbs_init ();
|
||||
|
||||
/* Schedule every region in the subroutine. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx head = BB_HEAD (bb);
|
||||
|
||||
|
@ -272,7 +272,7 @@ is_cfg_nonregular (void)
|
||||
|
||||
/* If we have insns which refer to labels as non-jumped-to operands,
|
||||
then we consider the cfg not well structured. */
|
||||
FOR_EACH_BB (b)
|
||||
FOR_EACH_BB_FN (b, cfun)
|
||||
FOR_BB_INSNS (b, insn)
|
||||
{
|
||||
rtx note, next, set, dest;
|
||||
@ -317,7 +317,7 @@ is_cfg_nonregular (void)
|
||||
Unreachable loops with a single block are detected here. This
|
||||
test is redundant with the one in find_rgns, but it's much
|
||||
cheaper to go ahead and catch the trivial case here. */
|
||||
FOR_EACH_BB (b)
|
||||
FOR_EACH_BB_FN (b, cfun)
|
||||
{
|
||||
if (EDGE_COUNT (b->preds) == 0
|
||||
|| (single_pred_p (b)
|
||||
@ -479,7 +479,7 @@ find_single_block_region (bool ebbs_p)
|
||||
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
|
||||
probability_cutoff = REG_BR_PROB_BASE / 100 * probability_cutoff;
|
||||
|
||||
FOR_EACH_BB (ebb_start)
|
||||
FOR_EACH_BB_FN (ebb_start, cfun)
|
||||
{
|
||||
RGN_NR_BLOCKS (nr_regions) = 0;
|
||||
RGN_BLOCKS (nr_regions) = i;
|
||||
@ -512,7 +512,7 @@ find_single_block_region (bool ebbs_p)
|
||||
}
|
||||
}
|
||||
else
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rgn_bb_table[nr_regions] = bb->index;
|
||||
RGN_NR_BLOCKS (nr_regions) = 1;
|
||||
@ -762,7 +762,7 @@ haifa_find_rgns (void)
|
||||
the entry node by placing a nonzero value in dfs_nr. Thus if
|
||||
dfs_nr is zero for any block, then it must be unreachable. */
|
||||
unreachable = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (dfs_nr[bb->index] == 0)
|
||||
{
|
||||
unreachable = 1;
|
||||
@ -773,7 +773,7 @@ haifa_find_rgns (void)
|
||||
to hold degree counts. */
|
||||
degree = dfs_nr;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
degree[bb->index] = EDGE_COUNT (bb->preds);
|
||||
|
||||
/* Do not perform region scheduling if there are any unreachable
|
||||
@ -807,7 +807,7 @@ haifa_find_rgns (void)
|
||||
|
||||
/* Find blocks which are inner loop headers. We still have non-reducible
|
||||
loops to consider at this point. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (bitmap_bit_p (header, bb->index) && bitmap_bit_p (inner, bb->index))
|
||||
{
|
||||
@ -826,7 +826,7 @@ haifa_find_rgns (void)
|
||||
If there exists a block that is not dominated by the loop
|
||||
header, then the block is reachable from outside the loop
|
||||
and thus the loop is not a natural loop. */
|
||||
FOR_EACH_BB (jbb)
|
||||
FOR_EACH_BB_FN (jbb, cfun)
|
||||
{
|
||||
/* First identify blocks in the loop, except for the loop
|
||||
entry block. */
|
||||
@ -874,7 +874,7 @@ haifa_find_rgns (void)
|
||||
Place those blocks into the queue. */
|
||||
if (no_loops)
|
||||
{
|
||||
FOR_EACH_BB (jbb)
|
||||
FOR_EACH_BB_FN (jbb, cfun)
|
||||
/* Leaf nodes have only a single successor which must
|
||||
be EXIT_BLOCK. */
|
||||
if (single_succ_p (jbb)
|
||||
@ -1052,7 +1052,7 @@ haifa_find_rgns (void)
|
||||
|
||||
/* Any block that did not end up in a region is placed into a region
|
||||
by itself. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (degree[bb->index] >= 0)
|
||||
{
|
||||
rgn_bb_table[idx] = bb->index;
|
||||
@ -3281,7 +3281,7 @@ sched_rgn_local_init (int rgn)
|
||||
|
||||
/* Use ->aux to implement EDGE_TO_BIT mapping. */
|
||||
rgn_nr_edges = 0;
|
||||
FOR_EACH_BB (block)
|
||||
FOR_EACH_BB_FN (block, cfun)
|
||||
{
|
||||
if (CONTAINING_RGN (block->index) != rgn)
|
||||
continue;
|
||||
@ -3291,7 +3291,7 @@ sched_rgn_local_init (int rgn)
|
||||
|
||||
rgn_edges = XNEWVEC (edge, rgn_nr_edges);
|
||||
rgn_nr_edges = 0;
|
||||
FOR_EACH_BB (block)
|
||||
FOR_EACH_BB_FN (block, cfun)
|
||||
{
|
||||
if (CONTAINING_RGN (block->index) != rgn)
|
||||
continue;
|
||||
@ -3312,7 +3312,7 @@ sched_rgn_local_init (int rgn)
|
||||
/* Cleanup ->aux used for EDGE_TO_BIT mapping. */
|
||||
/* We don't need them anymore. But we want to avoid duplication of
|
||||
aux fields in the newly created edges. */
|
||||
FOR_EACH_BB (block)
|
||||
FOR_EACH_BB_FN (block, cfun)
|
||||
{
|
||||
if (CONTAINING_RGN (block->index) != rgn)
|
||||
continue;
|
||||
|
@ -750,7 +750,7 @@ sel_dump_cfg_2 (FILE *f, int flags)
|
||||
if (flags & SEL_DUMP_CFG_FUNCTION_NAME)
|
||||
fprintf (f, "function [label = \"%s\"];\n", current_function_name ());
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
insn_t insn = BB_HEAD (bb);
|
||||
insn_t next_tail = NEXT_INSN (BB_END (bb));
|
||||
|
@ -4321,7 +4321,7 @@ init_lv_sets (void)
|
||||
basic_block bb;
|
||||
|
||||
/* Initialize of LV sets. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
init_lv_set (bb);
|
||||
|
||||
/* Don't forget EXIT_BLOCK. */
|
||||
@ -4349,7 +4349,7 @@ free_lv_sets (void)
|
||||
free_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* Free LV sets. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (BB_LV_SET (bb))
|
||||
free_lv_set (bb);
|
||||
}
|
||||
@ -6155,7 +6155,7 @@ make_regions_from_the_rest (void)
|
||||
for (i = 0; i < last_basic_block_for_fn (cfun); i++)
|
||||
loop_hdr[i] = -1;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
if (bb->loop_father && !bb->loop_father->num == 0
|
||||
&& !(bb->flags & BB_IRREDUCIBLE_LOOP))
|
||||
@ -6165,7 +6165,7 @@ make_regions_from_the_rest (void)
|
||||
/* For each basic block degree is calculated as the number of incoming
|
||||
edges, that are going out of bbs that are not yet scheduled.
|
||||
The basic blocks that are scheduled have degree value of zero. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
degree[bb->index] = 0;
|
||||
|
||||
@ -6183,7 +6183,7 @@ make_regions_from_the_rest (void)
|
||||
|
||||
/* Any block that did not end up in a region is placed into a region
|
||||
by itself. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (degree[bb->index] >= 0)
|
||||
{
|
||||
rgn_bb_table[cur_rgn_blocks] = bb->index;
|
||||
|
@ -156,7 +156,7 @@ build_sese_loop_nests (sese region)
|
||||
basic_block bb;
|
||||
struct loop *loop0, *loop1;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb_in_sese_p (bb, region))
|
||||
{
|
||||
struct loop *loop = bb->loop_father;
|
||||
@ -303,10 +303,10 @@ sese_build_liveouts (sese region, bitmap liveouts)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
sese_build_liveouts_bb (region, liveouts, bb);
|
||||
if (MAY_HAVE_DEBUG_STMTS)
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
sese_reset_debug_liveouts_bb (region, liveouts, bb);
|
||||
}
|
||||
|
||||
|
@ -58,7 +58,7 @@ notice_stack_pointer_modification (void)
|
||||
been used. */
|
||||
crtl->sp_is_unchanging = !cfun->calls_alloca;
|
||||
if (crtl->sp_is_unchanging)
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
if (INSN_P (insn))
|
||||
|
@ -656,7 +656,7 @@ compute_store_table (void)
|
||||
already_set = XNEWVEC (int, max_gcse_regno);
|
||||
|
||||
/* Find all the stores we care about. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
/* First compute the registers set in this block. */
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
@ -1061,7 +1061,7 @@ build_store_vectors (void)
|
||||
bitmap_vector_clear (st_transp, last_basic_block_for_fn (cfun));
|
||||
regs_set_in_block = XNEWVEC (int, max_gcse_regno);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
memset (regs_set_in_block, 0, sizeof (int) * max_gcse_regno);
|
||||
|
||||
@ -1188,7 +1188,7 @@ one_store_motion_pass (void)
|
||||
|
||||
/* Now we want to insert the new stores which are going to be needed. */
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bitmap_bit_p (st_delete_map[bb->index], ptr->index))
|
||||
{
|
||||
delete_store (ptr, bb);
|
||||
|
@ -1,3 +1,10 @@
|
||||
2013-12-09 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* g++.dg/plugin/selfassign.c (execute_warn_self_assign): Eliminate
|
||||
use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun
|
||||
explicit.
|
||||
* gcc.dg/plugin/selfassign.c (execute_warn_self_assign): Likewise.
|
||||
|
||||
2013-12-09 Richard Earnshaw <rearnsha@arm.com>
|
||||
|
||||
* gcc.target/arm/ldrd-strd-offset.c: New.
|
||||
|
@ -261,7 +261,7 @@ execute_warn_self_assign (void)
|
||||
gimple_stmt_iterator gsi;
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
warn_self_assign (gsi_stmt (gsi));
|
||||
|
@ -261,7 +261,7 @@ execute_warn_self_assign (void)
|
||||
gimple_stmt_iterator gsi;
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
warn_self_assign (gsi_stmt (gsi));
|
||||
|
@ -256,7 +256,7 @@ tail_duplicate (void)
|
||||
branch_ratio_cutoff =
|
||||
(REG_BR_PROB_BASE / 100 * PARAM_VALUE (TRACER_MIN_BRANCH_RATIO));
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
int n = count_insns (bb);
|
||||
if (!ignore_bb_p (bb))
|
||||
|
@ -2656,7 +2656,7 @@ compute_transaction_bits (void)
|
||||
certainly don't need it to calculate CDI_DOMINATOR info. */
|
||||
gate_tm_init ();
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
bb->flags &= ~BB_IN_TRANSACTION;
|
||||
|
||||
for (region = all_tm_regions; region; region = region->next)
|
||||
|
@ -876,7 +876,7 @@ tree_call_cdce (void)
|
||||
gimple_stmt_iterator i;
|
||||
bool something_changed = false;
|
||||
auto_vec<gimple> cond_dead_built_in_calls;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
/* Collect dead call candidates. */
|
||||
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
|
||||
|
@ -302,7 +302,7 @@ replace_loop_annotate ()
|
||||
}
|
||||
|
||||
/* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gsi = gsi_last_bb (bb);
|
||||
stmt = gsi_stmt (gsi);
|
||||
@ -456,7 +456,7 @@ factor_computed_gotos (void)
|
||||
Examine the last statement in each basic block to see if the block
|
||||
ends with a computed goto. */
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator gsi = gsi_last_bb (bb);
|
||||
gimple last;
|
||||
@ -635,7 +635,7 @@ fold_cond_expr_cond (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple stmt = last_stmt (bb);
|
||||
|
||||
@ -682,7 +682,7 @@ make_edges (void)
|
||||
EDGE_FALLTHRU);
|
||||
|
||||
/* Traverse the basic block array placing edges. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple last = last_stmt (bb);
|
||||
bool fallthru;
|
||||
@ -836,7 +836,7 @@ assign_discriminators (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -1055,7 +1055,7 @@ make_abnormal_goto_edges (basic_block bb, bool for_call)
|
||||
basic_block target_bb;
|
||||
gimple_stmt_iterator gsi;
|
||||
|
||||
FOR_EACH_BB (target_bb)
|
||||
FOR_EACH_BB_FN (target_bb, cfun)
|
||||
{
|
||||
for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
@ -1235,7 +1235,7 @@ cleanup_dead_labels (void)
|
||||
|
||||
/* Find a suitable label for each block. We use the first user-defined
|
||||
label if there is one, or otherwise just the first label we see. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator i;
|
||||
|
||||
@ -1271,7 +1271,7 @@ cleanup_dead_labels (void)
|
||||
|
||||
/* Now redirect all jumps/branches to the selected label.
|
||||
First do so for each block ending in a control statement. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple stmt = last_stmt (bb);
|
||||
tree label, new_label;
|
||||
@ -1363,7 +1363,7 @@ cleanup_dead_labels (void)
|
||||
/* Finally, purge dead labels. All user-defined labels and labels that
|
||||
can be the target of non-local gotos and labels which have their
|
||||
address taken are preserved. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple_stmt_iterator i;
|
||||
tree label_for_this_bb = label_for_bb[bb->index].label;
|
||||
@ -1487,7 +1487,7 @@ group_case_labels (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
gimple stmt = last_stmt (bb);
|
||||
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
|
||||
@ -2160,7 +2160,7 @@ dump_cfg_stats (FILE *file)
|
||||
SCALE (size), LABEL (size));
|
||||
|
||||
num_edges = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
num_edges += EDGE_COUNT (bb->succs);
|
||||
size = num_edges * sizeof (struct edge_def);
|
||||
total += size;
|
||||
@ -4894,7 +4894,7 @@ gimple_verify_flow_info (void)
|
||||
err = 1;
|
||||
}
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bool found_ctrl_stmt = false;
|
||||
|
||||
@ -7241,7 +7241,7 @@ print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
|
||||
if (verbosity >= 1)
|
||||
{
|
||||
fprintf (file, "%s{\n", s_indent);
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
if (bb->loop_father == loop)
|
||||
print_loops_bb (file, bb, indent, verbosity);
|
||||
|
||||
@ -8331,7 +8331,7 @@ execute_fixup_cfg (void)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
e->count = apply_scale (e->count, count_scale);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
bb->count = apply_scale (bb->count, count_scale);
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
|
@ -640,7 +640,7 @@ cleanup_tree_cfg_1 (void)
|
||||
recording of edge to CASE_LABEL_EXPR. */
|
||||
start_recording_case_labels ();
|
||||
|
||||
/* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB,
|
||||
/* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB_FN,
|
||||
since the basic blocks may get removed. */
|
||||
n = last_basic_block_for_fn (cfun);
|
||||
for (i = NUM_FIXED_BLOCKS; i < n; i++)
|
||||
@ -918,7 +918,7 @@ merge_phi_nodes (void)
|
||||
calculate_dominance_info (CDI_DOMINATORS);
|
||||
|
||||
/* Find all PHI nodes that we may be able to merge. */
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
basic_block dest;
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user