mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-15 10:20:39 +08:00
NEXT_INSN and PREV_INSN take a const rtx_insn
gcc/ 2014-08-28 David Malcolm <dmalcolm@redhat.com> * rtl.h (RTX_PREV): Added checked casts to uses of PREV_INSN and NEXT_INSN. (PREV_INSN): Strengthen param from const_rtx to const rtx_insn *. (NEXT_INSN): Likewise. (JUMP_LABEL_AS_INSN): Add a "const" modifier to param. (reg_used_between_p): Strengthen params 2 and 3 from const_rtx to const rtx_insn *. (no_labels_between_p): Likewise for both params. * config/aarch64/aarch64.c (aarch64_output_casesi): Add a checked cast when using NEXT_INSN on operands[2]. * config/alpha/alpha.c (alpha_set_memflags): Strengthen local "insn" from rtx to rtx_insn *, adding a checked cast. (alpha_handle_trap_shadows): Strengthen locals "i", "n" from rtx to rtx_insn *. * config/arc/arc-protos.h (arc_ccfsm_record_condition): Likewise for third param. (arc_text_label): Likewise for param "insn". * config/arc/arc.c (arc_expand_epilogue): Likewise for local "insn". (arc_ccfsm_record_condition): Likewise for param "jump". (arc_text_label): Likewise for local "label". * config/arc/arc.md (doloop_begin_i): Likewise for local "scan". Introduce a local "seq" via a dyn_cast to rtx_sequence *, and use a method for typesafety. Add a checked cast. * config/arc/constraints.md (Clb): Add a checked cast when getting the CODE_LABEL from a LABEL_REF. * config/arm/arm.c (require_pic_register): Strengthen locals "seq", "insn" from rtx to rtx_insn *. (create_fix_barrier): Likewise for locals "selected", "next". (thumb1_reorg): Likewise for locals "prev", "insn". (arm_expand_prologue): Likewise for local "last". (thumb1_output_casesi): Add a checked cast when using NEXT_INSN on operands[0]. (thumb2_output_casesi): Likewise for operands[2]. * config/avr/avr-log.c (avr_log_vadump): Within 'L' case, strengthen local "insn" from rtx to rtx_insn *. * config/bfin/bfin.c (find_next_insn_start): Likewise for return type and param "insn". (find_prev_insn_start): Likewise. (hwloop_optimize): Likewise for locals "insn", "last_insn", "prev". (gen_one_bundle): Likewise for loal "t". (find_load): Likewise for param "insn". (workaround_speculation): Likewise for locals "insn", "next", "target", "next_tgt". * config/c6x/c6x.c (assign_reservations): Likewise for both params and for locals "insn", "within", "last". (count_unit_reqs): Likewise for params "head", "tail" and local "insn". (try_rename_operands): Likewise for params "head", "tail". (reshuffle_units): Likewise for locals "head", "tail", "insn". (struct c6x_sched_context): Likewise for fields "last_scheduled_insn", "last_scheduled_iter0". (init_sched_state): Replace NULL_RTX with NULL. (reorg_split_calls): Strengthen local "new_cycle_first" from rtx to rtx_insn *. (undo_split_delayed_nonbranch): Likewise for param and for local "prev". (conditionalize_after_sched): Likewise for local "insn". (bb_earliest_end_cycle): Likewise. (filter_insns_above): Likewise for locals "insn", "next". (hwloop_optimize): Remove redundant checked cast. (hwloop_fail): Strengthen local "t" from rtx to rtx_insn *. * config/cris/cris.c (cris_initial_frame_pointer_offset): Replace NULL_RTX with NULL. (cris_simple_epilogue): Likewise. (cris_expand_prologue): Likewise. (cris_expand_epilogue): Likewise. * config/frv/frv.c (frv_function_contains_far_jump): Strengthen local "insn" from rtx to rtx_insn *. (frv_ifcvt_modify_tests): Likewise for locals "last_insn", "insn". (struct frv_packet_group): Likewise for the elements within array fields "insns", "sorted", and for field "nop". (frv_packet): Likewise for the elements within array field "insns". (frv_add_insn_to_packet): Likewise for param "insn". (frv_insert_nop_in_packet): Likewise for param "insn" and local "last". (frv_for_each_packet): Likewise for locals "insn", "next_insn". (frv_sort_insn_group_1): Likewise for local "insn". (frv_optimize_membar_local): Likewise. (frv_align_label): Likewise for locals "x", "last", "barrier", "label". * config/ia64/ia64.c (last_scheduled_insn): Likewise for this local. (ia64_sched_init): Likewise for local "insn". (scheduled_good_insn): Likewise for param "last". (struct _ia64_sched_context): Likewise for field "last_scheduled_insn". (ia64_init_sched_context): Replace NULL_RTX with NULL. (struct bundle_state): Likewise for field "insn". (issue_nops_and_insn): Likewise for param "insn". (get_next_important_insn): Likewise for return type and both params. (ia64_add_bundle_selector_before): Likewise for param "insn". (bundling): Likewise for params "prev_head_insn", "tail" and locals "insn", "next_insn", "b". Eliminate top-level local rtx "nop" in favor of new locals rtx "nop_pat" and rtx_insn *nop; * config/iq2000/iq2000-protos.h (iq2000_fill_delay_slot): Strengthen final param from rtx to rtx_insn *. (iq2000_move_1word): Likewise for second param. * config/iq2000/iq2000.c (iq2000_fill_delay_slot): Likewise for param "cur_insn" and local "next_insn". (iq2000_move_1word): Likewise for param "insn". * config/iq2000/iq2000.md (insn before ADDR_DIFF_VEC): Add checked casts when using NEXT_INSN on operands[1]. * config/m32c/m32c.c (m32c_function_needs_enter): Strengthen local "insn" from rtx to rtx_insn *. * config/m68k/m68k.c (m68k_jump_table_ref_p): Split out uses of "x", introducing local rtx_insn * "insn" for when working with the CODE_LABEL of the LABEL_REF. (m68k_sched_md_init_global): Strengthen local "insn" from rtx to rtx_insn *. * config/mcore/mcore-protos.h (mcore_is_dead): Likewise for first param. * config/mcore/mcore.c (emit_new_cond_insn): Likewise for return type. (conditionalize_block): Likewise for return type and param. (mcore_is_dead): Likewise for param "first" and local "insn". (emit_new_cond_insn): Likewise for return type. (conditionalize_block): Likewise for return type, param, and locals "insn", "blk_1_br", "end_blk_2_insn", "start_blk_3_lab", "newinsn". (conditionalize_optimization): Likewise for local "insn". * config/mep/mep.c (mep_jmp_return_reorg): Add checked cast when using NEXT_INSN. * config/microblaze/microblaze.md: Add checked casts when using NEXT_INSN. * config/mips/mips.c (mips_expand_prologue): Eliminate top-level rtx "insn" in favor of various more tightly-scoped rtx "insn" and and rtx_insn * "insn". * config/mips/mips.md (casesi_internal_mips16_<mode>): Add a checked cast when using NEXT_INSN on operands[2]. * config/mn10300/mn10300.c (mn10300_insert_setlb_lcc): Strengthen local "insn" from rtx to rtx_insn *. * config/nds32/nds32-fp-as-gp.c (nds32_fp_as_gp_check_available): Likewise. * config/nds32/nds32-md-auxiliary.c (nds32_output_casesi_pc_relative): Add a checked cast when using NEXT_INSN on operands[1]. * config/pa/pa-protos.h (pa_following_call): Strengthen param from rtx to rtx_insn *. (pa_output_cbranch): Likewise for final param. (pa_output_lbranch): Likewise for second param. (pa_output_bb): Likewise for third param. (pa_output_bvb): Likewise. (pa_output_dbra): Likewise for second param. (pa_output_movb): Likewise. (pa_output_parallel_movb): Likewise. (pa_output_parallel_addb): Likewise. (pa_output_millicode_call): Likewise for first param. (pa_output_mul_insn): Likewise for second param. (pa_output_div_insn): Likewise for third param. (pa_output_mod_insn): Likewise for second param. (pa_jump_in_call_delay): Likewise for param. * config/pa/pa.c (pa_output_mul_insn): Likewise for param "insn". (pa_output_div_insn): Likewise. (pa_output_mod_insn): Likewise. (pa_output_cbranch): Likewise. (pa_output_lbranch): Likewise. (pa_output_bb): Likewise. (pa_output_bvb): Likewise. (pa_output_dbra): Likewise. (pa_output_movb): Likewise. (pa_output_millicode_call): Likewise; use method of rtx_sequence * to simplify and for typesafety. (pa_output_call): Use method of rtx_sequence *. (forward_branch_p): Strengthen param "insn" from rtx to rtx_insn *. (pa_jump_in_call_delay): Likewise. (pa_output_parallel_movb): Likewise. (pa_output_parallel_addb): Likewise. (pa_following_call): Likewise. (pa_combine_instructions): Likewise for locals "anchor", "floater". (pa_can_combine_p): Likewise for params "anchor", "floater" and locals "start", "end". * config/picochip/picochip.c (picochip_reset_vliw): Likewise for param "insn" and local "local_insn". (picochip_final_prescan_insn): Likewise for local "local_insn". * config/rs6000/rs6000.c (compute_save_world_info): Likewise for local "insn". (uses_TOC): Likewise. * config/s390/s390.c (get_some_local_dynamic_name): Likewise. (s390_mainpool_finish): Eliminate top-level local rtx "insn", splitting out to more tightly-scoped locals, 3 as rtx and one as rtx_insn *. (s390_optimize_nonescaping_tx): Strengthen local "tmp" from rtx to rtx_insn *. (s390_emit_prologue): Introduce a local "insn" to be an rtx_insn * where needed. * config/sh/sh-protos.h (barrier_align): Strenghten param from rtx to rtx_insn *. (fixup_addr_diff_vecs): Likewise. (reg_unused_after): Likewise for param 2. (sh_can_redirect_branch): Likewise for both params. (check_use_sfunc_addr): Likewise for param 1. * config/sh/sh.c (fixup_mova): Likewise for local "worker". (find_barrier): Likewise for local "last_got". (gen_block_redirect): Likewise for return type, param "jump" and locals "prev", "scan", "next", "insn". (struct far_branch): Likewise for fields "near_label", "insert_place", "far_label". (gen_far_branch): Likewise for local "jump". (fixup_addr_diff_vecs): Likewise for param "first" and locals "insn", "prev". (barrier_align): Likewise for param and for locals "prev", "x". Introduce local rtx_sequence * "prev_seq" and use insn method for typesafety and clarity. (sh_reorg): Strengthen local "scan" from rtx to rtx_insn *. (get_dest_uid): Likewise for local "dest". (split_branches): Likewise for locals "next", "beyond", "label", "block", "far_label". Add checked casts when assigning to bp->far_label and "far_label". (reg_unused_after): Strengthen param "scan" from rtx to rtx_insn *. (sequence_insn_p): Likewise. (mark_constant_pool_use): Likewise for locals "insn", "lab". Add a more loop-scoped rtx "insn" when walking LABEL_REFS. (sh_can_redirect_branch): Strengthen both params from rtx to rtx_insn *. (check_use_sfunc_addr): Likewise for param "insn". Introduce a new local rtx_sequence * "seq" via a dyn_cast, and use a method for clarity and typesafety. * config/sh/sh.md (define_expand "epilogue"): Strengthen local "insn" from rtx to rtx_insn *. (define_insn "casesi_worker_1"): Add a checked cast to rtx_insn * when using NEXT_INSN on the CODE_LABEL in operands[2]. (define_insn "casesi_worker_2"): Likewise. (define_insn "casesi_shift_media"): Likewise. (define_insn "casesi_load_media"): Likewise for the CODE_LABEL in operands[3]. * config/sh/sh_optimize_sett_clrt.cc (struct ccreg_value): Strengthen field "insn" from rtx to rtx_insn *. (sh_optimize_sett_clrt::execute): Likewise for locals "next_i", "i". (sh_optimize_sett_clrt::find_last_ccreg_values): Likewise for param "start_insn" and local "start_insn". * config/sh/sh_treg_combine.cc (struct set_of_reg): Likewise for field "insn". (find_set_of_reg_bb): Likewise for param "insn". (trace_reg_uses_1): Likewise for param "start_insn" and local "i". (trace_reg_uses): Likewise for param "start_insn". (sh_treg_combine::cbranch_trace): Likewise for field "cbranch_insn". (sh_treg_combine::cbranch_trace::cbranch_trace): Likewise for param "insn". (sh_treg_combine::record_set_of_reg): Likewise for param "start_insn" and local "i". (sh_treg_combine::can_remove_cstore): Likewise for local "prev_insn". (sh_treg_combine::try_optimize_cbranch): Likewise for param "insn". (sh_treg_combine::execute): Likewise for local "i". * config/sparc/sparc-protos.h (empty_delay_slot): Likewise for param. (sparc_check_64): Likewise for second param. * config/sparc/sparc.c (sparc_do_work_around_errata): Likewise for locals "insn", "next". Introduce local rtx_sequence * "seq" via a dyn_cast, using its insn method for typesafety and clarity. (empty_delay_slot): Strengthen param "insn" from rtx to rtx_insn *. (set_extends): Likewise. (sparc_check_64): Likewise. * config/stormy16/stormy16.c (xstormy16_split_cbranch): Likewise for locals "seq", "last_insn". (combine_bnp): Likewise for param "insn". (xstormy16_reorg): Likewise for local "insn". * config/v850/v850.c (substitute_ep_register): Likewise for params "first_insn", "last_insn" and local "insn". (v850_reorg): Likewise for fields "first_insn", "last_insn" within elements of "regs" array, and local "insn". * except.c (emit_note_eh_region_end): Likewise for param "insn". * final.c (final_sequence): Strengthen this global from rtx to rtx_sequence *. (shorten_branches): Strenthen locals "rel_lab", "prev" from rtx to rtx_insn *. (final_scan_insn): Update assignment to "final_sequence" to be from "seq", the cast version of "body", for type-safety. * function.c (assign_parm_setup_reg): Strengthen locals "insn", "insns" from rtx to rtx_insn *. (thread_prologue_and_epilogue_insns): Likewise for local "seq". * genattr.c (main): When writing out generated insn-attr.h, strengthen params 1 and 3 of eligible_for_delay, eligible_for_annul_true, eligible_for_annul_false from rtx to rtx_insn *. * genattrtab.c (write_eligible_delay): Likewise when writing out generated insn-attrtab.c; also local "insn" the generated functions. * hw-doloop.c (discover_loops): Strengthen local "insn" from rtx to rtx_insn *. * hw-doloop.h (struct GTY hwloop_info_d): Strengthen field "start_label" from rtx to rtx_insn *. * ira.c (decrease_live_ranges_number): Likewise for local "p". (ira_update_equiv_info_by_shuffle_insn): Likewise for param "insns" and local "insn". (validate_equiv_mem): Likewise for param "start" and local "insn". (memref_used_between_p): Likewise for params "start", "end" and local "insn". * ira.h (ira_update_equiv_info_by_shuffle_insn): Likewise for final param. * loop-doloop.c (doloop_optimize): Within region guarded by INSN_P (doloop_pat), introduce a new local rtx_insn * "doloop_insn" via a checked cast, and use it for typesafety, eventually writing the value back into doloop_pat. * output.h (final_sequence): Strengthen this global from rtx to rtx_sequence *. * recog.c (peep2_attempt): Rename param "insn" to "uncast_insn", reintroducing "insn" as an rtx_insn * via a checked cast. Strengthen param "attempt" and local "new_insn"from rtx to rtx_insn *. (peephole2_optimize): Strengthen locals "insn", "attempt" from rtx to rtx_insn *. * ree.c (emit_note_eh_region_end): Likewise for local "insn". * reload1.c (reload_as_needed): Eliminate top-level locals "x" and "p" in favor of more tightly-scoped replacements, sometimes rtx and sometimes rtx_insn *, as appropriate. (delete_output_reload): Eliminate top-level rtx "i1", splitting into two loop-scoped locals, one an rtx, the other an rtx_insn *. * reorg.c (delete_scheduled_jump): Add checked cast. Strengthen local "trial" from rtx to rtx_insn *. (redirect_with_delay_slots_safe_p): Strengthen param "jump" from rtx to rtx_insn *. Strenghten local "pat" from rtx to rtx_sequence * and use methods for clarity and typesafety. (redirect_with_delay_list_safe_p): Strengthen param "jump" from rtx to rtx_insn *. Strenghten local "li" from rtx to rtx_insn_list * and use its methods for clarity and typesafety. (steal_delay_list_from_target): Strengthen param "insn" from rtx to rtx_insn *. (steal_delay_list_from_fallthrough): Likewise. (try_merge_delay_insns): Likewise for param "thread" and locals "trial", "next_trial", "delay_insn". (redundant_insn): Likewise for param "target" and local "trial". (own_thread_p): Likewise for param "thread" and locals "active_insn", "insn". (get_label_before): Likewise for param "insn". (fill_simple_delay_slots): Likewise for local "new_label"; use JUMP_LABEL_AS_INSN as necessary when calling own_thread_p. (label_before_next_insn): Strengthen return type and local "insn" from rtx to rtx_insn *. (relax_delay_slots): Likewise for locals "other", "tmp". (make_return_insns): Likewise for param "first" and locals "insn", "jump_insn", "prev". Move declaration of "pat" to its assignment and strengthen from rtx to rtx_sequence *. Use its methods for clarity and typesafety. * rtlanal.c (no_labels_between_p): Strengthen params from const_rtx to const rtx_insn *. Strengthen local "p" from rtx to rtx_insn *. (reg_used_between_p): Strengthen params "from_insn", "to_insn" from const_rtx to const rtx_insn *. (reg_set_between_p): Rename param "from_insn" to "uncast_from_insn", and reintroduce "from_insn" as a const rtx_insn * via a checked cast. (modified_between_p): Likewise for param "start" as "uncast_start". (tablejump_p): Add a cast when invoking NEXT_INSN on "label". * sel-sched-ir.c (get_seqno_by_preds): Strengthen param and locals "tmp", head" from rtx to rtx_insn *. (recompute_rev_top_order): Likewise for local "insn". * sel-sched-ir.h (get_seqno_by_preds): Likewise for param. * store-motion.c (build_store_vectors): Likewise for local "insn". Strengthen local "st" from rtx to rtx_insn_list * and use methods for clarity and typesafety. * tree-ssa-loop-ivopts.c (seq_cost): Strengthen param "seq" from rtx to rtx_insn *. (computation_cost): Likewise for local "seq". (get_address_cost): Likewise. / 2014-08-28 David Malcolm <dmalcolm@redhat.com> * rtx-classes-status.txt (TODO): NEXT_INSN/PREV_INSN are done. From-SVN: r214698
This commit is contained in:
parent
c5241a21a3
commit
b32d518945
@ -1,3 +1,7 @@
|
||||
2014-08-28 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* rtx-classes-status.txt (TODO): NEXT_INSN/PREV_INSN are done.
|
||||
|
||||
2014-08-27 Sebastian Pop <s.pop@samsung.com>
|
||||
|
||||
* config/isl.m4 (_ISL_CHECK_CT_PROG): Removed.
|
||||
|
366
gcc/ChangeLog
366
gcc/ChangeLog
@ -1,3 +1,369 @@
|
||||
2014-08-28 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* rtl.h (RTX_PREV): Added checked casts to uses of PREV_INSN and
|
||||
NEXT_INSN.
|
||||
(PREV_INSN): Strengthen param from const_rtx to const rtx_insn *.
|
||||
(NEXT_INSN): Likewise.
|
||||
(JUMP_LABEL_AS_INSN): Add a "const" modifier to param.
|
||||
(reg_used_between_p): Strengthen params 2 and 3 from const_rtx to
|
||||
const rtx_insn *.
|
||||
(no_labels_between_p): Likewise for both params.
|
||||
|
||||
* config/aarch64/aarch64.c (aarch64_output_casesi): Add a checked
|
||||
cast when using NEXT_INSN on operands[2].
|
||||
* config/alpha/alpha.c (alpha_set_memflags): Strengthen local
|
||||
"insn" from rtx to rtx_insn *, adding a checked cast.
|
||||
(alpha_handle_trap_shadows): Strengthen locals "i", "n" from rtx to
|
||||
rtx_insn *.
|
||||
* config/arc/arc-protos.h (arc_ccfsm_record_condition): Likewise
|
||||
for third param.
|
||||
(arc_text_label): Likewise for param "insn".
|
||||
* config/arc/arc.c (arc_expand_epilogue): Likewise for local
|
||||
"insn".
|
||||
(arc_ccfsm_record_condition): Likewise for param "jump".
|
||||
(arc_text_label): Likewise for local "label".
|
||||
* config/arc/arc.md (doloop_begin_i): Likewise for local "scan".
|
||||
Introduce a local "seq" via a dyn_cast to rtx_sequence *, and use
|
||||
a method for typesafety. Add a checked cast.
|
||||
* config/arc/constraints.md (Clb): Add a checked cast when getting
|
||||
the CODE_LABEL from a LABEL_REF.
|
||||
* config/arm/arm.c (require_pic_register): Strengthen locals
|
||||
"seq", "insn" from rtx to rtx_insn *.
|
||||
(create_fix_barrier): Likewise for locals "selected", "next".
|
||||
(thumb1_reorg): Likewise for locals "prev", "insn".
|
||||
(arm_expand_prologue): Likewise for local "last".
|
||||
(thumb1_output_casesi): Add a checked cast when using NEXT_INSN on
|
||||
operands[0].
|
||||
(thumb2_output_casesi): Likewise for operands[2].
|
||||
* config/avr/avr-log.c (avr_log_vadump): Within 'L' case,
|
||||
strengthen local "insn" from rtx to rtx_insn *.
|
||||
* config/bfin/bfin.c (find_next_insn_start): Likewise for return
|
||||
type and param "insn".
|
||||
(find_prev_insn_start): Likewise.
|
||||
(hwloop_optimize): Likewise for locals "insn", "last_insn",
|
||||
"prev".
|
||||
(gen_one_bundle): Likewise for loal "t".
|
||||
(find_load): Likewise for param "insn".
|
||||
(workaround_speculation): Likewise for locals "insn", "next",
|
||||
"target", "next_tgt".
|
||||
* config/c6x/c6x.c (assign_reservations): Likewise for both params
|
||||
and for locals "insn", "within", "last".
|
||||
(count_unit_reqs): Likewise for params "head", "tail" and local
|
||||
"insn".
|
||||
(try_rename_operands): Likewise for params "head", "tail".
|
||||
(reshuffle_units): Likewise for locals "head", "tail", "insn".
|
||||
(struct c6x_sched_context): Likewise for fields
|
||||
"last_scheduled_insn", "last_scheduled_iter0".
|
||||
(init_sched_state): Replace NULL_RTX with NULL.
|
||||
(reorg_split_calls): Strengthen local "new_cycle_first" from rtx
|
||||
to rtx_insn *.
|
||||
(undo_split_delayed_nonbranch): Likewise for param and for local
|
||||
"prev".
|
||||
(conditionalize_after_sched): Likewise for local "insn".
|
||||
(bb_earliest_end_cycle): Likewise.
|
||||
(filter_insns_above): Likewise for locals "insn", "next".
|
||||
(hwloop_optimize): Remove redundant checked cast.
|
||||
(hwloop_fail): Strengthen local "t" from rtx to rtx_insn *.
|
||||
* config/cris/cris.c (cris_initial_frame_pointer_offset): Replace
|
||||
NULL_RTX with NULL.
|
||||
(cris_simple_epilogue): Likewise.
|
||||
(cris_expand_prologue): Likewise.
|
||||
(cris_expand_epilogue): Likewise.
|
||||
* config/frv/frv.c (frv_function_contains_far_jump): Strengthen
|
||||
local "insn" from rtx to rtx_insn *.
|
||||
(frv_ifcvt_modify_tests): Likewise for locals "last_insn", "insn".
|
||||
(struct frv_packet_group): Likewise for the elements within array
|
||||
fields "insns", "sorted", and for field "nop".
|
||||
(frv_packet): Likewise for the elements within array field
|
||||
"insns".
|
||||
(frv_add_insn_to_packet): Likewise for param "insn".
|
||||
(frv_insert_nop_in_packet): Likewise for param "insn" and local
|
||||
"last".
|
||||
(frv_for_each_packet): Likewise for locals "insn", "next_insn".
|
||||
(frv_sort_insn_group_1): Likewise for local "insn".
|
||||
(frv_optimize_membar_local): Likewise.
|
||||
(frv_align_label): Likewise for locals "x", "last", "barrier",
|
||||
"label".
|
||||
* config/ia64/ia64.c (last_scheduled_insn): Likewise for this
|
||||
local.
|
||||
(ia64_sched_init): Likewise for local "insn".
|
||||
(scheduled_good_insn): Likewise for param "last".
|
||||
(struct _ia64_sched_context): Likewise for field
|
||||
"last_scheduled_insn".
|
||||
(ia64_init_sched_context): Replace NULL_RTX with NULL.
|
||||
(struct bundle_state): Likewise for field "insn".
|
||||
(issue_nops_and_insn): Likewise for param "insn".
|
||||
(get_next_important_insn): Likewise for return type and both
|
||||
params.
|
||||
(ia64_add_bundle_selector_before): Likewise for param "insn".
|
||||
(bundling): Likewise for params "prev_head_insn", "tail" and
|
||||
locals "insn", "next_insn", "b". Eliminate top-level local rtx
|
||||
"nop" in favor of new locals rtx "nop_pat" and rtx_insn *nop;
|
||||
* config/iq2000/iq2000-protos.h (iq2000_fill_delay_slot):
|
||||
Strengthen final param from rtx to rtx_insn *.
|
||||
(iq2000_move_1word): Likewise for second param.
|
||||
* config/iq2000/iq2000.c (iq2000_fill_delay_slot): Likewise for
|
||||
param "cur_insn" and local "next_insn".
|
||||
(iq2000_move_1word): Likewise for param "insn".
|
||||
* config/iq2000/iq2000.md (insn before ADDR_DIFF_VEC): Add checked
|
||||
casts when using NEXT_INSN on operands[1].
|
||||
* config/m32c/m32c.c (m32c_function_needs_enter): Strengthen local
|
||||
"insn" from rtx to rtx_insn *.
|
||||
* config/m68k/m68k.c (m68k_jump_table_ref_p): Split out uses of
|
||||
"x", introducing local rtx_insn * "insn" for when working with the
|
||||
CODE_LABEL of the LABEL_REF.
|
||||
(m68k_sched_md_init_global): Strengthen local "insn" from rtx to
|
||||
rtx_insn *.
|
||||
* config/mcore/mcore-protos.h (mcore_is_dead): Likewise for first
|
||||
param.
|
||||
* config/mcore/mcore.c (emit_new_cond_insn): Likewise for return
|
||||
type.
|
||||
(conditionalize_block): Likewise for return type and param.
|
||||
(mcore_is_dead): Likewise for param "first" and local "insn".
|
||||
(emit_new_cond_insn): Likewise for return type.
|
||||
(conditionalize_block): Likewise for return type, param, and
|
||||
locals "insn", "blk_1_br", "end_blk_2_insn", "start_blk_3_lab",
|
||||
"newinsn".
|
||||
(conditionalize_optimization): Likewise for local "insn".
|
||||
* config/mep/mep.c (mep_jmp_return_reorg): Add checked cast when
|
||||
using NEXT_INSN.
|
||||
* config/microblaze/microblaze.md: Add checked casts when using
|
||||
NEXT_INSN.
|
||||
* config/mips/mips.c (mips_expand_prologue): Eliminate top-level
|
||||
rtx "insn" in favor of various more tightly-scoped rtx "insn" and
|
||||
and rtx_insn * "insn".
|
||||
* config/mips/mips.md (casesi_internal_mips16_<mode>): Add a
|
||||
checked cast when using NEXT_INSN on operands[2].
|
||||
* config/mn10300/mn10300.c (mn10300_insert_setlb_lcc): Strengthen
|
||||
local "insn" from rtx to rtx_insn *.
|
||||
* config/nds32/nds32-fp-as-gp.c (nds32_fp_as_gp_check_available):
|
||||
Likewise.
|
||||
* config/nds32/nds32-md-auxiliary.c (nds32_output_casesi_pc_relative):
|
||||
Add a checked cast when using NEXT_INSN on operands[1].
|
||||
* config/pa/pa-protos.h (pa_following_call): Strengthen param from
|
||||
rtx to rtx_insn *.
|
||||
(pa_output_cbranch): Likewise for final param.
|
||||
(pa_output_lbranch): Likewise for second param.
|
||||
(pa_output_bb): Likewise for third param.
|
||||
(pa_output_bvb): Likewise.
|
||||
(pa_output_dbra): Likewise for second param.
|
||||
(pa_output_movb): Likewise.
|
||||
(pa_output_parallel_movb): Likewise.
|
||||
(pa_output_parallel_addb): Likewise.
|
||||
(pa_output_millicode_call): Likewise for first param.
|
||||
(pa_output_mul_insn): Likewise for second param.
|
||||
(pa_output_div_insn): Likewise for third param.
|
||||
(pa_output_mod_insn): Likewise for second param.
|
||||
(pa_jump_in_call_delay): Likewise for param.
|
||||
* config/pa/pa.c (pa_output_mul_insn): Likewise for param "insn".
|
||||
(pa_output_div_insn): Likewise.
|
||||
(pa_output_mod_insn): Likewise.
|
||||
(pa_output_cbranch): Likewise.
|
||||
(pa_output_lbranch): Likewise.
|
||||
(pa_output_bb): Likewise.
|
||||
(pa_output_bvb): Likewise.
|
||||
(pa_output_dbra): Likewise.
|
||||
(pa_output_movb): Likewise.
|
||||
(pa_output_millicode_call): Likewise; use method of rtx_sequence *
|
||||
to simplify and for typesafety.
|
||||
(pa_output_call): Use method of rtx_sequence *.
|
||||
(forward_branch_p): Strengthen param "insn" from rtx to rtx_insn *.
|
||||
(pa_jump_in_call_delay): Likewise.
|
||||
(pa_output_parallel_movb): Likewise.
|
||||
(pa_output_parallel_addb): Likewise.
|
||||
(pa_following_call): Likewise.
|
||||
(pa_combine_instructions): Likewise for locals "anchor",
|
||||
"floater".
|
||||
(pa_can_combine_p): Likewise for params "anchor", "floater" and
|
||||
locals "start", "end".
|
||||
* config/picochip/picochip.c (picochip_reset_vliw): Likewise for
|
||||
param "insn" and local "local_insn".
|
||||
(picochip_final_prescan_insn): Likewise for local "local_insn".
|
||||
* config/rs6000/rs6000.c (compute_save_world_info): Likewise for
|
||||
local "insn".
|
||||
(uses_TOC): Likewise.
|
||||
* config/s390/s390.c (get_some_local_dynamic_name): Likewise.
|
||||
(s390_mainpool_finish): Eliminate top-level local rtx "insn",
|
||||
splitting out to more tightly-scoped locals, 3 as rtx and one as
|
||||
rtx_insn *.
|
||||
(s390_optimize_nonescaping_tx): Strengthen local "tmp" from rtx
|
||||
to rtx_insn *.
|
||||
(s390_emit_prologue): Introduce a local "insn" to be an rtx_insn *
|
||||
where needed.
|
||||
* config/sh/sh-protos.h (barrier_align): Strenghten param from rtx
|
||||
to rtx_insn *.
|
||||
(fixup_addr_diff_vecs): Likewise.
|
||||
(reg_unused_after): Likewise for param 2.
|
||||
(sh_can_redirect_branch): Likewise for both params.
|
||||
(check_use_sfunc_addr): Likewise for param 1.
|
||||
* config/sh/sh.c (fixup_mova): Likewise for local "worker".
|
||||
(find_barrier): Likewise for local "last_got".
|
||||
(gen_block_redirect): Likewise for return type, param "jump" and
|
||||
locals "prev", "scan", "next", "insn".
|
||||
(struct far_branch): Likewise for fields "near_label",
|
||||
"insert_place", "far_label".
|
||||
(gen_far_branch): Likewise for local "jump".
|
||||
(fixup_addr_diff_vecs): Likewise for param "first" and locals
|
||||
"insn", "prev".
|
||||
(barrier_align): Likewise for param and for locals "prev", "x".
|
||||
Introduce local rtx_sequence * "prev_seq" and use insn method for
|
||||
typesafety and clarity.
|
||||
(sh_reorg): Strengthen local "scan" from rtx to rtx_insn *.
|
||||
(get_dest_uid): Likewise for local "dest".
|
||||
(split_branches): Likewise for locals "next", "beyond", "label",
|
||||
"block", "far_label". Add checked casts when assigning to
|
||||
bp->far_label and "far_label".
|
||||
(reg_unused_after): Strengthen param "scan" from rtx to rtx_insn *.
|
||||
(sequence_insn_p): Likewise.
|
||||
(mark_constant_pool_use): Likewise for locals "insn", "lab". Add a
|
||||
more loop-scoped rtx "insn" when walking LABEL_REFS.
|
||||
(sh_can_redirect_branch): Strengthen both params from rtx to
|
||||
rtx_insn *.
|
||||
(check_use_sfunc_addr): Likewise for param "insn". Introduce a
|
||||
new local rtx_sequence * "seq" via a dyn_cast, and use a method
|
||||
for clarity and typesafety.
|
||||
* config/sh/sh.md (define_expand "epilogue"): Strengthen local
|
||||
"insn" from rtx to rtx_insn *.
|
||||
(define_insn "casesi_worker_1"): Add a checked cast to rtx_insn *
|
||||
when using NEXT_INSN on the CODE_LABEL in operands[2].
|
||||
(define_insn "casesi_worker_2"): Likewise.
|
||||
(define_insn "casesi_shift_media"): Likewise.
|
||||
(define_insn "casesi_load_media"): Likewise for the CODE_LABEL in
|
||||
operands[3].
|
||||
* config/sh/sh_optimize_sett_clrt.cc (struct ccreg_value):
|
||||
Strengthen field "insn" from rtx to rtx_insn *.
|
||||
(sh_optimize_sett_clrt::execute): Likewise for locals "next_i", "i".
|
||||
(sh_optimize_sett_clrt::find_last_ccreg_values): Likewise for
|
||||
param "start_insn" and local "start_insn".
|
||||
* config/sh/sh_treg_combine.cc (struct set_of_reg): Likewise for
|
||||
field "insn".
|
||||
(find_set_of_reg_bb): Likewise for param "insn".
|
||||
(trace_reg_uses_1): Likewise for param "start_insn" and local "i".
|
||||
(trace_reg_uses): Likewise for param "start_insn".
|
||||
(sh_treg_combine::cbranch_trace): Likewise for field
|
||||
"cbranch_insn".
|
||||
(sh_treg_combine::cbranch_trace::cbranch_trace): Likewise for
|
||||
param "insn".
|
||||
(sh_treg_combine::record_set_of_reg): Likewise for param
|
||||
"start_insn" and local "i".
|
||||
(sh_treg_combine::can_remove_cstore): Likewise for local
|
||||
"prev_insn".
|
||||
(sh_treg_combine::try_optimize_cbranch): Likewise for param
|
||||
"insn".
|
||||
(sh_treg_combine::execute): Likewise for local "i".
|
||||
* config/sparc/sparc-protos.h (empty_delay_slot): Likewise for
|
||||
param.
|
||||
(sparc_check_64): Likewise for second param.
|
||||
* config/sparc/sparc.c (sparc_do_work_around_errata): Likewise for
|
||||
locals "insn", "next". Introduce local rtx_sequence * "seq" via a
|
||||
dyn_cast, using its insn method for typesafety and clarity.
|
||||
(empty_delay_slot): Strengthen param "insn" from rtx to
|
||||
rtx_insn *.
|
||||
(set_extends): Likewise.
|
||||
(sparc_check_64): Likewise.
|
||||
* config/stormy16/stormy16.c (xstormy16_split_cbranch): Likewise
|
||||
for locals "seq", "last_insn".
|
||||
(combine_bnp): Likewise for param "insn".
|
||||
(xstormy16_reorg): Likewise for local "insn".
|
||||
* config/v850/v850.c (substitute_ep_register): Likewise for params
|
||||
"first_insn", "last_insn" and local "insn".
|
||||
(v850_reorg): Likewise for fields "first_insn", "last_insn" within
|
||||
elements of "regs" array, and local "insn".
|
||||
* except.c (emit_note_eh_region_end): Likewise for param "insn".
|
||||
* final.c (final_sequence): Strengthen this global from rtx to
|
||||
rtx_sequence *.
|
||||
(shorten_branches): Strenthen locals "rel_lab", "prev" from rtx to
|
||||
rtx_insn *.
|
||||
(final_scan_insn): Update assignment to "final_sequence" to be
|
||||
from "seq", the cast version of "body", for type-safety.
|
||||
* function.c (assign_parm_setup_reg): Strengthen locals "insn",
|
||||
"insns" from rtx to rtx_insn *.
|
||||
(thread_prologue_and_epilogue_insns): Likewise for local "seq".
|
||||
* genattr.c (main): When writing out generated insn-attr.h,
|
||||
strengthen params 1 and 3 of eligible_for_delay,
|
||||
eligible_for_annul_true, eligible_for_annul_false from rtx to
|
||||
rtx_insn *.
|
||||
* genattrtab.c (write_eligible_delay): Likewise when writing out
|
||||
generated insn-attrtab.c; also local "insn" the generated
|
||||
functions.
|
||||
* hw-doloop.c (discover_loops): Strengthen local "insn" from rtx
|
||||
to rtx_insn *.
|
||||
* hw-doloop.h (struct GTY hwloop_info_d): Strengthen field
|
||||
"start_label" from rtx to rtx_insn *.
|
||||
* ira.c (decrease_live_ranges_number): Likewise for local "p".
|
||||
(ira_update_equiv_info_by_shuffle_insn): Likewise for param
|
||||
"insns" and local "insn".
|
||||
(validate_equiv_mem): Likewise for param "start" and local "insn".
|
||||
(memref_used_between_p): Likewise for params "start", "end" and
|
||||
local "insn".
|
||||
* ira.h (ira_update_equiv_info_by_shuffle_insn): Likewise for
|
||||
final param.
|
||||
* loop-doloop.c (doloop_optimize): Within region guarded by
|
||||
INSN_P (doloop_pat), introduce a new local rtx_insn *
|
||||
"doloop_insn" via a checked cast, and use it for typesafety,
|
||||
eventually writing the value back into doloop_pat.
|
||||
* output.h (final_sequence): Strengthen this global from rtx to
|
||||
rtx_sequence *.
|
||||
* recog.c (peep2_attempt): Rename param "insn" to "uncast_insn",
|
||||
reintroducing "insn" as an rtx_insn * via a checked cast.
|
||||
Strengthen param "attempt" and local "new_insn"from rtx to
|
||||
rtx_insn *.
|
||||
(peephole2_optimize): Strengthen locals "insn", "attempt" from rtx
|
||||
to rtx_insn *.
|
||||
* ree.c (emit_note_eh_region_end): Likewise for local "insn".
|
||||
* reload1.c (reload_as_needed): Eliminate top-level locals "x" and
|
||||
"p" in favor of more tightly-scoped replacements, sometimes rtx
|
||||
and sometimes rtx_insn *, as appropriate.
|
||||
(delete_output_reload): Eliminate top-level rtx "i1", splitting
|
||||
into two loop-scoped locals, one an rtx, the other an rtx_insn *.
|
||||
* reorg.c (delete_scheduled_jump): Add checked cast. Strengthen
|
||||
local "trial" from rtx to rtx_insn *.
|
||||
(redirect_with_delay_slots_safe_p): Strengthen param "jump" from
|
||||
rtx to rtx_insn *. Strenghten local "pat" from rtx to
|
||||
rtx_sequence * and use methods for clarity and typesafety.
|
||||
(redirect_with_delay_list_safe_p): Strengthen param "jump" from
|
||||
rtx to rtx_insn *. Strenghten local "li" from rtx to
|
||||
rtx_insn_list * and use its methods for clarity and typesafety.
|
||||
(steal_delay_list_from_target): Strengthen param "insn" from rtx
|
||||
to rtx_insn *.
|
||||
(steal_delay_list_from_fallthrough): Likewise.
|
||||
(try_merge_delay_insns): Likewise for param "thread" and locals
|
||||
"trial", "next_trial", "delay_insn".
|
||||
(redundant_insn): Likewise for param "target" and local "trial".
|
||||
(own_thread_p): Likewise for param "thread" and locals
|
||||
"active_insn", "insn".
|
||||
(get_label_before): Likewise for param "insn".
|
||||
(fill_simple_delay_slots): Likewise for local "new_label"; use
|
||||
JUMP_LABEL_AS_INSN as necessary when calling own_thread_p.
|
||||
(label_before_next_insn): Strengthen return type and local "insn"
|
||||
from rtx to rtx_insn *.
|
||||
(relax_delay_slots): Likewise for locals "other", "tmp".
|
||||
(make_return_insns): Likewise for param "first" and locals "insn",
|
||||
"jump_insn", "prev". Move declaration of "pat" to its assignment
|
||||
and strengthen from rtx to rtx_sequence *. Use its methods for
|
||||
clarity and typesafety.
|
||||
* rtlanal.c (no_labels_between_p): Strengthen params from
|
||||
const_rtx to const rtx_insn *. Strengthen local "p" from rtx to
|
||||
rtx_insn *.
|
||||
(reg_used_between_p): Strengthen params "from_insn", "to_insn"
|
||||
from const_rtx to const rtx_insn *.
|
||||
(reg_set_between_p): Rename param "from_insn" to
|
||||
"uncast_from_insn", and reintroduce "from_insn" as a
|
||||
const rtx_insn * via a checked cast.
|
||||
(modified_between_p): Likewise for param "start" as "uncast_start".
|
||||
(tablejump_p): Add a cast when invoking NEXT_INSN on "label".
|
||||
* sel-sched-ir.c (get_seqno_by_preds): Strengthen param and locals
|
||||
"tmp", head" from rtx to rtx_insn *.
|
||||
(recompute_rev_top_order): Likewise for local "insn".
|
||||
* sel-sched-ir.h (get_seqno_by_preds): Likewise for param.
|
||||
* store-motion.c (build_store_vectors): Likewise for local "insn".
|
||||
Strengthen local "st" from rtx to rtx_insn_list * and use methods
|
||||
for clarity and typesafety.
|
||||
* tree-ssa-loop-ivopts.c (seq_cost): Strengthen param "seq" from
|
||||
rtx to rtx_insn *.
|
||||
(computation_cost): Likewise for local "seq".
|
||||
(get_address_cost): Likewise.
|
||||
|
||||
2014-08-28 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* rtl.h (tablejump_p): Strengthen first param from const_rtx to
|
||||
|
@ -4563,7 +4563,7 @@ aarch64_output_casesi (rtx *operands)
|
||||
{
|
||||
char buf[100];
|
||||
char label[100];
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[2]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[2])));
|
||||
int index;
|
||||
static const char *const patterns[4][2] =
|
||||
{
|
||||
|
@ -1701,7 +1701,7 @@ alpha_set_memflags_1 (rtx *xp, void *data)
|
||||
void
|
||||
alpha_set_memflags (rtx seq, rtx ref)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
if (!MEM_P (ref))
|
||||
return;
|
||||
@ -1714,7 +1714,7 @@ alpha_set_memflags (rtx seq, rtx ref)
|
||||
&& !MEM_READONLY_P (ref))
|
||||
return;
|
||||
|
||||
for (insn = seq; insn; insn = NEXT_INSN (insn))
|
||||
for (insn = as_a <rtx_insn *> (seq); insn; insn = NEXT_INSN (insn))
|
||||
if (INSN_P (insn))
|
||||
for_each_rtx (&PATTERN (insn), alpha_set_memflags_1, (void *) ref);
|
||||
else
|
||||
@ -8759,7 +8759,7 @@ alpha_handle_trap_shadows (void)
|
||||
{
|
||||
struct shadow_summary shadow;
|
||||
int trap_pending, exception_nesting;
|
||||
rtx i, n;
|
||||
rtx_insn *i, *n;
|
||||
|
||||
trap_pending = 0;
|
||||
exception_nesting = 0;
|
||||
|
@ -88,7 +88,8 @@ extern rtx gen_mlo (void);
|
||||
extern rtx gen_mhi (void);
|
||||
extern bool arc_branch_size_unknown_p (void);
|
||||
struct arc_ccfsm;
|
||||
extern void arc_ccfsm_record_condition (rtx, bool, rtx, struct arc_ccfsm *);
|
||||
extern void arc_ccfsm_record_condition (rtx, bool, rtx_insn *,
|
||||
struct arc_ccfsm *);
|
||||
extern void arc_expand_prologue (void);
|
||||
extern void arc_expand_epilogue (int);
|
||||
extern void arc_init_expanders (void);
|
||||
@ -108,7 +109,8 @@ extern bool arc_scheduling_not_expected (void);
|
||||
extern bool arc_sets_cc_p (rtx insn);
|
||||
extern int arc_label_align (rtx label);
|
||||
extern bool arc_need_delay (rtx_insn *insn);
|
||||
extern bool arc_text_label (rtx);
|
||||
extern bool arc_text_label (rtx_insn *insn);
|
||||
|
||||
extern int arc_decl_pretend_args (tree decl);
|
||||
extern bool arc_short_comparison_p (rtx, int);
|
||||
extern bool arc_epilogue_uses (int regno);
|
||||
|
@ -2464,7 +2464,7 @@ arc_expand_epilogue (int sibcall_p)
|
||||
epilogue_done:
|
||||
if (!TARGET_EPILOGUE_CFI)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
RTX_FRAME_RELATED_P (insn) = 0;
|
||||
@ -3735,7 +3735,7 @@ arc_ccfsm_at_label (const char *prefix, int num, struct arc_ccfsm *state)
|
||||
the ccfsm state accordingly.
|
||||
REVERSE says branch will branch when the condition is false. */
|
||||
void
|
||||
arc_ccfsm_record_condition (rtx cond, bool reverse, rtx jump,
|
||||
arc_ccfsm_record_condition (rtx cond, bool reverse, rtx_insn *jump,
|
||||
struct arc_ccfsm *state)
|
||||
{
|
||||
rtx_insn *seq_insn = NEXT_INSN (PREV_INSN (jump));
|
||||
@ -9177,7 +9177,7 @@ arc_label_align (rtx label)
|
||||
/* Return true if LABEL is in executable code. */
|
||||
|
||||
bool
|
||||
arc_text_label (rtx label)
|
||||
arc_text_label (rtx_insn *label)
|
||||
{
|
||||
rtx_insn *next;
|
||||
|
||||
|
@ -4771,7 +4771,7 @@
|
||||
(use (match_operand 4 "const_int_operand" "C_0,X,X"))]
|
||||
""
|
||||
{
|
||||
rtx scan;
|
||||
rtx_insn *scan;
|
||||
int len, size = 0;
|
||||
int n_insns = 0;
|
||||
rtx loop_start = operands[4];
|
||||
@ -4812,8 +4812,8 @@
|
||||
{
|
||||
if (!INSN_P (scan))
|
||||
continue;
|
||||
if (GET_CODE (PATTERN (scan)) == SEQUENCE)
|
||||
scan = XVECEXP (PATTERN (scan), 0, 0);
|
||||
if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (scan)))
|
||||
scan = seq->insn (0);
|
||||
if (JUMP_P (scan))
|
||||
{
|
||||
if (recog_memoized (scan) != CODE_FOR_doloop_end_i)
|
||||
@ -4821,7 +4821,7 @@
|
||||
n_insns += 2;
|
||||
if (simplejump_p (scan))
|
||||
{
|
||||
scan = XEXP (SET_SRC (PATTERN (scan)), 0);
|
||||
scan = as_a <rtx_insn *> (XEXP (SET_SRC (PATTERN (scan)), 0));
|
||||
continue;
|
||||
}
|
||||
if (JUMP_LABEL (scan)
|
||||
|
@ -308,7 +308,7 @@
|
||||
(define_constraint "Clb"
|
||||
"label"
|
||||
(and (match_code "label_ref")
|
||||
(match_test "arc_text_label (XEXP (op, 0))")))
|
||||
(match_test "arc_text_label (as_a <rtx_insn *> (XEXP (op, 0)))")))
|
||||
|
||||
(define_constraint "Cal"
|
||||
"constant for arithmetic/logical operations"
|
||||
|
@ -6416,7 +6416,7 @@ require_pic_register (void)
|
||||
}
|
||||
else
|
||||
{
|
||||
rtx seq, insn;
|
||||
rtx_insn *seq, *insn;
|
||||
|
||||
if (!cfun->machine->pic_reg)
|
||||
cfun->machine->pic_reg = gen_reg_rtx (Pmode);
|
||||
@ -16697,7 +16697,7 @@ create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
|
||||
rtx_barrier *barrier;
|
||||
rtx_insn *from = fix->insn;
|
||||
/* The instruction after which we will insert the jump. */
|
||||
rtx selected = NULL;
|
||||
rtx_insn *selected = NULL;
|
||||
int selected_cost;
|
||||
/* The address at which the jump instruction will be placed. */
|
||||
HOST_WIDE_INT selected_address;
|
||||
@ -16767,7 +16767,7 @@ create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
|
||||
CALL_ARG_LOCATION note. */
|
||||
if (CALL_P (selected))
|
||||
{
|
||||
rtx next = NEXT_INSN (selected);
|
||||
rtx_insn *next = NEXT_INSN (selected);
|
||||
if (next && NOTE_P (next)
|
||||
&& NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
|
||||
selected = next;
|
||||
@ -17047,7 +17047,7 @@ thumb1_reorg (void)
|
||||
{
|
||||
rtx dest, src;
|
||||
rtx pat, op0, set = NULL;
|
||||
rtx prev, insn = BB_END (bb);
|
||||
rtx_insn *prev, *insn = BB_END (bb);
|
||||
bool insn_clobbered = false;
|
||||
|
||||
while (insn != BB_HEAD (bb) && !NONDEBUG_INSN_P (insn))
|
||||
@ -21325,7 +21325,7 @@ arm_expand_prologue (void)
|
||||
{
|
||||
/* This add can produce multiple insns for a large constant, so we
|
||||
need to get tricky. */
|
||||
rtx last = get_last_insn ();
|
||||
rtx_insn *last = get_last_insn ();
|
||||
|
||||
amount = GEN_INT (offsets->saved_args + saved_regs
|
||||
- offsets->outgoing_args);
|
||||
@ -29739,7 +29739,7 @@ arm_output_iwmmxt_tinsr (rtx *operands)
|
||||
const char *
|
||||
thumb1_output_casesi (rtx *operands)
|
||||
{
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[0]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[0])));
|
||||
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
@ -29762,7 +29762,7 @@ thumb1_output_casesi (rtx *operands)
|
||||
const char *
|
||||
thumb2_output_casesi (rtx *operands)
|
||||
{
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[2]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[2])));
|
||||
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
|
@ -209,7 +209,7 @@ avr_log_vadump (FILE *file, const char *fmt, va_list ap)
|
||||
|
||||
case 'L':
|
||||
{
|
||||
rtx insn = va_arg (ap, rtx);
|
||||
rtx_insn *insn = safe_as_a <rtx_insn *> (va_arg (ap, rtx));
|
||||
|
||||
while (insn)
|
||||
{
|
||||
|
@ -3348,8 +3348,8 @@ bfin_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
|
||||
/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
|
||||
skips all subsequent parallel instructions if INSN is the start of such
|
||||
a group. */
|
||||
static rtx
|
||||
find_next_insn_start (rtx insn)
|
||||
static rtx_insn *
|
||||
find_next_insn_start (rtx_insn *insn)
|
||||
{
|
||||
if (GET_MODE (insn) == SImode)
|
||||
{
|
||||
@ -3362,8 +3362,8 @@ find_next_insn_start (rtx insn)
|
||||
/* This function acts like PREV_INSN, but is aware of three-insn bundles and
|
||||
skips all subsequent parallel instructions if INSN is the start of such
|
||||
a group. */
|
||||
static rtx
|
||||
find_prev_insn_start (rtx insn)
|
||||
static rtx_insn *
|
||||
find_prev_insn_start (rtx_insn *insn)
|
||||
{
|
||||
insn = PREV_INSN (insn);
|
||||
gcc_assert (GET_MODE (insn) != SImode);
|
||||
@ -3439,7 +3439,7 @@ static bool
|
||||
hwloop_optimize (hwloop_info loop)
|
||||
{
|
||||
basic_block bb;
|
||||
rtx insn, last_insn;
|
||||
rtx_insn *insn, *last_insn;
|
||||
rtx loop_init, start_label, end_label;
|
||||
rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
|
||||
rtx lc_reg, lt_reg, lb_reg;
|
||||
@ -3614,7 +3614,7 @@ hwloop_optimize (hwloop_info loop)
|
||||
}
|
||||
else
|
||||
{
|
||||
last_insn = NULL_RTX;
|
||||
last_insn = NULL;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -3657,7 +3657,7 @@ hwloop_optimize (hwloop_info loop)
|
||||
last_insn = emit_insn_after (gen_forced_nop (), last_insn);
|
||||
}
|
||||
|
||||
loop->last_insn = safe_as_a <rtx_insn *> (last_insn);
|
||||
loop->last_insn = last_insn;
|
||||
|
||||
/* The loop is good for replacement. */
|
||||
start_label = loop->start_label;
|
||||
@ -3772,7 +3772,7 @@ hwloop_optimize (hwloop_info loop)
|
||||
|
||||
if (loop->incoming_src)
|
||||
{
|
||||
rtx prev = BB_END (loop->incoming_src);
|
||||
rtx_insn *prev = BB_END (loop->incoming_src);
|
||||
if (vec_safe_length (loop->incoming) > 1
|
||||
|| !(loop->incoming->last ()->flags & EDGE_FALLTHRU))
|
||||
{
|
||||
@ -3909,7 +3909,7 @@ gen_one_bundle (rtx_insn *slot[3])
|
||||
/* Verify that we really can do the multi-issue. */
|
||||
if (slot[0])
|
||||
{
|
||||
rtx t = NEXT_INSN (slot[0]);
|
||||
rtx_insn *t = NEXT_INSN (slot[0]);
|
||||
while (t != slot[1])
|
||||
{
|
||||
if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
|
||||
@ -4230,7 +4230,7 @@ trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
|
||||
a three-insn bundle, see if one of them is a load and return that if so.
|
||||
Return NULL_RTX if the insn does not contain loads. */
|
||||
static rtx
|
||||
find_load (rtx insn)
|
||||
find_load (rtx_insn *insn)
|
||||
{
|
||||
if (!NONDEBUG_INSN_P (insn))
|
||||
return NULL_RTX;
|
||||
@ -4285,7 +4285,7 @@ note_np_check_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
|
||||
static void
|
||||
workaround_speculation (void)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx_insn *insn, *next;
|
||||
rtx last_condjump = NULL_RTX;
|
||||
int cycles_since_jump = INT_MAX;
|
||||
int delay_added = 0;
|
||||
@ -4449,9 +4449,9 @@ workaround_speculation (void)
|
||||
&& (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
|
||||
|| cbranch_predicted_taken_p (insn)))
|
||||
{
|
||||
rtx target = JUMP_LABEL (insn);
|
||||
rtx_insn *target = JUMP_LABEL_AS_INSN (insn);
|
||||
rtx label = target;
|
||||
rtx next_tgt;
|
||||
rtx_insn *next_tgt;
|
||||
|
||||
cycles_since_jump = 0;
|
||||
for (; target && cycles_since_jump < 3; target = next_tgt)
|
||||
|
@ -3045,13 +3045,13 @@ get_insn_side (rtx insn, enum attr_units units)
|
||||
/* After scheduling, walk the insns between HEAD and END and assign unit
|
||||
reservations. */
|
||||
static void
|
||||
assign_reservations (rtx head, rtx end)
|
||||
assign_reservations (rtx_insn *head, rtx_insn *end)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
for (insn = head; insn != NEXT_INSN (end); insn = NEXT_INSN (insn))
|
||||
{
|
||||
unsigned int sched_mask, reserved;
|
||||
rtx within, last;
|
||||
rtx_insn *within, *last;
|
||||
int pass;
|
||||
int rsrv[2];
|
||||
int rsrv_count[2][4];
|
||||
@ -3061,7 +3061,7 @@ assign_reservations (rtx head, rtx end)
|
||||
continue;
|
||||
|
||||
reserved = 0;
|
||||
last = NULL_RTX;
|
||||
last = NULL;
|
||||
/* Find the last insn in the packet. It has a state recorded for it,
|
||||
which we can use to determine the units we should be using. */
|
||||
for (within = insn;
|
||||
@ -3272,9 +3272,9 @@ get_unit_reqs (rtx insn, int *req1, int *side1, int *req2, int *side2)
|
||||
/* Walk the insns between and including HEAD and TAIL, and mark the
|
||||
resource requirements in the unit_reqs table. */
|
||||
static void
|
||||
count_unit_reqs (unit_req_table reqs, rtx head, rtx tail)
|
||||
count_unit_reqs (unit_req_table reqs, rtx_insn *head, rtx_insn *tail)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
memset (reqs, 0, sizeof (unit_req_table));
|
||||
|
||||
@ -3417,7 +3417,8 @@ get_unit_operand_masks (rtx insn, unsigned int *pmask1, unsigned int *pmask2)
|
||||
We recompute this information locally after our transformation, and keep
|
||||
it only if we managed to improve the balance. */
|
||||
static void
|
||||
try_rename_operands (rtx head, rtx tail, unit_req_table reqs, rtx insn,
|
||||
try_rename_operands (rtx_insn *head, rtx_insn *tail, unit_req_table reqs,
|
||||
rtx insn,
|
||||
insn_rr_info *info, unsigned int op_mask, int orig_side)
|
||||
{
|
||||
enum reg_class super_class = orig_side == 0 ? B_REGS : A_REGS;
|
||||
@ -3520,9 +3521,9 @@ try_rename_operands (rtx head, rtx tail, unit_req_table reqs, rtx insn,
|
||||
static void
|
||||
reshuffle_units (basic_block loop)
|
||||
{
|
||||
rtx head = BB_HEAD (loop);
|
||||
rtx tail = BB_END (loop);
|
||||
rtx insn;
|
||||
rtx_insn *head = BB_HEAD (loop);
|
||||
rtx_insn *tail = BB_END (loop);
|
||||
rtx_insn *insn;
|
||||
unit_req_table reqs;
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -3613,7 +3614,7 @@ typedef struct c6x_sched_context
|
||||
int delays_finished_at;
|
||||
|
||||
/* The following variable value is the last issued insn. */
|
||||
rtx last_scheduled_insn;
|
||||
rtx_insn *last_scheduled_insn;
|
||||
/* The last issued insn that isn't a shadow of another. */
|
||||
rtx_insn *last_scheduled_iter0;
|
||||
|
||||
@ -3844,7 +3845,7 @@ predicate_insn (rtx insn, rtx cond, bool doit)
|
||||
static void
|
||||
init_sched_state (c6x_sched_context_t sc)
|
||||
{
|
||||
sc->last_scheduled_insn = NULL_RTX;
|
||||
sc->last_scheduled_insn = NULL;
|
||||
sc->last_scheduled_iter0 = NULL;
|
||||
sc->issued_this_cycle = 0;
|
||||
memset (sc->jump_cycles, 0, sizeof sc->jump_cycles);
|
||||
@ -4954,7 +4955,7 @@ reorg_split_calls (rtx *call_labels)
|
||||
= INSN_INFO_ENTRY (INSN_UID (last_same_clock)).unit_mask;
|
||||
if (GET_MODE (insn) == TImode)
|
||||
{
|
||||
rtx new_cycle_first = NEXT_INSN (insn);
|
||||
rtx_insn *new_cycle_first = NEXT_INSN (insn);
|
||||
while (!NONDEBUG_INSN_P (new_cycle_first)
|
||||
|| GET_CODE (PATTERN (new_cycle_first)) == USE
|
||||
|| GET_CODE (PATTERN (new_cycle_first)) == CLOBBER)
|
||||
@ -5332,11 +5333,12 @@ split_delayed_nonbranch (rtx_insn *insn)
|
||||
/* Examine if INSN is the result of splitting a load into a real load and a
|
||||
shadow, and if so, undo the transformation. */
|
||||
static void
|
||||
undo_split_delayed_nonbranch (rtx insn)
|
||||
undo_split_delayed_nonbranch (rtx_insn *insn)
|
||||
{
|
||||
int icode = recog_memoized (insn);
|
||||
enum attr_type type;
|
||||
rtx prev_pat, insn_pat, prev;
|
||||
rtx prev_pat, insn_pat;
|
||||
rtx_insn *prev;
|
||||
|
||||
if (icode < 0)
|
||||
return;
|
||||
@ -5388,7 +5390,7 @@ static void
|
||||
conditionalize_after_sched (void)
|
||||
{
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
@ -5432,7 +5434,7 @@ static int
|
||||
bb_earliest_end_cycle (basic_block bb, rtx ignore)
|
||||
{
|
||||
int earliest = 0;
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
{
|
||||
@ -5458,7 +5460,7 @@ bb_earliest_end_cycle (basic_block bb, rtx ignore)
|
||||
static void
|
||||
filter_insns_above (basic_block bb, int max_uid)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx_insn *insn, *next;
|
||||
bool prev_ti = false;
|
||||
int prev_cycle = -1;
|
||||
|
||||
@ -5729,7 +5731,7 @@ hwloop_optimize (hwloop_info loop)
|
||||
require. */
|
||||
prev = NULL;
|
||||
n_execute_packets = 0;
|
||||
for (insn = as_a <rtx_insn *> (loop->start_label);
|
||||
for (insn = loop->start_label;
|
||||
insn != loop->loop_end;
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
@ -5869,7 +5871,7 @@ hwloop_fail (hwloop_info loop)
|
||||
emit_insn_before (insn, loop->loop_end);
|
||||
else
|
||||
{
|
||||
rtx t = loop->start_label;
|
||||
rtx_insn *t = loop->start_label;
|
||||
while (!NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_BASIC_BLOCK)
|
||||
t = NEXT_INSN (t);
|
||||
emit_insn_after (insn, t);
|
||||
|
@ -1303,7 +1303,7 @@ cris_initial_frame_pointer_offset (void)
|
||||
push_topmost_sequence ();
|
||||
got_really_used
|
||||
= reg_used_between_p (pic_offset_table_rtx, get_insns (),
|
||||
NULL_RTX);
|
||||
NULL);
|
||||
pop_topmost_sequence ();
|
||||
}
|
||||
|
||||
@ -2015,7 +2015,7 @@ cris_simple_epilogue (void)
|
||||
{
|
||||
push_topmost_sequence ();
|
||||
got_really_used
|
||||
= reg_used_between_p (pic_offset_table_rtx, get_insns (), NULL_RTX);
|
||||
= reg_used_between_p (pic_offset_table_rtx, get_insns (), NULL);
|
||||
pop_topmost_sequence ();
|
||||
}
|
||||
|
||||
@ -3081,7 +3081,7 @@ cris_expand_prologue (void)
|
||||
it's still used. */
|
||||
push_topmost_sequence ();
|
||||
got_really_used
|
||||
= reg_used_between_p (pic_offset_table_rtx, get_insns (), NULL_RTX);
|
||||
= reg_used_between_p (pic_offset_table_rtx, get_insns (), NULL);
|
||||
pop_topmost_sequence ();
|
||||
}
|
||||
|
||||
@ -3364,7 +3364,7 @@ cris_expand_epilogue (void)
|
||||
it's still used. */
|
||||
push_topmost_sequence ();
|
||||
got_really_used
|
||||
= reg_used_between_p (pic_offset_table_rtx, get_insns (), NULL_RTX);
|
||||
= reg_used_between_p (pic_offset_table_rtx, get_insns (), NULL);
|
||||
pop_topmost_sequence ();
|
||||
}
|
||||
|
||||
|
@ -337,8 +337,8 @@ static void frv_start_packet (void);
|
||||
static void frv_start_packet_block (void);
|
||||
static void frv_finish_packet (void (*) (void));
|
||||
static bool frv_pack_insn_p (rtx);
|
||||
static void frv_add_insn_to_packet (rtx);
|
||||
static void frv_insert_nop_in_packet (rtx);
|
||||
static void frv_add_insn_to_packet (rtx_insn *);
|
||||
static void frv_insert_nop_in_packet (rtx_insn *);
|
||||
static bool frv_for_each_packet (void (*) (void));
|
||||
static bool frv_sort_insn_group_1 (enum frv_insn_group,
|
||||
unsigned int, unsigned int,
|
||||
@ -1391,7 +1391,7 @@ static int frv_insn_packing_flag;
|
||||
static int
|
||||
frv_function_contains_far_jump (void)
|
||||
{
|
||||
rtx insn = get_insns ();
|
||||
rtx_insn *insn = get_insns ();
|
||||
while (insn != NULL
|
||||
&& !(JUMP_P (insn)
|
||||
&& get_attr_far_jump (insn) == FAR_JUMP_YES))
|
||||
@ -5368,8 +5368,8 @@ frv_ifcvt_modify_tests (ce_if_block *ce_info, rtx *p_true, rtx *p_false)
|
||||
/* Scan all of the blocks for registers that must not be allocated. */
|
||||
for (j = 0; j < num_bb; j++)
|
||||
{
|
||||
rtx last_insn = BB_END (bb[j]);
|
||||
rtx insn = BB_HEAD (bb[j]);
|
||||
rtx_insn *last_insn = BB_END (bb[j]);
|
||||
rtx_insn *insn = BB_HEAD (bb[j]);
|
||||
unsigned int regno;
|
||||
|
||||
if (dump_file)
|
||||
@ -7101,15 +7101,15 @@ struct frv_packet_group {
|
||||
|
||||
/* A list of the instructions that belong to this group, in the order
|
||||
they appear in the rtl stream. */
|
||||
rtx insns[ARRAY_SIZE (frv_unit_codes)];
|
||||
rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
|
||||
|
||||
/* The contents of INSNS after they have been sorted into the correct
|
||||
assembly-language order. Element X issues to unit X. The list may
|
||||
contain extra nops. */
|
||||
rtx sorted[ARRAY_SIZE (frv_unit_codes)];
|
||||
rtx_insn *sorted[ARRAY_SIZE (frv_unit_codes)];
|
||||
|
||||
/* The member of frv_nops[] to use in sorted[]. */
|
||||
rtx nop;
|
||||
rtx_insn *nop;
|
||||
};
|
||||
|
||||
/* The current state of the packing pass, implemented by frv_pack_insns. */
|
||||
@ -7140,7 +7140,7 @@ static struct {
|
||||
struct frv_packet_group groups[NUM_GROUPS];
|
||||
|
||||
/* The instructions that make up the current packet. */
|
||||
rtx insns[ARRAY_SIZE (frv_unit_codes)];
|
||||
rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
|
||||
unsigned int num_insns;
|
||||
} frv_packet;
|
||||
|
||||
@ -7388,7 +7388,7 @@ frv_pack_insn_p (rtx insn)
|
||||
/* Add instruction INSN to the current packet. */
|
||||
|
||||
static void
|
||||
frv_add_insn_to_packet (rtx insn)
|
||||
frv_add_insn_to_packet (rtx_insn *insn)
|
||||
{
|
||||
struct frv_packet_group *packet_group;
|
||||
|
||||
@ -7405,10 +7405,10 @@ frv_add_insn_to_packet (rtx insn)
|
||||
add to the end. */
|
||||
|
||||
static void
|
||||
frv_insert_nop_in_packet (rtx insn)
|
||||
frv_insert_nop_in_packet (rtx_insn *insn)
|
||||
{
|
||||
struct frv_packet_group *packet_group;
|
||||
rtx last;
|
||||
rtx_insn *last;
|
||||
|
||||
packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
|
||||
last = frv_packet.insns[frv_packet.num_insns - 1];
|
||||
@ -7433,7 +7433,7 @@ frv_insert_nop_in_packet (rtx insn)
|
||||
static bool
|
||||
frv_for_each_packet (void (*handle_packet) (void))
|
||||
{
|
||||
rtx insn, next_insn;
|
||||
rtx_insn *insn, *next_insn;
|
||||
|
||||
frv_packet.issue_rate = frv_issue_rate ();
|
||||
|
||||
@ -7531,7 +7531,7 @@ frv_sort_insn_group_1 (enum frv_insn_group group,
|
||||
unsigned int i;
|
||||
state_t test_state;
|
||||
size_t dfa_size;
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
/* Early success if we've filled all the slots. */
|
||||
if (lower_slot == upper_slot)
|
||||
@ -7870,7 +7870,8 @@ frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
|
||||
rtx *last_membar)
|
||||
{
|
||||
HARD_REG_SET used_regs;
|
||||
rtx next_membar, set, insn;
|
||||
rtx next_membar, set;
|
||||
rtx_insn *insn;
|
||||
bool next_is_end_p;
|
||||
|
||||
/* NEXT_IO is the next I/O operation to be performed after the current
|
||||
@ -8074,7 +8075,7 @@ static void
|
||||
frv_align_label (void)
|
||||
{
|
||||
unsigned int alignment, target, nop;
|
||||
rtx x, last, barrier, label;
|
||||
rtx_insn *x, *last, *barrier, *label;
|
||||
|
||||
/* Walk forward to the start of the next packet. Set ALIGNMENT to the
|
||||
maximum alignment of that packet, LABEL to the last label between
|
||||
|
@ -275,14 +275,15 @@ static void initiate_bundle_state_table (void);
|
||||
static void finish_bundle_state_table (void);
|
||||
static int try_issue_nops (struct bundle_state *, int);
|
||||
static int try_issue_insn (struct bundle_state *, rtx);
|
||||
static void issue_nops_and_insn (struct bundle_state *, int, rtx, int, int);
|
||||
static void issue_nops_and_insn (struct bundle_state *, int, rtx_insn *,
|
||||
int, int);
|
||||
static int get_max_pos (state_t);
|
||||
static int get_template (state_t, int);
|
||||
|
||||
static rtx get_next_important_insn (rtx, rtx);
|
||||
static rtx_insn *get_next_important_insn (rtx_insn *, rtx_insn *);
|
||||
static bool important_for_bundling_p (rtx);
|
||||
static bool unknown_for_bundling_p (rtx);
|
||||
static void bundling (FILE *, int, rtx, rtx);
|
||||
static void bundling (FILE *, int, rtx_insn *, rtx_insn *);
|
||||
|
||||
static void ia64_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
|
||||
HOST_WIDE_INT, tree);
|
||||
@ -7087,7 +7088,7 @@ static rtx_insn *dfa_stop_insn;
|
||||
|
||||
/* The following variable value is the last issued insn. */
|
||||
|
||||
static rtx last_scheduled_insn;
|
||||
static rtx_insn *last_scheduled_insn;
|
||||
|
||||
/* The following variable value is pointer to a DFA state used as
|
||||
temporary variable. */
|
||||
@ -7294,7 +7295,7 @@ ia64_sched_init (FILE *dump ATTRIBUTE_UNUSED,
|
||||
int max_ready ATTRIBUTE_UNUSED)
|
||||
{
|
||||
#ifdef ENABLE_CHECKING
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
if (!sel_sched_p () && reload_completed)
|
||||
for (insn = NEXT_INSN (current_sched_info->prev_head);
|
||||
@ -7302,7 +7303,7 @@ ia64_sched_init (FILE *dump ATTRIBUTE_UNUSED,
|
||||
insn = NEXT_INSN (insn))
|
||||
gcc_assert (!SCHED_GROUP_P (insn));
|
||||
#endif
|
||||
last_scheduled_insn = NULL_RTX;
|
||||
last_scheduled_insn = NULL;
|
||||
init_insn_group_barriers ();
|
||||
|
||||
current_cycle = 0;
|
||||
@ -7567,7 +7568,7 @@ static rtx_insn *dfa_pre_cycle_insn;
|
||||
/* Returns 1 when a meaningful insn was scheduled between the last group
|
||||
barrier and LAST. */
|
||||
static int
|
||||
scheduled_good_insn (rtx last)
|
||||
scheduled_good_insn (rtx_insn *last)
|
||||
{
|
||||
if (last && recog_memoized (last) >= 0)
|
||||
return 1;
|
||||
@ -7669,7 +7670,7 @@ ia64_h_i_d_extended (void)
|
||||
struct _ia64_sched_context
|
||||
{
|
||||
state_t prev_cycle_state;
|
||||
rtx last_scheduled_insn;
|
||||
rtx_insn *last_scheduled_insn;
|
||||
struct reg_write_state rws_sum[NUM_REGS];
|
||||
struct reg_write_state rws_insn[NUM_REGS];
|
||||
int first_instruction;
|
||||
@ -7697,7 +7698,7 @@ ia64_init_sched_context (void *_sc, bool clean_p)
|
||||
if (clean_p)
|
||||
{
|
||||
state_reset (sc->prev_cycle_state);
|
||||
sc->last_scheduled_insn = NULL_RTX;
|
||||
sc->last_scheduled_insn = NULL;
|
||||
memset (sc->rws_sum, 0, sizeof (rws_sum));
|
||||
memset (sc->rws_insn, 0, sizeof (rws_insn));
|
||||
sc->first_instruction = 1;
|
||||
@ -8458,7 +8459,7 @@ struct bundle_state
|
||||
/* Unique bundle state number to identify them in the debugging
|
||||
output */
|
||||
int unique_num;
|
||||
rtx insn; /* corresponding insn, NULL for the 1st and the last state */
|
||||
rtx_insn *insn; /* corresponding insn, NULL for the 1st and the last state */
|
||||
/* number nops before and after the insn */
|
||||
short before_nops_num, after_nops_num;
|
||||
int insn_num; /* insn number (0 - for initial state, 1 - for the 1st
|
||||
@ -8700,7 +8701,8 @@ try_issue_insn (struct bundle_state *curr_state, rtx insn)
|
||||
|
||||
static void
|
||||
issue_nops_and_insn (struct bundle_state *originator, int before_nops_num,
|
||||
rtx insn, int try_bundle_end_p, int only_bundle_end_p)
|
||||
rtx_insn *insn, int try_bundle_end_p,
|
||||
int only_bundle_end_p)
|
||||
{
|
||||
struct bundle_state *curr_state;
|
||||
|
||||
@ -8914,13 +8916,13 @@ important_for_bundling_p (rtx insn)
|
||||
/* The following function returns an insn important for insn bundling
|
||||
followed by INSN and before TAIL. */
|
||||
|
||||
static rtx
|
||||
get_next_important_insn (rtx insn, rtx tail)
|
||||
static rtx_insn *
|
||||
get_next_important_insn (rtx_insn *insn, rtx_insn *tail)
|
||||
{
|
||||
for (; insn && insn != tail; insn = NEXT_INSN (insn))
|
||||
if (important_for_bundling_p (insn))
|
||||
return insn;
|
||||
return NULL_RTX;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* True when INSN is unknown, but important, for bundling. */
|
||||
@ -8937,7 +8939,7 @@ unknown_for_bundling_p (rtx insn)
|
||||
/* Add a bundle selector TEMPLATE0 before INSN. */
|
||||
|
||||
static void
|
||||
ia64_add_bundle_selector_before (int template0, rtx insn)
|
||||
ia64_add_bundle_selector_before (int template0, rtx_insn *insn)
|
||||
{
|
||||
rtx b = gen_bundle_selector (GEN_INT (template0));
|
||||
|
||||
@ -9017,15 +9019,14 @@ ia64_add_bundle_selector_before (int template0, rtx insn)
|
||||
EBB. */
|
||||
|
||||
static void
|
||||
bundling (FILE *dump, int verbose, rtx prev_head_insn, rtx tail)
|
||||
bundling (FILE *dump, int verbose, rtx_insn *prev_head_insn, rtx_insn *tail)
|
||||
{
|
||||
struct bundle_state *curr_state, *next_state, *best_state;
|
||||
rtx insn, next_insn;
|
||||
rtx_insn *insn, *next_insn;
|
||||
int insn_num;
|
||||
int i, bundle_end_p, only_bundle_end_p, asm_p;
|
||||
int pos = 0, max_pos, template0, template1;
|
||||
rtx b;
|
||||
rtx nop;
|
||||
rtx_insn *b;
|
||||
enum attr_type type;
|
||||
|
||||
insn_num = 0;
|
||||
@ -9237,8 +9238,8 @@ bundling (FILE *dump, int verbose, rtx prev_head_insn, rtx tail)
|
||||
/* Emit nops after the current insn. */
|
||||
for (i = 0; i < curr_state->after_nops_num; i++)
|
||||
{
|
||||
nop = gen_nop ();
|
||||
emit_insn_after (nop, insn);
|
||||
rtx nop_pat = gen_nop ();
|
||||
rtx_insn *nop = emit_insn_after (nop_pat, insn);
|
||||
pos--;
|
||||
gcc_assert (pos >= 0);
|
||||
if (pos % 3 == 0)
|
||||
@ -9281,9 +9282,9 @@ bundling (FILE *dump, int verbose, rtx prev_head_insn, rtx tail)
|
||||
/* Emit nops after the current insn. */
|
||||
for (i = 0; i < curr_state->before_nops_num; i++)
|
||||
{
|
||||
nop = gen_nop ();
|
||||
ia64_emit_insn_before (nop, insn);
|
||||
nop = PREV_INSN (insn);
|
||||
rtx nop_pat = gen_nop ();
|
||||
ia64_emit_insn_before (nop_pat, insn);
|
||||
rtx_insn *nop = PREV_INSN (insn);
|
||||
insn = nop;
|
||||
pos--;
|
||||
gcc_assert (pos >= 0);
|
||||
@ -9317,7 +9318,7 @@ bundling (FILE *dump, int verbose, rtx prev_head_insn, rtx tail)
|
||||
start_bundle = true;
|
||||
else
|
||||
{
|
||||
rtx next_insn;
|
||||
rtx_insn *next_insn;
|
||||
|
||||
for (next_insn = NEXT_INSN (insn);
|
||||
next_insn && next_insn != tail;
|
||||
|
@ -22,8 +22,8 @@
|
||||
|
||||
extern int iq2000_check_split (rtx, enum machine_mode);
|
||||
extern int iq2000_reg_mode_ok_for_base_p (rtx, enum machine_mode, int);
|
||||
extern const char * iq2000_fill_delay_slot (const char *, enum delay_type, rtx *, rtx);
|
||||
extern const char * iq2000_move_1word (rtx *, rtx, int);
|
||||
extern const char * iq2000_fill_delay_slot (const char *, enum delay_type, rtx *, rtx_insn *);
|
||||
extern const char * iq2000_move_1word (rtx *, rtx_insn *, int);
|
||||
extern HOST_WIDE_INT iq2000_debugger_offset (rtx, HOST_WIDE_INT);
|
||||
extern void final_prescan_insn (rtx_insn *, rtx *, int);
|
||||
extern HOST_WIDE_INT compute_frame_size (HOST_WIDE_INT);
|
||||
|
@ -369,11 +369,11 @@ iq2000_legitimate_address_p (enum machine_mode mode, rtx xinsn, bool strict)
|
||||
|
||||
const char *
|
||||
iq2000_fill_delay_slot (const char *ret, enum delay_type type, rtx operands[],
|
||||
rtx cur_insn)
|
||||
rtx_insn *cur_insn)
|
||||
{
|
||||
rtx set_reg;
|
||||
enum machine_mode mode;
|
||||
rtx next_insn = cur_insn ? NEXT_INSN (cur_insn) : NULL_RTX;
|
||||
rtx_insn *next_insn = cur_insn ? NEXT_INSN (cur_insn) : NULL;
|
||||
int num_nops;
|
||||
|
||||
if (type == DELAY_LOAD || type == DELAY_FCMP)
|
||||
@ -552,7 +552,7 @@ abort_with_insn (rtx insn, const char * reason)
|
||||
/* Return the appropriate instructions to move one operand to another. */
|
||||
|
||||
const char *
|
||||
iq2000_move_1word (rtx operands[], rtx insn, int unsignedp)
|
||||
iq2000_move_1word (rtx operands[], rtx_insn *insn, int unsignedp)
|
||||
{
|
||||
const char *ret = 0;
|
||||
rtx op0 = operands[0];
|
||||
|
@ -1398,8 +1398,8 @@
|
||||
(plus:SI (match_operand:SI 0 "register_operand" "d")
|
||||
(label_ref:SI (match_operand 1 "" ""))))
|
||||
(use (label_ref:SI (match_dup 1)))]
|
||||
"!(Pmode == DImode) && NEXT_INSN (operands[1]) != 0
|
||||
&& GET_CODE (PATTERN (NEXT_INSN (operands[1]))) == ADDR_DIFF_VEC"
|
||||
"!(Pmode == DImode) && NEXT_INSN (as_a <rtx_insn *> (operands[1])) != 0
|
||||
&& GET_CODE (PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[1])))) == ADDR_DIFF_VEC"
|
||||
"*
|
||||
{
|
||||
return \"j\\t%0\";
|
||||
|
@ -4072,7 +4072,7 @@ m32c_leaf_function_p (void)
|
||||
static bool
|
||||
m32c_function_needs_enter (void)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
struct sequence_stack *seq;
|
||||
rtx sp = gen_rtx_REG (Pmode, SP_REGNO);
|
||||
rtx fb = gen_rtx_REG (Pmode, FB_REGNO);
|
||||
|
@ -1933,12 +1933,12 @@ m68k_jump_table_ref_p (rtx x)
|
||||
if (GET_CODE (x) != LABEL_REF)
|
||||
return false;
|
||||
|
||||
x = XEXP (x, 0);
|
||||
if (!NEXT_INSN (x) && !PREV_INSN (x))
|
||||
rtx_insn *insn = as_a <rtx_insn *> (XEXP (x, 0));
|
||||
if (!NEXT_INSN (insn) && !PREV_INSN (insn))
|
||||
return true;
|
||||
|
||||
x = next_nonnote_insn (x);
|
||||
return x && JUMP_TABLE_DATA_P (x);
|
||||
insn = next_nonnote_insn (insn);
|
||||
return insn && JUMP_TABLE_DATA_P (insn);
|
||||
}
|
||||
|
||||
/* Return true if X is a legitimate address for values of mode MODE.
|
||||
@ -6126,12 +6126,12 @@ m68k_sched_md_init_global (FILE *sched_dump ATTRIBUTE_UNUSED,
|
||||
/* Check that all instructions have DFA reservations and
|
||||
that all instructions can be issued from a clean state. */
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
state_t state;
|
||||
|
||||
state = alloca (state_size ());
|
||||
|
||||
for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
|
||||
for (insn = get_insns (); insn != NULL; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (INSN_P (insn) && recog_memoized (insn) >= 0)
|
||||
{
|
||||
|
@ -48,7 +48,7 @@ extern const char * mcore_output_bclri (rtx, int);
|
||||
extern const char * mcore_output_bseti (rtx, int);
|
||||
extern const char * mcore_output_cmov (rtx *, int, const char *);
|
||||
extern char * mcore_output_call (rtx *, int);
|
||||
extern int mcore_is_dead (rtx, rtx);
|
||||
extern int mcore_is_dead (rtx_insn *, rtx);
|
||||
extern int mcore_expand_insv (rtx *);
|
||||
extern bool mcore_expand_block_move (rtx *);
|
||||
extern const char * mcore_output_andn (rtx, rtx *);
|
||||
|
@ -102,8 +102,8 @@ static const char * output_inline_const (enum machine_mode, rtx *);
|
||||
static void layout_mcore_frame (struct mcore_frame *);
|
||||
static void mcore_setup_incoming_varargs (cumulative_args_t, enum machine_mode, tree, int *, int);
|
||||
static cond_type is_cond_candidate (rtx);
|
||||
static rtx emit_new_cond_insn (rtx, int);
|
||||
static rtx conditionalize_block (rtx);
|
||||
static rtx_insn *emit_new_cond_insn (rtx, int);
|
||||
static rtx_insn *conditionalize_block (rtx_insn *);
|
||||
static void conditionalize_optimization (void);
|
||||
static void mcore_reorg (void);
|
||||
static rtx handle_structs_in_regs (enum machine_mode, const_tree, int);
|
||||
@ -903,9 +903,9 @@ try_constant_tricks (HOST_WIDE_INT value, HOST_WIDE_INT * x, HOST_WIDE_INT * y)
|
||||
can ignore subregs by extracting the actual register. BRC */
|
||||
|
||||
int
|
||||
mcore_is_dead (rtx first, rtx reg)
|
||||
mcore_is_dead (rtx_insn *first, rtx reg)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
/* For mcore, subregs can't live independently of their parent regs. */
|
||||
if (GET_CODE (reg) == SUBREG)
|
||||
@ -2321,7 +2321,7 @@ is_cond_candidate (rtx insn)
|
||||
/* Emit a conditional version of insn and replace the old insn with the
|
||||
new one. Return the new insn if emitted. */
|
||||
|
||||
static rtx
|
||||
static rtx_insn *
|
||||
emit_new_cond_insn (rtx insn, int cond)
|
||||
{
|
||||
rtx c_insn = 0;
|
||||
@ -2406,7 +2406,7 @@ emit_new_cond_insn (rtx insn, int cond)
|
||||
|
||||
delete_insn (insn);
|
||||
|
||||
return c_insn;
|
||||
return as_a <rtx_insn *> (c_insn);
|
||||
}
|
||||
|
||||
/* Attempt to change a basic block into a series of conditional insns. This
|
||||
@ -2438,14 +2438,14 @@ emit_new_cond_insn (rtx insn, int cond)
|
||||
we can delete the L2 label if NUSES==1 and re-apply the optimization
|
||||
starting at the last instruction of block 2. This may allow an entire
|
||||
if-then-else statement to be conditionalized. BRC */
|
||||
static rtx
|
||||
conditionalize_block (rtx first)
|
||||
static rtx_insn *
|
||||
conditionalize_block (rtx_insn *first)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
rtx br_pat;
|
||||
rtx end_blk_1_br = 0;
|
||||
rtx end_blk_2_insn = 0;
|
||||
rtx start_blk_3_lab = 0;
|
||||
rtx_insn *end_blk_1_br = 0;
|
||||
rtx_insn *end_blk_2_insn = 0;
|
||||
rtx_insn *start_blk_3_lab = 0;
|
||||
int cond;
|
||||
int br_lab_num;
|
||||
int blk_size = 0;
|
||||
@ -2534,7 +2534,7 @@ conditionalize_block (rtx first)
|
||||
for (insn = NEXT_INSN (end_blk_1_br); insn != start_blk_3_lab;
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
rtx newinsn;
|
||||
rtx_insn *newinsn;
|
||||
|
||||
if (INSN_DELETED_P (insn))
|
||||
continue;
|
||||
@ -2582,7 +2582,7 @@ conditionalize_block (rtx first)
|
||||
static void
|
||||
conditionalize_optimization (void)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
for (insn = get_insns (); insn; insn = conditionalize_block (insn))
|
||||
continue;
|
||||
|
@ -5653,7 +5653,7 @@ mep_jmp_return_reorg (rtx_insn *insns)
|
||||
&& (NOTE_P (ret)
|
||||
|| LABEL_P (ret)
|
||||
|| GET_CODE (PATTERN (ret)) == USE))
|
||||
ret = NEXT_INSN (ret);
|
||||
ret = NEXT_INSN (as_a <rtx_insn *> (ret));
|
||||
|
||||
if (ret)
|
||||
{
|
||||
|
@ -1829,8 +1829,8 @@
|
||||
(plus:SI (match_operand:SI 0 "register_operand" "d")
|
||||
(label_ref:SI (match_operand 1 "" ""))))
|
||||
(use (label_ref:SI (match_dup 1)))]
|
||||
"NEXT_INSN (operands[1]) != 0
|
||||
&& GET_CODE (PATTERN (NEXT_INSN (operands[1]))) == ADDR_DIFF_VEC
|
||||
"NEXT_INSN (as_a <rtx_insn *> (operands[1])) != 0
|
||||
&& GET_CODE (PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[1])))) == ADDR_DIFF_VEC
|
||||
&& flag_pic"
|
||||
{
|
||||
output_asm_insn ("addk\t%0,%0,r20",operands);
|
||||
|
@ -11079,7 +11079,6 @@ mips_expand_prologue (void)
|
||||
const struct mips_frame_info *frame;
|
||||
HOST_WIDE_INT size;
|
||||
unsigned int nargs;
|
||||
rtx insn;
|
||||
|
||||
if (cfun->machine->global_pointer != INVALID_REGNUM)
|
||||
{
|
||||
@ -11133,8 +11132,8 @@ mips_expand_prologue (void)
|
||||
|
||||
/* Build the save instruction. */
|
||||
mask = frame->mask;
|
||||
insn = mips16e_build_save_restore (false, &mask, &offset,
|
||||
nargs, step1);
|
||||
rtx insn = mips16e_build_save_restore (false, &mask, &offset,
|
||||
nargs, step1);
|
||||
RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
|
||||
mips_frame_barrier ();
|
||||
size -= step1;
|
||||
@ -11174,8 +11173,8 @@ mips_expand_prologue (void)
|
||||
}
|
||||
|
||||
/* Allocate the first part of the frame. */
|
||||
insn = gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
|
||||
GEN_INT (-step1));
|
||||
rtx insn = gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
|
||||
GEN_INT (-step1));
|
||||
RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
|
||||
mips_frame_barrier ();
|
||||
size -= step1;
|
||||
@ -11235,9 +11234,9 @@ mips_expand_prologue (void)
|
||||
}
|
||||
else
|
||||
{
|
||||
insn = gen_add3_insn (stack_pointer_rtx,
|
||||
stack_pointer_rtx,
|
||||
GEN_INT (-step1));
|
||||
rtx insn = gen_add3_insn (stack_pointer_rtx,
|
||||
stack_pointer_rtx,
|
||||
GEN_INT (-step1));
|
||||
RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
|
||||
mips_frame_barrier ();
|
||||
size -= step1;
|
||||
@ -11291,13 +11290,13 @@ mips_expand_prologue (void)
|
||||
offset = frame->hard_frame_pointer_offset;
|
||||
if (offset == 0)
|
||||
{
|
||||
insn = mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
|
||||
rtx insn = mips_emit_move (hard_frame_pointer_rtx, stack_pointer_rtx);
|
||||
RTX_FRAME_RELATED_P (insn) = 1;
|
||||
}
|
||||
else if (SMALL_OPERAND (offset))
|
||||
{
|
||||
insn = gen_add3_insn (hard_frame_pointer_rtx,
|
||||
stack_pointer_rtx, GEN_INT (offset));
|
||||
rtx insn = gen_add3_insn (hard_frame_pointer_rtx,
|
||||
stack_pointer_rtx, GEN_INT (offset));
|
||||
RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
|
||||
}
|
||||
else
|
||||
@ -11338,6 +11337,7 @@ mips_expand_prologue (void)
|
||||
/* We need to search back to the last use of K0 or K1. */
|
||||
if (cfun->machine->interrupt_handler_p)
|
||||
{
|
||||
rtx_insn *insn;
|
||||
for (insn = get_last_insn (); insn != NULL_RTX; insn = PREV_INSN (insn))
|
||||
if (INSN_P (insn)
|
||||
&& for_each_rtx (&PATTERN (insn), mips_kernel_reg_p, NULL))
|
||||
|
@ -6130,7 +6130,7 @@
|
||||
(clobber (reg:SI MIPS16_T_REGNUM))]
|
||||
"TARGET_MIPS16_SHORT_JUMP_TABLES"
|
||||
{
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[2]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[2])));
|
||||
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
|
@ -3211,7 +3211,7 @@ mn10300_insert_setlb_lcc (rtx label, rtx branch)
|
||||
static bool
|
||||
mn10300_block_contains_call (basic_block block)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
FOR_BB_INSNS (block, insn)
|
||||
if (CALL_P (insn))
|
||||
|
@ -153,7 +153,7 @@ nds32_fp_as_gp_check_available (void)
|
||||
int symbol_count = 0;
|
||||
|
||||
int threshold;
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
/* We check if there already requires prologue.
|
||||
Note that $gp will be saved in prologue for PIC code generation.
|
||||
|
@ -773,7 +773,7 @@ nds32_output_casesi_pc_relative (rtx *operands)
|
||||
enum machine_mode mode;
|
||||
rtx diff_vec;
|
||||
|
||||
diff_vec = PATTERN (NEXT_INSN (operands[1]));
|
||||
diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[1])));
|
||||
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
|
@ -22,7 +22,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
extern rtx pa_eh_return_handler_rtx (void);
|
||||
|
||||
/* Used in insn-*.c. */
|
||||
extern int pa_following_call (rtx);
|
||||
extern int pa_following_call (rtx_insn *);
|
||||
|
||||
/* Define functions in pa.c and used in insn-output.c. */
|
||||
|
||||
@ -34,20 +34,20 @@ extern const char *pa_output_move_double (rtx *);
|
||||
extern const char *pa_output_fp_move_double (rtx *);
|
||||
extern const char *pa_output_block_move (rtx *, int);
|
||||
extern const char *pa_output_block_clear (rtx *, int);
|
||||
extern const char *pa_output_cbranch (rtx *, int, rtx);
|
||||
extern const char *pa_output_lbranch (rtx, rtx, int);
|
||||
extern const char *pa_output_bb (rtx *, int, rtx, int);
|
||||
extern const char *pa_output_bvb (rtx *, int, rtx, int);
|
||||
extern const char *pa_output_dbra (rtx *, rtx, int);
|
||||
extern const char *pa_output_movb (rtx *, rtx, int, int);
|
||||
extern const char *pa_output_parallel_movb (rtx *, rtx);
|
||||
extern const char *pa_output_parallel_addb (rtx *, rtx);
|
||||
extern const char *pa_output_cbranch (rtx *, int, rtx_insn *);
|
||||
extern const char *pa_output_lbranch (rtx, rtx_insn *, int);
|
||||
extern const char *pa_output_bb (rtx *, int, rtx_insn *, int);
|
||||
extern const char *pa_output_bvb (rtx *, int, rtx_insn *, int);
|
||||
extern const char *pa_output_dbra (rtx *, rtx_insn *, int);
|
||||
extern const char *pa_output_movb (rtx *, rtx_insn *, int, int);
|
||||
extern const char *pa_output_parallel_movb (rtx *, rtx_insn *);
|
||||
extern const char *pa_output_parallel_addb (rtx *, rtx_insn *);
|
||||
extern const char *pa_output_call (rtx_insn *, rtx, int);
|
||||
extern const char *pa_output_indirect_call (rtx_insn *, rtx);
|
||||
extern const char *pa_output_millicode_call (rtx, rtx);
|
||||
extern const char *pa_output_mul_insn (int, rtx);
|
||||
extern const char *pa_output_div_insn (rtx *, int, rtx);
|
||||
extern const char *pa_output_mod_insn (int, rtx);
|
||||
extern const char *pa_output_millicode_call (rtx_insn *, rtx);
|
||||
extern const char *pa_output_mul_insn (int, rtx_insn *);
|
||||
extern const char *pa_output_div_insn (rtx *, int, rtx_insn *);
|
||||
extern const char *pa_output_mod_insn (int, rtx_insn *);
|
||||
extern const char *pa_singlemove_string (rtx *);
|
||||
extern void pa_output_addr_vec (rtx, rtx);
|
||||
extern void pa_output_addr_diff_vec (rtx, rtx);
|
||||
@ -63,7 +63,7 @@ extern void pa_emit_bcond_fp (rtx[]);
|
||||
extern int pa_emit_move_sequence (rtx *, enum machine_mode, rtx);
|
||||
extern int pa_emit_hpdiv_const (rtx *, int);
|
||||
extern int pa_is_function_label_plus_const (rtx);
|
||||
extern int pa_jump_in_call_delay (rtx);
|
||||
extern int pa_jump_in_call_delay (rtx_insn *);
|
||||
extern int pa_fpstore_bypass_p (rtx, rtx);
|
||||
extern int pa_attr_length_millicode_call (rtx_insn *);
|
||||
extern int pa_attr_length_call (rtx_insn *, int);
|
||||
|
@ -99,8 +99,8 @@ static bool hppa_rtx_costs (rtx, int, int, int, int *, bool);
|
||||
static inline rtx force_mode (enum machine_mode, rtx);
|
||||
static void pa_reorg (void);
|
||||
static void pa_combine_instructions (void);
|
||||
static int pa_can_combine_p (rtx, rtx, rtx, int, rtx, rtx, rtx);
|
||||
static bool forward_branch_p (rtx);
|
||||
static int pa_can_combine_p (rtx, rtx_insn *, rtx_insn *, int, rtx, rtx, rtx);
|
||||
static bool forward_branch_p (rtx_insn *);
|
||||
static void compute_zdepwi_operands (unsigned HOST_WIDE_INT, unsigned *);
|
||||
static void compute_zdepdi_operands (unsigned HOST_WIDE_INT, unsigned *);
|
||||
static int compute_movmem_length (rtx);
|
||||
@ -5693,7 +5693,7 @@ import_milli (enum millicodes code)
|
||||
the proper registers. */
|
||||
|
||||
const char *
|
||||
pa_output_mul_insn (int unsignedp ATTRIBUTE_UNUSED, rtx insn)
|
||||
pa_output_mul_insn (int unsignedp ATTRIBUTE_UNUSED, rtx_insn *insn)
|
||||
{
|
||||
import_milli (mulI);
|
||||
return pa_output_millicode_call (insn, gen_rtx_SYMBOL_REF (Pmode, "$$mulI"));
|
||||
@ -5741,7 +5741,7 @@ pa_emit_hpdiv_const (rtx *operands, int unsignedp)
|
||||
}
|
||||
|
||||
const char *
|
||||
pa_output_div_insn (rtx *operands, int unsignedp, rtx insn)
|
||||
pa_output_div_insn (rtx *operands, int unsignedp, rtx_insn *insn)
|
||||
{
|
||||
int divisor;
|
||||
|
||||
@ -5795,7 +5795,7 @@ pa_output_div_insn (rtx *operands, int unsignedp, rtx insn)
|
||||
/* Output a $$rem millicode to do mod. */
|
||||
|
||||
const char *
|
||||
pa_output_mod_insn (int unsignedp, rtx insn)
|
||||
pa_output_mod_insn (int unsignedp, rtx_insn *insn)
|
||||
{
|
||||
if (unsignedp)
|
||||
{
|
||||
@ -6411,7 +6411,7 @@ use_skip_p (rtx insn)
|
||||
parameters. */
|
||||
|
||||
const char *
|
||||
pa_output_cbranch (rtx *operands, int negated, rtx insn)
|
||||
pa_output_cbranch (rtx *operands, int negated, rtx_insn *insn)
|
||||
{
|
||||
static char buf[100];
|
||||
bool useskip;
|
||||
@ -6622,7 +6622,7 @@ pa_output_cbranch (rtx *operands, int negated, rtx insn)
|
||||
bytes for the portable runtime, non-PIC and PIC cases, respectively. */
|
||||
|
||||
const char *
|
||||
pa_output_lbranch (rtx dest, rtx insn, int xdelay)
|
||||
pa_output_lbranch (rtx dest, rtx_insn *insn, int xdelay)
|
||||
{
|
||||
rtx xoperands[2];
|
||||
|
||||
@ -6737,7 +6737,7 @@ pa_output_lbranch (rtx dest, rtx insn, int xdelay)
|
||||
above. it returns the appropriate output template to emit the branch. */
|
||||
|
||||
const char *
|
||||
pa_output_bb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn, int which)
|
||||
pa_output_bb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx_insn *insn, int which)
|
||||
{
|
||||
static char buf[100];
|
||||
bool useskip;
|
||||
@ -6922,7 +6922,7 @@ pa_output_bb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn, int which)
|
||||
branch. */
|
||||
|
||||
const char *
|
||||
pa_output_bvb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn,
|
||||
pa_output_bvb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx_insn *insn,
|
||||
int which)
|
||||
{
|
||||
static char buf[100];
|
||||
@ -7105,7 +7105,7 @@ pa_output_bvb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn,
|
||||
Note it may perform some output operations on its own before
|
||||
returning the final output string. */
|
||||
const char *
|
||||
pa_output_dbra (rtx *operands, rtx insn, int which_alternative)
|
||||
pa_output_dbra (rtx *operands, rtx_insn *insn, int which_alternative)
|
||||
{
|
||||
int length = get_attr_length (insn);
|
||||
|
||||
@ -7254,7 +7254,7 @@ pa_output_dbra (rtx *operands, rtx insn, int which_alternative)
|
||||
Note it may perform some output operations on its own before
|
||||
returning the final output string. */
|
||||
const char *
|
||||
pa_output_movb (rtx *operands, rtx insn, int which_alternative,
|
||||
pa_output_movb (rtx *operands, rtx_insn *insn, int which_alternative,
|
||||
int reverse_comparison)
|
||||
{
|
||||
int length = get_attr_length (insn);
|
||||
@ -7533,7 +7533,7 @@ pa_attr_length_millicode_call (rtx_insn *insn)
|
||||
CALL_DEST is the routine we are calling. */
|
||||
|
||||
const char *
|
||||
pa_output_millicode_call (rtx insn, rtx call_dest)
|
||||
pa_output_millicode_call (rtx_insn *insn, rtx call_dest)
|
||||
{
|
||||
int attr_length = get_attr_length (insn);
|
||||
int seq_length = dbr_sequence_length ();
|
||||
@ -7651,7 +7651,7 @@ pa_output_millicode_call (rtx insn, rtx call_dest)
|
||||
sequence insn's address. */
|
||||
if (INSN_ADDRESSES_SET_P ())
|
||||
{
|
||||
seq_insn = NEXT_INSN (PREV_INSN (XVECEXP (final_sequence, 0, 0)));
|
||||
seq_insn = NEXT_INSN (PREV_INSN (final_sequence->insn (0)));
|
||||
distance = (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (NEXT_INSN (insn))))
|
||||
- INSN_ADDRESSES (INSN_UID (seq_insn)) - 8);
|
||||
|
||||
@ -8038,7 +8038,7 @@ pa_output_call (rtx_insn *insn, rtx call_dest, int sibcall)
|
||||
sequence insn's address. This would break the regular call/return@
|
||||
relationship assumed by the table based eh unwinder, so only do that
|
||||
if the call is not possibly throwing. */
|
||||
rtx seq_insn = NEXT_INSN (PREV_INSN (XVECEXP (final_sequence, 0, 0)));
|
||||
rtx seq_insn = NEXT_INSN (PREV_INSN (final_sequence->insn (0)));
|
||||
int distance = (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (NEXT_INSN (insn))))
|
||||
- INSN_ADDRESSES (INSN_UID (seq_insn)) - 8);
|
||||
|
||||
@ -8786,7 +8786,7 @@ pa_shadd_constant_p (int val)
|
||||
/* Return TRUE if INSN branches forward. */
|
||||
|
||||
static bool
|
||||
forward_branch_p (rtx insn)
|
||||
forward_branch_p (rtx_insn *insn)
|
||||
{
|
||||
rtx lab = JUMP_LABEL (insn);
|
||||
|
||||
@ -8809,7 +8809,7 @@ forward_branch_p (rtx insn)
|
||||
|
||||
/* Return 1 if INSN is in the delay slot of a call instruction. */
|
||||
int
|
||||
pa_jump_in_call_delay (rtx insn)
|
||||
pa_jump_in_call_delay (rtx_insn *insn)
|
||||
{
|
||||
|
||||
if (! JUMP_P (insn))
|
||||
@ -8832,7 +8832,7 @@ pa_jump_in_call_delay (rtx insn)
|
||||
/* Output an unconditional move and branch insn. */
|
||||
|
||||
const char *
|
||||
pa_output_parallel_movb (rtx *operands, rtx insn)
|
||||
pa_output_parallel_movb (rtx *operands, rtx_insn *insn)
|
||||
{
|
||||
int length = get_attr_length (insn);
|
||||
|
||||
@ -8872,7 +8872,7 @@ pa_output_parallel_movb (rtx *operands, rtx insn)
|
||||
/* Output an unconditional add and branch insn. */
|
||||
|
||||
const char *
|
||||
pa_output_parallel_addb (rtx *operands, rtx insn)
|
||||
pa_output_parallel_addb (rtx *operands, rtx_insn *insn)
|
||||
{
|
||||
int length = get_attr_length (insn);
|
||||
|
||||
@ -8907,7 +8907,7 @@ pa_output_parallel_addb (rtx *operands, rtx insn)
|
||||
the delay slot of the call. */
|
||||
|
||||
int
|
||||
pa_following_call (rtx insn)
|
||||
pa_following_call (rtx_insn *insn)
|
||||
{
|
||||
if (! TARGET_JUMP_IN_DELAY)
|
||||
return 0;
|
||||
@ -8989,7 +8989,8 @@ pa_reorg (void)
|
||||
static void
|
||||
pa_combine_instructions (void)
|
||||
{
|
||||
rtx anchor, new_rtx;
|
||||
rtx_insn *anchor;
|
||||
rtx new_rtx;
|
||||
|
||||
/* This can get expensive since the basic algorithm is on the
|
||||
order of O(n^2) (or worse). Only do it for -O2 or higher
|
||||
@ -9023,7 +9024,7 @@ pa_combine_instructions (void)
|
||||
|| (anchor_attr == PA_COMBINE_TYPE_UNCOND_BRANCH
|
||||
&& ! forward_branch_p (anchor)))
|
||||
{
|
||||
rtx floater;
|
||||
rtx_insn *floater;
|
||||
|
||||
for (floater = PREV_INSN (anchor);
|
||||
floater;
|
||||
@ -9038,7 +9039,7 @@ pa_combine_instructions (void)
|
||||
/* Anything except a regular INSN will stop our search. */
|
||||
if (! NONJUMP_INSN_P (floater))
|
||||
{
|
||||
floater = NULL_RTX;
|
||||
floater = NULL;
|
||||
break;
|
||||
}
|
||||
|
||||
@ -9098,7 +9099,7 @@ pa_combine_instructions (void)
|
||||
/* Anything except a regular INSN will stop our search. */
|
||||
if (! NONJUMP_INSN_P (floater))
|
||||
{
|
||||
floater = NULL_RTX;
|
||||
floater = NULL;
|
||||
break;
|
||||
}
|
||||
|
||||
@ -9171,11 +9172,12 @@ pa_combine_instructions (void)
|
||||
}
|
||||
|
||||
static int
|
||||
pa_can_combine_p (rtx new_rtx, rtx anchor, rtx floater, int reversed, rtx dest,
|
||||
pa_can_combine_p (rtx new_rtx, rtx_insn *anchor, rtx_insn *floater,
|
||||
int reversed, rtx dest,
|
||||
rtx src1, rtx src2)
|
||||
{
|
||||
int insn_code_number;
|
||||
rtx start, end;
|
||||
rtx_insn *start, *end;
|
||||
|
||||
/* Create a PARALLEL with the patterns of ANCHOR and
|
||||
FLOATER, try to recognize it, then test constraints
|
||||
|
@ -3435,9 +3435,9 @@ picochip_get_vliw_alu_id (void)
|
||||
|
||||
/* Reset any information about the current VLIW packing status. */
|
||||
static void
|
||||
picochip_reset_vliw (rtx insn)
|
||||
picochip_reset_vliw (rtx_insn *insn)
|
||||
{
|
||||
rtx local_insn = insn;
|
||||
rtx_insn *local_insn = insn;
|
||||
|
||||
/* Nothing to do if VLIW scheduling isn't being used. */
|
||||
if (picochip_schedule_type != DFA_TYPE_SPEED)
|
||||
@ -3877,7 +3877,7 @@ void
|
||||
picochip_final_prescan_insn (rtx_insn *insn, rtx * opvec ATTRIBUTE_UNUSED,
|
||||
int num_operands ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx local_insn;
|
||||
rtx_insn *local_insn;
|
||||
|
||||
picochip_current_prescan_insn = insn;
|
||||
|
||||
|
@ -20857,7 +20857,7 @@ compute_save_world_info (rs6000_stack_t *info_ptr)
|
||||
are none. (This check is expensive, but seldom executed.) */
|
||||
if (WORLD_SAVE_P (info_ptr))
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
for (insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
|
||||
if (CALL_P (insn) && SIBLING_CALL_P (insn))
|
||||
{
|
||||
@ -22050,7 +22050,7 @@ get_TOC_alias_set (void)
|
||||
static int
|
||||
uses_TOC (void)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
if (INSN_P (insn))
|
||||
|
@ -5226,7 +5226,7 @@ get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
|
||||
static const char *
|
||||
get_some_local_dynamic_name (void)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
if (cfun->machine->some_ld_name)
|
||||
return cfun->machine->some_ld_name;
|
||||
@ -6739,7 +6739,6 @@ static void
|
||||
s390_mainpool_finish (struct constant_pool *pool)
|
||||
{
|
||||
rtx base_reg = cfun->machine->base_reg;
|
||||
rtx insn;
|
||||
|
||||
/* If the pool is empty, we're done. */
|
||||
if (pool->size == 0)
|
||||
@ -6760,7 +6759,7 @@ s390_mainpool_finish (struct constant_pool *pool)
|
||||
located in the .rodata section, so we emit it after the function. */
|
||||
if (TARGET_CPU_ZARCH)
|
||||
{
|
||||
insn = gen_main_base_64 (base_reg, pool->label);
|
||||
rtx insn = gen_main_base_64 (base_reg, pool->label);
|
||||
insn = emit_insn_after (insn, pool->pool_insn);
|
||||
INSN_ADDRESSES_NEW (insn, -1);
|
||||
remove_insn (pool->pool_insn);
|
||||
@ -6778,7 +6777,7 @@ s390_mainpool_finish (struct constant_pool *pool)
|
||||
else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
|
||||
+ pool->size + 8 /* alignment slop */ < 4096)
|
||||
{
|
||||
insn = gen_main_base_31_small (base_reg, pool->label);
|
||||
rtx insn = gen_main_base_31_small (base_reg, pool->label);
|
||||
insn = emit_insn_after (insn, pool->pool_insn);
|
||||
INSN_ADDRESSES_NEW (insn, -1);
|
||||
remove_insn (pool->pool_insn);
|
||||
@ -6803,7 +6802,7 @@ s390_mainpool_finish (struct constant_pool *pool)
|
||||
{
|
||||
rtx pool_end = gen_label_rtx ();
|
||||
|
||||
insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
|
||||
rtx insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
|
||||
insn = emit_jump_insn_after (insn, pool->pool_insn);
|
||||
JUMP_LABEL (insn) = pool_end;
|
||||
INSN_ADDRESSES_NEW (insn, -1);
|
||||
@ -6824,7 +6823,7 @@ s390_mainpool_finish (struct constant_pool *pool)
|
||||
|
||||
/* Replace all literal pool references. */
|
||||
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (INSN_P (insn))
|
||||
replace_ltrel_base (&PATTERN (insn));
|
||||
@ -8002,7 +8001,7 @@ s390_optimize_nonescaping_tx (void)
|
||||
|
||||
if (XINT (SET_SRC (pat), 1) == UNSPECV_TBEGIN)
|
||||
{
|
||||
rtx tmp;
|
||||
rtx_insn *tmp;
|
||||
|
||||
tbegin_insn = insn;
|
||||
|
||||
@ -8955,7 +8954,7 @@ s390_emit_prologue (void)
|
||||
{
|
||||
rtx_insn *insns = s390_load_got ();
|
||||
|
||||
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
||||
for (rtx_insn *insn = insns; insn; insn = NEXT_INSN (insn))
|
||||
annotate_constant_pool_refs (&PATTERN (insn));
|
||||
|
||||
emit_insn (insns);
|
||||
|
@ -103,7 +103,7 @@ extern const char *output_movepcrel (rtx, rtx[], enum machine_mode);
|
||||
extern const char *output_far_jump (rtx_insn *, rtx);
|
||||
|
||||
extern rtx sfunc_uses_reg (rtx);
|
||||
extern int barrier_align (rtx);
|
||||
extern int barrier_align (rtx_insn *);
|
||||
extern int sh_loop_align (rtx);
|
||||
extern bool fp_zero_operand (rtx);
|
||||
extern bool fp_one_operand (rtx);
|
||||
@ -144,18 +144,18 @@ extern int shl_sext_length (rtx);
|
||||
extern bool gen_shl_sext (rtx, rtx, rtx, rtx);
|
||||
extern rtx gen_datalabel_ref (rtx);
|
||||
extern int regs_used (rtx, int);
|
||||
extern void fixup_addr_diff_vecs (rtx);
|
||||
extern void fixup_addr_diff_vecs (rtx_insn *);
|
||||
extern int get_dest_uid (rtx, int);
|
||||
extern void final_prescan_insn (rtx_insn *, rtx *, int);
|
||||
extern enum tls_model tls_symbolic_operand (rtx, enum machine_mode);
|
||||
extern bool system_reg_operand (rtx, enum machine_mode);
|
||||
extern bool reg_unused_after (rtx, rtx);
|
||||
extern bool reg_unused_after (rtx, rtx_insn *);
|
||||
extern void expand_sf_unop (rtx (*)(rtx, rtx, rtx), rtx *);
|
||||
extern void expand_sf_binop (rtx (*)(rtx, rtx, rtx, rtx), rtx *);
|
||||
extern void expand_df_unop (rtx (*)(rtx, rtx, rtx), rtx *);
|
||||
extern void expand_df_binop (rtx (*)(rtx, rtx, rtx, rtx), rtx *);
|
||||
extern int sh_insn_length_adjustment (rtx_insn *);
|
||||
extern bool sh_can_redirect_branch (rtx, rtx);
|
||||
extern bool sh_can_redirect_branch (rtx_insn *, rtx_insn *);
|
||||
extern void sh_expand_unop_v2sf (enum rtx_code, rtx, rtx);
|
||||
extern void sh_expand_binop_v2sf (enum rtx_code, rtx, rtx, rtx);
|
||||
extern bool sh_expand_t_scc (rtx *);
|
||||
@ -207,7 +207,7 @@ extern bool sh_cannot_change_mode_class
|
||||
(enum machine_mode, enum machine_mode, enum reg_class);
|
||||
extern bool sh_small_register_classes_for_mode_p (enum machine_mode);
|
||||
extern void sh_mark_label (rtx, int);
|
||||
extern bool check_use_sfunc_addr (rtx, rtx);
|
||||
extern bool check_use_sfunc_addr (rtx_insn *, rtx);
|
||||
|
||||
#ifdef HARD_CONST
|
||||
extern void fpscr_set_from_mem (int, HARD_REG_SET);
|
||||
|
@ -191,7 +191,7 @@ static bool broken_move (rtx_insn *);
|
||||
static bool mova_p (rtx_insn *);
|
||||
static rtx_insn *find_barrier (int, rtx_insn *, rtx_insn *);
|
||||
static bool noncall_uses_reg (rtx, rtx, rtx *);
|
||||
static rtx gen_block_redirect (rtx, int, int);
|
||||
static rtx_insn *gen_block_redirect (rtx_insn *, int, int);
|
||||
static void sh_reorg (void);
|
||||
static void sh_option_override (void);
|
||||
static void output_stack_adjust (int, rtx, int, HARD_REG_SET *, bool);
|
||||
@ -324,7 +324,7 @@ static void sh_conditional_register_usage (void);
|
||||
static bool sh_legitimate_constant_p (enum machine_mode, rtx);
|
||||
static int mov_insn_size (enum machine_mode, bool);
|
||||
static int mov_insn_alignment_mask (enum machine_mode, bool);
|
||||
static bool sequence_insn_p (rtx);
|
||||
static bool sequence_insn_p (rtx_insn *);
|
||||
static void sh_canonicalize_comparison (int *, rtx *, rtx *, bool);
|
||||
static void sh_canonicalize_comparison (enum rtx_code&, rtx&, rtx&,
|
||||
enum machine_mode, bool);
|
||||
@ -4941,7 +4941,7 @@ fixup_mova (rtx_insn *mova)
|
||||
}
|
||||
else
|
||||
{
|
||||
rtx worker = mova;
|
||||
rtx_insn *worker = mova;
|
||||
rtx lab = gen_label_rtx ();
|
||||
rtx wpat, wpat0, wpat1, wsrc, target, base, diff;
|
||||
|
||||
@ -5041,7 +5041,7 @@ find_barrier (int num_mova, rtx_insn *mova, rtx_insn *from)
|
||||
int si_limit;
|
||||
int hi_limit;
|
||||
rtx_insn *orig = from;
|
||||
rtx last_got = NULL_RTX;
|
||||
rtx_insn *last_got = NULL;
|
||||
rtx_insn *last_symoff = NULL;
|
||||
|
||||
/* For HImode: range is 510, add 4 because pc counts from address of
|
||||
@ -5139,9 +5139,9 @@ find_barrier (int num_mova, rtx_insn *mova, rtx_insn *from)
|
||||
instructions. (plus add r0,r12).
|
||||
Remember if we see one without the other. */
|
||||
if (GET_CODE (src) == UNSPEC && PIC_ADDR_P (XVECEXP (src, 0, 0)))
|
||||
last_got = last_got ? NULL_RTX : from;
|
||||
last_got = last_got ? NULL : from;
|
||||
else if (PIC_ADDR_P (src))
|
||||
last_got = last_got ? NULL_RTX : from;
|
||||
last_got = last_got ? NULL : from;
|
||||
|
||||
/* We must explicitly check the mode, because sometimes the
|
||||
front end will generate code to load unsigned constants into
|
||||
@ -5562,11 +5562,11 @@ regs_used (rtx x, int is_dest)
|
||||
pass 1. Pass 2 if a definite blocking insn is needed.
|
||||
-1 is used internally to avoid deep recursion.
|
||||
If a blocking instruction is made or recognized, return it. */
|
||||
static rtx
|
||||
gen_block_redirect (rtx jump, int addr, int need_block)
|
||||
static rtx_insn *
|
||||
gen_block_redirect (rtx_insn *jump, int addr, int need_block)
|
||||
{
|
||||
int dead = 0;
|
||||
rtx prev = prev_nonnote_insn (jump);
|
||||
rtx_insn *prev = prev_nonnote_insn (jump);
|
||||
rtx dest;
|
||||
|
||||
/* First, check if we already have an instruction that satisfies our need. */
|
||||
@ -5599,7 +5599,7 @@ gen_block_redirect (rtx jump, int addr, int need_block)
|
||||
&& (INSN_ADDRESSES (INSN_UID (dest)) - addr + (unsigned) 4092
|
||||
> 4092 + 4098))
|
||||
{
|
||||
rtx scan;
|
||||
rtx_insn *scan;
|
||||
/* Don't look for the stack pointer as a scratch register,
|
||||
it would cause trouble if an interrupt occurred. */
|
||||
unsigned attempt = 0x7fff, used;
|
||||
@ -5627,7 +5627,7 @@ gen_block_redirect (rtx jump, int addr, int need_block)
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (used = dead = 0, scan = JUMP_LABEL (jump);
|
||||
for (used = dead = 0, scan = JUMP_LABEL_AS_INSN (jump);
|
||||
(scan = NEXT_INSN (scan)); )
|
||||
{
|
||||
enum rtx_code code;
|
||||
@ -5649,7 +5649,7 @@ gen_block_redirect (rtx jump, int addr, int need_block)
|
||||
if (code == JUMP_INSN)
|
||||
{
|
||||
if (jump_left-- && simplejump_p (scan))
|
||||
scan = JUMP_LABEL (scan);
|
||||
scan = JUMP_LABEL_AS_INSN (scan);
|
||||
else
|
||||
break;
|
||||
}
|
||||
@ -5666,7 +5666,7 @@ gen_block_redirect (rtx jump, int addr, int need_block)
|
||||
|
||||
else if (optimize && need_block >= 0)
|
||||
{
|
||||
rtx next = next_active_insn (next_active_insn (dest));
|
||||
rtx_insn *next = next_active_insn (next_active_insn (dest));
|
||||
if (next && JUMP_P (next)
|
||||
&& GET_CODE (PATTERN (next)) == SET
|
||||
&& recog_memoized (next) == CODE_FOR_jump_compact)
|
||||
@ -5690,9 +5690,9 @@ gen_block_redirect (rtx jump, int addr, int need_block)
|
||||
it should try to schedule instructions from the target of the
|
||||
branch; simplejump_p fails for indirect jumps even if they have
|
||||
a JUMP_LABEL. */
|
||||
rtx insn = emit_insn_before (gen_indirect_jump_scratch
|
||||
(reg, GEN_INT (unspec_bbr_uid++)),
|
||||
jump);
|
||||
rtx_insn *insn = emit_insn_before (gen_indirect_jump_scratch
|
||||
(reg, GEN_INT (unspec_bbr_uid++)),
|
||||
jump);
|
||||
/* ??? We would like this to have the scope of the jump, but that
|
||||
scope will change when a delay slot insn of an inner scope is added.
|
||||
Hence, after delay slot scheduling, we'll have to expect
|
||||
@ -5718,12 +5718,12 @@ struct far_branch
|
||||
{
|
||||
/* A label (to be placed) in front of the jump
|
||||
that jumps to our ultimate destination. */
|
||||
rtx near_label;
|
||||
rtx_insn *near_label;
|
||||
/* Where we are going to insert it if we cannot move the jump any farther,
|
||||
or the jump itself if we have picked up an existing jump. */
|
||||
rtx insert_place;
|
||||
rtx_insn *insert_place;
|
||||
/* The ultimate destination. */
|
||||
rtx far_label;
|
||||
rtx_insn *far_label;
|
||||
struct far_branch *prev;
|
||||
/* If the branch has already been created, its address;
|
||||
else the address of its first prospective user. */
|
||||
@ -5736,7 +5736,7 @@ static void
|
||||
gen_far_branch (struct far_branch *bp)
|
||||
{
|
||||
rtx insn = bp->insert_place;
|
||||
rtx jump;
|
||||
rtx_insn *jump;
|
||||
rtx label = gen_label_rtx ();
|
||||
int ok;
|
||||
|
||||
@ -5786,13 +5786,14 @@ gen_far_branch (struct far_branch *bp)
|
||||
|
||||
/* Fix up ADDR_DIFF_VECs. */
|
||||
void
|
||||
fixup_addr_diff_vecs (rtx first)
|
||||
fixup_addr_diff_vecs (rtx_insn *first)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
for (insn = first; insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
rtx vec_lab, pat, prev, prevpat, x, braf_label;
|
||||
rtx vec_lab, pat, prevpat, x, braf_label;
|
||||
rtx_insn *prev;
|
||||
|
||||
if (! JUMP_TABLE_DATA_P (insn)
|
||||
|| GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
|
||||
@ -5801,7 +5802,7 @@ fixup_addr_diff_vecs (rtx first)
|
||||
vec_lab = XEXP (XEXP (pat, 0), 0);
|
||||
|
||||
/* Search the matching casesi_jump_2. */
|
||||
for (prev = vec_lab; ; prev = PREV_INSN (prev))
|
||||
for (prev = as_a <rtx_insn *> (vec_lab); ; prev = PREV_INSN (prev))
|
||||
{
|
||||
if (!JUMP_P (prev))
|
||||
continue;
|
||||
@ -5834,7 +5835,7 @@ fixup_addr_diff_vecs (rtx first)
|
||||
/* BARRIER_OR_LABEL is either a BARRIER or a CODE_LABEL immediately following
|
||||
a barrier. Return the base 2 logarithm of the desired alignment. */
|
||||
int
|
||||
barrier_align (rtx barrier_or_label)
|
||||
barrier_align (rtx_insn *barrier_or_label)
|
||||
{
|
||||
rtx next, pat;
|
||||
|
||||
@ -5899,7 +5900,7 @@ barrier_align (rtx barrier_or_label)
|
||||
|
||||
/* Skip to the insn before the JUMP_INSN before the barrier under
|
||||
investigation. */
|
||||
rtx prev = prev_real_insn (prev_active_insn (barrier_or_label));
|
||||
rtx_insn *prev = prev_real_insn (prev_active_insn (barrier_or_label));
|
||||
|
||||
for (slot = 2, credit = (1 << (CACHE_LOG - 2)) + 2;
|
||||
credit >= 0 && prev && NONJUMP_INSN_P (prev);
|
||||
@ -5909,9 +5910,9 @@ barrier_align (rtx barrier_or_label)
|
||||
if (GET_CODE (PATTERN (prev)) == USE
|
||||
|| GET_CODE (PATTERN (prev)) == CLOBBER)
|
||||
continue;
|
||||
if (GET_CODE (PATTERN (prev)) == SEQUENCE)
|
||||
if (rtx_sequence *prev_seq = dyn_cast <rtx_sequence *> (PATTERN (prev)))
|
||||
{
|
||||
prev = XVECEXP (PATTERN (prev), 0, 1);
|
||||
prev = prev_seq->insn (1);
|
||||
if (INSN_UID (prev) == INSN_UID (next))
|
||||
{
|
||||
/* Delay slot was filled with insn at jump target. */
|
||||
@ -5927,7 +5928,7 @@ barrier_align (rtx barrier_or_label)
|
||||
}
|
||||
if (prev && jump_to_label_p (prev))
|
||||
{
|
||||
rtx x;
|
||||
rtx_insn *x;
|
||||
if (jump_to_next
|
||||
|| next_real_insn (JUMP_LABEL (prev)) == next
|
||||
/* If relax_delay_slots() decides NEXT was redundant
|
||||
@ -6247,7 +6248,7 @@ sh_reorg (void)
|
||||
|| (prev_nonnote_insn (insn)
|
||||
== XEXP (MOVA_LABELREF (mova), 0))))
|
||||
{
|
||||
rtx scan;
|
||||
rtx_insn *scan;
|
||||
int total;
|
||||
|
||||
num_mova--;
|
||||
@ -6448,7 +6449,7 @@ sh_reorg (void)
|
||||
int
|
||||
get_dest_uid (rtx label, int max_uid)
|
||||
{
|
||||
rtx dest = next_real_insn (label);
|
||||
rtx_insn *dest = next_real_insn (label);
|
||||
int dest_uid;
|
||||
if (! dest)
|
||||
/* This can happen for an undefined label. */
|
||||
@ -6501,14 +6502,14 @@ split_branches (rtx_insn *first)
|
||||
enum attr_type type = get_attr_type (insn);
|
||||
if (type == TYPE_CBRANCH)
|
||||
{
|
||||
rtx next, beyond;
|
||||
rtx_insn *next, *beyond;
|
||||
|
||||
if (get_attr_length (insn) > 4)
|
||||
{
|
||||
rtx src = SET_SRC (PATTERN (insn));
|
||||
rtx olabel = XEXP (XEXP (src, 1), 0);
|
||||
int addr = INSN_ADDRESSES (INSN_UID (insn));
|
||||
rtx label = 0;
|
||||
rtx_insn *label = 0;
|
||||
int dest_uid = get_dest_uid (olabel, max_uid);
|
||||
struct far_branch *bp = uid_branch[dest_uid];
|
||||
|
||||
@ -6529,8 +6530,9 @@ split_branches (rtx_insn *first)
|
||||
uid_branch[dest_uid] = bp;
|
||||
bp->prev = far_branch_list;
|
||||
far_branch_list = bp;
|
||||
bp->far_label
|
||||
= XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0);
|
||||
bp->far_label = as_a <rtx_insn *> (
|
||||
XEXP (XEXP (SET_SRC (PATTERN (insn)), 1),
|
||||
0));
|
||||
LABEL_NUSES (bp->far_label)++;
|
||||
}
|
||||
else
|
||||
@ -6538,7 +6540,7 @@ split_branches (rtx_insn *first)
|
||||
label = bp->near_label;
|
||||
if (! label && bp->address - addr >= CONDJUMP_MIN)
|
||||
{
|
||||
rtx block = bp->insert_place;
|
||||
rtx_insn *block = bp->insert_place;
|
||||
|
||||
if (GET_CODE (PATTERN (block)) == RETURN)
|
||||
block = PREV_INSN (block);
|
||||
@ -6611,13 +6613,14 @@ split_branches (rtx_insn *first)
|
||||
else if (type == TYPE_JUMP || type == TYPE_RETURN)
|
||||
{
|
||||
int addr = INSN_ADDRESSES (INSN_UID (insn));
|
||||
rtx far_label = 0;
|
||||
rtx_insn *far_label = 0;
|
||||
int dest_uid = 0;
|
||||
struct far_branch *bp;
|
||||
|
||||
if (type == TYPE_JUMP)
|
||||
{
|
||||
far_label = XEXP (SET_SRC (PATTERN (insn)), 0);
|
||||
far_label = as_a <rtx_insn *> (
|
||||
XEXP (SET_SRC (PATTERN (insn)), 0));
|
||||
dest_uid = get_dest_uid (far_label, max_uid);
|
||||
if (! dest_uid)
|
||||
{
|
||||
@ -9907,7 +9910,7 @@ branch_dest (rtx branch)
|
||||
We assume REG is a reload reg, and therefore does
|
||||
not live past labels. It may live past calls or jumps though. */
|
||||
bool
|
||||
reg_unused_after (rtx reg, rtx insn)
|
||||
reg_unused_after (rtx reg, rtx_insn *insn)
|
||||
{
|
||||
enum rtx_code code;
|
||||
rtx set;
|
||||
@ -10147,7 +10150,7 @@ fpscr_set_from_mem (int mode, HARD_REG_SET regs_live)
|
||||
#endif
|
||||
|
||||
static bool
|
||||
sequence_insn_p (rtx insn)
|
||||
sequence_insn_p (rtx_insn *insn)
|
||||
{
|
||||
rtx_insn *prev, *next;
|
||||
|
||||
@ -10668,7 +10671,8 @@ sh_delegitimize_address (rtx orig_x)
|
||||
static rtx
|
||||
mark_constant_pool_use (rtx x)
|
||||
{
|
||||
rtx insn, lab, pattern;
|
||||
rtx_insn *insn, *lab;
|
||||
rtx pattern;
|
||||
|
||||
if (x == NULL_RTX)
|
||||
return x;
|
||||
@ -10685,8 +10689,8 @@ mark_constant_pool_use (rtx x)
|
||||
|
||||
/* Get the first label in the list of labels for the same constant
|
||||
and delete another labels in the list. */
|
||||
lab = x;
|
||||
for (insn = PREV_INSN (x); insn; insn = PREV_INSN (insn))
|
||||
lab = as_a <rtx_insn *> (x);
|
||||
for (insn = PREV_INSN (lab); insn; insn = PREV_INSN (insn))
|
||||
{
|
||||
if (!LABEL_P (insn)
|
||||
|| LABEL_REFS (insn) != NEXT_INSN (insn))
|
||||
@ -10694,11 +10698,11 @@ mark_constant_pool_use (rtx x)
|
||||
lab = insn;
|
||||
}
|
||||
|
||||
for (insn = LABEL_REFS (lab); insn; insn = LABEL_REFS (insn))
|
||||
for (rtx insn = LABEL_REFS (lab); insn; insn = LABEL_REFS (insn))
|
||||
INSN_DELETED_P (insn) = 1;
|
||||
|
||||
/* Mark constants in a window. */
|
||||
for (insn = NEXT_INSN (x); insn; insn = NEXT_INSN (insn))
|
||||
for (insn = NEXT_INSN (as_a <rtx_insn *> (x)); insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (!NONJUMP_INSN_P (insn))
|
||||
continue;
|
||||
@ -10732,7 +10736,7 @@ mark_constant_pool_use (rtx x)
|
||||
of an unconditional jump BRANCH2. We only want to do this if the
|
||||
resulting branch will have a short displacement. */
|
||||
bool
|
||||
sh_can_redirect_branch (rtx branch1, rtx branch2)
|
||||
sh_can_redirect_branch (rtx_insn *branch1, rtx_insn *branch2)
|
||||
{
|
||||
if (flag_expensive_optimizations && simplejump_p (branch2))
|
||||
{
|
||||
@ -12702,7 +12706,7 @@ extract_sfunc_addr (rtx insn)
|
||||
INSN is the use_sfunc_addr instruction, and REG is the register it
|
||||
guards. */
|
||||
bool
|
||||
check_use_sfunc_addr (rtx insn, rtx reg)
|
||||
check_use_sfunc_addr (rtx_insn *insn, rtx reg)
|
||||
{
|
||||
/* Search for the sfunc. It should really come right after INSN. */
|
||||
while ((insn = NEXT_INSN (insn)))
|
||||
@ -12712,8 +12716,8 @@ check_use_sfunc_addr (rtx insn, rtx reg)
|
||||
if (! INSN_P (insn))
|
||||
continue;
|
||||
|
||||
if (GET_CODE (PATTERN (insn)) == SEQUENCE)
|
||||
insn = XVECEXP (PATTERN (insn), 0, 0);
|
||||
if (rtx_sequence *seq = dyn_cast<rtx_sequence *> (PATTERN (insn)))
|
||||
insn = seq->insn (0);
|
||||
if (GET_CODE (PATTERN (insn)) != PARALLEL
|
||||
|| get_attr_type (insn) != TYPE_SFUNC)
|
||||
continue;
|
||||
|
@ -9996,7 +9996,8 @@ label:
|
||||
sh_expand_epilogue (true);
|
||||
if (TARGET_SHCOMPACT)
|
||||
{
|
||||
rtx insn, set;
|
||||
rtx_insn *insn;
|
||||
rtx set;
|
||||
|
||||
/* If epilogue clobbers r0, preserve it in macl. */
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
@ -10878,7 +10879,7 @@ label:
|
||||
(clobber (match_scratch:SI 3 "=X,1"))]
|
||||
"TARGET_SH1"
|
||||
{
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[2]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[2])));
|
||||
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
@ -10912,7 +10913,7 @@ label:
|
||||
(clobber (match_operand:SI 4 "" "=X,1"))]
|
||||
"TARGET_SH2 && reload_completed && flag_pic"
|
||||
{
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[2]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[2])));
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
switch (GET_MODE (diff_vec))
|
||||
@ -10950,7 +10951,7 @@ label:
|
||||
UNSPEC_CASESI)))]
|
||||
"TARGET_SHMEDIA"
|
||||
{
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[2]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[2])));
|
||||
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
@ -10977,7 +10978,7 @@ label:
|
||||
(label_ref:DI (match_operand 3 "" ""))] UNSPEC_CASESI)))]
|
||||
"TARGET_SHMEDIA"
|
||||
{
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (operands[3]));
|
||||
rtx diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[3])));
|
||||
|
||||
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
|
||||
|
||||
|
@ -88,9 +88,9 @@ private:
|
||||
struct ccreg_value
|
||||
{
|
||||
// The insn at which the ccreg value was determined.
|
||||
// Might be NULL_RTX if e.g. an unknown value is recorded for an
|
||||
// Might be NULL if e.g. an unknown value is recorded for an
|
||||
// empty basic block.
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
// The basic block where the insn was discovered.
|
||||
basic_block bb;
|
||||
@ -111,7 +111,7 @@ private:
|
||||
// Given a start insn and its basic block, recursively determine all
|
||||
// possible ccreg values in all basic block paths that can lead to the
|
||||
// start insn.
|
||||
void find_last_ccreg_values (rtx start_insn, basic_block bb,
|
||||
void find_last_ccreg_values (rtx_insn *start_insn, basic_block bb,
|
||||
std::vector<ccreg_value>& values_out,
|
||||
std::vector<basic_block>& prev_visited_bb) const;
|
||||
|
||||
@ -205,7 +205,7 @@ sh_optimize_sett_clrt::execute (function* fun)
|
||||
// be optimized.
|
||||
basic_block bb;
|
||||
FOR_EACH_BB_REVERSE_FN (bb, fun)
|
||||
for (rtx next_i, i = NEXT_INSN (BB_HEAD (bb));
|
||||
for (rtx_insn *next_i, *i = NEXT_INSN (BB_HEAD (bb));
|
||||
i != NULL_RTX && i != BB_END (bb); i = next_i)
|
||||
{
|
||||
next_i = NEXT_INSN (i);
|
||||
@ -309,7 +309,7 @@ sh_optimize_sett_clrt
|
||||
|
||||
void
|
||||
sh_optimize_sett_clrt
|
||||
::find_last_ccreg_values (rtx start_insn, basic_block bb,
|
||||
::find_last_ccreg_values (rtx_insn *start_insn, basic_block bb,
|
||||
std::vector<ccreg_value>& values_out,
|
||||
std::vector<basic_block>& prev_visited_bb) const
|
||||
{
|
||||
@ -322,7 +322,7 @@ sh_optimize_sett_clrt
|
||||
log_msg ("(prev visited [bb %d])", prev_visited_bb.back ()->index);
|
||||
log_msg ("\n");
|
||||
|
||||
for (rtx i = start_insn; i != NULL_RTX && i != PREV_INSN (BB_HEAD (bb));
|
||||
for (rtx_insn *i = start_insn; i != NULL && i != PREV_INSN (BB_HEAD (bb));
|
||||
i = PREV_INSN (i))
|
||||
{
|
||||
if (!INSN_P (i))
|
||||
|
@ -250,8 +250,8 @@ In order to handle cases such as above the RTL pass does the following:
|
||||
|
||||
struct set_of_reg
|
||||
{
|
||||
// The insn where the search stopped or NULL_RTX.
|
||||
rtx insn;
|
||||
// The insn where the search stopped or NULL.
|
||||
rtx_insn *insn;
|
||||
|
||||
// The set rtx of the specified reg if found, NULL_RTX otherwise.
|
||||
// Notice that the set rtx can also be in a parallel.
|
||||
@ -281,14 +281,14 @@ struct set_of_reg
|
||||
// Given a reg rtx and a start insn find the insn (in the same basic block)
|
||||
// that sets the reg.
|
||||
static set_of_reg
|
||||
find_set_of_reg_bb (rtx reg, rtx insn)
|
||||
find_set_of_reg_bb (rtx reg, rtx_insn *insn)
|
||||
{
|
||||
set_of_reg result = { insn, NULL_RTX };
|
||||
|
||||
if (!REG_P (reg) || insn == NULL_RTX)
|
||||
if (!REG_P (reg) || insn == NULL)
|
||||
return result;
|
||||
|
||||
for (result.insn = insn; result.insn != NULL_RTX;
|
||||
for (result.insn = insn; result.insn != NULL;
|
||||
result.insn = prev_nonnote_insn_bb (result.insn))
|
||||
{
|
||||
if (BARRIER_P (result.insn))
|
||||
@ -338,7 +338,7 @@ is_adjacent_bb (basic_block a, basic_block b)
|
||||
|
||||
// Internal function of trace_reg_uses.
|
||||
static void
|
||||
trace_reg_uses_1 (rtx reg, rtx start_insn, basic_block bb, int& count,
|
||||
trace_reg_uses_1 (rtx reg, rtx_insn *start_insn, basic_block bb, int& count,
|
||||
std::vector<basic_block>& visited_bb, rtx abort_at_insn)
|
||||
{
|
||||
if (bb == NULL)
|
||||
@ -360,7 +360,7 @@ trace_reg_uses_1 (rtx reg, rtx start_insn, basic_block bb, int& count,
|
||||
if (end_insn == NULL_RTX)
|
||||
log_return_void ("[bb %d] end_insn is null\n", bb->index);
|
||||
|
||||
for (rtx i = NEXT_INSN (start_insn); i != end_insn; i = NEXT_INSN (i))
|
||||
for (rtx_insn *i = NEXT_INSN (start_insn); i != end_insn; i = NEXT_INSN (i))
|
||||
{
|
||||
if (INSN_P (i))
|
||||
{
|
||||
@ -396,7 +396,7 @@ trace_reg_uses_1 (rtx reg, rtx start_insn, basic_block bb, int& count,
|
||||
// that insn. If the insn 'abort_at_insn' uses the specified reg, it is also
|
||||
// counted.
|
||||
static int
|
||||
trace_reg_uses (rtx reg, rtx start_insn, rtx abort_at_insn)
|
||||
trace_reg_uses (rtx reg, rtx_insn *start_insn, rtx abort_at_insn)
|
||||
{
|
||||
log_msg ("\ntrace_reg_uses\nreg = ");
|
||||
log_rtx (reg);
|
||||
@ -463,7 +463,7 @@ private:
|
||||
// A ccreg trace for a conditional branch.
|
||||
struct cbranch_trace
|
||||
{
|
||||
rtx cbranch_insn;
|
||||
rtx_insn *cbranch_insn;
|
||||
branch_condition_type_t cbranch_type;
|
||||
|
||||
// The comparison against zero right before the conditional branch.
|
||||
@ -473,7 +473,7 @@ private:
|
||||
// the BB of the cbranch itself and might be empty.
|
||||
std::list<bb_entry> bb_entries;
|
||||
|
||||
cbranch_trace (rtx insn)
|
||||
cbranch_trace (rtx_insn *insn)
|
||||
: cbranch_insn (insn),
|
||||
cbranch_type (unknown_branch_condition),
|
||||
setcc ()
|
||||
@ -537,7 +537,8 @@ private:
|
||||
set_not_found,
|
||||
other_set_found
|
||||
};
|
||||
record_return_t record_set_of_reg (rtx reg, rtx start_insn, bb_entry& e);
|
||||
record_return_t record_set_of_reg (rtx reg, rtx_insn *start_insn,
|
||||
bb_entry& e);
|
||||
|
||||
// Tells whether the cbranch insn of the specified bb_entry can be removed
|
||||
// safely without triggering any side effects.
|
||||
@ -584,7 +585,7 @@ private:
|
||||
|
||||
// Given a branch insn, try to optimize its branch condition.
|
||||
// If any insns are modified or added they are added to 'm_touched_insns'.
|
||||
void try_optimize_cbranch (rtx i);
|
||||
void try_optimize_cbranch (rtx_insn *i);
|
||||
};
|
||||
|
||||
|
||||
@ -670,7 +671,7 @@ sh_treg_combine::is_inverted_ccreg (const_rtx x) const
|
||||
}
|
||||
|
||||
sh_treg_combine::record_return_t
|
||||
sh_treg_combine::record_set_of_reg (rtx reg, rtx start_insn,
|
||||
sh_treg_combine::record_set_of_reg (rtx reg, rtx_insn *start_insn,
|
||||
bb_entry& new_entry)
|
||||
{
|
||||
log_msg ("\n[bb %d]\n", new_entry.bb->index);
|
||||
@ -680,7 +681,7 @@ sh_treg_combine::record_set_of_reg (rtx reg, rtx start_insn,
|
||||
|
||||
new_entry.cstore_type = cstore_unknown;
|
||||
|
||||
for (rtx i = start_insn; i != NULL_RTX; )
|
||||
for (rtx_insn *i = start_insn; i != NULL; )
|
||||
{
|
||||
new_entry.cstore = find_set_of_reg_bb (reg, i);
|
||||
|
||||
@ -791,7 +792,7 @@ sh_treg_combine::can_remove_cstore (const bb_entry& e,
|
||||
// must not be a usage of the copied regs between the reg-reg copies.
|
||||
// Otherwise we assume that the result of the cstore is used in some
|
||||
// other way.
|
||||
rtx prev_insn = e.cstore.insn;
|
||||
rtx_insn *prev_insn = e.cstore.insn;
|
||||
for (std::vector<set_of_reg>::const_reverse_iterator i =
|
||||
e.cstore_reg_reg_copies.rbegin ();
|
||||
i != e.cstore_reg_reg_copies.rend (); ++i)
|
||||
@ -1262,7 +1263,7 @@ sh_treg_combine::try_eliminate_cstores (cbranch_trace& trace,
|
||||
}
|
||||
|
||||
void
|
||||
sh_treg_combine::try_optimize_cbranch (rtx insn)
|
||||
sh_treg_combine::try_optimize_cbranch (rtx_insn *insn)
|
||||
{
|
||||
cbranch_trace trace (insn);
|
||||
|
||||
@ -1468,7 +1469,7 @@ sh_treg_combine::execute (function *fun)
|
||||
basic_block bb;
|
||||
FOR_EACH_BB_FN (bb, fun)
|
||||
{
|
||||
rtx i = BB_END (bb);
|
||||
rtx_insn *i = BB_END (bb);
|
||||
if (any_condjump_p (i) && onlyjump_p (i))
|
||||
try_optimize_cbranch (i);
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ extern void emit_conditional_branch_insn (rtx []);
|
||||
extern int registers_ok_for_ldd_peep (rtx, rtx);
|
||||
extern int mems_ok_for_ldd_peep (rtx, rtx, rtx);
|
||||
extern rtx widen_mem_for_ldd_peep (rtx, rtx, enum machine_mode);
|
||||
extern int empty_delay_slot (rtx);
|
||||
extern int empty_delay_slot (rtx_insn *);
|
||||
extern int emit_cbcond_nop (rtx);
|
||||
extern int eligible_for_call_delay (rtx);
|
||||
extern int eligible_for_return_delay (rtx);
|
||||
@ -99,7 +99,7 @@ extern int memory_ok_for_ldd (rtx);
|
||||
extern int v9_regcmp_p (enum rtx_code);
|
||||
/* Function used for V8+ code generation. Returns 1 if the high
|
||||
32 bits of REG are 0 before INSN. */
|
||||
extern int sparc_check_64 (rtx, rtx);
|
||||
extern int sparc_check_64 (rtx, rtx_insn *);
|
||||
extern rtx gen_df_reg (rtx, int);
|
||||
extern void sparc_expand_compare_and_swap (rtx op[]);
|
||||
extern void sparc_expand_vector_init (rtx, rtx);
|
||||
|
@ -550,7 +550,7 @@ static bool sparc_legitimate_constant_p (enum machine_mode, rtx);
|
||||
static rtx sparc_builtin_saveregs (void);
|
||||
static int epilogue_renumber (rtx *, int);
|
||||
static bool sparc_assemble_integer (rtx, unsigned int, int);
|
||||
static int set_extends (rtx);
|
||||
static int set_extends (rtx_insn *);
|
||||
static void sparc_asm_function_prologue (FILE *, HOST_WIDE_INT);
|
||||
static void sparc_asm_function_epilogue (FILE *, HOST_WIDE_INT);
|
||||
#ifdef TARGET_SOLARIS
|
||||
@ -875,7 +875,7 @@ mem_ref (rtx x)
|
||||
static unsigned int
|
||||
sparc_do_work_around_errata (void)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx_insn *insn, *next;
|
||||
|
||||
/* Force all instructions to be split into their final form. */
|
||||
split_all_insns_noflow ();
|
||||
@ -887,8 +887,9 @@ sparc_do_work_around_errata (void)
|
||||
rtx set;
|
||||
|
||||
/* Look into the instruction in a delay slot. */
|
||||
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
|
||||
insn = XVECEXP (PATTERN (insn), 0, 1);
|
||||
if (NONJUMP_INSN_P (insn))
|
||||
if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
|
||||
insn = seq->insn (1);
|
||||
|
||||
/* Look for a single-word load into an odd-numbered FP register. */
|
||||
if (sparc_fix_at697f
|
||||
@ -3425,7 +3426,7 @@ emit_tfmode_cvt (enum rtx_code code, rtx *operands)
|
||||
nop into its delay slot. */
|
||||
|
||||
int
|
||||
empty_delay_slot (rtx insn)
|
||||
empty_delay_slot (rtx_insn *insn)
|
||||
{
|
||||
rtx seq;
|
||||
|
||||
@ -9613,7 +9614,7 @@ sparc_issue_rate (void)
|
||||
}
|
||||
|
||||
static int
|
||||
set_extends (rtx insn)
|
||||
set_extends (rtx_insn *insn)
|
||||
{
|
||||
register rtx pat = PATTERN (insn);
|
||||
|
||||
@ -9779,7 +9780,7 @@ sparc_output_deferred_case_vectors (void)
|
||||
unknown. Return 1 if the high bits are zero, -1 if the register is
|
||||
sign extended. */
|
||||
int
|
||||
sparc_check_64 (rtx x, rtx insn)
|
||||
sparc_check_64 (rtx x, rtx_insn *insn)
|
||||
{
|
||||
/* If a register is set only once it is safe to ignore insns this
|
||||
code does not know how to handle. The loop will either recognize
|
||||
|
@ -263,7 +263,7 @@ xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
|
||||
{
|
||||
rtx op0 = XEXP (comparison, 0);
|
||||
rtx op1 = XEXP (comparison, 1);
|
||||
rtx seq, last_insn;
|
||||
rtx_insn *seq, *last_insn;
|
||||
rtx compare;
|
||||
|
||||
start_sequence ();
|
||||
@ -2389,7 +2389,7 @@ xstormy16_expand_builtin (tree exp, rtx target,
|
||||
patterns. */
|
||||
|
||||
static void
|
||||
combine_bnp (rtx insn)
|
||||
combine_bnp (rtx_insn *insn)
|
||||
{
|
||||
int insn_code, regno, need_extend;
|
||||
unsigned int mask;
|
||||
@ -2606,7 +2606,7 @@ combine_bnp (rtx insn)
|
||||
static void
|
||||
xstormy16_reorg (void)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
|
@ -1114,15 +1114,15 @@ ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
|
||||
taking care to save and preserve the ep. */
|
||||
|
||||
static void
|
||||
substitute_ep_register (rtx first_insn,
|
||||
rtx last_insn,
|
||||
substitute_ep_register (rtx_insn *first_insn,
|
||||
rtx_insn *last_insn,
|
||||
int uses,
|
||||
int regno,
|
||||
rtx * p_r1,
|
||||
rtx * p_ep)
|
||||
{
|
||||
rtx reg = gen_rtx_REG (Pmode, regno);
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
if (!*p_r1)
|
||||
{
|
||||
@ -1227,8 +1227,8 @@ v850_reorg (void)
|
||||
struct
|
||||
{
|
||||
int uses;
|
||||
rtx first_insn;
|
||||
rtx last_insn;
|
||||
rtx_insn *first_insn;
|
||||
rtx_insn *last_insn;
|
||||
}
|
||||
regs[FIRST_PSEUDO_REGISTER];
|
||||
|
||||
@ -1236,7 +1236,7 @@ v850_reorg (void)
|
||||
int use_ep = FALSE;
|
||||
rtx r1 = NULL_RTX;
|
||||
rtx ep = NULL_RTX;
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
rtx pattern;
|
||||
|
||||
/* If not ep mode, just return now. */
|
||||
@ -1246,8 +1246,8 @@ v850_reorg (void)
|
||||
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
||||
{
|
||||
regs[i].uses = 0;
|
||||
regs[i].first_insn = NULL_RTX;
|
||||
regs[i].last_insn = NULL_RTX;
|
||||
regs[i].first_insn = NULL;
|
||||
regs[i].last_insn = NULL;
|
||||
}
|
||||
|
||||
for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
|
||||
@ -1280,8 +1280,8 @@ v850_reorg (void)
|
||||
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
||||
{
|
||||
regs[i].uses = 0;
|
||||
regs[i].first_insn = NULL_RTX;
|
||||
regs[i].last_insn = NULL_RTX;
|
||||
regs[i].first_insn = NULL;
|
||||
regs[i].last_insn = NULL;
|
||||
}
|
||||
break;
|
||||
|
||||
@ -1413,8 +1413,8 @@ v850_reorg (void)
|
||||
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
||||
{
|
||||
regs[i].uses = 0;
|
||||
regs[i].first_insn = NULL_RTX;
|
||||
regs[i].last_insn = NULL_RTX;
|
||||
regs[i].first_insn = NULL;
|
||||
regs[i].last_insn = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1422,8 +1422,8 @@ v850_reorg (void)
|
||||
for (i = regno; i < endregno; i++)
|
||||
{
|
||||
regs[i].uses = 0;
|
||||
regs[i].first_insn = NULL_RTX;
|
||||
regs[i].last_insn = NULL_RTX;
|
||||
regs[i].first_insn = NULL;
|
||||
regs[i].last_insn = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2465,7 +2465,7 @@ add_call_site (rtx landing_pad, int action, int section)
|
||||
}
|
||||
|
||||
static rtx_note *
|
||||
emit_note_eh_region_end (rtx insn)
|
||||
emit_note_eh_region_end (rtx_insn *insn)
|
||||
{
|
||||
rtx_insn *next = NEXT_INSN (insn);
|
||||
|
||||
|
@ -189,7 +189,7 @@ static int app_on;
|
||||
/* If we are outputting an insn sequence, this contains the sequence rtx.
|
||||
Zero otherwise. */
|
||||
|
||||
rtx final_sequence;
|
||||
rtx_sequence *final_sequence;
|
||||
|
||||
#ifdef ASSEMBLER_DIALECT
|
||||
|
||||
@ -1279,13 +1279,14 @@ shorten_branches (rtx_insn *first)
|
||||
{
|
||||
rtx body = PATTERN (insn);
|
||||
int old_length = insn_lengths[uid];
|
||||
rtx rel_lab = XEXP (XEXP (body, 0), 0);
|
||||
rtx_insn *rel_lab =
|
||||
safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
|
||||
rtx min_lab = XEXP (XEXP (body, 2), 0);
|
||||
rtx max_lab = XEXP (XEXP (body, 3), 0);
|
||||
int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
|
||||
int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
|
||||
int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
|
||||
rtx prev;
|
||||
rtx_insn *prev;
|
||||
int rel_align = 0;
|
||||
addr_diff_vec_flags flags;
|
||||
enum machine_mode vec_mode;
|
||||
@ -2619,7 +2620,7 @@ final_scan_insn (rtx uncast_insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
|
||||
/* A delayed-branch sequence */
|
||||
int i;
|
||||
|
||||
final_sequence = body;
|
||||
final_sequence = seq;
|
||||
|
||||
/* The first insn in this SEQUENCE might be a JUMP_INSN that will
|
||||
force the restoration of a comparison that was previously
|
||||
|
@ -3019,7 +3019,8 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
|
||||
&& insn_operand_matches (icode, 1, op1))
|
||||
{
|
||||
enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
|
||||
rtx insn, insns, t = op1;
|
||||
rtx_insn *insn, *insns;
|
||||
rtx t = op1;
|
||||
HARD_REG_SET hardregs;
|
||||
|
||||
start_sequence ();
|
||||
@ -3038,8 +3039,9 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
|
||||
}
|
||||
else
|
||||
t = op1;
|
||||
insn = gen_extend_insn (op0, t, promoted_nominal_mode,
|
||||
data->passed_mode, unsignedp);
|
||||
insn = as_a <rtx_insn *> (
|
||||
gen_extend_insn (op0, t, promoted_nominal_mode,
|
||||
data->passed_mode, unsignedp));
|
||||
emit_insn (insn);
|
||||
insns = get_insns ();
|
||||
|
||||
|
@ -177,7 +177,7 @@ main (int argc, char **argv)
|
||||
if (! have_delay)
|
||||
{
|
||||
printf ("extern int num_delay_slots (rtx);\n");
|
||||
printf ("extern int eligible_for_delay (rtx, int, rtx, int);\n\n");
|
||||
printf ("extern int eligible_for_delay (rtx_insn *, int, rtx_insn *, int);\n\n");
|
||||
printf ("extern int const_num_delay_slots (rtx);\n\n");
|
||||
have_delay = 1;
|
||||
}
|
||||
@ -187,14 +187,14 @@ main (int argc, char **argv)
|
||||
if (XVECEXP (desc, 1, i + 1) && ! have_annul_true)
|
||||
{
|
||||
printf ("#define ANNUL_IFTRUE_SLOTS\n");
|
||||
printf ("extern int eligible_for_annul_true (rtx, int, rtx, int);\n");
|
||||
printf ("extern int eligible_for_annul_true (rtx_insn *, int, rtx_insn *, int);\n");
|
||||
have_annul_true = 1;
|
||||
}
|
||||
|
||||
if (XVECEXP (desc, 1, i + 2) && ! have_annul_false)
|
||||
{
|
||||
printf ("#define ANNUL_IFFALSE_SLOTS\n");
|
||||
printf ("extern int eligible_for_annul_false (rtx, int, rtx, int);\n");
|
||||
printf ("extern int eligible_for_annul_false (rtx_insn *, int, rtx_insn *, int);\n");
|
||||
have_annul_false = 1;
|
||||
}
|
||||
}
|
||||
|
@ -4456,11 +4456,11 @@ write_eligible_delay (FILE *outf, const char *kind)
|
||||
/* Write function prelude. */
|
||||
|
||||
fprintf (outf, "int\n");
|
||||
fprintf (outf, "eligible_for_%s (rtx delay_insn ATTRIBUTE_UNUSED, int slot, \n"
|
||||
" rtx candidate_insn, int flags ATTRIBUTE_UNUSED)\n",
|
||||
fprintf (outf, "eligible_for_%s (rtx_insn *delay_insn ATTRIBUTE_UNUSED, int slot, \n"
|
||||
" rtx_insn *candidate_insn, int flags ATTRIBUTE_UNUSED)\n",
|
||||
kind);
|
||||
fprintf (outf, "{\n");
|
||||
fprintf (outf, " rtx insn;\n");
|
||||
fprintf (outf, " rtx_insn *insn;\n");
|
||||
fprintf (outf, "\n");
|
||||
fprintf (outf, " gcc_assert (slot < %d);\n", max_slots);
|
||||
fprintf (outf, "\n");
|
||||
|
@ -360,7 +360,8 @@ discover_loops (bitmap_obstack *loop_stack, struct hw_doloop_hooks *hooks)
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
rtx_insn *tail = BB_END (bb);
|
||||
rtx insn, reg;
|
||||
rtx_insn *insn;
|
||||
rtx reg;
|
||||
|
||||
while (tail && NOTE_P (tail) && tail != BB_HEAD (bb))
|
||||
tail = PREV_INSN (tail);
|
||||
@ -378,7 +379,7 @@ discover_loops (bitmap_obstack *loop_stack, struct hw_doloop_hooks *hooks)
|
||||
|
||||
/* There's a degenerate case we can handle - an empty loop consisting
|
||||
of only a back branch. Handle that by deleting the branch. */
|
||||
insn = JUMP_LABEL (tail);
|
||||
insn = JUMP_LABEL_AS_INSN (tail);
|
||||
while (insn && !NONDEBUG_INSN_P (insn))
|
||||
insn = NEXT_INSN (insn);
|
||||
if (insn == tail)
|
||||
|
@ -75,7 +75,7 @@ struct GTY (()) hwloop_info_d
|
||||
rtx iter_reg;
|
||||
|
||||
/* The new label placed at the beginning of the loop. */
|
||||
rtx start_label;
|
||||
rtx_insn *start_label;
|
||||
|
||||
/* The new label placed at the end of the loop. */
|
||||
rtx end_label;
|
||||
|
16
gcc/ira.c
16
gcc/ira.c
@ -2016,7 +2016,8 @@ decrease_live_ranges_number (void)
|
||||
{
|
||||
basic_block bb;
|
||||
rtx_insn *insn;
|
||||
rtx set, src, dest, dest_death, p, q, note;
|
||||
rtx set, src, dest, dest_death, q, note;
|
||||
rtx_insn *p;
|
||||
int sregno, dregno;
|
||||
|
||||
if (! flag_expensive_optimizations)
|
||||
@ -2581,9 +2582,10 @@ setup_reg_equiv_init (void)
|
||||
to update equiv info for register shuffles on the region borders
|
||||
and for caller save/restore insns. */
|
||||
void
|
||||
ira_update_equiv_info_by_shuffle_insn (int to_regno, int from_regno, rtx insns)
|
||||
ira_update_equiv_info_by_shuffle_insn (int to_regno, int from_regno, rtx_insn *insns)
|
||||
{
|
||||
rtx insn, x, note;
|
||||
rtx_insn *insn;
|
||||
rtx x, note;
|
||||
|
||||
if (! ira_reg_equiv[from_regno].defined_p
|
||||
&& (! ira_reg_equiv[to_regno].defined_p
|
||||
@ -2932,9 +2934,9 @@ validate_equiv_mem_from_store (rtx dest, const_rtx set ATTRIBUTE_UNUSED,
|
||||
|
||||
Return 1 if MEMREF remains valid. */
|
||||
static int
|
||||
validate_equiv_mem (rtx start, rtx reg, rtx memref)
|
||||
validate_equiv_mem (rtx_insn *start, rtx reg, rtx memref)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
rtx note;
|
||||
|
||||
equiv_mem = memref;
|
||||
@ -3208,9 +3210,9 @@ memref_referenced_p (rtx memref, rtx x)
|
||||
/* TRUE if some insn in the range (START, END] references a memory location
|
||||
that would be affected by a store to MEMREF. */
|
||||
static int
|
||||
memref_used_between_p (rtx memref, rtx start, rtx end)
|
||||
memref_used_between_p (rtx memref, rtx_insn *start, rtx_insn *end)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
|
||||
insn = NEXT_INSN (insn))
|
||||
|
@ -183,7 +183,7 @@ extern rtx ira_eliminate_regs (rtx, enum machine_mode);
|
||||
extern void ira_set_pseudo_classes (bool, FILE *);
|
||||
extern void ira_implicitly_set_insn_hard_regs (HARD_REG_SET *);
|
||||
extern void ira_expand_reg_equiv (void);
|
||||
extern void ira_update_equiv_info_by_shuffle_insn (int, int, rtx);
|
||||
extern void ira_update_equiv_info_by_shuffle_insn (int, int, rtx_insn *);
|
||||
|
||||
extern void ira_sort_regnos_for_alter_reg (int *, int, unsigned int *);
|
||||
extern void ira_mark_allocation_change (int);
|
||||
|
@ -713,10 +713,12 @@ doloop_optimize (struct loop *loop)
|
||||
doloop_pat = doloop_seq;
|
||||
if (INSN_P (doloop_pat))
|
||||
{
|
||||
while (NEXT_INSN (doloop_pat) != NULL_RTX)
|
||||
doloop_pat = NEXT_INSN (doloop_pat);
|
||||
if (!JUMP_P (doloop_pat))
|
||||
doloop_pat = NULL_RTX;
|
||||
rtx_insn *doloop_insn = as_a <rtx_insn *> (doloop_pat);
|
||||
while (NEXT_INSN (doloop_insn) != NULL_RTX)
|
||||
doloop_insn = NEXT_INSN (doloop_insn);
|
||||
if (!JUMP_P (doloop_insn))
|
||||
doloop_insn = NULL;
|
||||
doloop_pat = doloop_insn;
|
||||
}
|
||||
|
||||
if (! doloop_pat
|
||||
|
@ -296,7 +296,7 @@ extern void output_quoted_string (FILE *, const char *);
|
||||
insn output code.
|
||||
|
||||
This variable is defined in final.c. */
|
||||
extern rtx final_sequence;
|
||||
extern rtx_sequence *final_sequence;
|
||||
|
||||
/* The line number of the beginning of the current function. Various
|
||||
md code needs this so that it can output relative linenumbers. */
|
||||
|
14
gcc/recog.c
14
gcc/recog.c
@ -3161,12 +3161,14 @@ peep2_reinit_state (regset live)
|
||||
if the replacement is rejected. */
|
||||
|
||||
static rtx
|
||||
peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
|
||||
peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt)
|
||||
{
|
||||
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
|
||||
int i;
|
||||
rtx_insn *last, *before_try, *x;
|
||||
rtx eh_note, as_note;
|
||||
rtx old_insn, new_insn;
|
||||
rtx old_insn;
|
||||
rtx_insn *new_insn;
|
||||
bool was_call = false;
|
||||
|
||||
/* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
|
||||
@ -3471,7 +3473,7 @@ peep2_fill_buffer (basic_block bb, rtx insn, regset live)
|
||||
static void
|
||||
peephole2_optimize (void)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
bitmap live;
|
||||
int i;
|
||||
basic_block bb;
|
||||
@ -3504,7 +3506,8 @@ peephole2_optimize (void)
|
||||
insn = BB_HEAD (bb);
|
||||
for (;;)
|
||||
{
|
||||
rtx attempt, head;
|
||||
rtx_insn *attempt;
|
||||
rtx head;
|
||||
int match_len;
|
||||
|
||||
if (!past_end && !NONDEBUG_INSN_P (insn))
|
||||
@ -3531,7 +3534,8 @@ peephole2_optimize (void)
|
||||
|
||||
/* Match the peephole. */
|
||||
head = peep2_insn_data[peep2_current].insn;
|
||||
attempt = peephole2_insns (PATTERN (head), head, &match_len);
|
||||
attempt = safe_as_a <rtx_insn *> (
|
||||
peephole2_insns (PATTERN (head), head, &match_len));
|
||||
if (attempt != NULL)
|
||||
{
|
||||
rtx last = peep2_attempt (bb, head, match_len, attempt);
|
||||
|
@ -797,7 +797,7 @@ combine_reaching_defs (ext_cand *cand, const_rtx set_pat, ext_state *state)
|
||||
REGNO (SET_DEST (pat)));
|
||||
emit_move_insn (new_dst, new_src);
|
||||
|
||||
rtx insn = get_insns();
|
||||
rtx_insn *insn = get_insns();
|
||||
end_sequence ();
|
||||
if (NEXT_INSN (insn))
|
||||
return false;
|
||||
|
@ -4573,7 +4573,6 @@ reload_as_needed (int live_known)
|
||||
#if defined (AUTO_INC_DEC)
|
||||
int i;
|
||||
#endif
|
||||
rtx x;
|
||||
rtx_note *marker;
|
||||
|
||||
memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
|
||||
@ -4662,7 +4661,6 @@ reload_as_needed (int live_known)
|
||||
if (n_reloads > 0)
|
||||
{
|
||||
rtx_insn *next = NEXT_INSN (insn);
|
||||
rtx p;
|
||||
|
||||
/* ??? PREV can get deleted by reload inheritance.
|
||||
Work around this by emitting a marker note. */
|
||||
@ -4693,7 +4691,7 @@ reload_as_needed (int live_known)
|
||||
fixup_eh_region_note (insn, prev, next);
|
||||
|
||||
/* Adjust the location of REG_ARGS_SIZE. */
|
||||
p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
|
||||
rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
|
||||
if (p)
|
||||
{
|
||||
remove_note (insn, p);
|
||||
@ -4705,7 +4703,9 @@ reload_as_needed (int live_known)
|
||||
we have generated are valid. If not, give an error
|
||||
and delete them. */
|
||||
if (asm_noperands (PATTERN (insn)) >= 0)
|
||||
for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
|
||||
for (rtx_insn *p = NEXT_INSN (prev);
|
||||
p != next;
|
||||
p = NEXT_INSN (p))
|
||||
if (p != insn && INSN_P (p)
|
||||
&& GET_CODE (PATTERN (p)) != USE
|
||||
&& (recog_memoized (p) < 0
|
||||
@ -4732,7 +4732,7 @@ reload_as_needed (int live_known)
|
||||
|
||||
/* There may have been CLOBBER insns placed after INSN. So scan
|
||||
between INSN and NEXT and use them to forget old reloads. */
|
||||
for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
|
||||
for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
|
||||
if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
|
||||
note_stores (PATTERN (x), forget_old_reloads_1, NULL);
|
||||
|
||||
@ -4764,7 +4764,7 @@ reload_as_needed (int live_known)
|
||||
rtx reload_reg = rld[i].reg_rtx;
|
||||
enum machine_mode mode = GET_MODE (reload_reg);
|
||||
int n = 0;
|
||||
rtx p;
|
||||
rtx_insn *p;
|
||||
|
||||
for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
|
||||
{
|
||||
@ -4846,7 +4846,8 @@ reload_as_needed (int live_known)
|
||||
if (TEST_HARD_REG_BIT (reg_reloaded_valid,
|
||||
in_hard_regno))
|
||||
{
|
||||
for (x = old_prev ? NEXT_INSN (old_prev) : insn;
|
||||
for (rtx_insn *x = (old_prev ?
|
||||
NEXT_INSN (old_prev) : insn);
|
||||
x != old_next;
|
||||
x = NEXT_INSN (x))
|
||||
if (x == reg_reloaded_insn[in_hard_regno])
|
||||
@ -4874,7 +4875,7 @@ reload_as_needed (int live_known)
|
||||
/* If a pseudo that got a hard register is auto-incremented,
|
||||
we must purge records of copying it into pseudos without
|
||||
hard registers. */
|
||||
for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
|
||||
for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
|
||||
if (REG_NOTE_KIND (x) == REG_INC)
|
||||
{
|
||||
/* See if this pseudo reg was reloaded in this insn.
|
||||
@ -8840,7 +8841,6 @@ delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
|
||||
int k;
|
||||
int n_occurrences;
|
||||
int n_inherited = 0;
|
||||
rtx i1;
|
||||
rtx substed;
|
||||
unsigned regno;
|
||||
int nregs;
|
||||
@ -8887,7 +8887,7 @@ delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
|
||||
n_occurrences += count_occurrences (PATTERN (insn),
|
||||
eliminate_regs (substed, VOIDmode,
|
||||
NULL_RTX), 0);
|
||||
for (i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
|
||||
for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
|
||||
{
|
||||
gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
|
||||
n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
|
||||
@ -8906,7 +8906,7 @@ delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
|
||||
and we're within the same basic block, then the value can only
|
||||
pass through the reload reg and end up here.
|
||||
Otherwise, give up--return. */
|
||||
for (i1 = NEXT_INSN (output_reload_insn);
|
||||
for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
|
||||
i1 != insn; i1 = NEXT_INSN (i1))
|
||||
{
|
||||
if (NOTE_INSN_BASIC_BLOCK_P (i1))
|
||||
|
110
gcc/reorg.c
110
gcc/reorg.c
@ -219,10 +219,10 @@ static int get_jump_flags (rtx, rtx);
|
||||
static int mostly_true_jump (rtx);
|
||||
static rtx get_branch_condition (rtx, rtx);
|
||||
static int condition_dominates_p (rtx, rtx);
|
||||
static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
|
||||
static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
|
||||
static int redirect_with_delay_slots_safe_p (rtx_insn *, rtx, rtx);
|
||||
static int redirect_with_delay_list_safe_p (rtx_insn *, rtx, rtx_insn_list *);
|
||||
static int check_annul_list_true_false (int, rtx);
|
||||
static rtx_insn_list *steal_delay_list_from_target (rtx, rtx,
|
||||
static rtx_insn_list *steal_delay_list_from_target (rtx_insn *, rtx,
|
||||
rtx_sequence *,
|
||||
rtx_insn_list *,
|
||||
struct resources *,
|
||||
@ -230,16 +230,16 @@ static rtx_insn_list *steal_delay_list_from_target (rtx, rtx,
|
||||
struct resources *,
|
||||
int, int *, int *,
|
||||
rtx_insn **);
|
||||
static rtx_insn_list *steal_delay_list_from_fallthrough (rtx, rtx,
|
||||
static rtx_insn_list *steal_delay_list_from_fallthrough (rtx_insn *, rtx,
|
||||
rtx_sequence *,
|
||||
rtx_insn_list *,
|
||||
struct resources *,
|
||||
struct resources *,
|
||||
struct resources *,
|
||||
int, int *, int *);
|
||||
static void try_merge_delay_insns (rtx, rtx);
|
||||
static rtx redundant_insn (rtx, rtx, rtx);
|
||||
static int own_thread_p (rtx, rtx, int);
|
||||
static void try_merge_delay_insns (rtx, rtx_insn *);
|
||||
static rtx redundant_insn (rtx, rtx_insn *, rtx);
|
||||
static int own_thread_p (rtx_insn *, rtx, int);
|
||||
static void update_block (rtx, rtx);
|
||||
static int reorg_redirect_jump (rtx, rtx);
|
||||
static void update_reg_dead_notes (rtx, rtx);
|
||||
@ -252,7 +252,7 @@ static rtx_insn_list *fill_slots_from_thread (rtx_insn *, rtx,
|
||||
int *, rtx_insn_list *);
|
||||
static void fill_eager_delay_slots (void);
|
||||
static void relax_delay_slots (rtx_insn *);
|
||||
static void make_return_insns (rtx);
|
||||
static void make_return_insns (rtx_insn *);
|
||||
|
||||
/* A wrapper around next_active_insn which takes care to return ret_rtx
|
||||
unchanged. */
|
||||
@ -978,63 +978,62 @@ condition_dominates_p (rtx condition, rtx insn)
|
||||
any insns already in the delay slot of JUMP. */
|
||||
|
||||
static int
|
||||
redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
|
||||
redirect_with_delay_slots_safe_p (rtx_insn *jump, rtx newlabel, rtx seq)
|
||||
{
|
||||
int flags, i;
|
||||
rtx pat = PATTERN (seq);
|
||||
rtx_sequence *pat = as_a <rtx_sequence *> (PATTERN (seq));
|
||||
|
||||
/* Make sure all the delay slots of this jump would still
|
||||
be valid after threading the jump. If they are still
|
||||
valid, then return nonzero. */
|
||||
|
||||
flags = get_jump_flags (jump, newlabel);
|
||||
for (i = 1; i < XVECLEN (pat, 0); i++)
|
||||
for (i = 1; i < pat->len (); i++)
|
||||
if (! (
|
||||
#ifdef ANNUL_IFFALSE_SLOTS
|
||||
(INSN_ANNULLED_BRANCH_P (jump)
|
||||
&& INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
|
||||
? eligible_for_annul_false (jump, i - 1,
|
||||
XVECEXP (pat, 0, i), flags) :
|
||||
&& INSN_FROM_TARGET_P (pat->insn (i)))
|
||||
? eligible_for_annul_false (jump, i - 1, pat->insn (i), flags) :
|
||||
#endif
|
||||
#ifdef ANNUL_IFTRUE_SLOTS
|
||||
(INSN_ANNULLED_BRANCH_P (jump)
|
||||
&& ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
|
||||
? eligible_for_annul_true (jump, i - 1,
|
||||
XVECEXP (pat, 0, i), flags) :
|
||||
? eligible_for_annul_true (jump, i - 1, pat->insn (i), flags) :
|
||||
#endif
|
||||
eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
|
||||
eligible_for_delay (jump, i - 1, pat->insn (i), flags)))
|
||||
break;
|
||||
|
||||
return (i == XVECLEN (pat, 0));
|
||||
return (i == pat->len ());
|
||||
}
|
||||
|
||||
/* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
|
||||
any insns we wish to place in the delay slot of JUMP. */
|
||||
|
||||
static int
|
||||
redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
|
||||
redirect_with_delay_list_safe_p (rtx_insn *jump, rtx newlabel,
|
||||
rtx_insn_list *delay_list)
|
||||
{
|
||||
int flags, i;
|
||||
rtx li;
|
||||
rtx_insn_list *li;
|
||||
|
||||
/* Make sure all the insns in DELAY_LIST would still be
|
||||
valid after threading the jump. If they are still
|
||||
valid, then return nonzero. */
|
||||
|
||||
flags = get_jump_flags (jump, newlabel);
|
||||
for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
|
||||
for (li = delay_list, i = 0; li; li = li->next (), i++)
|
||||
if (! (
|
||||
#ifdef ANNUL_IFFALSE_SLOTS
|
||||
(INSN_ANNULLED_BRANCH_P (jump)
|
||||
&& INSN_FROM_TARGET_P (XEXP (li, 0)))
|
||||
? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
|
||||
&& INSN_FROM_TARGET_P (li->insn ()))
|
||||
? eligible_for_annul_false (jump, i, li->insn (), flags) :
|
||||
#endif
|
||||
#ifdef ANNUL_IFTRUE_SLOTS
|
||||
(INSN_ANNULLED_BRANCH_P (jump)
|
||||
&& ! INSN_FROM_TARGET_P (XEXP (li, 0)))
|
||||
? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
|
||||
? eligible_for_annul_true (jump, i, li->insn (), flags) :
|
||||
#endif
|
||||
eligible_for_delay (jump, i, XEXP (li, 0), flags)))
|
||||
eligible_for_delay (jump, i, li->insn (), flags)))
|
||||
break;
|
||||
|
||||
return (li == NULL);
|
||||
@ -1085,7 +1084,7 @@ check_annul_list_true_false (int annul_true_p, rtx delay_list)
|
||||
execution should continue. */
|
||||
|
||||
static rtx_insn_list *
|
||||
steal_delay_list_from_target (rtx insn, rtx condition, rtx_sequence *seq,
|
||||
steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
|
||||
rtx_insn_list *delay_list, struct resources *sets,
|
||||
struct resources *needed,
|
||||
struct resources *other_needed,
|
||||
@ -1226,7 +1225,8 @@ steal_delay_list_from_target (rtx insn, rtx condition, rtx_sequence *seq,
|
||||
for INSN since unconditional branches are much easier to fill. */
|
||||
|
||||
static rtx_insn_list *
|
||||
steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx_sequence *seq,
|
||||
steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
|
||||
rtx_sequence *seq,
|
||||
rtx_insn_list *delay_list,
|
||||
struct resources *sets,
|
||||
struct resources *needed,
|
||||
@ -1307,10 +1307,10 @@ steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx_sequence *seq,
|
||||
we delete the merged insn. */
|
||||
|
||||
static void
|
||||
try_merge_delay_insns (rtx insn, rtx thread)
|
||||
try_merge_delay_insns (rtx insn, rtx_insn *thread)
|
||||
{
|
||||
rtx trial, next_trial;
|
||||
rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
|
||||
rtx_insn *trial, *next_trial;
|
||||
rtx_insn *delay_insn = as_a <rtx_insn *> (XVECEXP (PATTERN (insn), 0, 0));
|
||||
int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn);
|
||||
int slot_number = 1;
|
||||
int num_slots = XVECLEN (PATTERN (insn), 0);
|
||||
@ -1499,11 +1499,12 @@ try_merge_delay_insns (rtx insn, rtx thread)
|
||||
gain in rare cases. */
|
||||
|
||||
static rtx
|
||||
redundant_insn (rtx insn, rtx target, rtx delay_list)
|
||||
redundant_insn (rtx insn, rtx_insn *target, rtx delay_list)
|
||||
{
|
||||
rtx target_main = target;
|
||||
rtx ipat = PATTERN (insn);
|
||||
rtx trial, pat;
|
||||
rtx_insn *trial;
|
||||
rtx pat;
|
||||
struct resources needed, set;
|
||||
int i;
|
||||
unsigned insns_to_search;
|
||||
@ -1714,10 +1715,10 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
|
||||
finding an active insn, we do not own this thread. */
|
||||
|
||||
static int
|
||||
own_thread_p (rtx thread, rtx label, int allow_fallthrough)
|
||||
own_thread_p (rtx_insn *thread, rtx label, int allow_fallthrough)
|
||||
{
|
||||
rtx active_insn;
|
||||
rtx insn;
|
||||
rtx_insn *active_insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
/* We don't own the function end. */
|
||||
if (thread == 0 || ANY_RETURN_P (thread))
|
||||
@ -1884,7 +1885,7 @@ static vec <rtx> sibling_labels;
|
||||
the new label. */
|
||||
|
||||
static rtx_insn *
|
||||
get_label_before (rtx insn, rtx sibling)
|
||||
get_label_before (rtx_insn *insn, rtx sibling)
|
||||
{
|
||||
rtx_insn *label;
|
||||
|
||||
@ -2247,7 +2248,7 @@ fill_simple_delay_slots (int non_jumps_p)
|
||||
{
|
||||
/* See comment in relax_delay_slots about necessity of using
|
||||
next_real_insn here. */
|
||||
rtx new_label = next_real_insn (next_trial);
|
||||
rtx_insn *new_label = next_real_insn (next_trial);
|
||||
|
||||
if (new_label != 0)
|
||||
new_label = get_label_before (new_label, JUMP_LABEL (trial));
|
||||
@ -2274,8 +2275,8 @@ fill_simple_delay_slots (int non_jumps_p)
|
||||
= fill_slots_from_thread (insn, const_true_rtx,
|
||||
next_active_insn (JUMP_LABEL (insn)),
|
||||
NULL, 1, 1,
|
||||
own_thread_p (JUMP_LABEL (insn),
|
||||
JUMP_LABEL (insn), 0),
|
||||
own_thread_p (JUMP_LABEL_AS_INSN (insn),
|
||||
JUMP_LABEL_AS_INSN (insn), 0),
|
||||
slots_to_fill, &slots_filled,
|
||||
delay_list);
|
||||
|
||||
@ -3127,15 +3128,15 @@ delete_jump (rtx insn)
|
||||
delete_computation (insn);
|
||||
}
|
||||
|
||||
static rtx
|
||||
static rtx_insn *
|
||||
label_before_next_insn (rtx x, rtx scan_limit)
|
||||
{
|
||||
rtx insn = next_active_insn (x);
|
||||
rtx_insn *insn = next_active_insn (x);
|
||||
while (insn)
|
||||
{
|
||||
insn = PREV_INSN (insn);
|
||||
if (insn == scan_limit || insn == NULL_RTX)
|
||||
return NULL_RTX;
|
||||
return NULL;
|
||||
if (LABEL_P (insn))
|
||||
break;
|
||||
}
|
||||
@ -3157,7 +3158,7 @@ relax_delay_slots (rtx_insn *first)
|
||||
/* Look at every JUMP_INSN and see if we can improve it. */
|
||||
for (insn = first; insn; insn = next)
|
||||
{
|
||||
rtx other;
|
||||
rtx_insn *other;
|
||||
bool crossing;
|
||||
|
||||
next = next_active_insn (insn);
|
||||
@ -3346,7 +3347,7 @@ relax_delay_slots (rtx_insn *first)
|
||||
{
|
||||
/* Figure out where to emit the special USE insn so we don't
|
||||
later incorrectly compute register live/death info. */
|
||||
rtx tmp = next_active_insn (trial);
|
||||
rtx_insn *tmp = next_active_insn (trial);
|
||||
if (tmp == 0)
|
||||
tmp = find_end_label (simple_return_rtx);
|
||||
|
||||
@ -3520,9 +3521,10 @@ relax_delay_slots (rtx_insn *first)
|
||||
RETURN as well. */
|
||||
|
||||
static void
|
||||
make_return_insns (rtx first)
|
||||
make_return_insns (rtx_insn *first)
|
||||
{
|
||||
rtx insn, jump_insn, pat;
|
||||
rtx_insn *insn;
|
||||
rtx_insn *jump_insn;
|
||||
rtx real_return_label = function_return_label;
|
||||
rtx real_simple_return_label = function_simple_return_label;
|
||||
int slots, i;
|
||||
@ -3577,8 +3579,8 @@ make_return_insns (rtx first)
|
||||
else
|
||||
continue;
|
||||
|
||||
pat = PATTERN (insn);
|
||||
jump_insn = XVECEXP (pat, 0, 0);
|
||||
rtx_sequence *pat = as_a <rtx_sequence *> (PATTERN (insn));
|
||||
jump_insn = pat->insn (0);
|
||||
|
||||
/* If we can't make the jump into a RETURN, try to redirect it to the best
|
||||
RETURN and go on to the next insn. */
|
||||
@ -3603,18 +3605,18 @@ make_return_insns (rtx first)
|
||||
if (! (
|
||||
#ifdef ANNUL_IFFALSE_SLOTS
|
||||
(INSN_ANNULLED_BRANCH_P (jump_insn)
|
||||
&& INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
|
||||
&& INSN_FROM_TARGET_P (pat->insn (i)))
|
||||
? eligible_for_annul_false (jump_insn, i - 1,
|
||||
XVECEXP (pat, 0, i), flags) :
|
||||
pat->insn (i), flags) :
|
||||
#endif
|
||||
#ifdef ANNUL_IFTRUE_SLOTS
|
||||
(INSN_ANNULLED_BRANCH_P (jump_insn)
|
||||
&& ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
|
||||
&& ! INSN_FROM_TARGET_P (pat->insn (i)))
|
||||
? eligible_for_annul_true (jump_insn, i - 1,
|
||||
XVECEXP (pat, 0, i), flags) :
|
||||
pat->insn (i), flags) :
|
||||
#endif
|
||||
eligible_for_delay (jump_insn, i - 1,
|
||||
XVECEXP (pat, 0, i), flags)))
|
||||
pat->insn (i), flags)))
|
||||
break;
|
||||
}
|
||||
else
|
||||
@ -3629,7 +3631,7 @@ make_return_insns (rtx first)
|
||||
insns for its delay slots, if it needs some. */
|
||||
if (ANY_RETURN_P (PATTERN (jump_insn)))
|
||||
{
|
||||
rtx prev = PREV_INSN (insn);
|
||||
rtx_insn *prev = PREV_INSN (insn);
|
||||
|
||||
delete_related_insns (insn);
|
||||
for (i = 1; i < XVECLEN (pat, 0); i++)
|
||||
|
16
gcc/rtl.h
16
gcc/rtl.h
@ -635,9 +635,9 @@ class GTY(()) rtx_note : public rtx_insn
|
||||
|| JUMP_TABLE_DATA_P (X) \
|
||||
|| BARRIER_P (X) \
|
||||
|| LABEL_P (X)) \
|
||||
&& PREV_INSN (X) != NULL \
|
||||
&& NEXT_INSN (PREV_INSN (X)) == X \
|
||||
? PREV_INSN (X) : NULL)
|
||||
&& PREV_INSN (as_a <rtx_insn *> (X)) != NULL \
|
||||
&& NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X))) == X \
|
||||
? PREV_INSN (as_a <rtx_insn *> (X)) : NULL)
|
||||
|
||||
/* Define macros to access the `code' field of the rtx. */
|
||||
|
||||
@ -1321,7 +1321,7 @@ inline int& INSN_UID (rtx insn)
|
||||
and an lvalue form:
|
||||
SET_NEXT_INSN/SET_PREV_INSN. */
|
||||
|
||||
inline rtx_insn *PREV_INSN (const_rtx insn)
|
||||
inline rtx_insn *PREV_INSN (const rtx_insn *insn)
|
||||
{
|
||||
rtx prev = XEXP (insn, 0);
|
||||
return safe_as_a <rtx_insn *> (prev);
|
||||
@ -1332,7 +1332,7 @@ inline rtx& SET_PREV_INSN (rtx_insn *insn)
|
||||
return XEXP (insn, 0);
|
||||
}
|
||||
|
||||
inline rtx_insn *NEXT_INSN (const_rtx insn)
|
||||
inline rtx_insn *NEXT_INSN (const rtx_insn *insn)
|
||||
{
|
||||
rtx next = XEXP (insn, 1);
|
||||
return safe_as_a <rtx_insn *> (next);
|
||||
@ -1658,7 +1658,7 @@ enum label_kind
|
||||
be decremented and possibly the label can be deleted. */
|
||||
#define JUMP_LABEL(INSN) XCEXP (INSN, 7, JUMP_INSN)
|
||||
|
||||
inline rtx_insn *JUMP_LABEL_AS_INSN (rtx_insn *insn)
|
||||
inline rtx_insn *JUMP_LABEL_AS_INSN (const rtx_insn *insn)
|
||||
{
|
||||
return safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
|
||||
}
|
||||
@ -2739,12 +2739,12 @@ extern bool unsigned_reg_p (rtx);
|
||||
extern int reg_mentioned_p (const_rtx, const_rtx);
|
||||
extern int count_occurrences (const_rtx, const_rtx, int);
|
||||
extern int reg_referenced_p (const_rtx, const_rtx);
|
||||
extern int reg_used_between_p (const_rtx, const_rtx, const_rtx);
|
||||
extern int reg_used_between_p (const_rtx, const rtx_insn *, const rtx_insn *);
|
||||
extern int reg_set_between_p (const_rtx, const_rtx, const_rtx);
|
||||
extern int commutative_operand_precedence (rtx);
|
||||
extern bool swap_commutative_operands_p (rtx, rtx);
|
||||
extern int modified_between_p (const_rtx, const_rtx, const_rtx);
|
||||
extern int no_labels_between_p (const_rtx, const_rtx);
|
||||
extern int no_labels_between_p (const rtx_insn *, const rtx_insn *);
|
||||
extern int modified_in_p (const_rtx, const_rtx);
|
||||
extern int reg_set_p (const_rtx, const_rtx);
|
||||
extern rtx single_set_2 (const_rtx, const_rtx);
|
||||
|
@ -835,9 +835,9 @@ reg_mentioned_p (const_rtx reg, const_rtx in)
|
||||
no CODE_LABEL insn. */
|
||||
|
||||
int
|
||||
no_labels_between_p (const_rtx beg, const_rtx end)
|
||||
no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
|
||||
{
|
||||
rtx p;
|
||||
rtx_insn *p;
|
||||
if (beg == end)
|
||||
return 0;
|
||||
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
|
||||
@ -850,7 +850,8 @@ no_labels_between_p (const_rtx beg, const_rtx end)
|
||||
FROM_INSN and TO_INSN (exclusive of those two). */
|
||||
|
||||
int
|
||||
reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
|
||||
reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
|
||||
const rtx_insn *to_insn)
|
||||
{
|
||||
rtx_insn *insn;
|
||||
|
||||
@ -946,8 +947,10 @@ reg_referenced_p (const_rtx x, const_rtx body)
|
||||
FROM_INSN and TO_INSN (exclusive of those two). */
|
||||
|
||||
int
|
||||
reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
|
||||
reg_set_between_p (const_rtx reg, const_rtx uncast_from_insn, const_rtx to_insn)
|
||||
{
|
||||
const rtx_insn *from_insn =
|
||||
safe_as_a <const rtx_insn *> (uncast_from_insn);
|
||||
const rtx_insn *insn;
|
||||
|
||||
if (from_insn == to_insn)
|
||||
@ -984,8 +987,10 @@ reg_set_p (const_rtx reg, const_rtx insn)
|
||||
X contains a MEM; this routine does use memory aliasing. */
|
||||
|
||||
int
|
||||
modified_between_p (const_rtx x, const_rtx start, const_rtx end)
|
||||
modified_between_p (const_rtx x, const_rtx uncast_start, const_rtx end)
|
||||
{
|
||||
const rtx_insn *start =
|
||||
safe_as_a <const rtx_insn *> (uncast_start);
|
||||
const enum rtx_code code = GET_CODE (x);
|
||||
const char *fmt;
|
||||
int i, j;
|
||||
@ -2871,7 +2876,7 @@ tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep)
|
||||
|
||||
label = JUMP_LABEL (insn);
|
||||
if (label != NULL_RTX && !ANY_RETURN_P (label)
|
||||
&& (table = NEXT_INSN (label)) != NULL_RTX
|
||||
&& (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX
|
||||
&& JUMP_TABLE_DATA_P (table))
|
||||
{
|
||||
if (labelp)
|
||||
|
@ -4083,10 +4083,10 @@ get_seqno_for_a_jump (insn_t insn, int old_seqno)
|
||||
/* Find the proper seqno for inserting at INSN. Returns -1 if no predecessors
|
||||
with positive seqno exist. */
|
||||
int
|
||||
get_seqno_by_preds (rtx insn)
|
||||
get_seqno_by_preds (rtx_insn *insn)
|
||||
{
|
||||
basic_block bb = BLOCK_FOR_INSN (insn);
|
||||
rtx tmp = insn, head = BB_HEAD (bb);
|
||||
rtx_insn *tmp = insn, *head = BB_HEAD (bb);
|
||||
insn_t *preds;
|
||||
int n, i, seqno;
|
||||
|
||||
@ -4950,7 +4950,7 @@ recompute_rev_top_order (void)
|
||||
void
|
||||
clear_outdated_rtx_info (basic_block bb)
|
||||
{
|
||||
rtx insn;
|
||||
rtx_insn *insn;
|
||||
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
if (INSN_P (insn))
|
||||
|
@ -1628,7 +1628,7 @@ extern struct succs_info * compute_succs_info (insn_t, short);
|
||||
extern void free_succs_info (struct succs_info *);
|
||||
extern bool sel_insn_has_single_succ_p (insn_t, int);
|
||||
extern bool sel_num_cfg_preds_gt_1 (insn_t);
|
||||
extern int get_seqno_by_preds (rtx);
|
||||
extern int get_seqno_by_preds (rtx_insn *);
|
||||
|
||||
extern bool bb_ends_ebb_p (basic_block);
|
||||
extern bool in_same_ebb_p (insn_t, insn_t);
|
||||
|
@ -1011,7 +1011,8 @@ build_store_vectors (void)
|
||||
{
|
||||
basic_block bb;
|
||||
int *regs_set_in_block;
|
||||
rtx insn, st;
|
||||
rtx_insn *insn;
|
||||
rtx_insn_list *st;
|
||||
struct st_expr * ptr;
|
||||
unsigned int max_gcse_regno = max_reg_num ();
|
||||
|
||||
@ -1027,9 +1028,9 @@ build_store_vectors (void)
|
||||
|
||||
for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
|
||||
{
|
||||
for (st = ptr->avail_stores; st != NULL; st = XEXP (st, 1))
|
||||
for (st = ptr->avail_stores; st != NULL; st = st->next ())
|
||||
{
|
||||
insn = XEXP (st, 0);
|
||||
insn = st->insn ();
|
||||
bb = BLOCK_FOR_INSN (insn);
|
||||
|
||||
/* If we've already seen an available expression in this block,
|
||||
@ -1047,9 +1048,9 @@ build_store_vectors (void)
|
||||
bitmap_set_bit (st_avloc[bb->index], ptr->index);
|
||||
}
|
||||
|
||||
for (st = ptr->antic_stores; st != NULL; st = XEXP (st, 1))
|
||||
for (st = ptr->antic_stores; st != NULL; st = st->next ())
|
||||
{
|
||||
insn = XEXP (st, 0);
|
||||
insn = st->insn ();
|
||||
bb = BLOCK_FOR_INSN (insn);
|
||||
bitmap_set_bit (st_antloc[bb->index], ptr->index);
|
||||
}
|
||||
|
@ -2845,7 +2845,7 @@ get_use_iv_cost (struct ivopts_data *data, struct iv_use *use,
|
||||
/* Returns estimate on cost of computing SEQ. */
|
||||
|
||||
static unsigned
|
||||
seq_cost (rtx seq, bool speed)
|
||||
seq_cost (rtx_insn *seq, bool speed)
|
||||
{
|
||||
unsigned cost = 0;
|
||||
rtx set;
|
||||
@ -2956,7 +2956,8 @@ prepare_decl_rtl (tree *expr_p, int *ws, void *data)
|
||||
static unsigned
|
||||
computation_cost (tree expr, bool speed)
|
||||
{
|
||||
rtx seq, rslt;
|
||||
rtx_insn *seq;
|
||||
rtx rslt;
|
||||
tree type = TREE_TYPE (expr);
|
||||
unsigned cost;
|
||||
/* Avoid using hard regs in ways which may be unsupported. */
|
||||
@ -3286,7 +3287,8 @@ get_address_cost (bool symbol_present, bool var_present,
|
||||
HOST_WIDE_INT rat, off = 0;
|
||||
int old_cse_not_expected, width;
|
||||
unsigned sym_p, var_p, off_p, rat_p, add_c;
|
||||
rtx seq, addr, base;
|
||||
rtx_insn *seq;
|
||||
rtx addr, base;
|
||||
rtx reg0, reg1;
|
||||
|
||||
data = (address_cost_data) xcalloc (1, sizeof (*data));
|
||||
|
@ -11,7 +11,3 @@ Phase 3: per-file commits within "config" subdirs: DONE
|
||||
Phase 4: removal of "scaffolding": DONE
|
||||
Phase 5: additional rtx_def subclasses: DONE
|
||||
Phase 6: use extra rtx_def subclasses: IN PROGRESS
|
||||
|
||||
TODO: "Scaffolding" to be removed
|
||||
=================================
|
||||
* SET_NEXT_INSN, SET_PREV_INSN
|
||||
|
Loading…
x
Reference in New Issue
Block a user