except.c (dw2_size_of_call_site_table, [...]): Use vector API for call_site_record.

* except.c (dw2_size_of_call_site_table,
	sjlj_size_of_call_site_table): Use vector API for call_site_record.

	* cgraphbuild.c (build_cgraph_edges): Update.
	* tree-pass.h: Update comment.
	* final.c (leaf_function_p): Update.
	(leaf_renumber_regs): Update.
	(rest_of_clean_state): Update.
	* omp-low.c (expand_omp_parallel): Update.
	* ipa-reference.c (analyze_function): Update.
	* reorg.c (find_end_label): Update.
	(optimize_skip): Update.
	(fill_simple_delay_slots): Update.
	(fill_simple_delay_slots): Update.
	(make_return_insns): Update.
	(dbr_schedule): Update.
	* gimple-low.c (record_vars_into): Update.
	* cfgbuild.c (make_edges): Update.
	* function.c (assign_stack_local): Update.
	(assign_parm_adjust_stack_rtl): Update.
	(locate_and_pad_parm): Update.
	(allocate_struct_function): Do not initialize stack_alignment_needed
	and preferred_stack_boundary here.
	(stack_protect_prologue): Update.
	(stack_protect_epilogue): Update.
	(expand_function_start): Initialize stack_alignment_needed,
	preferred_stack_boundary and max_jumptable_ents.
	(expand_function_end): Update.
	(free_after_compilation): Do not NULLify epilogue_delay_list.
	* function.h (struct rtl_data): Add stack_protect_guard,
	stack_alignment_needed,
	preferred_stack_boundary, epilogue_delay_list.
	(struct function): Remove value_histograms, stack_alignment_needed,
	preferred_stack_boundary, epilogue_delay_list, max_jumptable_ents,
	last_label_uid,
	unexpanded_var_list, stack_protect_guard.
	(current_function_epilogue_delay_list): Remove.
	* ipa-type-escape.c (analyze_function): Update.
	* gimplify.c (pop_gimplify_context): Update comment.
	* calls.c (expand_call): Update.
	(emit_library_call_value_1): Update.
	* except.c (set_nothrow_function_flags): Update.
	* cfgexpand.c (get_decl_align_unit): Update.
	(create_stack_guard): Update.
	(estimated_stack_frame_size): Update.
	(expand_used_vars): Update.
	(tree_expand_cfg): Free histogram earliers, init expansion variables.
	* explow.c (allocate_dynamic_stack_space): Update.
	* tree-ssa-live.c (remove_unused_locals): Update.
	* varasm.c (mark_constant_pool): Update.
	* tree-inline.c (remap_decls): Update.
	(initialize_cfun): Update.
	(declare_return_variable): Update.
	(inline_forbidden_p): Update.
	(expand_call_inline): Update.
	(declare_inline_vars): Update.
	(tree_function_versioning): Update.
	* tree-flow.h (value_histograms): New.
	(VALUE_HISTOGRAMS): New macro.
	* basic-block.h (control_flow_graph): Add max_jumptable_ents,
	last_label_uid.
	* tree-cfg.c (set_bb_for_stmt): Update.
	(replace_by_duplicate_decl): Update.
	(move_block_to_fn): Update.
	(new_label_mapper): Update.
	(dump_function_to_file): Update.
	* ipa-struct-reorg.c (build_data_structure): Update.
	* cfgrtl.c (print_rtl_with_bb): Update.
	* reload1.c (reload): Update.
	(reload): Update.
	* config/i386/i386.c (setup_incoming_varargs_64,
	ix86_compute_frame_layout): Update.
	* config/arc/arc.c (arc_output_function_epilogue): Update.

From-SVN: r134425
This commit is contained in:
Jan Hubicka 2008-04-18 07:26:12 +02:00 committed by Jan Hubicka
parent 799cff46ec
commit cb91fab00f
28 changed files with 206 additions and 136 deletions

View File

@ -1,3 +1,79 @@
2008-04-18 Jan Hubicka <jh@suse.cz>
* except.c (dw2_size_of_call_site_table,
sjlj_size_of_call_site_table): Use vector API for call_site_record.
* cgraphbuild.c (build_cgraph_edges): Update.
* tree-pass.h: Update comment.
* final.c (leaf_function_p): Update.
(leaf_renumber_regs): Update.
(rest_of_clean_state): Update.
* omp-low.c (expand_omp_parallel): Update.
* ipa-reference.c (analyze_function): Update.
* reorg.c (find_end_label): Update.
(optimize_skip): Update.
(fill_simple_delay_slots): Update.
(fill_simple_delay_slots): Update.
(make_return_insns): Update.
(dbr_schedule): Update.
* gimple-low.c (record_vars_into): Update.
* cfgbuild.c (make_edges): Update.
* function.c (assign_stack_local): Update.
(assign_parm_adjust_stack_rtl): Update.
(locate_and_pad_parm): Update.
(allocate_struct_function): Do not initialize stack_alignment_needed
and preferred_stack_boundary here.
(stack_protect_prologue): Update.
(stack_protect_epilogue): Update.
(expand_function_start): Initialize stack_alignment_needed,
preferred_stack_boundary and max_jumptable_ents.
(expand_function_end): Update.
(free_after_compilation): Do not NULLify epilogue_delay_list.
* function.h (struct rtl_data): Add stack_protect_guard,
stack_alignment_needed,
preferred_stack_boundary, epilogue_delay_list.
(struct function): Remove value_histograms, stack_alignment_needed,
preferred_stack_boundary, epilogue_delay_list, max_jumptable_ents,
last_label_uid,
unexpanded_var_list, stack_protect_guard.
(current_function_epilogue_delay_list): Remove.
* ipa-type-escape.c (analyze_function): Update.
* gimplify.c (pop_gimplify_context): Update comment.
* calls.c (expand_call): Update.
(emit_library_call_value_1): Update.
* except.c (set_nothrow_function_flags): Update.
* cfgexpand.c (get_decl_align_unit): Update.
(create_stack_guard): Update.
(estimated_stack_frame_size): Update.
(expand_used_vars): Update.
(tree_expand_cfg): Free histogram earliers, init expansion variables.
* explow.c (allocate_dynamic_stack_space): Update.
* tree-ssa-live.c (remove_unused_locals): Update.
* varasm.c (mark_constant_pool): Update.
* tree-inline.c (remap_decls): Update.
(initialize_cfun): Update.
(declare_return_variable): Update.
(inline_forbidden_p): Update.
(expand_call_inline): Update.
(declare_inline_vars): Update.
(tree_function_versioning): Update.
* tree-flow.h (value_histograms): New.
(VALUE_HISTOGRAMS): New macro.
* basic-block.h (control_flow_graph): Add max_jumptable_ents,
last_label_uid.
* tree-cfg.c (set_bb_for_stmt): Update.
(replace_by_duplicate_decl): Update.
(move_block_to_fn): Update.
(new_label_mapper): Update.
(dump_function_to_file): Update.
* ipa-struct-reorg.c (build_data_structure): Update.
* cfgrtl.c (print_rtl_with_bb): Update.
* reload1.c (reload): Update.
(reload): Update.
* config/i386/i386.c (setup_incoming_varargs_64,
ix86_compute_frame_layout): Update.
* config/arc/arc.c (arc_output_function_epilogue): Update.
2008-04-18 Marius Strobl <marius@FreeBSD.org>
* gthr-posix.h (__gthread_active_p): Use the Solaris implementation

View File

@ -397,6 +397,13 @@ struct control_flow_graph GTY(())
/* Number of basic blocks in the dominance tree. */
unsigned x_n_bbs_in_dom_tree[2];
/* Maximal number of entities in the single jumptable. Used to estimate
final flowgraph size. */
int max_jumptable_ents;
/* UIDs for LABEL_DECLs. */
int last_label_uid;
};
/* Defines for accessing the fields of the CFG structure for function FN. */

View File

@ -2296,9 +2296,9 @@ expand_call (tree exp, rtx target, int ignore)
/* Ensure current function's preferred stack boundary is at least
what we need. We don't have to increase alignment for recursive
functions. */
if (cfun->preferred_stack_boundary < preferred_stack_boundary
if (crtl->preferred_stack_boundary < preferred_stack_boundary
&& fndecl != current_function_decl)
cfun->preferred_stack_boundary = preferred_stack_boundary;
crtl->preferred_stack_boundary = preferred_stack_boundary;
if (fndecl == current_function_decl)
cfun->recursive_call_emit = true;
@ -2370,7 +2370,7 @@ expand_call (tree exp, rtx target, int ignore)
if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
start_sequence ();
if (pass == 0 && cfun->stack_protect_guard)
if (pass == 0 && crtl->stack_protect_guard)
stack_protect_epilogue ();
adjusted_args_size = args_size;
@ -3346,8 +3346,8 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
/* Ensure current function's preferred stack boundary is at least
what we need. */
if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
/* If this kind of value comes back in memory,
decide where in memory it should come back. */

View File

@ -256,7 +256,7 @@ make_edges (basic_block min, basic_block max, int update_p)
/* Heavy use of computed goto in machine-generated code can lead to
nearly fully-connected CFGs. In that case we spend a significant
amount of time searching the edge lists for duplicates. */
if (forced_labels || cfun->max_jumptable_ents > 100)
if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
edge_cache = sbitmap_alloc (last_basic_block);
/* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block

View File

@ -163,8 +163,8 @@ get_decl_align_unit (tree decl)
align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
if (align > PREFERRED_STACK_BOUNDARY)
align = PREFERRED_STACK_BOUNDARY;
if (cfun->stack_alignment_needed < align)
cfun->stack_alignment_needed = align;
if (crtl->stack_alignment_needed < align)
crtl->stack_alignment_needed = align;
return align / BITS_PER_UNIT;
}
@ -978,7 +978,7 @@ create_stack_guard (void)
TREE_THIS_VOLATILE (guard) = 1;
TREE_USED (guard) = 1;
expand_one_stack_var (guard);
cfun->stack_protect_guard = guard;
crtl->stack_protect_guard = guard;
}
/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
@ -1029,8 +1029,8 @@ static void
init_vars_expansion (void)
{
tree t;
/* Set TREE_USED on all variables in the unexpanded_var_list. */
for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
/* Set TREE_USED on all variables in the local_decls. */
for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
TREE_USED (TREE_VALUE (t)) = 1;
/* Clear TREE_USED on all variables associated with a block scope. */
@ -1062,9 +1062,9 @@ estimated_stack_frame_size (void)
init_vars_expansion ();
/* At this point all variables on the unexpanded_var_list with TREE_USED
/* At this point all variables on the local_decls with TREE_USED
set are not associated with any block scope. Lay them out. */
for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
{
tree var = TREE_VALUE (t);
@ -1113,9 +1113,9 @@ expand_used_vars (void)
init_vars_expansion ();
/* At this point all variables on the unexpanded_var_list with TREE_USED
/* At this point all variables on the local_decls with TREE_USED
set are not associated with any block scope. Lay them out. */
for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
{
tree var = TREE_VALUE (t);
bool expand_now = false;
@ -1148,7 +1148,7 @@ expand_used_vars (void)
if (expand_now)
expand_one_var (var, true, true);
}
cfun->unexpanded_var_list = NULL_TREE;
cfun->local_decls = NULL_TREE;
/* At this point, all variables within the block tree with TREE_USED
set are actually used by the optimized function. Lay them out. */
@ -1863,6 +1863,10 @@ tree_expand_cfg (void)
discover_nonconstant_array_refs ();
targetm.expand_to_rtl_hook ();
crtl->stack_alignment_needed = STACK_BOUNDARY;
crtl->preferred_stack_boundary = STACK_BOUNDARY;
cfun->cfg->max_jumptable_ents = 0;
/* Expand the variables recorded during gimple lowering. */
expand_used_vars ();
@ -1873,7 +1877,7 @@ tree_expand_cfg (void)
if (current_function_calls_alloca)
warning (OPT_Wstack_protector,
"not protecting local variables: variable length buffer");
if (has_short_buffer && !cfun->stack_protect_guard)
if (has_short_buffer && !crtl->stack_protect_guard)
warning (OPT_Wstack_protector,
"not protecting function: no buffer at least %d bytes long",
(int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
@ -1891,7 +1895,7 @@ tree_expand_cfg (void)
/* Initialize the stack_protect_guard field. This must happen after the
call to __main (if any) so that the external decl is initialized. */
if (cfun->stack_protect_guard)
if (crtl->stack_protect_guard)
stack_protect_prologue ();
/* Register rtl specific functions for cfg. */
@ -1908,6 +1912,7 @@ tree_expand_cfg (void)
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
bb = expand_gimple_basic_block (bb);
pointer_map_destroy (lab_rtx_for_bb);
free_histograms ();
construct_exit_block ();
set_curr_insn_block (DECL_INITIAL (current_function_decl));
@ -1971,7 +1976,6 @@ tree_expand_cfg (void)
/* After expanding, the return labels are no longer needed. */
return_label = NULL;
naked_return_label = NULL;
free_histograms ();
/* Tag the blocks with a depth number so that change_scope can find
the common parent easily. */
set_block_levels (DECL_INITIAL (cfun->decl), 0);

View File

@ -1651,10 +1651,10 @@ print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
free (in_bb_p);
}
if (current_function_epilogue_delay_list != 0)
if (crtl->epilogue_delay_list != 0)
{
fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
tmp_rtx = XEXP (tmp_rtx, 1))
print_rtl_single (outf, XEXP (tmp_rtx, 0));
}

View File

@ -152,7 +152,7 @@ build_cgraph_edges (void)
}
/* Look for initializers of constant variables and private statics. */
for (step = cfun->unexpanded_var_list;
for (step = cfun->local_decls;
step;
step = TREE_CHAIN (step))
{

View File

@ -1254,7 +1254,7 @@ arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
static void
arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
{
rtx epilogue_delay = current_function_epilogue_delay_list;
rtx epilogue_delay = crtl->epilogue_delay_list;
int noepilogue = FALSE;
enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);

View File

@ -5060,7 +5060,7 @@ setup_incoming_varargs_64 (CUMULATIVE_ARGS *cum)
We also may end up assuming that only 64bit values are stored in SSE
register let some floating point program work. */
if (ix86_preferred_stack_boundary >= BIGGEST_ALIGNMENT)
cfun->stack_alignment_needed = BIGGEST_ALIGNMENT;
crtl->stack_alignment_needed = BIGGEST_ALIGNMENT;
save_area = frame_pointer_rtx;
set = get_varargs_alias_set ();
@ -6098,8 +6098,8 @@ ix86_compute_frame_layout (struct ix86_frame *frame)
frame->nregs = ix86_nsaved_regs ();
total_size = size;
stack_alignment_needed = cfun->stack_alignment_needed / BITS_PER_UNIT;
preferred_alignment = cfun->preferred_stack_boundary / BITS_PER_UNIT;
stack_alignment_needed = crtl->stack_alignment_needed / BITS_PER_UNIT;
preferred_alignment = crtl->preferred_stack_boundary / BITS_PER_UNIT;
/* During reload iteration the amount of registers saved can change.
Recompute the value as needed. Do not recompute when amount of registers

View File

@ -2784,7 +2784,7 @@ set_nothrow_function_flags (void)
}
}
for (insn = current_function_epilogue_delay_list; insn;
for (insn = crtl->epilogue_delay_list; insn;
insn = XEXP (insn, 1))
if (can_throw_external (insn))
{
@ -3379,13 +3379,13 @@ push_sleb128 (varray_type *data_area, int value)
static int
dw2_size_of_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int n = VEC_length (call_site_record, crtl->eh.call_site_record);
int size = n * (4 + 4 + 4);
int i;
for (i = 0; i < n; ++i)
{
struct call_site_record *cs = &cfun->eh->call_site_data[i];
struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
size += size_of_uleb128 (cs->action);
}
@ -3395,13 +3395,13 @@ dw2_size_of_call_site_table (void)
static int
sjlj_size_of_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int n = VEC_length (call_site_record, crtl->eh.call_site_record);
int size = 0;
int i;
for (i = 0; i < n; ++i)
{
struct call_site_record *cs = &cfun->eh->call_site_data[i];
struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
size += size_of_uleb128 (INTVAL (cs->landing_pad));
size += size_of_uleb128 (cs->action);
}

View File

@ -1090,7 +1090,7 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
/* We can't attempt to minimize alignment necessary, because we don't
know the final value of preferred_stack_boundary yet while executing
this code. */
cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
/* We will need to ensure that the address we return is aligned to
BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't

View File

@ -9923,8 +9923,8 @@ do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
{
rtx temp, vector;
if (INTVAL (range) > cfun->max_jumptable_ents)
cfun->max_jumptable_ents = INTVAL (range);
if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
cfun->cfg->max_jumptable_ents = INTVAL (range);
/* Do an unsigned comparison (in the proper mode) between the index
expression and the value which represents the length of the range.

View File

@ -3827,7 +3827,7 @@ leaf_function_p (void)
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}
for (link = current_function_epilogue_delay_list;
for (link = crtl->epilogue_delay_list;
link;
link = XEXP (link, 1))
{
@ -3911,7 +3911,7 @@ leaf_renumber_regs (rtx first)
for (insn = first; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
leaf_renumber_regs_insn (PATTERN (insn));
for (insn = current_function_epilogue_delay_list;
for (insn = crtl->epilogue_delay_list;
insn;
insn = XEXP (insn, 1))
if (INSN_P (XEXP (insn, 0)))
@ -4239,9 +4239,9 @@ rest_of_clean_state (void)
if (targetm.binds_local_p (current_function_decl))
{
int pref = cfun->preferred_stack_boundary;
if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
pref = cfun->stack_alignment_needed;
int pref = crtl->preferred_stack_boundary;
if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
pref = crtl->stack_alignment_needed;
cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
= pref;
}

View File

@ -289,7 +289,6 @@ free_after_compilation (struct function *f)
f->machine = NULL;
f->cfg = NULL;
f->epilogue_delay_list = NULL;
regno_reg_rtx = NULL;
}
@ -379,8 +378,8 @@ assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
if (cfun->stack_alignment_needed < alignment * BITS_PER_UNIT)
cfun->stack_alignment_needed = alignment * BITS_PER_UNIT;
if (crtl->stack_alignment_needed < alignment * BITS_PER_UNIT)
crtl->stack_alignment_needed = alignment * BITS_PER_UNIT;
/* Calculate how many bytes the start of local variables is off from
stack alignment. */
@ -2379,7 +2378,7 @@ assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
/* If stack protection is in effect for this function, don't leave any
pointers in their passed stack slots. */
else if (cfun->stack_protect_guard
else if (crtl->stack_protect_guard
&& (flag_stack_protect == 2
|| data->passed_pointer
|| POINTER_TYPE_P (data->nominal_type)))
@ -3286,8 +3285,8 @@ locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
calling function side. */
if (boundary > PREFERRED_STACK_BOUNDARY)
boundary = PREFERRED_STACK_BOUNDARY;
if (cfun->stack_alignment_needed < boundary)
cfun->stack_alignment_needed = boundary;
if (crtl->stack_alignment_needed < boundary)
crtl->stack_alignment_needed = boundary;
#ifdef ARGS_GROW_DOWNWARD
locate->slot_offset.constant = -initial_offset_ptr->constant;
@ -3842,9 +3841,6 @@ allocate_struct_function (tree fndecl, bool abstract_p)
cfun = ggc_alloc_cleared (sizeof (struct function));
cfun->stack_alignment_needed = STACK_BOUNDARY;
cfun->preferred_stack_boundary = STACK_BOUNDARY;
current_function_funcdef_no = get_next_funcdef_no ();
cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
@ -4020,9 +4016,9 @@ stack_protect_prologue (void)
/* Avoid expand_expr here, because we don't want guard_decl pulled
into registers unless absolutely necessary. And we know that
cfun->stack_protect_guard is a local stack slot, so this skips
crtl->stack_protect_guard is a local stack slot, so this skips
all the fluff. */
x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
y = validize_mem (DECL_RTL (guard_decl));
/* Allow the target to copy from Y to X without leaking Y into a
@ -4058,9 +4054,9 @@ stack_protect_epilogue (void)
/* Avoid expand_expr here, because we don't want guard_decl pulled
into registers unless absolutely necessary. And we know that
cfun->stack_protect_guard is a local stack slot, so this skips
crtl->stack_protect_guard is a local stack slot, so this skips
all the fluff. */
x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
y = validize_mem (DECL_RTL (guard_decl));
/* Allow the target to compare Y with X without leaking either into
@ -4581,7 +4577,7 @@ expand_function_end (void)
emit_insn (gen_blockage ());
/* If stack protection is enabled for this function, check the guard. */
if (cfun->stack_protect_guard)
if (crtl->stack_protect_guard)
stack_protect_epilogue ();
/* If we had calls to alloca, and this machine needs

View File

@ -275,6 +275,10 @@ struct rtl_data GTY(())
has_hard_reg_initial_val (see integrate.[hc]). */
struct initial_value_struct *hard_reg_initial_vals;
/* A variable living at the top of the frame that holds a known value.
Used for detecting stack clobbers. */
tree stack_protect_guard;
/* List (chain of EXPR_LIST) of labels heading the current handlers for
nonlocal gotos. */
rtx x_nonlocal_goto_handler_labels;
@ -290,7 +294,7 @@ struct rtl_data GTY(())
rtx x_naked_return_label;
/* List (chain of EXPR_LISTs) of all stack slots in this function.
Made for the sake of unshare_all_crtl-> */
Made for the sake of unshare_all_rtl. */
rtx x_stack_slot_list;
/* Place after which to insert the tail_recursion_label if we need one. */
@ -319,6 +323,17 @@ struct rtl_data GTY(())
/* Current nesting level for temporaries. */
int x_temp_slot_level;
/* The largest alignment of slot allocated on the stack. */
unsigned int stack_alignment_needed;
/* Preferred alignment of the end of stack frame. */
unsigned int preferred_stack_boundary;
/* For reorg. */
/* If some insns can be deferred to the delay slots of the epilogue, the
delay list for them is recorded here. */
rtx epilogue_delay_list;
};
#define return_label (crtl->x_return_label)
@ -378,46 +393,23 @@ struct function GTY(())
/* Function sequence number for profiling, debugging, etc. */
int funcdef_no;
/* List of function local variables, functions, types and constants. */
tree local_decls;
/* For md files. */
/* tm.h can use this to store whatever it likes. */
struct machine_function * GTY ((maybe_undef)) machine;
/* The largest alignment of slot allocated on the stack. */
unsigned int stack_alignment_needed;
/* Preferred alignment of the end of stack frame. */
unsigned int preferred_stack_boundary;
/* Language-specific code can use this to store whatever it likes. */
struct language_function * language;
/* Used types hash table. */
htab_t GTY ((param_is (union tree_node))) used_types_hash;
/* For reorg. */
/* If some insns can be deferred to the delay slots of the epilogue, the
delay list for them is recorded here. */
rtx epilogue_delay_list;
/* Maximal number of entities in the single jumptable. Used to estimate
final flowgraph size. */
int max_jumptable_ents;
/* UIDs for LABEL_DECLs. */
int last_label_uid;
/* Line number of the end of the function. */
location_t function_end_locus;
/* The variables unexpanded so far. */
tree unexpanded_var_list;
/* A variable living at the top of the frame that holds a known value.
Used for detecting stack clobbers. */
tree stack_protect_guard;
/* Properties used by the pass manager. */
unsigned int curr_properties;
unsigned int last_verified;
@ -584,7 +576,6 @@ extern void instantiate_decl_rtl (rtx x);
#define current_function_limit_stack (cfun->limit_stack)
#define current_function_uses_pic_offset_table (cfun->uses_pic_offset_table)
#define current_function_uses_const_pool (cfun->uses_const_pool)
#define current_function_epilogue_delay_list (cfun->epilogue_delay_list)
#define current_function_has_nonlocal_label (cfun->has_nonlocal_label)
#define current_function_saves_all_registers (cfun->saves_all_registers)
#define current_function_has_nonlocal_goto (cfun->has_nonlocal_goto)

View File

@ -736,8 +736,8 @@ record_vars_into (tree vars, tree fn)
continue;
/* Record the variable. */
cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
cfun->unexpanded_var_list);
cfun->local_decls = tree_cons (NULL_TREE, var,
cfun->local_decls);
}
if (fn != current_function_decl)

View File

@ -175,7 +175,7 @@ push_gimplify_context (void)
/* Tear down a context for the gimplifier. If BODY is non-null, then
put the temporaries into the outer BIND_EXPR. Otherwise, put them
in the unexpanded_var_list. */
in the local_decls. */
void
pop_gimplify_context (tree body)

View File

@ -836,7 +836,7 @@ analyze_function (struct cgraph_node *fn)
if (DECL_STRUCT_FUNCTION (decl))
{
tree step;
for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
step;
step = TREE_CHAIN (step))
{

View File

@ -3443,7 +3443,7 @@ build_data_structure (VEC (tree, heap) **unsuitable_types)
add_structure (type);
/* Check function local variables. */
for (var_list = fn->unexpanded_var_list; var_list;
for (var_list = fn->local_decls; var_list;
var_list = TREE_CHAIN (var_list))
{
var = TREE_VALUE (var_list);

View File

@ -1752,7 +1752,7 @@ analyze_function (struct cgraph_node *fn)
if (DECL_STRUCT_FUNCTION (decl))
{
tree step;
for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
step;
step = TREE_CHAIN (step))
{

View File

@ -2626,7 +2626,7 @@ expand_omp_parallel (struct omp_region *region)
/* Declare local variables needed in CHILD_CFUN. */
block = DECL_INITIAL (child_fn);
BLOCK_VARS (block) = list2chain (child_cfun->unexpanded_var_list);
BLOCK_VARS (block) = list2chain (child_cfun->local_decls);
DECL_SAVED_TREE (child_fn) = bb_stmt_list (single_succ (entry_bb));
/* Reset DECL_CONTEXT on function arguments. */

View File

@ -1012,7 +1012,7 @@ reload (rtx first, int global)
/* If we allocated another stack slot, redo elimination bookkeeping. */
if (starting_frame_size != get_frame_size ())
continue;
if (starting_frame_size && cfun->stack_alignment_needed)
if (starting_frame_size && crtl->stack_alignment_needed)
{
/* If we have a stack frame, we must align it now. The
stack size may be a part of the offset computation for
@ -1022,7 +1022,7 @@ reload (rtx first, int global)
stack frame when none is needed should
STARTING_FRAME_OFFSET not be already aligned to
STACK_BOUNDARY. */
assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
if (starting_frame_size != get_frame_size ())
continue;
}

View File

@ -428,7 +428,7 @@ find_end_label (void)
epilogue has filled delay-slots; we would have to try and
move the delay-slot fillers to the delay-slots for the new
return insn or in front of the new return insn. */
if (current_function_epilogue_delay_list == NULL
if (crtl->epilogue_delay_list == NULL
&& HAVE_return)
{
/* The return we make may have delay slots too. */
@ -792,7 +792,7 @@ optimize_skip (rtx insn)
In both of these cases, inverting the jump and annulling the delay
slot give the same effect in fewer insns. */
if ((next_trial == next_active_insn (JUMP_LABEL (insn))
&& ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
&& ! (next_trial == 0 && crtl->epilogue_delay_list != 0))
|| (next_trial != 0
&& JUMP_P (next_trial)
&& JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
@ -2410,7 +2410,7 @@ fill_simple_delay_slots (int non_jumps_p)
The only thing we can do is scan backwards from the end of the
function. If we did this in a previous pass, it is incorrect to do it
again. */
if (current_function_epilogue_delay_list)
if (crtl->epilogue_delay_list)
return;
slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
@ -2470,9 +2470,9 @@ fill_simple_delay_slots (int non_jumps_p)
/* Here as well we are searching backward, so put the
insns we find on the head of the list. */
current_function_epilogue_delay_list
crtl->epilogue_delay_list
= gen_rtx_INSN_LIST (VOIDmode, trial,
current_function_epilogue_delay_list);
crtl->epilogue_delay_list);
mark_end_of_function_resources (trial, 1);
update_block (trial, trial);
delete_related_insns (trial);
@ -3695,7 +3695,7 @@ make_return_insns (rtx first)
delay slot filler insns. It is also unknown whether such a
transformation would actually be profitable. Note that the existing
code only cares for branches with (some) filled delay slots. */
if (current_function_epilogue_delay_list != NULL)
if (crtl->epilogue_delay_list != NULL)
return;
#endif
@ -4036,7 +4036,7 @@ dbr_schedule (rtx first)
{
rtx link;
for (link = current_function_epilogue_delay_list;
for (link = crtl->epilogue_delay_list;
link;
link = XEXP (link, 1))
INSN_LOCATOR (XEXP (link, 0)) = 0;

View File

@ -2697,7 +2697,7 @@ set_bb_for_stmt (tree t, basic_block bb)
if (uid == -1)
{
unsigned old_len = VEC_length (basic_block, label_to_block_map);
LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
if (old_len <= (unsigned) uid)
{
unsigned new_len = 3 * uid / 2;
@ -5550,8 +5550,7 @@ replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
if (SSA_VAR_P (t))
{
new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
f->unexpanded_var_list
= tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
}
else
{
@ -5844,8 +5843,8 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
if (uid >= dest_cfun->last_label_uid)
dest_cfun->last_label_uid = uid + 1;
if (uid >= dest_cfun->cfg->last_label_uid)
dest_cfun->cfg->last_label_uid = uid + 1;
}
else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
TREE_OPERAND (stmt, 0) =
@ -5918,8 +5917,8 @@ new_label_mapper (tree decl, void *data)
m->base.from = decl;
m->to = create_artificial_label ();
LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
if (LABEL_DECL_UID (m->to) >= cfun->last_label_uid)
cfun->last_label_uid = LABEL_DECL_UID (m->to) + 1;
if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
gcc_assert (*slot == NULL);
@ -6161,12 +6160,12 @@ dump_function_to_file (tree fn, FILE *file, int flags)
/* When GIMPLE is lowered, the variables are no longer available in
BIND_EXPRs, so display them separately. */
if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
if (cfun && cfun->decl == fn && cfun->local_decls)
{
ignore_topmost_bind = true;
fprintf (file, "{\n");
for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
{
var = TREE_VALUE (vars);

View File

@ -442,14 +442,14 @@ remap_decls (tree decls, copy_body_data *id)
{
tree new_var;
/* We can not chain the local static declarations into the unexpanded_var_list
/* We can not chain the local static declarations into the local_decls
as we can't duplicate them or break one decl rule. Go ahead and link
them into unexpanded_var_list. */
them into local_decls. */
if (!auto_var_in_fn_p (old_var, id->src_fn)
&& !DECL_EXTERNAL (old_var))
{
cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
cfun->unexpanded_var_list);
cfun->local_decls = tree_cons (NULL_TREE, old_var,
cfun->local_decls);
continue;
}
@ -1277,7 +1277,7 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
*new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
new_cfun->funcdef_no = get_next_funcdef_no ();
VALUE_HISTOGRAMS (new_cfun) = NULL;
new_cfun->unexpanded_var_list = NULL;
new_cfun->local_decls = NULL;
new_cfun->cfg = NULL;
new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
@ -1811,9 +1811,9 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
}
DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
DECL_STRUCT_FUNCTION (caller)->local_decls
= tree_cons (NULL_TREE, var,
DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
DECL_STRUCT_FUNCTION (caller)->local_decls);
/* Do not have the rest of GCC warn about this variable as it should
not be visible to the user. */
@ -2040,7 +2040,7 @@ inline_forbidden_p (tree fndecl)
goto egress;
}
for (step = fun->unexpanded_var_list; step; step = TREE_CHAIN (step))
for (step = fun->local_decls; step; step = TREE_CHAIN (step))
{
tree decl = TREE_VALUE (step);
if (TREE_CODE (decl) == VAR_DECL
@ -2831,16 +2831,16 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
copy_body (id, bb->count, bb->frequency, bb, return_block);
/* Add local vars in this inlined callee to caller. */
t_step = id->src_cfun->unexpanded_var_list;
t_step = id->src_cfun->local_decls;
for (; t_step; t_step = TREE_CHAIN (t_step))
{
var = TREE_VALUE (t_step);
if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
cfun->unexpanded_var_list);
cfun->local_decls = tree_cons (NULL_TREE, var,
cfun->local_decls);
else
cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
cfun->unexpanded_var_list);
cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
cfun->local_decls);
}
/* Clean up. */
@ -3340,9 +3340,7 @@ declare_inline_vars (tree block, tree vars)
{
DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
cfun->unexpanded_var_list =
tree_cons (NULL_TREE, t,
cfun->unexpanded_var_list);
cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
}
if (block)
@ -3615,19 +3613,18 @@ tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (id.dst_fn);
if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
/* Add local vars. */
for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
t_step; t_step = TREE_CHAIN (t_step))
{
tree var = TREE_VALUE (t_step);
if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
cfun->unexpanded_var_list);
cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
else
cfun->unexpanded_var_list =
cfun->local_decls =
tree_cons (NULL_TREE, remap_decl (var, &id),
cfun->unexpanded_var_list);
cfun->local_decls);
}
/* Copy the Function's body. */

View File

@ -229,8 +229,8 @@ struct dump_file_info
#define TODO_update_ssa_only_virtuals (1 << 14)
/* Some passes leave unused local variables that can be removed from
cfun->unexpanded_var_list. This reduces the size of dump files and
the memory footprint for VAR_DECLs. */
cfun->local_decls. This reduces the size of dump files
and the memory footprint for VAR_DECLs. */
#define TODO_remove_unused_locals (1 << 15)
/* Internally used for the first in a sequence of passes. It is set

View File

@ -616,8 +616,8 @@ remove_unused_locals (void)
}
}
/* Remove unmarked local vars from unexpanded_var_list. */
for (cell = &cfun->unexpanded_var_list; *cell; )
/* Remove unmarked local vars from local_decls. */
for (cell = &cfun->local_decls; *cell; )
{
tree var = TREE_VALUE (*cell);
@ -640,10 +640,10 @@ remove_unused_locals (void)
cell = &TREE_CHAIN (*cell);
}
/* Remove unmarked global vars from unexpanded_var_list. */
/* Remove unmarked global vars from local_decls. */
if (global_unused_vars != NULL)
{
for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
{
tree var = TREE_VALUE (t);
@ -654,7 +654,7 @@ remove_unused_locals (void)
mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars);
}
for (cell = &cfun->unexpanded_var_list; *cell; )
for (cell = &cfun->local_decls; *cell; )
{
tree var = TREE_VALUE (*cell);

View File

@ -3809,7 +3809,7 @@ mark_constant_pool (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
mark_constants (insn);
for (link = current_function_epilogue_delay_list;
for (link = crtl->epilogue_delay_list;
link;
link = XEXP (link, 1))
mark_constants (XEXP (link, 0));