mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-02-25 02:15:32 +08:00
cgraph.h (cgraph_decide_inlining_incrementally): Kill.
* cgraph.h (cgraph_decide_inlining_incrementally): Kill. * tree-pass.h: Reorder to make IPA passes appear toegher. (pass_early_inline, pass_inline_parameters, pass_apply_inline): Declare. * cgraphunit.c (cgraph_finalize_function): Do not compute inling parameters, do not call early inliner. * ipa-inline.c: Update comments. Include tree-flow.h (cgraph_decide_inlining): Do not compute inlining parameters. (cgraph_decide_inlining_incrementally): Return TODOs; assume to be called with function context set up. (pass_ipa_inline): Remove unreachable functions before pass. (cgraph_early_inlining): Simplify assuming to be called from the PM as local pass. (pass_early_inline): New pass. (cgraph_gate_ipa_early_inlining): New gate. (pass_ipa_early_inline): Turn into simple wrapper. (compute_inline_parameters): New function. (gate_inline_passes): New gate. (pass_inline_parameters): New pass. (apply_inline): Move here from tree-optimize.c (pass_apply_inline): New pass. * ipa.c (cgraph_remove_unreachable_nodes): Verify cgraph after transforming. * tree-inline.c (optimize_inline_calls): Return TODOs rather than doing them by hand. (tree_function_versioning): Do not allocate dummy struct function. * tree-inline.h (optimize_inline_calls): Update prototype. * tree-optimize.c (execute_fixup_cfg): Export. (pass_fixup_cfg): Remove (tree_rest_of_compilation): Do not apply inlines. * tree-flow.h (execute_fixup_cfg): Declare. * Makefile.in (gt-passes.c): New. * passes.c: Include gt-passes.h (init_optimization_passes): New passes. (nnodes, order): New static vars. (do_per_function_toporder): New function. (execute_one_pass): Dump current pass here. (execute_ipa_pass_list): Don't dump current pass here. From-SVN: r120835
This commit is contained in:
parent
2797f081d4
commit
873aa8f548
@ -1,3 +1,43 @@
|
||||
2007-01-16 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* cgraph.h (cgraph_decide_inlining_incrementally): Kill.
|
||||
* tree-pass.h: Reorder to make IPA passes appear toegher.
|
||||
(pass_early_inline, pass_inline_parameters, pass_apply_inline): Declare.
|
||||
* cgraphunit.c (cgraph_finalize_function): Do not compute inling
|
||||
parameters, do not call early inliner.
|
||||
* ipa-inline.c: Update comments. Include tree-flow.h
|
||||
(cgraph_decide_inlining): Do not compute inlining parameters.
|
||||
(cgraph_decide_inlining_incrementally): Return TODOs; assume to
|
||||
be called with function context set up.
|
||||
(pass_ipa_inline): Remove unreachable functions before pass.
|
||||
(cgraph_early_inlining): Simplify assuming to be called from the
|
||||
PM as local pass.
|
||||
(pass_early_inline): New pass.
|
||||
(cgraph_gate_ipa_early_inlining): New gate.
|
||||
(pass_ipa_early_inline): Turn into simple wrapper.
|
||||
(compute_inline_parameters): New function.
|
||||
(gate_inline_passes): New gate.
|
||||
(pass_inline_parameters): New pass.
|
||||
(apply_inline): Move here from tree-optimize.c
|
||||
(pass_apply_inline): New pass.
|
||||
* ipa.c (cgraph_remove_unreachable_nodes): Verify cgraph after
|
||||
transforming.
|
||||
* tree-inline.c (optimize_inline_calls): Return TODOs rather than
|
||||
doing them by hand.
|
||||
(tree_function_versioning): Do not allocate dummy struct function.
|
||||
* tree-inline.h (optimize_inline_calls): Update prototype.
|
||||
* tree-optimize.c (execute_fixup_cfg): Export.
|
||||
(pass_fixup_cfg): Remove
|
||||
(tree_rest_of_compilation): Do not apply inlines.
|
||||
* tree-flow.h (execute_fixup_cfg): Declare.
|
||||
* Makefile.in (gt-passes.c): New.
|
||||
* passes.c: Include gt-passes.h
|
||||
(init_optimization_passes): New passes.
|
||||
(nnodes, order): New static vars.
|
||||
(do_per_function_toporder): New function.
|
||||
(execute_one_pass): Dump current pass here.
|
||||
(execute_ipa_pass_list): Don't dump current pass here.
|
||||
|
||||
2007-01-16 Janis Johnson <janis187@us.ibm.com>
|
||||
|
||||
* config/dfp-bit.c (dfp_compare_op): Return separate value for NaN.
|
||||
|
@ -2103,7 +2103,8 @@ passes.o : passes.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
|
||||
$(PARAMS_H) $(TM_P_H) reload.h dwarf2asm.h $(TARGET_H) \
|
||||
langhooks.h insn-flags.h $(CFGLAYOUT_H) $(REAL_H) $(CFGLOOP_H) \
|
||||
hosthooks.h $(CGRAPH_H) $(COVERAGE_H) tree-pass.h $(TREE_DUMP_H) \
|
||||
$(GGC_H) $(INTEGRATE_H) $(CPPLIB_H) opts.h $(TREE_FLOW_H) $(TREE_INLINE_H)
|
||||
$(GGC_H) $(INTEGRATE_H) $(CPPLIB_H) opts.h $(TREE_FLOW_H) $(TREE_INLINE_H) \
|
||||
gt-passes.h
|
||||
|
||||
main.o : main.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) toplev.h
|
||||
|
||||
@ -2819,7 +2820,7 @@ GTFILES = $(srcdir)/input.h $(srcdir)/coretypes.h \
|
||||
$(srcdir)/ipa-reference.c $(srcdir)/tree-ssa-structalias.h \
|
||||
$(srcdir)/tree-ssa-structalias.c \
|
||||
$(srcdir)/c-pragma.h $(srcdir)/omp-low.c $(srcdir)/varpool.c \
|
||||
$(srcdir)/targhooks.c $(out_file) \
|
||||
$(srcdir)/targhooks.c $(out_file) $(srcdir)/passes.c\
|
||||
@all_gtfiles@
|
||||
|
||||
GTFILES_FILES_LANGS = @all_gtfiles_files_langs@
|
||||
@ -2850,7 +2851,7 @@ gt-tree-profile.h gt-tree-ssa-address.h \
|
||||
gt-tree-iterator.h gt-gimplify.h \
|
||||
gt-tree-phinodes.h gt-tree-nested.h \
|
||||
gt-tree-ssa-propagate.h gt-varpool.h \
|
||||
gt-tree-ssa-structalias.h gt-ipa-inline.h \
|
||||
gt-tree-ssa-structalias.h gt-ipa-inline.h gt-passes.h \
|
||||
gt-stringpool.h gt-targhooks.h gt-omp-low.h : s-gtype ; @true
|
||||
|
||||
define echo_quoted_to_gtyp
|
||||
|
@ -395,7 +395,6 @@ varpool_next_static_initializer (struct varpool_node *node)
|
||||
(node) = varpool_next_static_initializer (node))
|
||||
|
||||
/* In ipa-inline.c */
|
||||
bool cgraph_decide_inlining_incrementally (struct cgraph_node *, bool);
|
||||
void cgraph_clone_inlined_nodes (struct cgraph_edge *, bool, bool);
|
||||
void cgraph_mark_inline_edge (struct cgraph_edge *, bool);
|
||||
bool cgraph_default_inline_p (struct cgraph_node *, const char **);
|
||||
|
@ -455,10 +455,7 @@ cgraph_finalize_function (tree decl, bool nested)
|
||||
/* If not unit at a time, then we need to create the call graph
|
||||
now, so that called functions can be queued and emitted now. */
|
||||
if (!flag_unit_at_a_time)
|
||||
{
|
||||
cgraph_analyze_function (node);
|
||||
cgraph_decide_inlining_incrementally (node, false);
|
||||
}
|
||||
cgraph_analyze_function (node);
|
||||
|
||||
if (decide_is_function_needed (node, decl))
|
||||
cgraph_mark_needed_node (node);
|
||||
|
271
gcc/ipa-inline.c
271
gcc/ipa-inline.c
@ -61,7 +61,64 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
|
||||
cgraph_decide_inlining implements heuristics taking whole callgraph
|
||||
into account, while cgraph_decide_inlining_incrementally considers
|
||||
only one function at a time and is used in non-unit-at-a-time mode. */
|
||||
only one function at a time and is used in non-unit-at-a-time mode.
|
||||
|
||||
The inliner itself is split into several passes:
|
||||
|
||||
pass_inline_parameters
|
||||
|
||||
This pass computes local properties of functions that are used by inliner:
|
||||
estimated function body size, whether function is inlinable at all and
|
||||
stack frame consumption.
|
||||
|
||||
Before executing any of inliner passes, this local pass has to be applied
|
||||
to each function in the callgraph (ie run as subpass of some earlier
|
||||
IPA pass). The results are made out of date by any optimization applied
|
||||
on the function body.
|
||||
|
||||
pass_early_inlining
|
||||
|
||||
Simple local inlining pass inlining callees into current function. This
|
||||
pass makes no global whole compilation unit analysis and this when allowed
|
||||
to do inlining expanding code size it might result in unbounded growth of
|
||||
whole unit.
|
||||
|
||||
This is the main inlining pass in non-unit-at-a-time.
|
||||
|
||||
With unit-at-a-time the pass is run during conversion into SSA form.
|
||||
Only functions already converted into SSA form are inlined, so the
|
||||
conversion must happen in topological order on the callgraph (that is
|
||||
maintained by pass manager). The functions after inlining are early
|
||||
optimized so the early inliner sees unoptimized function itself, but
|
||||
all considered callees are already optimized allowing it to unfold
|
||||
abstraction penalty on C++ effectivly and cheaply.
|
||||
|
||||
pass_ipa_early_inlining
|
||||
|
||||
With profiling, the early inlining is also neccesary to reduce
|
||||
instrumentation costs on program with high abstraction penalty (doing
|
||||
many redundant calls). This can't happen in parallel with early
|
||||
optimization and profile instrumentation, because we would end up
|
||||
re-instrumenting already instrumented function bodies we brought in via
|
||||
inlining.
|
||||
|
||||
To avoid this, this pass is executed as IPA pass before profiling. It is
|
||||
simple wrapper to pass_early_inlining and ensures first inlining.
|
||||
|
||||
pass_ipa_inline
|
||||
|
||||
This is the main pass implementing simple greedy algorithm to do inlining
|
||||
of small functions that results in overall growth of compilation unit and
|
||||
inlining of functions called once. The pass compute just so called inline
|
||||
plan (representation of inlining to be done in callgraph) and unlike early
|
||||
inlining it is not performing the inlining itself.
|
||||
|
||||
pass_apply_inline
|
||||
|
||||
This pass performs actual inlining according to pass_ipa_inline on given
|
||||
function. Possible the function body before inlining is saved when it is
|
||||
needed for further inlining later.
|
||||
*/
|
||||
|
||||
#include "config.h"
|
||||
#include "system.h"
|
||||
@ -81,6 +138,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
#include "hashtab.h"
|
||||
#include "coverage.h"
|
||||
#include "ggc.h"
|
||||
#include "tree-flow.h"
|
||||
|
||||
/* Statistics we collect about inlining algorithm. */
|
||||
static int ncalls_inlined;
|
||||
@ -931,13 +989,6 @@ cgraph_decide_inlining (void)
|
||||
{
|
||||
struct cgraph_edge *e;
|
||||
|
||||
/* At the moment, no IPA passes change function bodies before inlining.
|
||||
Save some time by not recomputing function body sizes if early inlining
|
||||
already did so. */
|
||||
if (!flag_early_inlining)
|
||||
node->local.self_insns = node->global.insns
|
||||
= estimate_num_insns (node->decl);
|
||||
|
||||
initial_insns += node->local.self_insns;
|
||||
gcc_assert (node->local.self_insns == node->global.insns);
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
@ -1088,17 +1139,24 @@ cgraph_decide_inlining (void)
|
||||
/* Decide on the inlining. We do so in the topological order to avoid
|
||||
expenses on updating data structures. */
|
||||
|
||||
bool
|
||||
static unsigned int
|
||||
cgraph_decide_inlining_incrementally (struct cgraph_node *node, bool early)
|
||||
{
|
||||
struct cgraph_edge *e;
|
||||
bool inlined = false;
|
||||
const char *failed_reason;
|
||||
unsigned int todo = 0;
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
verify_cgraph_node (node);
|
||||
#endif
|
||||
|
||||
/* First of all look for always inline functions. */
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
if (e->callee->local.disregard_inline_limits
|
||||
&& e->inline_failed
|
||||
&& (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
|
||||
== gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
|
||||
&& !cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed)
|
||||
/* ??? It is possible that renaming variable removed the function body
|
||||
in duplicate_decls. See gcc.c-torture/compile/20011119-2.c */
|
||||
@ -1111,6 +1169,13 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node, bool early)
|
||||
fprintf (dump_file, " into %s\n", cgraph_node_name (node));
|
||||
}
|
||||
cgraph_mark_inline (e);
|
||||
/* In order to fully inline alway_inline functions at -O0, we need to
|
||||
recurse here, since the inlined functions might not be processed by
|
||||
incremental inlining at all yet. */
|
||||
|
||||
if (!flag_unit_at_a_time)
|
||||
cgraph_decide_inlining_incrementally (e->callee, early);
|
||||
|
||||
inlined = true;
|
||||
}
|
||||
|
||||
@ -1121,6 +1186,8 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node, bool early)
|
||||
&& e->inline_failed
|
||||
&& !e->callee->local.disregard_inline_limits
|
||||
&& !cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed)
|
||||
&& (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
|
||||
== gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
|
||||
&& (!early
|
||||
|| (cgraph_estimate_size_after_inlining (1, e->caller, e->callee)
|
||||
<= e->caller->global.insns))
|
||||
@ -1142,19 +1209,13 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node, bool early)
|
||||
else if (!early)
|
||||
e->inline_failed = failed_reason;
|
||||
}
|
||||
if (early && inlined)
|
||||
if (early && inlined && !node->global.inlined_to)
|
||||
{
|
||||
timevar_push (TV_INTEGRATION);
|
||||
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
|
||||
tree_register_cfg_hooks ();
|
||||
current_function_decl = node->decl;
|
||||
optimize_inline_calls (current_function_decl);
|
||||
node->local.self_insns = node->global.insns;
|
||||
current_function_decl = NULL;
|
||||
pop_cfun ();
|
||||
todo = optimize_inline_calls (current_function_decl);
|
||||
timevar_pop (TV_INTEGRATION);
|
||||
}
|
||||
return inlined;
|
||||
return todo;
|
||||
}
|
||||
|
||||
/* When inlining shall be performed. */
|
||||
@ -1176,7 +1237,7 @@ struct tree_opt_pass pass_ipa_inline =
|
||||
0, /* properties_required */
|
||||
PROP_cfg, /* properties_provided */
|
||||
0, /* properties_destroyed */
|
||||
0, /* todo_flags_start */
|
||||
TODO_remove_functions, /* todo_flags_finish */
|
||||
TODO_dump_cgraph | TODO_dump_func
|
||||
| TODO_remove_functions, /* todo_flags_finish */
|
||||
0 /* letter */
|
||||
@ -1194,44 +1255,11 @@ static GTY ((length ("nnodes"))) struct cgraph_node **order;
|
||||
static unsigned int
|
||||
cgraph_early_inlining (void)
|
||||
{
|
||||
struct cgraph_node *node;
|
||||
int i;
|
||||
struct cgraph_node *node = cgraph_node (current_function_decl);
|
||||
|
||||
if (sorrycount || errorcount)
|
||||
return 0;
|
||||
#ifdef ENABLE_CHECKING
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
gcc_assert (!node->aux);
|
||||
#endif
|
||||
|
||||
order = ggc_alloc (sizeof (*order) * cgraph_n_nodes);
|
||||
nnodes = cgraph_postorder (order);
|
||||
for (i = nnodes - 1; i >= 0; i--)
|
||||
{
|
||||
node = order[i];
|
||||
if (node->analyzed && (node->needed || node->reachable))
|
||||
node->local.self_insns = node->global.insns
|
||||
= estimate_num_insns (node->decl);
|
||||
}
|
||||
for (i = nnodes - 1; i >= 0; i--)
|
||||
{
|
||||
node = order[i];
|
||||
if (node->analyzed && node->local.inlinable
|
||||
&& (node->needed || node->reachable)
|
||||
&& node->callers)
|
||||
{
|
||||
if (cgraph_decide_inlining_incrementally (node, true))
|
||||
ggc_collect ();
|
||||
}
|
||||
}
|
||||
#ifdef ENABLE_CHECKING
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
gcc_assert (!node->global.inlined_to);
|
||||
#endif
|
||||
ggc_free (order);
|
||||
order = NULL;
|
||||
nnodes = 0;
|
||||
return 0;
|
||||
return cgraph_decide_inlining_incrementally (node, flag_unit_at_a_time);
|
||||
}
|
||||
|
||||
/* When inlining shall be performed. */
|
||||
@ -1241,7 +1269,7 @@ cgraph_gate_early_inlining (void)
|
||||
return flag_inline_trees && flag_early_inlining;
|
||||
}
|
||||
|
||||
struct tree_opt_pass pass_early_ipa_inline =
|
||||
struct tree_opt_pass pass_early_inline =
|
||||
{
|
||||
"einline", /* name */
|
||||
cgraph_gate_early_inlining, /* gate */
|
||||
@ -1254,8 +1282,137 @@ struct tree_opt_pass pass_early_ipa_inline =
|
||||
PROP_cfg, /* properties_provided */
|
||||
0, /* properties_destroyed */
|
||||
0, /* todo_flags_start */
|
||||
TODO_dump_cgraph | TODO_dump_func
|
||||
| TODO_remove_functions, /* todo_flags_finish */
|
||||
TODO_dump_func, /* todo_flags_finish */
|
||||
0 /* letter */
|
||||
};
|
||||
|
||||
/* When inlining shall be performed. */
|
||||
static bool
|
||||
cgraph_gate_ipa_early_inlining (void)
|
||||
{
|
||||
return (flag_inline_trees && flag_early_inlining
|
||||
&& (flag_branch_probabilities || flag_test_coverage
|
||||
|| profile_arc_flag));
|
||||
}
|
||||
|
||||
/* IPA pass wrapper for early inlining pass. We need to run early inlining
|
||||
before tree profiling so we have stand alone IPA pass for doing so. */
|
||||
struct tree_opt_pass pass_ipa_early_inline =
|
||||
{
|
||||
"einline_ipa", /* name */
|
||||
cgraph_gate_ipa_early_inlining, /* gate */
|
||||
NULL, /* execute */
|
||||
NULL, /* sub */
|
||||
NULL, /* next */
|
||||
0, /* static_pass_number */
|
||||
TV_INLINE_HEURISTICS, /* tv_id */
|
||||
0, /* properties_required */
|
||||
PROP_cfg, /* properties_provided */
|
||||
0, /* properties_destroyed */
|
||||
0, /* todo_flags_start */
|
||||
TODO_dump_cgraph, /* todo_flags_finish */
|
||||
0 /* letter */
|
||||
};
|
||||
|
||||
/* Compute parameters of functions used by inliner. */
|
||||
static unsigned int
|
||||
compute_inline_parameters (void)
|
||||
{
|
||||
struct cgraph_node *node = cgraph_node (current_function_decl);
|
||||
|
||||
gcc_assert (!node->global.inlined_to);
|
||||
node->local.estimated_self_stack_size = estimated_stack_frame_size ();
|
||||
node->global.estimated_stack_size = node->local.estimated_self_stack_size;
|
||||
node->global.stack_frame_offset = 0;
|
||||
node->local.inlinable = tree_inlinable_function_p (current_function_decl);
|
||||
node->local.self_insns = estimate_num_insns (current_function_decl);
|
||||
if (node->local.inlinable)
|
||||
node->local.disregard_inline_limits
|
||||
= lang_hooks.tree_inlining.disregard_inline_limits (current_function_decl);
|
||||
if (flag_really_no_inline && !node->local.disregard_inline_limits)
|
||||
node->local.inlinable = 0;
|
||||
/* Inlining characteristics are maintained by the cgraph_mark_inline. */
|
||||
node->global.insns = node->local.self_insns;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* When inlining shall be performed. */
|
||||
static bool
|
||||
gate_inline_passes (void)
|
||||
{
|
||||
return flag_inline_trees;
|
||||
}
|
||||
|
||||
struct tree_opt_pass pass_inline_parameters =
|
||||
{
|
||||
NULL, /* name */
|
||||
gate_inline_passes, /* gate */
|
||||
compute_inline_parameters, /* execute */
|
||||
NULL, /* sub */
|
||||
NULL, /* next */
|
||||
0, /* static_pass_number */
|
||||
TV_INLINE_HEURISTICS, /* tv_id */
|
||||
0, /* properties_required */
|
||||
PROP_cfg, /* properties_provided */
|
||||
0, /* properties_destroyed */
|
||||
0, /* todo_flags_start */
|
||||
0, /* todo_flags_finish */
|
||||
0 /* letter */
|
||||
};
|
||||
|
||||
/* Apply inline plan to the function. */
|
||||
static unsigned int
|
||||
apply_inline (void)
|
||||
{
|
||||
unsigned int todo = 0;
|
||||
struct cgraph_edge *e;
|
||||
struct cgraph_node *node = cgraph_node (current_function_decl);
|
||||
|
||||
/* Even when not optimizing, ensure that always_inline functions get inlined.
|
||||
*/
|
||||
if (!optimize)
|
||||
cgraph_decide_inlining_incrementally (node, false);
|
||||
|
||||
/* We might need the body of this function so that we can expand
|
||||
it inline somewhere else. */
|
||||
if (cgraph_preserve_function_body_p (current_function_decl))
|
||||
save_inline_function_body (node);
|
||||
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
if (!e->inline_failed || warn_inline)
|
||||
break;
|
||||
if (e)
|
||||
{
|
||||
timevar_push (TV_INTEGRATION);
|
||||
todo = optimize_inline_calls (current_function_decl);
|
||||
timevar_pop (TV_INTEGRATION);
|
||||
}
|
||||
/* In non-unit-at-a-time we must mark all referenced functions as needed. */
|
||||
if (!flag_unit_at_a_time)
|
||||
{
|
||||
struct cgraph_edge *e;
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
if (e->callee->analyzed)
|
||||
cgraph_mark_needed_node (e->callee);
|
||||
}
|
||||
return todo | execute_fixup_cfg ();
|
||||
}
|
||||
|
||||
struct tree_opt_pass pass_apply_inline =
|
||||
{
|
||||
"apply_inline", /* name */
|
||||
NULL, /* gate */
|
||||
apply_inline, /* execute */
|
||||
NULL, /* sub */
|
||||
NULL, /* next */
|
||||
0, /* static_pass_number */
|
||||
TV_INLINE_HEURISTICS, /* tv_id */
|
||||
0, /* properties_required */
|
||||
PROP_cfg, /* properties_provided */
|
||||
0, /* properties_destroyed */
|
||||
0, /* todo_flags_start */
|
||||
TODO_dump_func | TODO_verify_flow
|
||||
| TODO_verify_stmts, /* todo_flags_finish */
|
||||
0 /* letter */
|
||||
};
|
||||
|
||||
|
@ -206,6 +206,9 @@ cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
|
||||
node->aux = NULL;
|
||||
if (file)
|
||||
fprintf (file, "\nReclaimed %i insns", insns);
|
||||
#ifdef ENABLE_CHECKING
|
||||
verify_cgraph ();
|
||||
#endif
|
||||
return changed;
|
||||
}
|
||||
|
||||
|
73
gcc/passes.c
73
gcc/passes.c
@ -437,10 +437,11 @@ init_optimization_passes (void)
|
||||
struct tree_opt_pass **p;
|
||||
|
||||
#define NEXT_PASS(PASS) (p = next_pass_1 (p, &PASS))
|
||||
|
||||
/* Interprocedural optimization passes. */
|
||||
p = &all_ipa_passes;
|
||||
NEXT_PASS (pass_ipa_function_and_variable_visibility);
|
||||
NEXT_PASS (pass_early_ipa_inline);
|
||||
NEXT_PASS (pass_ipa_early_inline);
|
||||
NEXT_PASS (pass_early_local_passes);
|
||||
NEXT_PASS (pass_ipa_increase_alignment);
|
||||
NEXT_PASS (pass_ipa_cp);
|
||||
@ -451,6 +452,12 @@ init_optimization_passes (void)
|
||||
NEXT_PASS (pass_ipa_pta);
|
||||
*p = NULL;
|
||||
|
||||
p = &pass_ipa_early_inline.sub;
|
||||
NEXT_PASS (pass_early_inline);
|
||||
NEXT_PASS (pass_inline_parameters);
|
||||
NEXT_PASS (pass_rebuild_cgraph_edges);
|
||||
*p = NULL;
|
||||
|
||||
/* All passes needed to lower the function into shape optimizers can
|
||||
operate on. */
|
||||
p = &all_lowering_passes;
|
||||
@ -464,6 +471,7 @@ init_optimization_passes (void)
|
||||
NEXT_PASS (pass_lower_vector);
|
||||
NEXT_PASS (pass_warn_function_return);
|
||||
NEXT_PASS (pass_build_cgraph_edges);
|
||||
NEXT_PASS (pass_inline_parameters);
|
||||
*p = NULL;
|
||||
|
||||
p = &pass_early_local_passes.sub;
|
||||
@ -473,6 +481,7 @@ init_optimization_passes (void)
|
||||
NEXT_PASS (pass_expand_omp);
|
||||
NEXT_PASS (pass_all_early_optimizations);
|
||||
NEXT_PASS (pass_rebuild_cgraph_edges);
|
||||
NEXT_PASS (pass_inline_parameters);
|
||||
*p = NULL;
|
||||
|
||||
p = &pass_all_early_optimizations.sub;
|
||||
@ -480,6 +489,8 @@ init_optimization_passes (void)
|
||||
NEXT_PASS (pass_reset_cc_flags);
|
||||
NEXT_PASS (pass_build_ssa);
|
||||
NEXT_PASS (pass_early_warn_uninitialized);
|
||||
NEXT_PASS (pass_rebuild_cgraph_edges);
|
||||
NEXT_PASS (pass_early_inline);
|
||||
NEXT_PASS (pass_cleanup_cfg);
|
||||
NEXT_PASS (pass_rename_ssa_copies);
|
||||
NEXT_PASS (pass_ccp);
|
||||
@ -494,7 +505,7 @@ init_optimization_passes (void)
|
||||
*p = NULL;
|
||||
|
||||
p = &all_passes;
|
||||
NEXT_PASS (pass_fixup_cfg);
|
||||
NEXT_PASS (pass_apply_inline);
|
||||
NEXT_PASS (pass_all_optimizations);
|
||||
NEXT_PASS (pass_warn_function_noreturn);
|
||||
NEXT_PASS (pass_free_datastructures);
|
||||
@ -749,6 +760,52 @@ do_per_function (void (*callback) (void *data), void *data)
|
||||
}
|
||||
}
|
||||
|
||||
/* Because inlining might remove no-longer reachable nodes, we need to
|
||||
keep the array visible to garbage collector to avoid reading collected
|
||||
out nodes. */
|
||||
static int nnodes;
|
||||
static GTY ((length ("nnodes"))) struct cgraph_node **order;
|
||||
|
||||
/* If we are in IPA mode (i.e., current_function_decl is NULL), call
|
||||
function CALLBACK for every function in the call graph. Otherwise,
|
||||
call CALLBACK on the current function. */
|
||||
|
||||
static void
|
||||
do_per_function_toporder (void (*callback) (void *data), void *data)
|
||||
{
|
||||
int i;
|
||||
|
||||
if (current_function_decl)
|
||||
callback (data);
|
||||
else
|
||||
{
|
||||
gcc_assert (!order);
|
||||
order = ggc_alloc (sizeof (*order) * cgraph_n_nodes);
|
||||
nnodes = cgraph_postorder (order);
|
||||
for (i = nnodes - 1; i >= 0; i--)
|
||||
{
|
||||
struct cgraph_node *node = order[i];
|
||||
|
||||
/* Allow possibly removed nodes to be garbage collected. */
|
||||
order[i] = NULL;
|
||||
if (node->analyzed && (node->needed || node->reachable))
|
||||
{
|
||||
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
|
||||
current_function_decl = node->decl;
|
||||
callback (data);
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
free_dominance_info (CDI_POST_DOMINATORS);
|
||||
current_function_decl = NULL;
|
||||
pop_cfun ();
|
||||
ggc_collect ();
|
||||
}
|
||||
}
|
||||
}
|
||||
ggc_free (order);
|
||||
order = NULL;
|
||||
nnodes = 0;
|
||||
}
|
||||
|
||||
/* Perform all TODO actions that ought to be done on each function. */
|
||||
|
||||
static void
|
||||
@ -903,6 +960,9 @@ execute_one_pass (struct tree_opt_pass *pass)
|
||||
if (pass->gate && !pass->gate ())
|
||||
return false;
|
||||
|
||||
if (!quiet_flag && !cfun)
|
||||
fprintf (stderr, " <%s>", pass->name ? pass->name : "");
|
||||
|
||||
if (pass->todo_flags_start & TODO_set_props)
|
||||
cfun->curr_properties = pass->properties_required;
|
||||
|
||||
@ -1012,16 +1072,13 @@ execute_ipa_pass_list (struct tree_opt_pass *pass)
|
||||
{
|
||||
gcc_assert (!current_function_decl);
|
||||
gcc_assert (!cfun);
|
||||
if (!quiet_flag)
|
||||
{
|
||||
fprintf (stderr, " <%s>", pass->name ? pass->name : "");
|
||||
fflush (stderr);
|
||||
}
|
||||
if (execute_one_pass (pass) && pass->sub)
|
||||
do_per_function ((void (*)(void *))execute_pass_list, pass->sub);
|
||||
do_per_function_toporder ((void (*)(void *))execute_pass_list,
|
||||
pass->sub);
|
||||
if (!current_function_decl)
|
||||
cgraph_process_new_functions ();
|
||||
pass = pass->next;
|
||||
}
|
||||
while (pass);
|
||||
}
|
||||
#include "gt-passes.h"
|
||||
|
@ -1058,6 +1058,7 @@ void sort_fieldstack (VEC(fieldoff_s,heap) *);
|
||||
|
||||
void init_alias_heapvars (void);
|
||||
void delete_alias_heapvars (void);
|
||||
unsigned int execute_fixup_cfg (void);
|
||||
|
||||
#include "tree-flow-inline.h"
|
||||
|
||||
|
@ -2613,7 +2613,7 @@ fold_marked_statements (int first, struct pointer_set_t *statements)
|
||||
|
||||
/* Expand calls to inline functions in the body of FN. */
|
||||
|
||||
void
|
||||
unsigned int
|
||||
optimize_inline_calls (tree fn)
|
||||
{
|
||||
copy_body_data id;
|
||||
@ -2624,7 +2624,7 @@ optimize_inline_calls (tree fn)
|
||||
occurred -- and we might crash if we try to inline invalid
|
||||
code. */
|
||||
if (errorcount || sorrycount)
|
||||
return;
|
||||
return 0;
|
||||
|
||||
/* Clear out ID. */
|
||||
memset (&id, 0, sizeof (id));
|
||||
@ -2679,25 +2679,22 @@ optimize_inline_calls (tree fn)
|
||||
if (ENTRY_BLOCK_PTR->count)
|
||||
counts_to_freqs ();
|
||||
|
||||
/* We are not going to maintain the cgraph edges up to date.
|
||||
Kill it so it won't confuse us. */
|
||||
cgraph_node_remove_callees (id.dst_node);
|
||||
|
||||
fold_marked_statements (last, id.statements_to_fold);
|
||||
pointer_set_destroy (id.statements_to_fold);
|
||||
if (gimple_in_ssa_p (cfun))
|
||||
{
|
||||
/* We make no attempts to keep dominance info up-to-date. */
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
free_dominance_info (CDI_POST_DOMINATORS);
|
||||
delete_unreachable_blocks ();
|
||||
update_ssa (TODO_update_ssa);
|
||||
fold_cond_expr_cond ();
|
||||
if (need_ssa_update_p ())
|
||||
update_ssa (TODO_update_ssa);
|
||||
}
|
||||
else
|
||||
fold_cond_expr_cond ();
|
||||
fold_cond_expr_cond ();
|
||||
/* We make no attempts to keep dominance info up-to-date. */
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
free_dominance_info (CDI_POST_DOMINATORS);
|
||||
/* It would be nice to check SSA/CFG/statement consistency here, but it is
|
||||
not possible yet - the IPA passes might make various functions to not
|
||||
throw and they don't care to proactively update local EH info. This is
|
||||
done later in fixup_cfg pass that also execute the verification. */
|
||||
return (TODO_update_ssa | TODO_cleanup_cfg
|
||||
| (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0));
|
||||
}
|
||||
|
||||
/* FN is a function that has a complete body, and CLONE is a function whose
|
||||
@ -3194,6 +3191,7 @@ tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
|
||||
struct ipa_replace_map *replace_info;
|
||||
basic_block old_entry_block;
|
||||
tree t_step;
|
||||
tree old_current_function_decl = current_function_decl;
|
||||
|
||||
gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
|
||||
&& TREE_CODE (new_decl) == FUNCTION_DECL);
|
||||
@ -3202,10 +3200,6 @@ tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
|
||||
old_version_node = cgraph_node (old_decl);
|
||||
new_version_node = cgraph_node (new_decl);
|
||||
|
||||
allocate_struct_function (new_decl);
|
||||
/* Cfun points to the new allocated function struct at this point. */
|
||||
cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
|
||||
|
||||
DECL_ARTIFICIAL (new_decl) = 1;
|
||||
DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
|
||||
|
||||
@ -3322,7 +3316,9 @@ tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
free_dominance_info (CDI_POST_DOMINATORS);
|
||||
pop_cfun ();
|
||||
current_function_decl = NULL;
|
||||
current_function_decl = old_current_function_decl;
|
||||
gcc_assert (!current_function_decl
|
||||
|| DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ typedef struct copy_body_data
|
||||
extern tree copy_body_r (tree *, int *, void *);
|
||||
extern void insert_decl_map (copy_body_data *, tree, tree);
|
||||
|
||||
void optimize_inline_calls (tree);
|
||||
unsigned int optimize_inline_calls (tree);
|
||||
bool tree_inlinable_function_p (tree);
|
||||
tree copy_tree_r (tree *, int *, void *);
|
||||
void clone_body (tree, tree, void *);
|
||||
|
@ -285,9 +285,12 @@ has_abnormal_outgoing_edge_p (basic_block bb)
|
||||
/* Pass: fixup_cfg. IPA passes, compilation of earlier functions or inlining
|
||||
might have changed some properties, such as marked functions nothrow or
|
||||
added calls that can potentially go to non-local labels. Remove redundant
|
||||
edges and basic blocks, and create new ones if necessary. */
|
||||
edges and basic blocks, and create new ones if necessary.
|
||||
|
||||
static unsigned int
|
||||
This pass can't be executed as stand alone pass from pass manager, because
|
||||
in between inlining and this fixup the verify_flow_info would fail. */
|
||||
|
||||
unsigned int
|
||||
execute_fixup_cfg (void)
|
||||
{
|
||||
basic_block bb;
|
||||
@ -310,7 +313,7 @@ execute_fixup_cfg (void)
|
||||
{
|
||||
if (gimple_in_ssa_p (cfun))
|
||||
{
|
||||
todo |= TODO_update_ssa;
|
||||
todo |= TODO_update_ssa | TODO_cleanup_cfg;
|
||||
update_stmt (stmt);
|
||||
}
|
||||
TREE_SIDE_EFFECTS (call) = 0;
|
||||
@ -320,7 +323,8 @@ execute_fixup_cfg (void)
|
||||
if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
|
||||
remove_stmt_from_eh_region (stmt);
|
||||
}
|
||||
tree_purge_dead_eh_edges (bb);
|
||||
if (tree_purge_dead_eh_edges (bb))
|
||||
todo |= TODO_cleanup_cfg;
|
||||
}
|
||||
|
||||
if (current_function_has_nonlocal_label)
|
||||
@ -358,7 +362,7 @@ execute_fixup_cfg (void)
|
||||
|
||||
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
|
||||
{
|
||||
todo |= TODO_update_ssa;
|
||||
todo |= TODO_update_ssa | TODO_cleanup_cfg;
|
||||
gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
|
||||
(PHI_RESULT (phi)));
|
||||
mark_sym_for_renaming
|
||||
@ -377,24 +381,6 @@ execute_fixup_cfg (void)
|
||||
return todo;
|
||||
}
|
||||
|
||||
struct tree_opt_pass pass_fixup_cfg =
|
||||
{
|
||||
"fixupcfg", /* name */
|
||||
NULL, /* gate */
|
||||
execute_fixup_cfg, /* execute */
|
||||
NULL, /* sub */
|
||||
NULL, /* next */
|
||||
0, /* static_pass_number */
|
||||
0, /* tv_id */
|
||||
PROP_cfg, /* properties_required */
|
||||
0, /* properties_provided */
|
||||
0, /* properties_destroyed */
|
||||
0, /* todo_flags_start */
|
||||
TODO_cleanup_cfg | TODO_ggc_collect
|
||||
| TODO_dump_func | TODO_verify_flow
|
||||
| TODO_verify_stmts,/* todo_flags_finish */
|
||||
0 /* letter */ };
|
||||
|
||||
/* Do the actions required to initialize internal data structures used
|
||||
in tree-ssa optimization passes. */
|
||||
|
||||
@ -487,13 +473,9 @@ tree_rest_of_compilation (tree fndecl)
|
||||
/* Initialize the default bitmap obstack. */
|
||||
bitmap_obstack_initialize (NULL);
|
||||
|
||||
/* We might need the body of this function so that we can expand
|
||||
it inline somewhere else. */
|
||||
if (cgraph_preserve_function_body_p (fndecl))
|
||||
save_inline_function_body (node);
|
||||
|
||||
/* Initialize the RTL code for the function. */
|
||||
current_function_decl = fndecl;
|
||||
cfun = DECL_STRUCT_FUNCTION (fndecl);
|
||||
saved_loc = input_location;
|
||||
input_location = DECL_SOURCE_LOCATION (fndecl);
|
||||
init_function_start (fndecl);
|
||||
@ -506,33 +488,6 @@ tree_rest_of_compilation (tree fndecl)
|
||||
|
||||
tree_register_cfg_hooks ();
|
||||
|
||||
if (flag_inline_trees)
|
||||
{
|
||||
struct cgraph_edge *e;
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
if (!e->inline_failed || warn_inline)
|
||||
break;
|
||||
if (e)
|
||||
{
|
||||
timevar_push (TV_INTEGRATION);
|
||||
optimize_inline_calls (fndecl);
|
||||
timevar_pop (TV_INTEGRATION);
|
||||
}
|
||||
}
|
||||
/* In non-unit-at-a-time we must mark all referenced functions as needed.
|
||||
*/
|
||||
if (!flag_unit_at_a_time)
|
||||
{
|
||||
struct cgraph_edge *e;
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
if (e->callee->analyzed)
|
||||
cgraph_mark_needed_node (e->callee);
|
||||
}
|
||||
|
||||
/* We are not going to maintain the cgraph edges up to date.
|
||||
Kill it so it won't confuse us. */
|
||||
cgraph_node_remove_callees (node);
|
||||
|
||||
bitmap_obstack_initialize (®_obstack); /* FIXME, only at RTL generation*/
|
||||
/* Perform all tree transforms and optimizations. */
|
||||
execute_pass_list (all_passes);
|
||||
|
@ -310,13 +310,12 @@ extern struct tree_opt_pass pass_reset_cc_flags;
|
||||
/* IPA Passes */
|
||||
extern struct tree_opt_pass pass_ipa_cp;
|
||||
extern struct tree_opt_pass pass_ipa_inline;
|
||||
extern struct tree_opt_pass pass_early_ipa_inline;
|
||||
extern struct tree_opt_pass pass_ipa_early_inline;
|
||||
extern struct tree_opt_pass pass_ipa_reference;
|
||||
extern struct tree_opt_pass pass_ipa_pure_const;
|
||||
extern struct tree_opt_pass pass_ipa_type_escape;
|
||||
extern struct tree_opt_pass pass_ipa_pta;
|
||||
extern struct tree_opt_pass pass_early_local_passes;
|
||||
extern struct tree_opt_pass pass_all_early_optimizations;
|
||||
extern struct tree_opt_pass pass_ipa_increase_alignment;
|
||||
extern struct tree_opt_pass pass_ipa_function_and_variable_visibility;
|
||||
|
||||
@ -399,6 +398,10 @@ extern struct tree_opt_pass pass_set_nothrow_function_flags;
|
||||
extern struct tree_opt_pass pass_final;
|
||||
extern struct tree_opt_pass pass_rtl_seqabstr;
|
||||
extern struct tree_opt_pass pass_release_ssa_names;
|
||||
extern struct tree_opt_pass pass_early_inline;
|
||||
extern struct tree_opt_pass pass_inline_parameters;
|
||||
extern struct tree_opt_pass pass_apply_inline;
|
||||
extern struct tree_opt_pass pass_all_early_optimizations;
|
||||
|
||||
/* The root of the compilation pass tree, once constructed. */
|
||||
extern struct tree_opt_pass *all_passes, *all_ipa_passes, *all_lowering_passes;
|
||||
|
Loading…
Reference in New Issue
Block a user