tree-inline.c (optimize_inline_calls): Schedule cleanups only if we inlined something.

2010-09-22  Richard Guenther  <rguenther@suse.de>

	* tree-inline.c (optimize_inline_calls): Schedule cleanups
	only if we inlined something.  Block compaction and conditional
	folding are done by cfg cleanup.  Schedule update-address-taken.
	(tree_function_versioning): Remove redundant call to number_blocks.
	* tree-optimize.c (execute_cleanup_cfg_post_optimizing): Conditional
	folding is done by cfg cleanup.
	* passes.c (init_optimization_passes): Remove update-address-taken
	pass after IPA inlining.

From-SVN: r164525
This commit is contained in:
Richard Guenther 2010-09-22 14:44:13 +00:00 committed by Richard Biener
parent 27d5e204ca
commit 5d7b099c0a
4 changed files with 19 additions and 10 deletions

View File

@ -1,3 +1,14 @@
2010-09-22 Richard Guenther <rguenther@suse.de>
* tree-inline.c (optimize_inline_calls): Schedule cleanups
only if we inlined something. Block compaction and conditional
folding are done by cfg cleanup. Schedule update-address-taken.
(tree_function_versioning): Remove redundant call to number_blocks.
* tree-optimize.c (execute_cleanup_cfg_post_optimizing): Conditional
folding is done by cfg cleanup.
* passes.c (init_optimization_passes): Remove update-address-taken
pass after IPA inlining.
2010-09-22 Chung-Lin Tang <cltang@codesourcery.com>
* postreload.c (move2add_note_store): Add reg_symbol_ref[] checks

View File

@ -836,7 +836,6 @@ init_optimization_passes (void)
/* Initial scalar cleanups before alias computation.
They ensure memory accesses are not indirect wherever possible. */
NEXT_PASS (pass_strip_predict_hints);
NEXT_PASS (pass_update_address_taken);
NEXT_PASS (pass_rename_ssa_copies);
NEXT_PASS (pass_complete_unrolli);
NEXT_PASS (pass_ccp);

View File

@ -4162,6 +4162,7 @@ optimize_inline_calls (tree fn)
basic_block bb;
int last = n_basic_blocks;
struct gimplify_ctx gctx;
bool inlined_p = false;
/* There is no point in performing inlining if errors have already
occurred -- and we might crash if we try to inline invalid
@ -4201,7 +4202,7 @@ optimize_inline_calls (tree fn)
follow it; we'll trudge through them, processing their CALL_EXPRs
along the way. */
FOR_EACH_BB (bb)
gimple_expand_calls_inline (bb, &id);
inlined_p |= gimple_expand_calls_inline (bb, &id);
pop_gimplify_context (NULL);
@ -4217,18 +4218,19 @@ optimize_inline_calls (tree fn)
}
#endif
/* Fold the statements before compacting/renumbering the basic blocks. */
/* Fold queued statements. */
fold_marked_statements (last, id.statements_to_fold);
pointer_set_destroy (id.statements_to_fold);
gcc_assert (!id.debug_stmts);
/* Renumber the (code) basic_blocks consecutively. */
compact_blocks ();
/* If we didn't inline into the function there is nothing to do. */
if (!inlined_p)
return 0;
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (fn);
fold_cond_expr_cond ();
delete_unreachable_blocks_update_callgraph (&id);
#ifdef ENABLE_CHECKING
verify_cgraph_node (id.dst_node);
@ -4241,6 +4243,7 @@ optimize_inline_calls (tree fn)
return (TODO_update_ssa
| TODO_cleanup_cfg
| (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
| (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
| (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
}
@ -5118,9 +5121,6 @@ tree_function_versioning (tree old_decl, tree new_decl,
DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (id.dst_fn);
declare_inline_vars (DECL_INITIAL (new_decl), vars);
if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))

View File

@ -158,7 +158,6 @@ struct gimple_opt_pass pass_all_early_optimizations =
static unsigned int
execute_cleanup_cfg_post_optimizing (void)
{
fold_cond_expr_cond ();
cleanup_tree_cfg ();
cleanup_dead_labels ();
group_case_labels ();