mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-13 18:30:59 +08:00
Sibling call optimizations.
Co-Authored-By: Richard Henderson <rth@cygnus.com> From-SVN: r32612
This commit is contained in:
parent
f1fd8077fd
commit
0a1c58a25a
@ -1,3 +1,57 @@
|
||||
2000-03-17 Jeff Law <law@cygnus.com>
|
||||
Richard Henderson <rth@cygnus.com>
|
||||
|
||||
* Makefile.in (OBJS): Add sibcall.o.
|
||||
(sibcall.o): New.
|
||||
* sibcall.c: New file.
|
||||
* calls.c (FUNCTION_OK_FOR_SIBCALL): Provide default.
|
||||
(ECF_IS_CONST, ECF_NOTHROW, ECF_SIBCALL): New.
|
||||
(emit_call_1): Replace `is_const' and `nothrow' with `ecf_flags'.
|
||||
Emit sibcall patterns when requested. Update all callers.
|
||||
(expand_call): Generate CALL_PLACEHOLDER insns when tail call
|
||||
elimination seems feasable.
|
||||
* final.c (leaf_function_p): Sibling calls don't discount being
|
||||
a leaf function.
|
||||
* flow.c (HAVE_sibcall_epilogue): Provide default.
|
||||
(find_basic_blocks_1): Sibling calls don't throw.
|
||||
(make_edges): Make edge from sibling call to EXIT.
|
||||
(propagate_block): Don't remove sibcall_epilogue insns.
|
||||
* function.c (prologue, epilogue): Turn into varrays. Update all uses.
|
||||
(sibcall_epilogue): New.
|
||||
(fixup_var_refs): Scan CALL_PLACEHOLDER sub-sequences.
|
||||
(identify_blocks_1): Likewise. Break out from ...
|
||||
(identify_blocks): ... here.
|
||||
(reorder_blocks_1): Scan CALL_PLACEHOLDER. Break out from ...
|
||||
(reorder_blocks): ... here.
|
||||
(init_function_for_compilation): Zap prologue/epilogue as varrays.
|
||||
(record_insns): Extend a varray instead of mallocing new memory.
|
||||
(contains): Read a varray not array of ints.
|
||||
(sibcall_epilogue_contains): New.
|
||||
(thread_prologue_and_epilogue_insns): Emit and record
|
||||
sibcall_epilogue patterns.
|
||||
(init_function_once): Allocate prologue/epilogue varrays.
|
||||
* genflags.c (gen_insn): Treat sibcall patterns as calls.
|
||||
* integrate.c (save_parm_insns): Recurse on CALL_PLACEHOLDER patterns.
|
||||
Broken out from ...
|
||||
(save_for_inline_nocopy): ... here.
|
||||
(copy_insn_list): Recurse on CALL_PLACEHOLDER patterns.
|
||||
Broken out from ...
|
||||
(expand_inline_function): ... here.
|
||||
(copy_rtx_and_substitute): Handle NOTE_INSN_DELETED_LABEL.
|
||||
(subst_constants): Handle 'n' formats.
|
||||
* jump.c (jump_optimize_minimal): New.
|
||||
(jump_optimize_1): New arg `minimal'; update callers. Elide most
|
||||
optimizations if it's set.
|
||||
* rtl.c (copy_rtx): Do copy jump & call for insns.
|
||||
* rtl.h (struct rtx_def): Document use of jump and call for insns.
|
||||
(SIBLING_CALL_P): New.
|
||||
(sibcall_use_t): New.
|
||||
* toplev.c (rest_of_compilation): Do init_EXPR_INSN_LIST_cache earlier.
|
||||
Invoke optimize_sibling_and_tail_recursive_calls.
|
||||
* tree.c (lang_safe_for_unsave): New.
|
||||
(safe_for_unsave): New.
|
||||
* tree.h (lang_safe_for_unsave, safe_for_unsave): Declare.
|
||||
|
||||
2000-03-17 Mark Mitchell <mark@codesourcery.com>
|
||||
|
||||
* objc/objc-act.c (encode_method_prototype): Pass types, not
|
||||
|
@ -675,7 +675,8 @@ OBJS = diagnostic.o \
|
||||
insn-opinit.o insn-recog.o insn-extract.o insn-output.o insn-emit.o lcm.o \
|
||||
profile.o insn-attrtab.o $(out_object_file) $(EXTRA_OBJS) convert.o \
|
||||
mbchar.o dyn-string.o splay-tree.o graph.o sbitmap.o resource.o hash.o \
|
||||
predict.o lists.o ggc-common.o $(GGC) simplify-rtx.o ssa.o bb-reorder.o
|
||||
predict.o lists.o ggc-common.o $(GGC) simplify-rtx.o ssa.o bb-reorder.o \
|
||||
sibcall.o
|
||||
|
||||
# GEN files are listed separately, so they can be built before doing parallel
|
||||
# makes for cc1 or cc1plus. Otherwise sequent parallel make attempts to load
|
||||
@ -1562,6 +1563,8 @@ cse.o : cse.c $(CONFIG_H) system.h $(RTL_H) $(REGS_H) hard-reg-set.h flags.h \
|
||||
gcse.o : gcse.c $(CONFIG_H) system.h $(RTL_H) $(REGS_H) hard-reg-set.h \
|
||||
flags.h real.h insn-config.h $(RECOG_H) $(EXPR_H) $(BASIC_BLOCK_H) \
|
||||
function.h output.h toplev.h
|
||||
sibcall.o : sibcall.c $(CONFIG_H) system.h $(RTL_H) $(REGS_H) function.h \
|
||||
hard-reg-set.h flags.h insn-config.h $(RECOG_H) $(BASIC_BLOCK_H)
|
||||
resource.o : resource.c $(CONFIG_H) $(RTL_H) hard-reg-set.h system.h \
|
||||
$(BASIC_BLOCK_H) $(REGS_H) flags.h output.h resource.h function.h toplev.h \
|
||||
insn-attr.h
|
||||
|
1535
gcc/calls.c
1535
gcc/calls.c
File diff suppressed because it is too large
Load Diff
@ -4019,7 +4019,8 @@ leaf_function_p ()
|
||||
return 0;
|
||||
if (GET_CODE (insn) == INSN
|
||||
&& GET_CODE (PATTERN (insn)) == SEQUENCE
|
||||
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN)
|
||||
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
|
||||
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
|
||||
return 0;
|
||||
}
|
||||
for (insn = current_function_epilogue_delay_list; insn; insn = XEXP (insn, 1))
|
||||
@ -4028,7 +4029,8 @@ leaf_function_p ()
|
||||
return 0;
|
||||
if (GET_CODE (XEXP (insn, 0)) == INSN
|
||||
&& GET_CODE (PATTERN (XEXP (insn, 0))) == SEQUENCE
|
||||
&& GET_CODE (XVECEXP (PATTERN (XEXP (insn, 0)), 0, 0)) == CALL_INSN)
|
||||
&& GET_CODE (XVECEXP (PATTERN (XEXP (insn, 0)), 0, 0)) == CALL_INSN
|
||||
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (XEXP (insn, 0)), 0, 0)))
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
25
gcc/flow.c
25
gcc/flow.c
@ -154,10 +154,12 @@ Boston, MA 02111-1307, USA. */
|
||||
#ifndef HAVE_epilogue
|
||||
#define HAVE_epilogue 0
|
||||
#endif
|
||||
|
||||
#ifndef HAVE_prologue
|
||||
#define HAVE_prologue 0
|
||||
#endif
|
||||
#ifndef HAVE_sibcall_epilogue
|
||||
#define HAVE_sibcall_epilogue 0
|
||||
#endif
|
||||
|
||||
/* The contents of the current function definition are allocated
|
||||
in this obstack, and all are freed at the end of the function.
|
||||
@ -592,7 +594,8 @@ find_basic_blocks_1 (f)
|
||||
does not imply an abnormal edge, it will be a bit before
|
||||
everything can be updated. So continue to emit a noop at
|
||||
the end of such a block. */
|
||||
if (GET_CODE (end) == CALL_INSN)
|
||||
if (GET_CODE (end) == CALL_INSN
|
||||
&& ! SIBLING_CALL_P (end))
|
||||
{
|
||||
rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
|
||||
end = emit_insn_after (nop, end);
|
||||
@ -644,7 +647,8 @@ find_basic_blocks_1 (f)
|
||||
imply an abnormal edge, it will be a bit before everything can
|
||||
be updated. So continue to emit a noop at the end of such a
|
||||
block. */
|
||||
if (GET_CODE (end) == CALL_INSN)
|
||||
if (GET_CODE (end) == CALL_INSN
|
||||
&& ! SIBLING_CALL_P (end))
|
||||
{
|
||||
rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
|
||||
end = emit_insn_after (nop, end);
|
||||
@ -973,6 +977,15 @@ make_edges (label_value_list)
|
||||
}
|
||||
}
|
||||
|
||||
/* If this is a sibling call insn, then this is in effect a
|
||||
combined call and return, and so we need an edge to the
|
||||
exit block. No need to worry about EH edges, since we
|
||||
wouldn't have created the sibling call in the first place. */
|
||||
|
||||
if (code == CALL_INSN && SIBLING_CALL_P (insn))
|
||||
make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
|
||||
else
|
||||
|
||||
/* If this is a CALL_INSN, then mark it as reaching the active EH
|
||||
handler for this CALL_INSN. If we're handling asynchronous
|
||||
exceptions then any insn can reach any of the active handlers.
|
||||
@ -3249,8 +3262,10 @@ propagate_block (bb, old, significant, flags)
|
||||
instructions. Warn about probable compiler losage. */
|
||||
if (insn_is_dead
|
||||
&& reload_completed
|
||||
&& (HAVE_epilogue || HAVE_prologue)
|
||||
&& prologue_epilogue_contains (insn))
|
||||
&& (((HAVE_epilogue || HAVE_prologue)
|
||||
&& prologue_epilogue_contains (insn))
|
||||
|| (HAVE_sibcall_epilogue
|
||||
&& sibcall_epilogue_contains (insn))))
|
||||
{
|
||||
if (flags & PROP_KILL_DEAD_CODE)
|
||||
{
|
||||
|
446
gcc/function.c
446
gcc/function.c
@ -152,8 +152,12 @@ struct function *cfun = 0;
|
||||
struct function *all_functions = 0;
|
||||
|
||||
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
|
||||
static int *prologue;
|
||||
static int *epilogue;
|
||||
static varray_type prologue;
|
||||
static varray_type epilogue;
|
||||
|
||||
/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
|
||||
in this function. */
|
||||
static varray_type sibcall_epilogue;
|
||||
|
||||
/* In order to evaluate some expressions, such as function calls returning
|
||||
structures in memory, we need to temporarily allocate stack locations.
|
||||
@ -271,13 +275,15 @@ static void pad_below PARAMS ((struct args_size *, enum machine_mode,
|
||||
static tree round_down PARAMS ((tree, int));
|
||||
#endif
|
||||
static rtx round_trampoline_addr PARAMS ((rtx));
|
||||
static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
|
||||
static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
|
||||
static tree blocks_nreverse PARAMS ((tree));
|
||||
static int all_blocks PARAMS ((tree, tree *));
|
||||
static tree *get_block_vector PARAMS ((tree, int *));
|
||||
/* We always define `record_insns' even if its not used so that we
|
||||
can always export `prologue_epilogue_contains'. */
|
||||
static int *record_insns PARAMS ((rtx)) ATTRIBUTE_UNUSED;
|
||||
static int contains PARAMS ((rtx, int *));
|
||||
static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
|
||||
static int contains PARAMS ((rtx, varray_type));
|
||||
#ifdef HAVE_return
|
||||
static void emit_return_into_block PARAMS ((basic_block));
|
||||
#endif
|
||||
@ -1507,6 +1513,7 @@ fixup_var_refs (var, promoted_mode, unsignedp, ht)
|
||||
rtx first_insn = get_insns ();
|
||||
struct sequence_stack *stack = seq_stack;
|
||||
tree rtl_exps = rtl_expr_chain;
|
||||
rtx insn;
|
||||
|
||||
/* Must scan all insns for stack-refs that exceed the limit. */
|
||||
fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
|
||||
@ -1545,6 +1552,31 @@ fixup_var_refs (var, promoted_mode, unsignedp, ht)
|
||||
fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
|
||||
0, 0);
|
||||
end_sequence ();
|
||||
|
||||
/* Scan sequences saved in CALL_PLACEHOLDERS too. */
|
||||
for (insn = first_insn; insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (GET_CODE (insn) == CALL_INSN
|
||||
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
int i;
|
||||
|
||||
/* Look at the Normal call, sibling call and tail recursion
|
||||
sequences attached to the CALL_PLACEHOLDER. */
|
||||
for (i = 0; i < 3; i++)
|
||||
{
|
||||
rtx seq = XEXP (PATTERN (insn), i);
|
||||
if (seq)
|
||||
{
|
||||
push_to_sequence (seq);
|
||||
fixup_var_refs_insns (var, promoted_mode, unsignedp,
|
||||
seq, 0, 0);
|
||||
XEXP (PATTERN (insn), i) = get_insns ();
|
||||
end_sequence ();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
|
||||
@ -5494,11 +5526,8 @@ identify_blocks (block, insns)
|
||||
rtx insns;
|
||||
{
|
||||
int n_blocks;
|
||||
tree *block_vector;
|
||||
tree *block_vector, *last_block_vector;
|
||||
tree *block_stack;
|
||||
int depth = 0;
|
||||
int current_block_number = 1;
|
||||
rtx insn;
|
||||
|
||||
if (block == 0)
|
||||
return;
|
||||
@ -5508,37 +5537,85 @@ identify_blocks (block, insns)
|
||||
block_vector = get_block_vector (block, &n_blocks);
|
||||
block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
|
||||
|
||||
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
{
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
|
||||
{
|
||||
tree b;
|
||||
last_block_vector = identify_blocks_1 (insns, block_vector + 1,
|
||||
block_vector + n_blocks, block_stack);
|
||||
|
||||
/* If there are more block notes than BLOCKs, something
|
||||
is badly wrong. */
|
||||
if (current_block_number == n_blocks)
|
||||
abort ();
|
||||
|
||||
b = block_vector[current_block_number++];
|
||||
NOTE_BLOCK (insn) = b;
|
||||
block_stack[depth++] = b;
|
||||
}
|
||||
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
|
||||
{
|
||||
if (depth == 0)
|
||||
/* There are more NOTE_INSN_BLOCK_ENDs that
|
||||
NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
|
||||
abort ();
|
||||
|
||||
NOTE_BLOCK (insn) = block_stack[--depth];
|
||||
}
|
||||
}
|
||||
/* If we didn't use all of the subblocks, we've misplaced block notes. */
|
||||
/* ??? This appears to happen all the time. Latent bugs elsewhere? */
|
||||
if (0 && last_block_vector != block_vector + n_blocks)
|
||||
abort ();
|
||||
|
||||
free (block_vector);
|
||||
free (block_stack);
|
||||
}
|
||||
|
||||
/* Subroutine of identify_blocks. Do the block substitution on the
|
||||
insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
|
||||
|
||||
BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
|
||||
BLOCK_VECTOR is incremented for each block seen. */
|
||||
|
||||
static tree *
|
||||
identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
|
||||
rtx insns;
|
||||
tree *block_vector;
|
||||
tree *end_block_vector;
|
||||
tree *orig_block_stack;
|
||||
{
|
||||
rtx insn;
|
||||
tree *block_stack = orig_block_stack;
|
||||
|
||||
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
{
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
|
||||
{
|
||||
tree b;
|
||||
|
||||
/* If there are more block notes than BLOCKs, something
|
||||
is badly wrong. */
|
||||
if (block_vector == end_block_vector)
|
||||
abort ();
|
||||
|
||||
b = *block_vector++;
|
||||
NOTE_BLOCK (insn) = b;
|
||||
*block_stack++ = b;
|
||||
}
|
||||
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
|
||||
{
|
||||
/* If there are more NOTE_INSN_BLOCK_ENDs than
|
||||
NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
|
||||
if (block_stack == orig_block_stack)
|
||||
abort ();
|
||||
|
||||
NOTE_BLOCK (insn) = *--block_stack;
|
||||
}
|
||||
}
|
||||
else if (GET_CODE (insn) == CALL_INSN
|
||||
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
rtx cp = PATTERN (insn);
|
||||
|
||||
block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
|
||||
end_block_vector, block_stack);
|
||||
if (XEXP (cp, 1))
|
||||
block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
|
||||
end_block_vector, block_stack);
|
||||
if (XEXP (cp, 2))
|
||||
block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
|
||||
end_block_vector, block_stack);
|
||||
}
|
||||
}
|
||||
|
||||
/* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
|
||||
something is badly wrong. */
|
||||
if (block_stack != orig_block_stack)
|
||||
abort ();
|
||||
|
||||
return block_vector;
|
||||
}
|
||||
|
||||
/* Given a revised instruction chain, rebuild the tree structure of
|
||||
BLOCK nodes to correspond to the new order of RTL. The new block
|
||||
tree is inserted below TOP_BLOCK. Returns the current top-level
|
||||
@ -5550,7 +5627,6 @@ reorder_blocks (block, insns)
|
||||
rtx insns;
|
||||
{
|
||||
tree current_block = block;
|
||||
rtx insn;
|
||||
varray_type block_stack;
|
||||
|
||||
if (block == NULL_TREE)
|
||||
@ -5562,35 +5638,7 @@ reorder_blocks (block, insns)
|
||||
BLOCK_SUBBLOCKS (current_block) = 0;
|
||||
BLOCK_CHAIN (current_block) = 0;
|
||||
|
||||
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
{
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
|
||||
{
|
||||
tree block = NOTE_BLOCK (insn);
|
||||
/* If we have seen this block before, copy it. */
|
||||
if (TREE_ASM_WRITTEN (block))
|
||||
{
|
||||
block = copy_node (block);
|
||||
NOTE_BLOCK (insn) = block;
|
||||
}
|
||||
BLOCK_SUBBLOCKS (block) = 0;
|
||||
TREE_ASM_WRITTEN (block) = 1;
|
||||
BLOCK_SUPERCONTEXT (block) = current_block;
|
||||
BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
|
||||
BLOCK_SUBBLOCKS (current_block) = block;
|
||||
current_block = block;
|
||||
VARRAY_PUSH_TREE (block_stack, block);
|
||||
}
|
||||
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
|
||||
{
|
||||
NOTE_BLOCK (insn) = VARRAY_TOP_TREE (block_stack);
|
||||
VARRAY_POP (block_stack);
|
||||
BLOCK_SUBBLOCKS (current_block)
|
||||
= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
|
||||
current_block = BLOCK_SUPERCONTEXT (current_block);
|
||||
}
|
||||
}
|
||||
reorder_blocks_1 (insns, current_block, &block_stack);
|
||||
|
||||
BLOCK_SUBBLOCKS (current_block)
|
||||
= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
|
||||
@ -5600,6 +5648,60 @@ reorder_blocks (block, insns)
|
||||
return current_block;
|
||||
}
|
||||
|
||||
/* Helper function for reorder_blocks. Process the insn chain beginning
|
||||
at INSNS. Recurse for CALL_PLACEHOLDER insns. */
|
||||
|
||||
static void
|
||||
reorder_blocks_1 (insns, current_block, p_block_stack)
|
||||
rtx insns;
|
||||
tree current_block;
|
||||
varray_type *p_block_stack;
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
{
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
|
||||
{
|
||||
tree block = NOTE_BLOCK (insn);
|
||||
/* If we have seen this block before, copy it. */
|
||||
if (TREE_ASM_WRITTEN (block))
|
||||
{
|
||||
block = copy_node (block);
|
||||
NOTE_BLOCK (insn) = block;
|
||||
}
|
||||
BLOCK_SUBBLOCKS (block) = 0;
|
||||
TREE_ASM_WRITTEN (block) = 1;
|
||||
BLOCK_SUPERCONTEXT (block) = current_block;
|
||||
BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
|
||||
BLOCK_SUBBLOCKS (current_block) = block;
|
||||
current_block = block;
|
||||
VARRAY_PUSH_TREE (*p_block_stack, block);
|
||||
}
|
||||
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
|
||||
{
|
||||
NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
|
||||
VARRAY_POP (*p_block_stack);
|
||||
BLOCK_SUBBLOCKS (current_block)
|
||||
= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
|
||||
current_block = BLOCK_SUPERCONTEXT (current_block);
|
||||
}
|
||||
}
|
||||
else if (GET_CODE (insn) == CALL_INSN
|
||||
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
rtx cp = PATTERN (insn);
|
||||
reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
|
||||
if (XEXP (cp, 1))
|
||||
reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
|
||||
if (XEXP (cp, 2))
|
||||
reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Reverse the order of elements in the chain T of blocks,
|
||||
and return the new head of the chain (old last element). */
|
||||
|
||||
@ -5757,6 +5859,7 @@ prepare_function_start ()
|
||||
cfun->preferred_stack_boundary = STACK_BOUNDARY;
|
||||
#else
|
||||
cfun->stack_alignment_needed = 0;
|
||||
cfun->preferred_stack_boundary = 0;
|
||||
#endif
|
||||
|
||||
/* Set if a call to setjmp is seen. */
|
||||
@ -5900,8 +6003,11 @@ void
|
||||
init_function_for_compilation ()
|
||||
{
|
||||
reg_renumber = 0;
|
||||
|
||||
/* No prologue/epilogue insns yet. */
|
||||
prologue = epilogue = 0;
|
||||
VARRAY_GROW (prologue, 0);
|
||||
VARRAY_GROW (epilogue, 0);
|
||||
VARRAY_GROW (sibcall_epilogue, 0);
|
||||
}
|
||||
|
||||
/* Indicate that the current function uses extra args
|
||||
@ -6586,30 +6692,32 @@ expand_function_end (filename, line, end_bindings)
|
||||
expand_fixups (get_insns ());
|
||||
}
|
||||
|
||||
/* Create an array that records the INSN_UIDs of INSNS (either a sequence
|
||||
or a single insn). */
|
||||
/* Extend a vector that records the INSN_UIDs of INSNS (either a
|
||||
sequence or a single insn). */
|
||||
|
||||
static int *
|
||||
record_insns (insns)
|
||||
static void
|
||||
record_insns (insns, vecp)
|
||||
rtx insns;
|
||||
varray_type *vecp;
|
||||
{
|
||||
int *vec;
|
||||
|
||||
if (GET_CODE (insns) == SEQUENCE)
|
||||
{
|
||||
int len = XVECLEN (insns, 0);
|
||||
vec = (int *) oballoc ((len + 1) * sizeof (int));
|
||||
vec[len] = 0;
|
||||
int i = VARRAY_SIZE (*vecp);
|
||||
|
||||
VARRAY_GROW (*vecp, i + len);
|
||||
while (--len >= 0)
|
||||
vec[len] = INSN_UID (XVECEXP (insns, 0, len));
|
||||
{
|
||||
VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
|
||||
++i;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
vec = (int *) oballoc (2 * sizeof (int));
|
||||
vec[0] = INSN_UID (insns);
|
||||
vec[1] = 0;
|
||||
int i = VARRAY_SIZE (*vecp);
|
||||
VARRAY_GROW (*vecp, i + 1);
|
||||
VARRAY_INT (*vecp, i) = INSN_UID (insns);
|
||||
}
|
||||
return vec;
|
||||
}
|
||||
|
||||
/* Determine how many INSN_UIDs in VEC are part of INSN. */
|
||||
@ -6617,7 +6725,7 @@ record_insns (insns)
|
||||
static int
|
||||
contains (insn, vec)
|
||||
rtx insn;
|
||||
int *vec;
|
||||
varray_type vec;
|
||||
{
|
||||
register int i, j;
|
||||
|
||||
@ -6626,15 +6734,15 @@ contains (insn, vec)
|
||||
{
|
||||
int count = 0;
|
||||
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
|
||||
for (j = 0; vec[j]; j++)
|
||||
if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
|
||||
for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
|
||||
if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
|
||||
count++;
|
||||
return count;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (j = 0; vec[j]; j++)
|
||||
if (INSN_UID (insn) == vec[j])
|
||||
for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
|
||||
if (INSN_UID (insn) == VARRAY_INT (vec, j))
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
@ -6644,13 +6752,22 @@ int
|
||||
prologue_epilogue_contains (insn)
|
||||
rtx insn;
|
||||
{
|
||||
if (prologue && contains (insn, prologue))
|
||||
if (contains (insn, prologue))
|
||||
return 1;
|
||||
if (epilogue && contains (insn, epilogue))
|
||||
if (contains (insn, epilogue))
|
||||
return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
sibcall_epilogue_contains (insn)
|
||||
rtx insn;
|
||||
{
|
||||
if (sibcall_epilogue)
|
||||
return contains (insn, sibcall_epilogue);
|
||||
return 0;
|
||||
}
|
||||
|
||||
#ifdef HAVE_return
|
||||
/* Insert gen_return at the end of block BB. This also means updating
|
||||
block_for_insn appropriately. */
|
||||
@ -6698,7 +6815,7 @@ thread_prologue_and_epilogue_insns (f)
|
||||
/* Retain a map of the prologue insns. */
|
||||
if (GET_CODE (seq) != SEQUENCE)
|
||||
seq = get_insns ();
|
||||
prologue = record_insns (seq);
|
||||
record_insns (seq, &prologue);
|
||||
emit_note (NULL, NOTE_INSN_PROLOGUE_END);
|
||||
|
||||
/* GDB handles `break f' by setting a breakpoint on the first
|
||||
@ -6875,7 +6992,7 @@ thread_prologue_and_epilogue_insns (f)
|
||||
/* Retain a map of the epilogue insns. */
|
||||
if (GET_CODE (seq) != SEQUENCE)
|
||||
seq = get_insns ();
|
||||
epilogue = record_insns (seq);
|
||||
record_insns (seq, &epilogue);
|
||||
|
||||
seq = gen_sequence ();
|
||||
end_sequence();
|
||||
@ -6888,6 +7005,35 @@ epilogue_done:
|
||||
|
||||
if (insertted)
|
||||
commit_edge_insertions ();
|
||||
|
||||
#ifdef HAVE_sibcall_epilogue
|
||||
/* Emit sibling epilogues before any sibling call sites. */
|
||||
for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
|
||||
{
|
||||
basic_block bb = e->src;
|
||||
rtx insn = bb->end;
|
||||
rtx i;
|
||||
|
||||
if (GET_CODE (insn) != CALL_INSN
|
||||
|| ! SIBLING_CALL_P (insn))
|
||||
continue;
|
||||
|
||||
start_sequence ();
|
||||
seq = gen_sibcall_epilogue ();
|
||||
end_sequence ();
|
||||
|
||||
i = PREV_INSN (insn);
|
||||
emit_insn_before (seq, insn);
|
||||
|
||||
/* Update the UID to basic block map. */
|
||||
for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
|
||||
set_block_for_insn (i, bb);
|
||||
|
||||
/* Retain a map of the epilogue insns. Used in life analysis to
|
||||
avoid getting rid of sibcall epilogue insns. */
|
||||
record_insns (seq, &sibcall_epilogue);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
/* Reposition the prologue-end and epilogue-begin notes after instruction
|
||||
@ -6898,90 +7044,82 @@ reposition_prologue_and_epilogue_notes (f)
|
||||
rtx f ATTRIBUTE_UNUSED;
|
||||
{
|
||||
#if defined (HAVE_prologue) || defined (HAVE_epilogue)
|
||||
/* Reposition the prologue and epilogue notes. */
|
||||
if (n_basic_blocks)
|
||||
int len;
|
||||
|
||||
if ((len = VARRAY_SIZE (prologue)) > 0)
|
||||
{
|
||||
int len;
|
||||
register rtx insn, note = 0;
|
||||
|
||||
if (prologue)
|
||||
/* Scan from the beginning until we reach the last prologue insn.
|
||||
We apparently can't depend on basic_block_{head,end} after
|
||||
reorg has run. */
|
||||
for (insn = f; len && insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
register rtx insn, note = 0;
|
||||
|
||||
/* Scan from the beginning until we reach the last prologue insn.
|
||||
We apparently can't depend on basic_block_{head,end} after
|
||||
reorg has run. */
|
||||
for (len = 0; prologue[len]; len++)
|
||||
;
|
||||
for (insn = f; len && insn; insn = NEXT_INSN (insn))
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
{
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
|
||||
note = insn;
|
||||
}
|
||||
else if ((len -= contains (insn, prologue)) == 0)
|
||||
{
|
||||
rtx next;
|
||||
/* Find the prologue-end note if we haven't already, and
|
||||
move it to just after the last prologue insn. */
|
||||
if (note == 0)
|
||||
{
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
|
||||
note = insn;
|
||||
for (note = insn; (note = NEXT_INSN (note));)
|
||||
if (GET_CODE (note) == NOTE
|
||||
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
|
||||
break;
|
||||
}
|
||||
else if ((len -= contains (insn, prologue)) == 0)
|
||||
{
|
||||
rtx next;
|
||||
/* Find the prologue-end note if we haven't already, and
|
||||
move it to just after the last prologue insn. */
|
||||
if (note == 0)
|
||||
{
|
||||
for (note = insn; (note = NEXT_INSN (note));)
|
||||
if (GET_CODE (note) == NOTE
|
||||
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
|
||||
break;
|
||||
}
|
||||
|
||||
next = NEXT_INSN (note);
|
||||
next = NEXT_INSN (note);
|
||||
|
||||
/* Whether or not we can depend on BLOCK_HEAD,
|
||||
attempt to keep it up-to-date. */
|
||||
if (BLOCK_HEAD (0) == note)
|
||||
BLOCK_HEAD (0) = next;
|
||||
/* Whether or not we can depend on BLOCK_HEAD,
|
||||
attempt to keep it up-to-date. */
|
||||
if (BLOCK_HEAD (0) == note)
|
||||
BLOCK_HEAD (0) = next;
|
||||
|
||||
remove_insn (note);
|
||||
add_insn_after (note, insn);
|
||||
}
|
||||
remove_insn (note);
|
||||
add_insn_after (note, insn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (epilogue)
|
||||
if ((len = VARRAY_SIZE (epilogue)) > 0)
|
||||
{
|
||||
register rtx insn, note = 0;
|
||||
|
||||
/* Scan from the end until we reach the first epilogue insn.
|
||||
We apparently can't depend on basic_block_{head,end} after
|
||||
reorg has run. */
|
||||
for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
|
||||
{
|
||||
register rtx insn, note = 0;
|
||||
|
||||
/* Scan from the end until we reach the first epilogue insn.
|
||||
We apparently can't depend on basic_block_{head,end} after
|
||||
reorg has run. */
|
||||
for (len = 0; epilogue[len]; len++)
|
||||
;
|
||||
for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
{
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
|
||||
note = insn;
|
||||
}
|
||||
else if ((len -= contains (insn, epilogue)) == 0)
|
||||
{
|
||||
/* Find the epilogue-begin note if we haven't already, and
|
||||
move it to just before the first epilogue insn. */
|
||||
if (note == 0)
|
||||
{
|
||||
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
|
||||
note = insn;
|
||||
for (note = insn; (note = PREV_INSN (note));)
|
||||
if (GET_CODE (note) == NOTE
|
||||
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
|
||||
break;
|
||||
}
|
||||
else if ((len -= contains (insn, epilogue)) == 0)
|
||||
{
|
||||
/* Find the epilogue-begin note if we haven't already, and
|
||||
move it to just before the first epilogue insn. */
|
||||
if (note == 0)
|
||||
{
|
||||
for (note = insn; (note = PREV_INSN (note));)
|
||||
if (GET_CODE (note) == NOTE
|
||||
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
|
||||
break;
|
||||
}
|
||||
|
||||
/* Whether or not we can depend on BLOCK_HEAD,
|
||||
attempt to keep it up-to-date. */
|
||||
if (n_basic_blocks
|
||||
&& BLOCK_HEAD (n_basic_blocks-1) == insn)
|
||||
BLOCK_HEAD (n_basic_blocks-1) = note;
|
||||
/* Whether or not we can depend on BLOCK_HEAD,
|
||||
attempt to keep it up-to-date. */
|
||||
if (n_basic_blocks
|
||||
&& BLOCK_HEAD (n_basic_blocks-1) == insn)
|
||||
BLOCK_HEAD (n_basic_blocks-1) = note;
|
||||
|
||||
remove_insn (note);
|
||||
add_insn_before (note, insn);
|
||||
}
|
||||
remove_insn (note);
|
||||
add_insn_before (note, insn);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -7095,4 +7233,8 @@ init_function_once ()
|
||||
{
|
||||
ggc_add_root (&all_functions, 1, sizeof all_functions,
|
||||
mark_function_chain);
|
||||
|
||||
VARRAY_INT_INIT (prologue, 0, "prologue");
|
||||
VARRAY_INT_INIT (epilogue, 0, "epilogue");
|
||||
VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
|
||||
}
|
||||
|
@ -174,11 +174,15 @@ gen_insn (insn)
|
||||
call_value_pop) ignoring the extra arguments that are passed for
|
||||
some machines, so by default, turn off the prototype. */
|
||||
|
||||
obstack_ptr = (name[0] == 'c'
|
||||
obstack_ptr = ((name[0] == 'c' || name[0] == 's')
|
||||
&& (!strcmp (name, "call")
|
||||
|| !strcmp (name, "call_value")
|
||||
|| !strcmp (name, "call_pop")
|
||||
|| !strcmp (name, "call_value_pop")))
|
||||
|| !strcmp (name, "call_value_pop")
|
||||
|| !strcmp (name, "sibcall")
|
||||
|| !strcmp (name, "sibcall_value")
|
||||
|| !strcmp (name, "sibcall_pop")
|
||||
|| !strcmp (name, "sibcall_value_pop")))
|
||||
? &call_obstack : &normal_obstack;
|
||||
|
||||
obstack_grow (obstack_ptr, &insn, sizeof (rtx));
|
||||
|
294
gcc/integrate.c
294
gcc/integrate.c
@ -66,19 +66,22 @@ extern struct obstack *function_maybepermanent_obstack;
|
||||
static rtvec initialize_for_inline PARAMS ((tree));
|
||||
static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
|
||||
static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
|
||||
rtvec));
|
||||
rtvec));
|
||||
static tree integrate_decl_tree PARAMS ((tree,
|
||||
struct inline_remap *));
|
||||
struct inline_remap *));
|
||||
static void subst_constants PARAMS ((rtx *, rtx,
|
||||
struct inline_remap *, int));
|
||||
struct inline_remap *, int));
|
||||
static void set_block_origin_self PARAMS ((tree));
|
||||
static void set_decl_origin_self PARAMS ((tree));
|
||||
static void set_block_abstract_flags PARAMS ((tree, int));
|
||||
static void process_reg_param PARAMS ((struct inline_remap *, rtx,
|
||||
rtx));
|
||||
rtx));
|
||||
void set_decl_abstract_flags PARAMS ((tree, int));
|
||||
static rtx expand_inline_function_eh_labelmap PARAMS ((rtx));
|
||||
static void mark_stores PARAMS ((rtx, rtx, void *));
|
||||
static void save_parm_insns PARAMS ((rtx, rtx));
|
||||
static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
|
||||
rtx));
|
||||
static int compare_blocks PARAMS ((const PTR, const PTR));
|
||||
static int find_block PARAMS ((const PTR, const PTR));
|
||||
|
||||
@ -423,16 +426,7 @@ save_for_inline_nocopy (fndecl)
|
||||
perform constant folding when its incoming value is constant).
|
||||
Otherwise, we have to copy its value into a new register and track
|
||||
the new register's life. */
|
||||
in_nonparm_insns = 0;
|
||||
for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (insn == first_nonparm_insn)
|
||||
in_nonparm_insns = 1;
|
||||
|
||||
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
|
||||
/* Record what interesting things happen to our parameters. */
|
||||
note_stores (PATTERN (insn), note_modified_parmregs, NULL);
|
||||
}
|
||||
save_parm_insns (insn, first_nonparm_insn);
|
||||
|
||||
/* We have now allocated all that needs to be allocated permanently
|
||||
on the rtx obstack. Set our high-water mark, so that we
|
||||
@ -449,6 +443,48 @@ save_for_inline_nocopy (fndecl)
|
||||
/* Clean up. */
|
||||
free (parmdecl_map);
|
||||
}
|
||||
|
||||
/* Scan the chain of insns to see what happens to our PARM_DECLs. If a
|
||||
PARM_DECL is used but never modified, we can substitute its rtl directly
|
||||
when expanding inline (and perform constant folding when its incoming
|
||||
value is constant). Otherwise, we have to copy its value into a new
|
||||
register and track the new register's life. */
|
||||
|
||||
static void
|
||||
save_parm_insns (insn, first_nonparm_insn)
|
||||
rtx insn;
|
||||
rtx first_nonparm_insn;
|
||||
{
|
||||
in_nonparm_insns = 0;
|
||||
|
||||
if (insn == NULL_RTX)
|
||||
return;
|
||||
|
||||
for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (insn == first_nonparm_insn)
|
||||
in_nonparm_insns = 1;
|
||||
|
||||
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
|
||||
{
|
||||
/* Record what interesting things happen to our parameters. */
|
||||
note_stores (PATTERN (insn), note_modified_parmregs, NULL);
|
||||
|
||||
/* If this is a CALL_PLACEHOLDER insn then we need to look into the
|
||||
three attached sequences: normal call, sibling call and tail
|
||||
recursion. */
|
||||
if (GET_CODE (insn) == CALL_INSN
|
||||
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
int i;
|
||||
|
||||
for (i = 0; i < 3; i++)
|
||||
save_parm_insns (XEXP (PATTERN (insn), i),
|
||||
first_nonparm_insn);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Note whether a parameter is modified or not. */
|
||||
|
||||
@ -577,13 +613,11 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
: parm_insns);
|
||||
tree *arg_trees;
|
||||
rtx *arg_vals;
|
||||
rtx insn;
|
||||
int max_regno;
|
||||
register int i;
|
||||
int min_labelno = inl_f->emit->x_first_label_num;
|
||||
int max_labelno = inl_f->inl_max_label_num;
|
||||
int nargs;
|
||||
rtx local_return_label = 0;
|
||||
rtx loc;
|
||||
rtx stack_save = 0;
|
||||
rtx temp;
|
||||
@ -1089,7 +1123,100 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
if (inl_f->calls_alloca)
|
||||
emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
|
||||
|
||||
/* Now copy the insns one by one. Do this in two passes, first the insns and
|
||||
/* Now copy the insns one by one. */
|
||||
copy_insn_list (insns, map, static_chain_value);
|
||||
|
||||
/* Restore the stack pointer if we saved it above. */
|
||||
if (inl_f->calls_alloca)
|
||||
emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
|
||||
|
||||
if (! cfun->x_whole_function_mode_p)
|
||||
/* In statement-at-a-time mode, we just tell the front-end to add
|
||||
this block to the list of blocks at this binding level. We
|
||||
can't do it the way it's done for function-at-a-time mode the
|
||||
superblocks have not been created yet. */
|
||||
insert_block (block);
|
||||
else
|
||||
{
|
||||
BLOCK_CHAIN (block)
|
||||
= BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
|
||||
BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
|
||||
}
|
||||
|
||||
/* End the scope containing the copied formal parameter variables
|
||||
and copied LABEL_DECLs. We pass NULL_TREE for the variables list
|
||||
here so that expand_end_bindings will not check for unused
|
||||
variables. That's already been checked for when the inlined
|
||||
function was defined. */
|
||||
expand_end_bindings (NULL_TREE, 1, 1);
|
||||
|
||||
/* Must mark the line number note after inlined functions as a repeat, so
|
||||
that the test coverage code can avoid counting the call twice. This
|
||||
just tells the code to ignore the immediately following line note, since
|
||||
there already exists a copy of this note before the expanded inline call.
|
||||
This line number note is still needed for debugging though, so we can't
|
||||
delete it. */
|
||||
if (flag_test_coverage)
|
||||
emit_note (0, NOTE_REPEATED_LINE_NUMBER);
|
||||
|
||||
emit_line_note (input_filename, lineno);
|
||||
|
||||
/* If the function returns a BLKmode object in a register, copy it
|
||||
out of the temp register into a BLKmode memory object. */
|
||||
if (target
|
||||
&& TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
|
||||
&& ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
|
||||
target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
|
||||
|
||||
if (structure_value_addr)
|
||||
{
|
||||
target = gen_rtx_MEM (TYPE_MODE (type),
|
||||
memory_address (TYPE_MODE (type),
|
||||
structure_value_addr));
|
||||
MEM_SET_IN_STRUCT_P (target, 1);
|
||||
}
|
||||
|
||||
/* Make sure we free the things we explicitly allocated with xmalloc. */
|
||||
if (real_label_map)
|
||||
free (real_label_map);
|
||||
VARRAY_FREE (map->const_equiv_varray);
|
||||
free (map->reg_map);
|
||||
VARRAY_FREE (map->block_map);
|
||||
free (map->insn_map);
|
||||
free (map);
|
||||
free (arg_vals);
|
||||
free (arg_trees);
|
||||
|
||||
inlining = inlining_previous;
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
/* Make copies of each insn in the given list using the mapping
|
||||
computed in expand_inline_function. This function may call itself for
|
||||
insns containing sequences.
|
||||
|
||||
Copying is done in two passes, first the insns and then their REG_NOTES,
|
||||
just like save_for_inline.
|
||||
|
||||
If static_chain_value is non-zero, it represents the context-pointer
|
||||
register for the function. */
|
||||
|
||||
static void
|
||||
copy_insn_list (insns, map, static_chain_value)
|
||||
rtx insns;
|
||||
struct inline_remap *map;
|
||||
rtx static_chain_value;
|
||||
{
|
||||
register int i;
|
||||
rtx insn;
|
||||
rtx temp;
|
||||
rtx local_return_label = NULL_RTX;
|
||||
#ifdef HAVE_cc0
|
||||
rtx cc0_insn = 0;
|
||||
#endif
|
||||
|
||||
/* Copy the insns one by one. Do this in two passes, first the insns and
|
||||
then their REG_NOTES, just like save_for_inline. */
|
||||
|
||||
/* This loop is very similar to the loop in copy_loop_body in unroll.c. */
|
||||
@ -1283,11 +1410,50 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
break;
|
||||
|
||||
case CALL_INSN:
|
||||
/* If this is a CALL_PLACEHOLDER insn then we need to copy the
|
||||
three attached sequences: normal call, sibling call and tail
|
||||
recursion. */
|
||||
if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
rtx sequence[3];
|
||||
rtx tail_label;
|
||||
|
||||
for (i = 0; i < 3; i++)
|
||||
{
|
||||
rtx seq;
|
||||
|
||||
sequence[i] = NULL_RTX;
|
||||
seq = XEXP (PATTERN (insn), i);
|
||||
if (seq)
|
||||
{
|
||||
start_sequence ();
|
||||
copy_insn_list (seq, map, static_chain_value);
|
||||
sequence[i] = get_insns ();
|
||||
end_sequence ();
|
||||
}
|
||||
}
|
||||
|
||||
/* Find the new tail recursion label.
|
||||
It will already be substituted into sequence[2]. */
|
||||
tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
|
||||
map, 0);
|
||||
|
||||
copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
|
||||
sequence[0],
|
||||
sequence[1],
|
||||
sequence[2],
|
||||
tail_label));
|
||||
break;
|
||||
}
|
||||
|
||||
pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
|
||||
copy = emit_call_insn (pattern);
|
||||
|
||||
SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
|
||||
|
||||
/* Because the USAGE information potentially contains objects other
|
||||
than hard registers, we need to copy it. */
|
||||
|
||||
CALL_INSN_FUNCTION_USAGE (copy)
|
||||
= copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
|
||||
map, 0);
|
||||
@ -1299,7 +1465,7 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
#endif
|
||||
try_constants (copy, map);
|
||||
|
||||
/* Be lazy and assume CALL_INSNs clobber all hard registers. */
|
||||
/* Be lazy and assume CALL_INSNs clobber all hard registers. */
|
||||
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
||||
VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
|
||||
break;
|
||||
@ -1316,14 +1482,23 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
break;
|
||||
|
||||
case NOTE:
|
||||
/* It is important to discard function-end and function-beg notes,
|
||||
so we have only one of each in the current function.
|
||||
Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
|
||||
/* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
|
||||
discarded because it is important to have only one of
|
||||
each in the current function.
|
||||
|
||||
NOTE_INSN_DELETED notes aren't useful (save_for_inline
|
||||
deleted these in the copy used for continuing compilation,
|
||||
not the copy used for inlining). */
|
||||
not the copy used for inlining).
|
||||
|
||||
NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
|
||||
pointer (which will soon be dangling) confuses flow's
|
||||
attempts to preserve bb structures during the compilation
|
||||
of a function. */
|
||||
|
||||
if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
|
||||
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
|
||||
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
|
||||
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
|
||||
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
|
||||
{
|
||||
copy = emit_note (NOTE_SOURCE_FILE (insn),
|
||||
NOTE_LINE_NUMBER (insn));
|
||||
@ -1403,71 +1578,6 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
|
||||
if (local_return_label)
|
||||
emit_label (local_return_label);
|
||||
|
||||
/* Restore the stack pointer if we saved it above. */
|
||||
if (inl_f->calls_alloca)
|
||||
emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
|
||||
|
||||
if (! cfun->x_whole_function_mode_p)
|
||||
/* In statement-at-a-time mode, we just tell the front-end to add
|
||||
this block to the list of blocks at this binding level. We
|
||||
can't do it the way it's done for function-at-a-time mode the
|
||||
superblocks have not been created yet. */
|
||||
insert_block (block);
|
||||
else
|
||||
{
|
||||
BLOCK_CHAIN (block)
|
||||
= BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
|
||||
BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
|
||||
}
|
||||
|
||||
/* End the scope containing the copied formal parameter variables
|
||||
and copied LABEL_DECLs. We pass NULL_TREE for the variables list
|
||||
here so that expand_end_bindings will not check for unused
|
||||
variables. That's already been checked for when the inlined
|
||||
function was defined. */
|
||||
expand_end_bindings (NULL_TREE, 1, 1);
|
||||
|
||||
/* Must mark the line number note after inlined functions as a repeat, so
|
||||
that the test coverage code can avoid counting the call twice. This
|
||||
just tells the code to ignore the immediately following line note, since
|
||||
there already exists a copy of this note before the expanded inline call.
|
||||
This line number note is still needed for debugging though, so we can't
|
||||
delete it. */
|
||||
if (flag_test_coverage)
|
||||
emit_note (0, NOTE_REPEATED_LINE_NUMBER);
|
||||
|
||||
emit_line_note (input_filename, lineno);
|
||||
|
||||
/* If the function returns a BLKmode object in a register, copy it
|
||||
out of the temp register into a BLKmode memory object. */
|
||||
if (target
|
||||
&& TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
|
||||
&& ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
|
||||
target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
|
||||
|
||||
if (structure_value_addr)
|
||||
{
|
||||
target = gen_rtx_MEM (TYPE_MODE (type),
|
||||
memory_address (TYPE_MODE (type),
|
||||
structure_value_addr));
|
||||
MEM_SET_IN_STRUCT_P (target, 1);
|
||||
}
|
||||
|
||||
/* Make sure we free the things we explicitly allocated with xmalloc. */
|
||||
if (real_label_map)
|
||||
free (real_label_map);
|
||||
VARRAY_FREE (map->const_equiv_varray);
|
||||
free (map->reg_map);
|
||||
VARRAY_FREE (map->block_map);
|
||||
free (map->insn_map);
|
||||
free (map);
|
||||
free (arg_vals);
|
||||
free (arg_trees);
|
||||
|
||||
inlining = inlining_previous;
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
|
||||
@ -1790,6 +1900,13 @@ copy_rtx_and_substitute (orig, map, for_lhs)
|
||||
= LABEL_PRESERVE_P (orig);
|
||||
return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
|
||||
|
||||
/* We need to handle "deleted" labels that appear in the DECL_RTL
|
||||
of a LABEL_DECL. */
|
||||
case NOTE:
|
||||
if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
|
||||
return map->insn_map[INSN_UID (orig)];
|
||||
break;
|
||||
|
||||
case LABEL_REF:
|
||||
copy
|
||||
= gen_rtx_LABEL_REF
|
||||
@ -2348,6 +2465,7 @@ subst_constants (loc, insn, map, memonly)
|
||||
case 'i':
|
||||
case 's':
|
||||
case 'w':
|
||||
case 'n':
|
||||
case 't':
|
||||
break;
|
||||
|
||||
|
40
gcc/jump.c
40
gcc/jump.c
@ -125,13 +125,13 @@ static void delete_from_jump_chain PARAMS ((rtx));
|
||||
static int delete_labelref_insn PARAMS ((rtx, rtx, int));
|
||||
static void mark_modified_reg PARAMS ((rtx, rtx, void *));
|
||||
static void redirect_tablejump PARAMS ((rtx, rtx));
|
||||
static void jump_optimize_1 PARAMS ((rtx, int, int, int, int));
|
||||
static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
|
||||
#if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
|
||||
static rtx find_insert_position PARAMS ((rtx, rtx));
|
||||
#endif
|
||||
static int returnjump_p_1 PARAMS ((rtx *, void *));
|
||||
static void delete_prior_computation PARAMS ((rtx, rtx));
|
||||
|
||||
|
||||
/* Main external entry point into the jump optimizer. See comments before
|
||||
jump_optimize_1 for descriptions of the arguments. */
|
||||
void
|
||||
@ -141,7 +141,7 @@ jump_optimize (f, cross_jump, noop_moves, after_regscan)
|
||||
int noop_moves;
|
||||
int after_regscan;
|
||||
{
|
||||
jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
|
||||
jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
|
||||
}
|
||||
|
||||
/* Alternate entry into the jump optimizer. This entry point only rebuilds
|
||||
@ -151,9 +151,16 @@ void
|
||||
rebuild_jump_labels (f)
|
||||
rtx f;
|
||||
{
|
||||
jump_optimize_1 (f, 0, 0, 0, 1);
|
||||
jump_optimize_1 (f, 0, 0, 0, 1, 0);
|
||||
}
|
||||
|
||||
/* Alternate entry into the jump optimizer. Do only trivial optimizations. */
|
||||
void
|
||||
jump_optimize_minimal (f)
|
||||
rtx f;
|
||||
{
|
||||
jump_optimize_1 (f, 0, 0, 0, 0, 1);
|
||||
}
|
||||
|
||||
/* Delete no-op jumps and optimize jumps to jumps
|
||||
and jumps around jumps.
|
||||
@ -175,15 +182,29 @@ rebuild_jump_labels (f)
|
||||
just determine whether control drops off the end of the function.
|
||||
This case occurs when we have -W and not -O.
|
||||
It works because `delete_insn' checks the value of `optimize'
|
||||
and refrains from actually deleting when that is 0. */
|
||||
and refrains from actually deleting when that is 0.
|
||||
|
||||
If MINIMAL is nonzero, then we only perform trivial optimizations:
|
||||
|
||||
* Removal of unreachable code after BARRIERs.
|
||||
* Removal of unreferenced CODE_LABELs.
|
||||
* Removal of a jump to the next instruction.
|
||||
* Removal of a conditional jump followed by an unconditional jump
|
||||
to the same target as the conditional jump.
|
||||
* Simplify a conditional jump around an unconditional jump.
|
||||
* Simplify a jump to a jump.
|
||||
* Delete extraneous line number notes.
|
||||
*/
|
||||
|
||||
static void
|
||||
jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
|
||||
mark_labels_only, minimal)
|
||||
rtx f;
|
||||
int cross_jump;
|
||||
int noop_moves;
|
||||
int after_regscan;
|
||||
int mark_labels_only;
|
||||
int minimal;
|
||||
{
|
||||
register rtx insn, next;
|
||||
int changed;
|
||||
@ -230,7 +251,8 @@ jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
if (mark_labels_only)
|
||||
goto end;
|
||||
|
||||
exception_optimize ();
|
||||
if (! minimal)
|
||||
exception_optimize ();
|
||||
|
||||
last_insn = delete_unreferenced_labels (f);
|
||||
|
||||
@ -320,7 +342,7 @@ jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
if (nlabel != JUMP_LABEL (insn))
|
||||
changed |= redirect_jump (insn, nlabel);
|
||||
|
||||
if (! optimize)
|
||||
if (! optimize || ! minimal)
|
||||
continue;
|
||||
|
||||
/* If a dispatch table always goes to the same place,
|
||||
@ -2135,7 +2157,7 @@ jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
not be cleared. This is especially true for the case where we
|
||||
delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
|
||||
the front-end before compiling each function. */
|
||||
if (calculate_can_reach_end (last_insn, optimize != 0))
|
||||
if (! minimal && calculate_can_reach_end (last_insn, optimize != 0))
|
||||
can_reach_end = 1;
|
||||
|
||||
end:
|
||||
|
12
gcc/rtl.c
12
gcc/rtl.c
@ -406,14 +406,12 @@ copy_rtx (orig)
|
||||
walks over the RTL. */
|
||||
copy->used = 0;
|
||||
|
||||
/* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
|
||||
/* We do not copy FRAME_RELATED for INSNs. */
|
||||
if (GET_RTX_CLASS (code) == 'i')
|
||||
{
|
||||
copy->jump = 0;
|
||||
copy->call = 0;
|
||||
copy->frame_related = 0;
|
||||
}
|
||||
|
||||
copy->frame_related = 0;
|
||||
copy->jump = orig->jump;
|
||||
copy->call = orig->call;
|
||||
|
||||
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
|
||||
|
||||
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
|
||||
|
@ -1,7 +1,7 @@
|
||||
/* This file contains the definitions and documentation for the
|
||||
Register Transfer Expressions (rtx's) that make up the
|
||||
Register Transfer Language (rtl) used in the Back End of the GNU compiler.
|
||||
Copyright (C) 1987, 88, 92, 94, 95, 97, 98, 1999
|
||||
Copyright (C) 1987, 88, 92, 94, 95, 97, 98, 1999, 2000
|
||||
Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GNU CC.
|
||||
@ -880,7 +880,10 @@ DEF_RTL_EXPR(CONSTANT_P_RTX, "constant_p_rtx", "e", 'x')
|
||||
potential tail recursive calls were found.
|
||||
|
||||
The tail recursion label is needed so that we can clear LABEL_PRESERVE_P
|
||||
after we select a call method. */
|
||||
after we select a call method.
|
||||
|
||||
This method of tail-call elimination is intended to be replaced by
|
||||
tree-based optimizations once front-end conversions are complete. */
|
||||
DEF_RTL_EXPR(CALL_PLACEHOLDER, "call_placeholder", "uuuu", 'x')
|
||||
|
||||
/* The SSA phi operator.
|
||||
|
22
gcc/rtl.h
22
gcc/rtl.h
@ -119,9 +119,12 @@ typedef struct rtx_def
|
||||
#else
|
||||
enum machine_mode mode : 8;
|
||||
#endif
|
||||
/* LINK_COST_ZERO in an INSN_LIST. */
|
||||
/* 1 in an INSN if it can alter flow of control
|
||||
within this function.
|
||||
LINK_COST_ZERO in an INSN_LIST. */
|
||||
unsigned int jump : 1;
|
||||
/* LINK_COST_FREE in an INSN_LIST. */
|
||||
/* 1 in an INSN if it can call another function.
|
||||
LINK_COST_FREE in an INSN_LIST. */
|
||||
unsigned int call : 1;
|
||||
/* 1 in a MEM or REG if value of this expression will never change
|
||||
during the current function, even though it is not
|
||||
@ -380,6 +383,9 @@ extern void rtvec_check_failed_bounds PARAMS ((rtvec, int,
|
||||
/* 1 if insn is a call to a const function. */
|
||||
#define CONST_CALL_P(INSN) ((INSN)->unchanging)
|
||||
|
||||
/* 1 if insn (assumed to be a CALL_INSN) is a sibling call. */
|
||||
#define SIBLING_CALL_P(INSN) ((INSN)->jump)
|
||||
|
||||
/* 1 if insn is a branch that should not unconditionally execute its
|
||||
delay slots, i.e., it is an annulled branch. */
|
||||
#define INSN_ANNULLED_BRANCH_P(INSN) ((INSN)->unchanging)
|
||||
@ -1416,6 +1422,7 @@ extern int rtx_renumbered_equal_p PARAMS ((rtx, rtx));
|
||||
extern int true_regnum PARAMS ((rtx));
|
||||
extern int redirect_jump PARAMS ((rtx, rtx));
|
||||
extern void jump_optimize PARAMS ((rtx, int, int, int));
|
||||
extern void jump_optimize_minimal PARAMS ((rtx));
|
||||
extern void rebuild_jump_labels PARAMS ((rtx));
|
||||
extern void thread_jumps PARAMS ((rtx, int, int));
|
||||
extern int redirect_exp PARAMS ((rtx *, rtx, rtx, rtx));
|
||||
@ -1513,6 +1520,7 @@ extern void record_excess_regs PARAMS ((rtx, rtx, rtx *));
|
||||
extern void reposition_prologue_and_epilogue_notes PARAMS ((rtx));
|
||||
extern void thread_prologue_and_epilogue_insns PARAMS ((rtx));
|
||||
extern int prologue_epilogue_contains PARAMS ((rtx));
|
||||
extern int sibcall_epilogue_contains PARAMS ((rtx));
|
||||
extern HOST_WIDE_INT get_frame_size PARAMS ((void));
|
||||
extern void preserve_rtl_expr_result PARAMS ((rtx));
|
||||
extern void mark_temp_addr_taken PARAMS ((rtx));
|
||||
@ -1713,6 +1721,16 @@ extern void record_base_value PARAMS ((int, rtx, int));
|
||||
extern void record_alias_subset PARAMS ((int, int));
|
||||
extern rtx addr_side_effect_eval PARAMS ((rtx, int, int));
|
||||
|
||||
/* In sibcall.c */
|
||||
typedef enum {
|
||||
sibcall_use_normal = 1,
|
||||
sibcall_use_tail_recursion,
|
||||
sibcall_use_sibcall
|
||||
} sibcall_use_t;
|
||||
|
||||
extern void optimize_sibling_and_tail_recursive_calls PARAMS ((void));
|
||||
extern void replace_call_placeholder PARAMS ((rtx, sibcall_use_t));
|
||||
|
||||
#ifdef STACK_REGS
|
||||
extern int stack_regs_mentioned PARAMS ((rtx insn));
|
||||
#endif
|
||||
|
578
gcc/sibcall.c
Normal file
578
gcc/sibcall.c
Normal file
@ -0,0 +1,578 @@
|
||||
/* Generic sibling call optimization support
|
||||
Copyright (C) 1999, 2000 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GNU CC.
|
||||
|
||||
GNU CC is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2, or (at your option)
|
||||
any later version.
|
||||
|
||||
GNU CC is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with GNU CC; see the file COPYING. If not, write to
|
||||
the Free Software Foundation, 59 Temple Place - Suite 330,
|
||||
Boston, MA 02111-1307, USA. */
|
||||
|
||||
#include "config.h"
|
||||
#include "system.h"
|
||||
|
||||
#include "rtl.h"
|
||||
#include "regs.h"
|
||||
#include "function.h"
|
||||
#include "hard-reg-set.h"
|
||||
#include "flags.h"
|
||||
#include "insn-config.h"
|
||||
#include "recog.h"
|
||||
#include "basic-block.h"
|
||||
#include "output.h"
|
||||
#include "except.h"
|
||||
|
||||
static int identify_call_return_value PARAMS ((rtx, rtx *, rtx *));
|
||||
static rtx skip_copy_to_return_value PARAMS ((rtx, rtx, rtx));
|
||||
static rtx skip_use_of_return_value PARAMS ((rtx, enum rtx_code));
|
||||
static rtx skip_stack_adjustment PARAMS ((rtx));
|
||||
static rtx skip_jump_insn PARAMS ((rtx));
|
||||
static int uses_addressof PARAMS ((rtx));
|
||||
static int sequence_uses_addressof PARAMS ((rtx));
|
||||
static void purge_reg_equiv_notes PARAMS ((void));
|
||||
|
||||
/* Examine a CALL_PLACEHOLDER pattern and determine where the call's
|
||||
return value is located. P_HARD_RETURN receives the hard register
|
||||
that the function used; P_SOFT_RETURN receives the pseudo register
|
||||
that the sequence used. Return non-zero if the values were located. */
|
||||
|
||||
static int
|
||||
identify_call_return_value (cp, p_hard_return, p_soft_return)
|
||||
rtx cp;
|
||||
rtx *p_hard_return, *p_soft_return;
|
||||
{
|
||||
rtx insn, set, hard, soft;
|
||||
|
||||
/* Search forward through the "normal" call sequence to the CALL insn. */
|
||||
insn = XEXP (cp, 0);
|
||||
while (GET_CODE (insn) != CALL_INSN)
|
||||
insn = NEXT_INSN (insn);
|
||||
|
||||
/* Assume the pattern is (set (dest) (call ...)), or that the first
|
||||
member of a parallel is. This is the hard return register used
|
||||
by the function. */
|
||||
if (GET_CODE (PATTERN (insn)) == SET
|
||||
&& GET_CODE (SET_SRC (PATTERN (insn))) == CALL)
|
||||
hard = SET_DEST (PATTERN (insn));
|
||||
else if (GET_CODE (PATTERN (insn)) == PARALLEL
|
||||
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET
|
||||
&& GET_CODE (SET_SRC (XVECEXP (PATTERN (insn), 0, 0))) == CALL)
|
||||
hard = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
|
||||
else
|
||||
return 0;
|
||||
|
||||
/* If we didn't get a single hard register (e.g. a parallel), give up. */
|
||||
if (GET_CODE (hard) != REG)
|
||||
return 0;
|
||||
|
||||
/* If there's nothing after, there's no soft return value. */
|
||||
insn = NEXT_INSN (insn);
|
||||
if (! insn)
|
||||
return 0;
|
||||
|
||||
/* We're looking for a source of the hard return register. */
|
||||
set = single_set (insn);
|
||||
if (! set || SET_SRC (set) != hard)
|
||||
return 0;
|
||||
|
||||
soft = SET_DEST (set);
|
||||
insn = NEXT_INSN (insn);
|
||||
|
||||
/* Allow this first destination to be copied to a second register,
|
||||
as might happen if the first register wasn't the particular pseudo
|
||||
we'd been expecting. */
|
||||
if (insn
|
||||
&& (set = single_set (insn)) != NULL_RTX
|
||||
&& SET_SRC (set) == soft)
|
||||
{
|
||||
soft = SET_DEST (set);
|
||||
insn = NEXT_INSN (insn);
|
||||
}
|
||||
|
||||
/* Don't fool with anything but pseudo registers. */
|
||||
if (GET_CODE (soft) != REG || REGNO (soft) < FIRST_PSEUDO_REGISTER)
|
||||
return 0;
|
||||
|
||||
/* This value must not be modified before the end of the sequence. */
|
||||
if (reg_set_between_p (soft, insn, NULL_RTX))
|
||||
return 0;
|
||||
|
||||
*p_hard_return = hard;
|
||||
*p_soft_return = soft;
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* If the first real insn after ORIG_INSN copies to this function's
|
||||
return value from RETVAL, then return the insn which performs the
|
||||
copy. Otherwise return ORIG_INSN. */
|
||||
|
||||
static rtx
|
||||
skip_copy_to_return_value (orig_insn, hardret, softret)
|
||||
rtx orig_insn;
|
||||
rtx hardret, softret;
|
||||
{
|
||||
rtx insn, set = NULL_RTX;
|
||||
|
||||
insn = next_nonnote_insn (orig_insn);
|
||||
if (! insn)
|
||||
return orig_insn;
|
||||
|
||||
set = single_set (insn);
|
||||
if (! set)
|
||||
return orig_insn;
|
||||
|
||||
/* The destination must be the same as the called function's return
|
||||
value to ensure that any return value is put in the same place by the
|
||||
current function and the function we're calling.
|
||||
|
||||
Further, the source must be the same as the pseudo into which the
|
||||
called function's return value was copied. Otherwise we're returning
|
||||
some other value. */
|
||||
|
||||
if (SET_DEST (set) == current_function_return_rtx
|
||||
&& REG_P (SET_DEST (set))
|
||||
&& REGNO (SET_DEST (set)) == REGNO (hardret)
|
||||
&& SET_SRC (set) == softret)
|
||||
return insn;
|
||||
|
||||
/* It did not look like a copy of the return value, so return the
|
||||
same insn we were passed. */
|
||||
return orig_insn;
|
||||
}
|
||||
|
||||
/* If the first real insn after ORIG_INSN is a CODE of this function's return
|
||||
value, return insn. Otherwise return ORIG_INSN. */
|
||||
|
||||
static rtx
|
||||
skip_use_of_return_value (orig_insn, code)
|
||||
rtx orig_insn;
|
||||
enum rtx_code code;
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
insn = next_nonnote_insn (orig_insn);
|
||||
|
||||
if (insn
|
||||
&& GET_CODE (insn) == INSN
|
||||
&& GET_CODE (PATTERN (insn)) == code
|
||||
&& (XEXP (PATTERN (insn), 0) == current_function_return_rtx
|
||||
|| XEXP (PATTERN (insn), 0) == const0_rtx))
|
||||
return insn;
|
||||
|
||||
return orig_insn;
|
||||
}
|
||||
|
||||
/* If the first real insn after ORIG_INSN adjusts the stack pointer
|
||||
by a constant, return the insn with the stack pointer adjustment.
|
||||
Otherwise return ORIG_INSN. */
|
||||
|
||||
static rtx
|
||||
skip_stack_adjustment (orig_insn)
|
||||
rtx orig_insn;
|
||||
{
|
||||
rtx insn, set = NULL_RTX;
|
||||
|
||||
insn = next_nonnote_insn (orig_insn);
|
||||
|
||||
if (insn)
|
||||
set = single_set (insn);
|
||||
|
||||
/* The source must be the same as the current function's return value to
|
||||
ensure that any return value is put in the same place by the current
|
||||
function and the function we're calling. The destination register
|
||||
must be a pseudo. */
|
||||
if (insn
|
||||
&& set
|
||||
&& GET_CODE (SET_SRC (set)) == PLUS
|
||||
&& XEXP (SET_SRC (set), 0) == stack_pointer_rtx
|
||||
&& GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT
|
||||
&& SET_DEST (set) == stack_pointer_rtx)
|
||||
return insn;
|
||||
|
||||
/* It did not look like a copy of the return value, so return the
|
||||
same insn we were passed. */
|
||||
return orig_insn;
|
||||
}
|
||||
|
||||
/* If the first real insn after ORIG_INSN is a jump, return the JUMP_INSN.
|
||||
Otherwise return ORIG_INSN. */
|
||||
|
||||
static rtx
|
||||
skip_jump_insn (orig_insn)
|
||||
rtx orig_insn;
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
insn = next_nonnote_insn (orig_insn);
|
||||
|
||||
if (insn
|
||||
&& GET_CODE (insn) == JUMP_INSN
|
||||
&& simplejump_p (insn))
|
||||
return insn;
|
||||
|
||||
return orig_insn;
|
||||
}
|
||||
|
||||
/* Scan the rtx X for an ADDRESSOF expressions. Return nonzero if an ADDRESSOF
|
||||
expresion is found, else return zero. */
|
||||
|
||||
static int
|
||||
uses_addressof (x)
|
||||
rtx x;
|
||||
{
|
||||
RTX_CODE code;
|
||||
int i, j;
|
||||
const char *fmt;
|
||||
|
||||
if (x == NULL_RTX)
|
||||
return 0;
|
||||
|
||||
code = GET_CODE (x);
|
||||
|
||||
if (code == ADDRESSOF)
|
||||
return 1;
|
||||
|
||||
/* Scan all subexpressions. */
|
||||
fmt = GET_RTX_FORMAT (code);
|
||||
for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
|
||||
{
|
||||
if (*fmt == 'e')
|
||||
{
|
||||
if (uses_addressof (XEXP (x, i)))
|
||||
return 1;
|
||||
}
|
||||
else if (*fmt == 'E')
|
||||
{
|
||||
for (j = 0; j < XVECLEN (x, i); j++)
|
||||
if (uses_addressof (XVECEXP (x, i, j)))
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Scan the sequence of insns in SEQ to see if any have an ADDRESSOF
|
||||
rtl expression. If an ADDRESSOF expression is found, return nonzero,
|
||||
else return zero.
|
||||
|
||||
This function handles CALL_PLACEHOLDERs which contain multiple sequences
|
||||
of insns. */
|
||||
|
||||
static int
|
||||
sequence_uses_addressof (seq)
|
||||
rtx seq;
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
for (insn = seq; insn; insn = NEXT_INSN (insn))
|
||||
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
|
||||
{
|
||||
/* If this is a CALL_PLACEHOLDER, then recursively call ourselves
|
||||
with each nonempty sequence attached to the CALL_PLACEHOLDER. */
|
||||
if (GET_CODE (insn) == CALL_INSN
|
||||
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
if (XEXP (PATTERN (insn), 0) != NULL_RTX
|
||||
&& sequence_uses_addressof (XEXP (PATTERN (insn), 0)))
|
||||
return 1;
|
||||
if (XEXP (PATTERN (insn), 1) != NULL_RTX
|
||||
&& sequence_uses_addressof (XEXP (PATTERN (insn), 1)))
|
||||
return 1;
|
||||
if (XEXP (PATTERN (insn), 2) != NULL_RTX
|
||||
&& sequence_uses_addressof (XEXP (PATTERN (insn), 2)))
|
||||
return 1;
|
||||
}
|
||||
else if (uses_addressof (PATTERN (insn))
|
||||
|| (REG_NOTES (insn) && uses_addressof (REG_NOTES (insn))))
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Remove all REG_EQUIV notes found in the insn chain. */
|
||||
|
||||
static void
|
||||
purge_reg_equiv_notes ()
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
while (1)
|
||||
{
|
||||
rtx note = find_reg_note (insn, REG_EQUIV, 0);
|
||||
if (note)
|
||||
{
|
||||
/* Remove the note and keep looking at the notes for
|
||||
this insn. */
|
||||
remove_note (insn, note);
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Replace the CALL_PLACEHOLDER with one of its children. INSN should be
|
||||
the CALL_PLACEHOLDER insn; USE tells which child to use. */
|
||||
|
||||
void
|
||||
replace_call_placeholder (insn, use)
|
||||
rtx insn;
|
||||
sibcall_use_t use;
|
||||
{
|
||||
if (use == sibcall_use_tail_recursion)
|
||||
emit_insns_before (XEXP (PATTERN (insn), 2), insn);
|
||||
else if (use == sibcall_use_sibcall)
|
||||
emit_insns_before (XEXP (PATTERN (insn), 1), insn);
|
||||
else if (use == sibcall_use_normal)
|
||||
emit_insns_before (XEXP (PATTERN (insn), 0), insn);
|
||||
else
|
||||
abort();
|
||||
|
||||
/* Turn off LABEL_PRESERVE_P for the tail recursion label if it
|
||||
exists. We only had to set it long enough to keep the jump
|
||||
pass above from deleting it as unused. */
|
||||
if (XEXP (PATTERN (insn), 3))
|
||||
LABEL_PRESERVE_P (XEXP (PATTERN (insn), 3)) = 0;
|
||||
|
||||
/* "Delete" the placeholder insn. */
|
||||
PUT_CODE (insn, NOTE);
|
||||
NOTE_SOURCE_FILE (insn) = 0;
|
||||
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
|
||||
}
|
||||
|
||||
|
||||
/* Given a (possibly empty) set of potential sibling or tail recursion call
|
||||
sites, determine if optimization is possible.
|
||||
|
||||
Potential sibling or tail recursion calls are marked with CALL_PLACEHOLDER
|
||||
insns. The CALL_PLACEHOLDER insn holds chains of insns to implement a
|
||||
normal call, sibling call or tail recursive call.
|
||||
|
||||
Replace the CALL_PLACEHOLDER with an appropriate insn chain. */
|
||||
|
||||
void
|
||||
optimize_sibling_and_tail_recursive_calls ()
|
||||
{
|
||||
rtx insn, insns;
|
||||
basic_block alternate_exit = EXIT_BLOCK_PTR;
|
||||
int current_function_uses_addressof;
|
||||
int successful_sibling_call = 0;
|
||||
int replaced_call_placeholder = 0;
|
||||
edge e;
|
||||
|
||||
insns = get_insns ();
|
||||
|
||||
/* We do not perform these calls when flag_exceptions is true, so this
|
||||
is probably a NOP at the current time. However, we may want to support
|
||||
sibling and tail recursion optimizations in the future, so let's plan
|
||||
ahead and find all the EH labels. */
|
||||
find_exception_handler_labels ();
|
||||
|
||||
/* Run a jump optimization pass to clean up the CFG. We primarily want
|
||||
this to thread jumps so that it is obvious which blocks jump to the
|
||||
epilouge. */
|
||||
jump_optimize_minimal (insns);
|
||||
|
||||
/* We need cfg information to determine which blocks are succeeded
|
||||
only by the epilogue. */
|
||||
find_basic_blocks (insns, max_reg_num (), 0);
|
||||
cleanup_cfg (insns);
|
||||
|
||||
/* If there are no basic blocks, then there is nothing to do. */
|
||||
if (n_basic_blocks == 0)
|
||||
return;
|
||||
|
||||
/* Find the exit block.
|
||||
|
||||
It is possible that we have blocks which can reach the exit block
|
||||
directly. However, most of the time a block will jump (or fall into)
|
||||
N_BASIC_BLOCKS - 1, which in turn falls into the exit block. */
|
||||
for (e = EXIT_BLOCK_PTR->pred;
|
||||
e && alternate_exit == EXIT_BLOCK_PTR;
|
||||
e = e->pred_next)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
if (e->dest != EXIT_BLOCK_PTR || e->succ_next != NULL)
|
||||
continue;
|
||||
|
||||
/* Walk forwards through the last normal block and see if it
|
||||
does nothing except fall into the exit block. */
|
||||
for (insn = BLOCK_HEAD (n_basic_blocks - 1);
|
||||
insn;
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
/* This should only happen once, at the start of this block. */
|
||||
if (GET_CODE (insn) == CODE_LABEL)
|
||||
continue;
|
||||
|
||||
if (GET_CODE (insn) == NOTE)
|
||||
continue;
|
||||
|
||||
if (GET_CODE (insn) == INSN
|
||||
&& GET_CODE (PATTERN (insn)) == USE)
|
||||
continue;
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
/* If INSN is zero, then the search walked all the way through the
|
||||
block without hitting anything interesting. This block is a
|
||||
valid alternate exit block. */
|
||||
if (insn == NULL)
|
||||
alternate_exit = e->src;
|
||||
}
|
||||
|
||||
/* If the function uses ADDRESSOF, we can't (easily) determine
|
||||
at this point if the value will end up on the stack. */
|
||||
current_function_uses_addressof = sequence_uses_addressof (insns);
|
||||
|
||||
/* Walk the insn chain and find any CALL_PLACEHOLDER insns. We need to
|
||||
select one of the insn sequences attached to each CALL_PLACEHOLDER.
|
||||
|
||||
The different sequences represent different ways to implement the call,
|
||||
ie, tail recursion, sibling call or normal call.
|
||||
|
||||
Since we do not create nested CALL_PLACEHOLDERs, the scan
|
||||
continues with the insn that was after a replaced CALL_PLACEHOLDER;
|
||||
we don't rescan the replacement insns. */
|
||||
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
if (GET_CODE (insn) == CALL_INSN
|
||||
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
int sibcall = (XEXP (PATTERN (insn), 1) != NULL_RTX);
|
||||
int tailrecursion = (XEXP (PATTERN (insn), 2) != NULL_RTX);
|
||||
basic_block succ_block, call_block;
|
||||
rtx temp, hardret, softret;
|
||||
|
||||
/* We must be careful with stack slots which are live at
|
||||
potential optimization sites.
|
||||
|
||||
?!? This test is overly conservative and will be replaced. */
|
||||
if (frame_offset)
|
||||
goto failure;
|
||||
|
||||
/* alloca (until we have stack slot life analysis) inhibits
|
||||
sibling call optimizations, but not tail recursion.
|
||||
|
||||
Similarly if we have ADDRESSOF expressions.
|
||||
|
||||
Similarly if we use varargs or stdarg since they implicitly
|
||||
may take the address of an argument. */
|
||||
if (current_function_calls_alloca || current_function_uses_addressof
|
||||
|| current_function_varargs || current_function_stdarg)
|
||||
sibcall = 0;
|
||||
|
||||
call_block = BLOCK_FOR_INSN (insn);
|
||||
|
||||
/* If the block has more than one successor, then we can not
|
||||
perform sibcall or tail recursion optimizations. */
|
||||
if (call_block->succ == NULL
|
||||
|| call_block->succ->succ_next != NULL)
|
||||
goto failure;
|
||||
|
||||
/* If the single successor is not the exit block, then we can not
|
||||
perform sibcall or tail recursion optimizations.
|
||||
|
||||
Note that this test combined with the previous is sufficient
|
||||
to prevent tail call optimization in the presense of active
|
||||
exception handlers. */
|
||||
succ_block = call_block->succ->dest;
|
||||
if (succ_block != EXIT_BLOCK_PTR && succ_block != alternate_exit)
|
||||
goto failure;
|
||||
|
||||
/* If the call was the end of the block, then we're OK. */
|
||||
temp = insn;
|
||||
if (temp == call_block->end)
|
||||
goto success;
|
||||
|
||||
/* Skip over copying from the call's return value pseudo into
|
||||
this function's hard return register. */
|
||||
if (identify_call_return_value (PATTERN (insn), &hardret, &softret))
|
||||
{
|
||||
temp = skip_copy_to_return_value (temp, hardret, softret);
|
||||
if (temp == call_block->end)
|
||||
goto success;
|
||||
}
|
||||
|
||||
/* Skip any stack adjustment. */
|
||||
temp = skip_stack_adjustment (temp);
|
||||
if (temp == call_block->end)
|
||||
goto success;
|
||||
|
||||
/* Skip over a CLOBBER of the return value (as a hard reg). */
|
||||
temp = skip_use_of_return_value (temp, CLOBBER);
|
||||
if (temp == call_block->end)
|
||||
goto success;
|
||||
|
||||
/* Skip over a USE of the return value (as a hard reg). */
|
||||
temp = skip_use_of_return_value (temp, USE);
|
||||
if (temp == call_block->end)
|
||||
goto success;
|
||||
|
||||
/* Skip over the JUMP_INSN at the end of the block. */
|
||||
temp = skip_jump_insn (temp);
|
||||
if (GET_CODE (temp) == NOTE)
|
||||
temp = next_nonnote_insn (temp);
|
||||
if (temp == call_block->end)
|
||||
goto success;
|
||||
|
||||
/* There are operations at the end of the block which we must
|
||||
execute after returning from the function call. So this call
|
||||
can not be optimized. */
|
||||
failure:
|
||||
sibcall = 0, tailrecursion = 0;
|
||||
success:
|
||||
|
||||
/* Select a set of insns to implement the call and emit them.
|
||||
Tail recursion is the most efficient, so select it over
|
||||
a tail/sibling call. */
|
||||
|
||||
if (sibcall)
|
||||
successful_sibling_call = 1;
|
||||
replaced_call_placeholder = 1;
|
||||
replace_call_placeholder (insn,
|
||||
tailrecursion != 0
|
||||
? sibcall_use_tail_recursion
|
||||
: sibcall != 0
|
||||
? sibcall_use_sibcall
|
||||
: sibcall_use_normal);
|
||||
}
|
||||
}
|
||||
|
||||
/* A sibling call sequence invalidates any REG_EQUIV notes made for
|
||||
this function's incoming arguments.
|
||||
|
||||
At the start of RTL generation we know the only REG_EQUIV notes
|
||||
in the rtl chain are those for incoming arguments, so we can safely
|
||||
flush any REG_EQUIV note.
|
||||
|
||||
This is (slight) overkill. We could keep track of the highest argument
|
||||
we clobber and be more selective in removing notes, but it does not
|
||||
seem to be worth the effort. */
|
||||
if (successful_sibling_call)
|
||||
purge_reg_equiv_notes ();
|
||||
|
||||
/* There may have been NOTE_INSN_BLOCK_{BEGIN,END} notes in the
|
||||
CALL_PLACEHOLDER alternatives that we didn't emit. Rebuild the
|
||||
lexical block tree to correspond to the notes that still exist. */
|
||||
if (replaced_call_placeholder)
|
||||
unroll_block_trees ();
|
||||
|
||||
/* This information will be invalid after inline expansion. Kill it now. */
|
||||
free_basic_block_vars (0);
|
||||
}
|
11
gcc/toplev.c
11
gcc/toplev.c
@ -2986,6 +2986,15 @@ rest_of_compilation (decl)
|
||||
goto exit_rest_of_compilation;
|
||||
}
|
||||
|
||||
/* We may have potential sibling or tail recursion sites. Select one
|
||||
(of possibly multiple) methods of performing the call. */
|
||||
init_EXPR_INSN_LIST_cache ();
|
||||
if (optimize)
|
||||
optimize_sibling_and_tail_recursive_calls ();
|
||||
|
||||
if (ggc_p)
|
||||
ggc_collect ();
|
||||
|
||||
/* Initialize some variables used by the optimizers. */
|
||||
init_function_for_compilation ();
|
||||
|
||||
@ -3030,8 +3039,6 @@ rest_of_compilation (decl)
|
||||
|
||||
unshare_all_rtl (current_function_decl, insns);
|
||||
|
||||
init_EXPR_INSN_LIST_cache ();
|
||||
|
||||
#ifdef SETJMP_VIA_SAVE_AREA
|
||||
/* This must be performed before virutal register instantiation. */
|
||||
if (current_function_calls_alloca)
|
||||
|
79
gcc/tree.c
79
gcc/tree.c
@ -285,6 +285,9 @@ static void mark_type_hash PARAMS ((void *));
|
||||
void (*lang_unsave) PARAMS ((tree *));
|
||||
void (*lang_unsave_expr_now) PARAMS ((tree));
|
||||
|
||||
/* If non-null, a language specific version of safe_for_unsave. */
|
||||
int (*lang_safe_for_unsave) PARAMS ((tree));
|
||||
|
||||
/* The string used as a placeholder instead of a source file name for
|
||||
built-in tree nodes. The variable, which is dynamically allocated,
|
||||
should be used; the macro is only used to initialize it. */
|
||||
@ -2666,6 +2669,82 @@ unsave_expr_now (expr)
|
||||
|
||||
return expr;
|
||||
}
|
||||
|
||||
/* Return nonzero if it is safe to unsave EXPR, else return zero.
|
||||
It is not safe to unsave EXPR if it contains any embedded RTL_EXPRs. */
|
||||
|
||||
int
|
||||
safe_for_unsave (expr)
|
||||
tree expr;
|
||||
{
|
||||
enum tree_code code;
|
||||
register int i;
|
||||
int first_rtl;
|
||||
|
||||
if (expr == NULL_TREE)
|
||||
return 1;
|
||||
|
||||
code = TREE_CODE (expr);
|
||||
first_rtl = first_rtl_op (code);
|
||||
switch (code)
|
||||
{
|
||||
case RTL_EXPR:
|
||||
return 0;
|
||||
|
||||
case CALL_EXPR:
|
||||
if (TREE_OPERAND (expr, 1)
|
||||
&& TREE_CODE (TREE_OPERAND (expr, 1)) == TREE_LIST)
|
||||
{
|
||||
tree exp = TREE_OPERAND (expr, 1);
|
||||
while (exp)
|
||||
{
|
||||
if (! safe_for_unsave (TREE_VALUE (exp)))
|
||||
return 0;
|
||||
exp = TREE_CHAIN (exp);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
if (lang_safe_for_unsave)
|
||||
switch ((*lang_safe_for_unsave) (expr))
|
||||
{
|
||||
case -1:
|
||||
break;
|
||||
case 0:
|
||||
return 0;
|
||||
case 1:
|
||||
return 1;
|
||||
default:
|
||||
abort ();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
switch (TREE_CODE_CLASS (code))
|
||||
{
|
||||
case 'c': /* a constant */
|
||||
case 't': /* a type node */
|
||||
case 'x': /* something random, like an identifier or an ERROR_MARK. */
|
||||
case 'd': /* A decl node */
|
||||
case 'b': /* A block node */
|
||||
return 1;
|
||||
|
||||
case 'e': /* an expression */
|
||||
case 'r': /* a reference */
|
||||
case 's': /* an expression with side effects */
|
||||
case '<': /* a comparison expression */
|
||||
case '2': /* a binary arithmetic expression */
|
||||
case '1': /* a unary arithmetic expression */
|
||||
for (i = first_rtl - 1; i >= 0; i--)
|
||||
if (! safe_for_unsave (TREE_OPERAND (expr, i)))
|
||||
return 0;
|
||||
return 1;
|
||||
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/* Return 1 if EXP contains a PLACEHOLDER_EXPR; i.e., if it represents a size
|
||||
or offset that depends on a field within a record. */
|
||||
|
@ -1983,6 +1983,11 @@ extern int first_rtl_op PARAMS ((enum tree_code));
|
||||
|
||||
extern tree unsave_expr PARAMS ((tree));
|
||||
|
||||
/* safe_for_reeval_p (EXP) returns nonzero if it is possible to
|
||||
expand EXP multiple times. */
|
||||
|
||||
extern int safe_for_reeval_p PARAMS ((tree));
|
||||
|
||||
/* Reset EXP in place so that it can be expaned again. Does not
|
||||
recurse into subtrees. */
|
||||
|
||||
@ -2000,6 +2005,9 @@ extern tree unsave_expr_now PARAMS ((tree));
|
||||
extern void (*lang_unsave) PARAMS ((tree *));
|
||||
extern void (*lang_unsave_expr_now) PARAMS ((tree));
|
||||
|
||||
/* If non-null, a language specific version of safe_for_unsave. */
|
||||
extern int (*lang_safe_for_unsave) PARAMS ((tree));
|
||||
|
||||
/* Return 1 if EXP contains a PLACEHOLDER_EXPR; i.e., if it represents a size
|
||||
or offset that depends on a field within a record.
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user