mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-02-07 20:50:35 +08:00
Revert Jan Hubicka's patch of Fri Jul 13 14:46:21 CEST 2001.
From-SVN: r43986
This commit is contained in:
parent
4a9d51090a
commit
998d20d22a
@ -1,3 +1,7 @@
|
||||
2001-07-13 Geoffrey Keating <geoffk@redhat.com>
|
||||
|
||||
Revert Jan Hubicka's patch of Fri Jul 13 14:46:21 CEST 2001.
|
||||
|
||||
2001-07-13 David Edelsohn <edelsohn@gnu.org>
|
||||
|
||||
* combine.c (try_combine): Ensure const_int pow2 is positive.
|
||||
|
@ -2908,6 +2908,9 @@ Dump after sibling call optimizations, to @file{@var{file}.01.sibling}.
|
||||
@item j
|
||||
@opindex dj
|
||||
Dump after the first jump optimization, to @file{@var{file}.03.jump}.
|
||||
@item J
|
||||
@opindex dJ
|
||||
Dump after the last jump optimization, to @file{@var{file}.29.jump2}.
|
||||
@item k
|
||||
@opindex dk
|
||||
Dump after conversion from registers to stack, to @file{@var{file}.32.stack}.
|
||||
|
@ -2518,7 +2518,7 @@ try_split (pat, trial, last)
|
||||
for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
|
||||
if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
|
||||
mark_jump_label (PATTERN (XVECEXP (seq, 0, i)),
|
||||
XVECEXP (seq, 0, i), 0);
|
||||
XVECEXP (seq, 0, i), 0, 0);
|
||||
|
||||
/* If we are splitting a CALL_INSN, look for the CALL_INSN
|
||||
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
|
||||
|
@ -798,7 +798,7 @@ find_sub_basic_blocks (bb)
|
||||
bb = BASIC_BLOCK (i);
|
||||
if (GET_CODE (bb->end) == JUMP_INSN)
|
||||
{
|
||||
mark_jump_label (PATTERN (bb->end), bb->end, 0);
|
||||
mark_jump_label (PATTERN (bb->end), bb->end, 0, 0);
|
||||
make_label_edge (NULL, bb, JUMP_LABEL (bb->end), 0);
|
||||
}
|
||||
insn = NEXT_INSN (insn);
|
||||
|
505
gcc/jump.c
505
gcc/jump.c
@ -36,6 +36,11 @@ Boston, MA 02111-1307, USA. */
|
||||
formerly used them. The JUMP_LABEL info is sometimes looked
|
||||
at by later passes.
|
||||
|
||||
Optionally, cross-jumping can be done. Currently it is done
|
||||
only the last time (when after reload and before final).
|
||||
In fact, the code for cross-jumping now assumes that register
|
||||
allocation has been done, since it uses `rtx_renumbered_equal_p'.
|
||||
|
||||
Jump optimization is done after cse when cse's constant-propagation
|
||||
causes jumps to become unconditional or to be deleted.
|
||||
|
||||
@ -91,12 +96,23 @@ static rtx *jump_chain;
|
||||
|
||||
static int max_jump_chain;
|
||||
|
||||
/* Indicates whether death notes are significant in cross jump analysis.
|
||||
Normally they are not significant, because of A and B jump to C,
|
||||
and R dies in A, it must die in B. But this might not be true after
|
||||
stack register conversion, and we must compare death notes in that
|
||||
case. */
|
||||
|
||||
static int cross_jump_death_matters = 0;
|
||||
|
||||
static int init_label_info PARAMS ((rtx));
|
||||
static void delete_barrier_successors PARAMS ((rtx));
|
||||
static void mark_all_labels PARAMS ((rtx));
|
||||
static void mark_all_labels PARAMS ((rtx, int));
|
||||
static rtx delete_unreferenced_labels PARAMS ((rtx));
|
||||
static void delete_noop_moves PARAMS ((rtx));
|
||||
static int duplicate_loop_exit_test PARAMS ((rtx));
|
||||
static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
|
||||
static void do_cross_jump PARAMS ((rtx, rtx, rtx));
|
||||
static int jump_back_p PARAMS ((rtx, rtx));
|
||||
static int tension_vector_labels PARAMS ((rtx, int));
|
||||
static void delete_computation PARAMS ((rtx));
|
||||
static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
|
||||
@ -107,19 +123,20 @@ static void delete_from_jump_chain PARAMS ((rtx));
|
||||
static int delete_labelref_insn PARAMS ((rtx, rtx, int));
|
||||
static void mark_modified_reg PARAMS ((rtx, rtx, void *));
|
||||
static void redirect_tablejump PARAMS ((rtx, rtx));
|
||||
static void jump_optimize_1 PARAMS ((rtx, int, int, int, int));
|
||||
static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
|
||||
static int returnjump_p_1 PARAMS ((rtx *, void *));
|
||||
static void delete_prior_computation PARAMS ((rtx, rtx));
|
||||
|
||||
/* Main external entry point into the jump optimizer. See comments before
|
||||
jump_optimize_1 for descriptions of the arguments. */
|
||||
void
|
||||
jump_optimize (f, noop_moves, after_regscan)
|
||||
jump_optimize (f, cross_jump, noop_moves, after_regscan)
|
||||
rtx f;
|
||||
int cross_jump;
|
||||
int noop_moves;
|
||||
int after_regscan;
|
||||
{
|
||||
jump_optimize_1 (f, noop_moves, after_regscan, 0, 0);
|
||||
jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
|
||||
}
|
||||
|
||||
/* Alternate entry into the jump optimizer. This entry point only rebuilds
|
||||
@ -129,7 +146,7 @@ void
|
||||
rebuild_jump_labels (f)
|
||||
rtx f;
|
||||
{
|
||||
jump_optimize_1 (f, 0, 0, 1, 0);
|
||||
jump_optimize_1 (f, 0, 0, 0, 1, 0);
|
||||
}
|
||||
|
||||
/* Alternate entry into the jump optimizer. Do only trivial optimizations. */
|
||||
@ -138,13 +155,17 @@ void
|
||||
jump_optimize_minimal (f)
|
||||
rtx f;
|
||||
{
|
||||
jump_optimize_1 (f, 0, 0, 0, 1);
|
||||
jump_optimize_1 (f, 0, 0, 0, 0, 1);
|
||||
}
|
||||
|
||||
/* Delete no-op jumps and optimize jumps to jumps
|
||||
and jumps around jumps.
|
||||
Delete unused labels and unreachable code.
|
||||
|
||||
If CROSS_JUMP is 1, detect matching code
|
||||
before a jump and its destination and unify them.
|
||||
If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
|
||||
|
||||
If NOOP_MOVES is nonzero, delete no-op move insns.
|
||||
|
||||
If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
|
||||
@ -172,9 +193,10 @@ jump_optimize_minimal (f)
|
||||
*/
|
||||
|
||||
static void
|
||||
jump_optimize_1 (f, noop_moves, after_regscan,
|
||||
jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
|
||||
mark_labels_only, minimal)
|
||||
rtx f;
|
||||
int cross_jump;
|
||||
int noop_moves;
|
||||
int after_regscan;
|
||||
int mark_labels_only;
|
||||
@ -190,6 +212,7 @@ jump_optimize_1 (f, noop_moves, after_regscan,
|
||||
enum rtx_code reversed_code;
|
||||
#endif
|
||||
|
||||
cross_jump_death_matters = (cross_jump == 2);
|
||||
max_uid = init_label_info (f) + 1;
|
||||
|
||||
/* Leave some extra room for labels and duplicate exit test insns
|
||||
@ -197,7 +220,7 @@ jump_optimize_1 (f, noop_moves, after_regscan,
|
||||
max_jump_chain = max_uid * 14 / 10;
|
||||
jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
|
||||
|
||||
mark_all_labels (f);
|
||||
mark_all_labels (f, cross_jump);
|
||||
|
||||
/* Keep track of labels used from static data; we don't track them
|
||||
closely enough to delete them here, so make sure their reference
|
||||
@ -551,6 +574,125 @@ jump_optimize_1 (f, noop_moves, after_regscan,
|
||||
}
|
||||
}
|
||||
#endif
|
||||
else
|
||||
{
|
||||
/* Now that the jump has been tensioned,
|
||||
try cross jumping: check for identical code
|
||||
before the jump and before its target label. */
|
||||
|
||||
/* First, cross jumping of conditional jumps: */
|
||||
|
||||
if (cross_jump && condjump_p (insn))
|
||||
{
|
||||
rtx newjpos, newlpos;
|
||||
rtx x = prev_real_insn (JUMP_LABEL (insn));
|
||||
|
||||
/* A conditional jump may be crossjumped
|
||||
only if the place it jumps to follows
|
||||
an opposing jump that comes back here. */
|
||||
|
||||
if (x != 0 && ! jump_back_p (x, insn))
|
||||
/* We have no opposing jump;
|
||||
cannot cross jump this insn. */
|
||||
x = 0;
|
||||
|
||||
newjpos = 0;
|
||||
/* TARGET is nonzero if it is ok to cross jump
|
||||
to code before TARGET. If so, see if matches. */
|
||||
if (x != 0)
|
||||
find_cross_jump (insn, x, 2,
|
||||
&newjpos, &newlpos);
|
||||
|
||||
if (newjpos != 0)
|
||||
{
|
||||
do_cross_jump (insn, newjpos, newlpos);
|
||||
/* Make the old conditional jump
|
||||
into an unconditional one. */
|
||||
PATTERN (insn) = gen_jump (JUMP_LABEL (insn));
|
||||
INSN_CODE (insn) = -1;
|
||||
emit_barrier_after (insn);
|
||||
/* Add to jump_chain unless this is a new label
|
||||
whose UID is too large. */
|
||||
if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
|
||||
{
|
||||
jump_chain[INSN_UID (insn)]
|
||||
= jump_chain[INSN_UID (JUMP_LABEL (insn))];
|
||||
jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
|
||||
}
|
||||
changed = 1;
|
||||
next = insn;
|
||||
}
|
||||
}
|
||||
|
||||
/* Cross jumping of unconditional jumps:
|
||||
a few differences. */
|
||||
|
||||
if (cross_jump && simplejump_p (insn))
|
||||
{
|
||||
rtx newjpos, newlpos;
|
||||
rtx target;
|
||||
|
||||
newjpos = 0;
|
||||
|
||||
/* TARGET is nonzero if it is ok to cross jump
|
||||
to code before TARGET. If so, see if matches. */
|
||||
find_cross_jump (insn, JUMP_LABEL (insn), 1,
|
||||
&newjpos, &newlpos);
|
||||
|
||||
/* If cannot cross jump to code before the label,
|
||||
see if we can cross jump to another jump to
|
||||
the same label. */
|
||||
/* Try each other jump to this label. */
|
||||
if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
|
||||
for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
|
||||
target != 0 && newjpos == 0;
|
||||
target = jump_chain[INSN_UID (target)])
|
||||
if (target != insn
|
||||
&& JUMP_LABEL (target) == JUMP_LABEL (insn)
|
||||
/* Ignore TARGET if it's deleted. */
|
||||
&& ! INSN_DELETED_P (target))
|
||||
find_cross_jump (insn, target, 2,
|
||||
&newjpos, &newlpos);
|
||||
|
||||
if (newjpos != 0)
|
||||
{
|
||||
do_cross_jump (insn, newjpos, newlpos);
|
||||
changed = 1;
|
||||
next = insn;
|
||||
}
|
||||
}
|
||||
|
||||
/* This code was dead in the previous jump.c! */
|
||||
if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
|
||||
{
|
||||
/* Return insns all "jump to the same place"
|
||||
so we can cross-jump between any two of them. */
|
||||
|
||||
rtx newjpos, newlpos, target;
|
||||
|
||||
newjpos = 0;
|
||||
|
||||
/* If cannot cross jump to code before the label,
|
||||
see if we can cross jump to another jump to
|
||||
the same label. */
|
||||
/* Try each other jump to this label. */
|
||||
for (target = jump_chain[0];
|
||||
target != 0 && newjpos == 0;
|
||||
target = jump_chain[INSN_UID (target)])
|
||||
if (target != insn
|
||||
&& ! INSN_DELETED_P (target)
|
||||
&& GET_CODE (PATTERN (target)) == RETURN)
|
||||
find_cross_jump (insn, target, 2,
|
||||
&newjpos, &newlpos);
|
||||
|
||||
if (newjpos != 0)
|
||||
{
|
||||
do_cross_jump (insn, newjpos, newlpos);
|
||||
changed = 1;
|
||||
next = insn;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
first = 0;
|
||||
@ -695,11 +837,16 @@ delete_barrier_successors (f)
|
||||
|
||||
For each label, make a chain (using `jump_chain')
|
||||
of all the *unconditional* jumps that jump to it;
|
||||
also make a chain of all returns. */
|
||||
also make a chain of all returns.
|
||||
|
||||
CROSS_JUMP indicates whether we are doing cross jumping
|
||||
and if we are whether we will be paying attention to
|
||||
death notes or not. */
|
||||
|
||||
static void
|
||||
mark_all_labels (f)
|
||||
mark_all_labels (f, cross_jump)
|
||||
rtx f;
|
||||
int cross_jump;
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -709,9 +856,9 @@ mark_all_labels (f)
|
||||
if (GET_CODE (insn) == CALL_INSN
|
||||
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
||||
{
|
||||
mark_all_labels (XEXP (PATTERN (insn), 0));
|
||||
mark_all_labels (XEXP (PATTERN (insn), 1));
|
||||
mark_all_labels (XEXP (PATTERN (insn), 2));
|
||||
mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
|
||||
mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
|
||||
mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
|
||||
|
||||
/* Canonicalize the tail recursion label attached to the
|
||||
CALL_PLACEHOLDER insn. */
|
||||
@ -719,14 +866,14 @@ mark_all_labels (f)
|
||||
{
|
||||
rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
|
||||
XEXP (PATTERN (insn), 3));
|
||||
mark_jump_label (label_ref, insn, 0);
|
||||
mark_jump_label (label_ref, insn, cross_jump, 0);
|
||||
XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
mark_jump_label (PATTERN (insn), insn, 0);
|
||||
mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
|
||||
if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
|
||||
{
|
||||
/* When we know the LABEL_REF contained in a REG used in
|
||||
@ -742,7 +889,7 @@ mark_all_labels (f)
|
||||
rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
|
||||
XEXP (label_note, 0));
|
||||
|
||||
mark_jump_label (label_ref, insn, 0);
|
||||
mark_jump_label (label_ref, insn, cross_jump, 0);
|
||||
XEXP (label_note, 0) = XEXP (label_ref, 0);
|
||||
JUMP_LABEL (insn) = XEXP (label_note, 0);
|
||||
}
|
||||
@ -1066,7 +1213,7 @@ duplicate_loop_exit_test (loop_start)
|
||||
if (reg_map)
|
||||
replace_regs (PATTERN (copy), reg_map, max_reg, 1);
|
||||
|
||||
mark_jump_label (PATTERN (copy), copy, 0);
|
||||
mark_jump_label (PATTERN (copy), copy, 0, 0);
|
||||
|
||||
/* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
|
||||
make them. */
|
||||
@ -1094,7 +1241,7 @@ duplicate_loop_exit_test (loop_start)
|
||||
loop_start);
|
||||
if (reg_map)
|
||||
replace_regs (PATTERN (copy), reg_map, max_reg, 1);
|
||||
mark_jump_label (PATTERN (copy), copy, 0);
|
||||
mark_jump_label (PATTERN (copy), copy, 0, 0);
|
||||
if (REG_NOTES (insn))
|
||||
{
|
||||
REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
|
||||
@ -1155,7 +1302,7 @@ duplicate_loop_exit_test (loop_start)
|
||||
if (! first_copy)
|
||||
first_copy = copy;
|
||||
|
||||
mark_jump_label (PATTERN (copy), copy, 0);
|
||||
mark_jump_label (PATTERN (copy), copy, 0, 0);
|
||||
if (INSN_UID (copy) < max_jump_chain
|
||||
&& INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
|
||||
{
|
||||
@ -1225,6 +1372,248 @@ squeeze_notes (start, end)
|
||||
return start;
|
||||
}
|
||||
|
||||
/* Compare the instructions before insn E1 with those before E2
|
||||
to find an opportunity for cross jumping.
|
||||
(This means detecting identical sequences of insns followed by
|
||||
jumps to the same place, or followed by a label and a jump
|
||||
to that label, and replacing one with a jump to the other.)
|
||||
|
||||
Assume E1 is a jump that jumps to label E2
|
||||
(that is not always true but it might as well be).
|
||||
Find the longest possible equivalent sequences
|
||||
and store the first insns of those sequences into *F1 and *F2.
|
||||
Store zero there if no equivalent preceding instructions are found.
|
||||
|
||||
We give up if we find a label in stream 1.
|
||||
Actually we could transfer that label into stream 2. */
|
||||
|
||||
static void
|
||||
find_cross_jump (e1, e2, minimum, f1, f2)
|
||||
rtx e1, e2;
|
||||
int minimum;
|
||||
rtx *f1, *f2;
|
||||
{
|
||||
register rtx i1 = e1, i2 = e2;
|
||||
register rtx p1, p2;
|
||||
int lose = 0;
|
||||
|
||||
rtx last1 = 0, last2 = 0;
|
||||
rtx afterlast1 = 0, afterlast2 = 0;
|
||||
|
||||
*f1 = 0;
|
||||
*f2 = 0;
|
||||
|
||||
while (1)
|
||||
{
|
||||
i1 = prev_nonnote_insn (i1);
|
||||
|
||||
i2 = PREV_INSN (i2);
|
||||
while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
|
||||
i2 = PREV_INSN (i2);
|
||||
|
||||
if (i1 == 0)
|
||||
break;
|
||||
|
||||
/* Don't allow the range of insns preceding E1 or E2
|
||||
to include the other (E2 or E1). */
|
||||
if (i2 == e1 || i1 == e2)
|
||||
break;
|
||||
|
||||
/* If we will get to this code by jumping, those jumps will be
|
||||
tensioned to go directly to the new label (before I2),
|
||||
so this cross-jumping won't cost extra. So reduce the minimum. */
|
||||
if (GET_CODE (i1) == CODE_LABEL)
|
||||
{
|
||||
--minimum;
|
||||
break;
|
||||
}
|
||||
|
||||
if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
|
||||
break;
|
||||
|
||||
p1 = PATTERN (i1);
|
||||
p2 = PATTERN (i2);
|
||||
|
||||
/* If this is a CALL_INSN, compare register usage information.
|
||||
If we don't check this on stack register machines, the two
|
||||
CALL_INSNs might be merged leaving reg-stack.c with mismatching
|
||||
numbers of stack registers in the same basic block.
|
||||
If we don't check this on machines with delay slots, a delay slot may
|
||||
be filled that clobbers a parameter expected by the subroutine.
|
||||
|
||||
??? We take the simple route for now and assume that if they're
|
||||
equal, they were constructed identically. */
|
||||
|
||||
if (GET_CODE (i1) == CALL_INSN
|
||||
&& ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
|
||||
CALL_INSN_FUNCTION_USAGE (i2)))
|
||||
lose = 1;
|
||||
|
||||
#ifdef STACK_REGS
|
||||
/* If cross_jump_death_matters is not 0, the insn's mode
|
||||
indicates whether or not the insn contains any stack-like
|
||||
regs. */
|
||||
|
||||
if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
|
||||
{
|
||||
/* If register stack conversion has already been done, then
|
||||
death notes must also be compared before it is certain that
|
||||
the two instruction streams match. */
|
||||
|
||||
rtx note;
|
||||
HARD_REG_SET i1_regset, i2_regset;
|
||||
|
||||
CLEAR_HARD_REG_SET (i1_regset);
|
||||
CLEAR_HARD_REG_SET (i2_regset);
|
||||
|
||||
for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
|
||||
if (REG_NOTE_KIND (note) == REG_DEAD
|
||||
&& STACK_REG_P (XEXP (note, 0)))
|
||||
SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
|
||||
|
||||
for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
|
||||
if (REG_NOTE_KIND (note) == REG_DEAD
|
||||
&& STACK_REG_P (XEXP (note, 0)))
|
||||
SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
|
||||
|
||||
GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
|
||||
|
||||
lose = 1;
|
||||
|
||||
done:
|
||||
;
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Don't allow old-style asm or volatile extended asms to be accepted
|
||||
for cross jumping purposes. It is conceptually correct to allow
|
||||
them, since cross-jumping preserves the dynamic instruction order
|
||||
even though it is changing the static instruction order. However,
|
||||
if an asm is being used to emit an assembler pseudo-op, such as
|
||||
the MIPS `.set reorder' pseudo-op, then the static instruction order
|
||||
matters and it must be preserved. */
|
||||
if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
|
||||
|| (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
|
||||
|| (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
|
||||
lose = 1;
|
||||
|
||||
if (lose || GET_CODE (p1) != GET_CODE (p2)
|
||||
|| ! rtx_renumbered_equal_p (p1, p2))
|
||||
{
|
||||
/* The following code helps take care of G++ cleanups. */
|
||||
rtx equiv1;
|
||||
rtx equiv2;
|
||||
|
||||
if (!lose && GET_CODE (p1) == GET_CODE (p2)
|
||||
&& ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
|
||||
|| (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
|
||||
&& ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
|
||||
|| (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
|
||||
/* If the equivalences are not to a constant, they may
|
||||
reference pseudos that no longer exist, so we can't
|
||||
use them. */
|
||||
&& CONSTANT_P (XEXP (equiv1, 0))
|
||||
&& rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
|
||||
{
|
||||
rtx s1 = single_set (i1);
|
||||
rtx s2 = single_set (i2);
|
||||
if (s1 != 0 && s2 != 0
|
||||
&& rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
|
||||
{
|
||||
validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
|
||||
validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
|
||||
if (! rtx_renumbered_equal_p (p1, p2))
|
||||
cancel_changes (0);
|
||||
else if (apply_change_group ())
|
||||
goto win;
|
||||
}
|
||||
}
|
||||
|
||||
/* Insns fail to match; cross jumping is limited to the following
|
||||
insns. */
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
/* Don't allow the insn after a compare to be shared by
|
||||
cross-jumping unless the compare is also shared.
|
||||
Here, if either of these non-matching insns is a compare,
|
||||
exclude the following insn from possible cross-jumping. */
|
||||
if (sets_cc0_p (p1) || sets_cc0_p (p2))
|
||||
last1 = afterlast1, last2 = afterlast2, ++minimum;
|
||||
#endif
|
||||
|
||||
/* If cross-jumping here will feed a jump-around-jump
|
||||
optimization, this jump won't cost extra, so reduce
|
||||
the minimum. */
|
||||
if (GET_CODE (i1) == JUMP_INSN
|
||||
&& JUMP_LABEL (i1)
|
||||
&& prev_real_insn (JUMP_LABEL (i1)) == e1)
|
||||
--minimum;
|
||||
break;
|
||||
}
|
||||
|
||||
win:
|
||||
if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
|
||||
{
|
||||
/* Ok, this insn is potentially includable in a cross-jump here. */
|
||||
afterlast1 = last1, afterlast2 = last2;
|
||||
last1 = i1, last2 = i2, --minimum;
|
||||
}
|
||||
}
|
||||
|
||||
if (minimum <= 0 && last1 != 0 && last1 != e1)
|
||||
*f1 = last1, *f2 = last2;
|
||||
}
|
||||
|
||||
static void
|
||||
do_cross_jump (insn, newjpos, newlpos)
|
||||
rtx insn, newjpos, newlpos;
|
||||
{
|
||||
/* Find an existing label at this point
|
||||
or make a new one if there is none. */
|
||||
register rtx label = get_label_before (newlpos);
|
||||
|
||||
/* Make the same jump insn jump to the new point. */
|
||||
if (GET_CODE (PATTERN (insn)) == RETURN)
|
||||
{
|
||||
/* Remove from jump chain of returns. */
|
||||
delete_from_jump_chain (insn);
|
||||
/* Change the insn. */
|
||||
PATTERN (insn) = gen_jump (label);
|
||||
INSN_CODE (insn) = -1;
|
||||
JUMP_LABEL (insn) = label;
|
||||
LABEL_NUSES (label)++;
|
||||
/* Add to new the jump chain. */
|
||||
if (INSN_UID (label) < max_jump_chain
|
||||
&& INSN_UID (insn) < max_jump_chain)
|
||||
{
|
||||
jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
|
||||
jump_chain[INSN_UID (label)] = insn;
|
||||
}
|
||||
}
|
||||
else
|
||||
redirect_jump (insn, label, 1);
|
||||
|
||||
/* Delete the matching insns before the jump. Also, remove any REG_EQUAL
|
||||
or REG_EQUIV note in the NEWLPOS stream that isn't also present in
|
||||
the NEWJPOS stream. */
|
||||
|
||||
while (newjpos != insn)
|
||||
{
|
||||
rtx lnote;
|
||||
|
||||
for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
|
||||
if ((REG_NOTE_KIND (lnote) == REG_EQUAL
|
||||
|| REG_NOTE_KIND (lnote) == REG_EQUIV)
|
||||
&& ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
|
||||
&& ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
|
||||
remove_note (newlpos, lnote);
|
||||
|
||||
delete_insn (newjpos);
|
||||
newjpos = next_real_insn (newjpos);
|
||||
newlpos = next_real_insn (newlpos);
|
||||
}
|
||||
}
|
||||
|
||||
/* Return the label before INSN, or put a new label there. */
|
||||
|
||||
rtx
|
||||
@ -1269,6 +1658,50 @@ get_label_after (insn)
|
||||
return label;
|
||||
}
|
||||
|
||||
/* Return 1 if INSN is a jump that jumps to right after TARGET
|
||||
only on the condition that TARGET itself would drop through.
|
||||
Assumes that TARGET is a conditional jump. */
|
||||
|
||||
static int
|
||||
jump_back_p (insn, target)
|
||||
rtx insn, target;
|
||||
{
|
||||
rtx cinsn, ctarget;
|
||||
enum rtx_code codei, codet;
|
||||
rtx set, tset;
|
||||
|
||||
if (! any_condjump_p (insn)
|
||||
|| any_uncondjump_p (target)
|
||||
|| target != prev_real_insn (JUMP_LABEL (insn)))
|
||||
return 0;
|
||||
set = pc_set (insn);
|
||||
tset = pc_set (target);
|
||||
|
||||
cinsn = XEXP (SET_SRC (set), 0);
|
||||
ctarget = XEXP (SET_SRC (tset), 0);
|
||||
|
||||
codei = GET_CODE (cinsn);
|
||||
codet = GET_CODE (ctarget);
|
||||
|
||||
if (XEXP (SET_SRC (set), 1) == pc_rtx)
|
||||
{
|
||||
codei = reversed_comparison_code (cinsn, insn);
|
||||
if (codei == UNKNOWN)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (XEXP (SET_SRC (tset), 2) == pc_rtx)
|
||||
{
|
||||
codet = reversed_comparison_code (ctarget, target);
|
||||
if (codei == UNKNOWN)
|
||||
return 0;
|
||||
}
|
||||
|
||||
return (codei == codet
|
||||
&& rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
|
||||
&& rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
|
||||
}
|
||||
|
||||
/* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
|
||||
of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
|
||||
UNKNOWN may be returned in case we are having CC_MODE compare and we don't
|
||||
@ -2038,12 +2471,16 @@ tension_vector_labels (x, idx)
|
||||
must be kept distinct if we have not yet done loop-optimization,
|
||||
because the gap between them is where loop-optimize
|
||||
will want to move invariant code to. CROSS_JUMP tells us
|
||||
that loop-optimization is done with. */
|
||||
that loop-optimization is done with.
|
||||
|
||||
Once reload has completed (CROSS_JUMP non-zero), we need not consider
|
||||
two labels distinct if they are separated by only USE or CLOBBER insns. */
|
||||
|
||||
void
|
||||
mark_jump_label (x, insn, in_mem)
|
||||
mark_jump_label (x, insn, cross_jump, in_mem)
|
||||
register rtx x;
|
||||
rtx insn;
|
||||
int cross_jump;
|
||||
int in_mem;
|
||||
{
|
||||
register RTX_CODE code = GET_CODE (x);
|
||||
@ -2072,7 +2509,7 @@ mark_jump_label (x, insn, in_mem)
|
||||
|
||||
/* If this is a constant-pool reference, see if it is a label. */
|
||||
if (CONSTANT_POOL_ADDRESS_P (x))
|
||||
mark_jump_label (get_pool_constant (x), insn, in_mem);
|
||||
mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
|
||||
break;
|
||||
|
||||
case LABEL_REF:
|
||||
@ -2100,14 +2537,19 @@ mark_jump_label (x, insn, in_mem)
|
||||
{
|
||||
if (GET_CODE (next) == CODE_LABEL)
|
||||
label = next;
|
||||
else if (cross_jump && GET_CODE (next) == INSN
|
||||
&& (GET_CODE (PATTERN (next)) == USE
|
||||
|| GET_CODE (PATTERN (next)) == CLOBBER))
|
||||
continue;
|
||||
else if (GET_CODE (next) != NOTE)
|
||||
break;
|
||||
else if ((NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
|
||||
|| NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
|
||||
/* ??? Optional. Disables some optimizations, but
|
||||
makes gcov output more accurate with -O. */
|
||||
|| (flag_test_coverage
|
||||
&& NOTE_LINE_NUMBER (next) > 0)))
|
||||
else if (! cross_jump
|
||||
&& (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
|
||||
|| NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
|
||||
/* ??? Optional. Disables some optimizations, but
|
||||
makes gcov output more accurate with -O. */
|
||||
|| (flag_test_coverage
|
||||
&& NOTE_LINE_NUMBER (next) > 0)))
|
||||
break;
|
||||
}
|
||||
|
||||
@ -2163,7 +2605,8 @@ mark_jump_label (x, insn, in_mem)
|
||||
int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
|
||||
|
||||
for (i = 0; i < XVECLEN (x, eltnum); i++)
|
||||
mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
|
||||
mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
|
||||
cross_jump, in_mem);
|
||||
}
|
||||
return;
|
||||
|
||||
@ -2175,12 +2618,12 @@ mark_jump_label (x, insn, in_mem)
|
||||
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
||||
{
|
||||
if (fmt[i] == 'e')
|
||||
mark_jump_label (XEXP (x, i), insn, in_mem);
|
||||
mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
|
||||
else if (fmt[i] == 'E')
|
||||
{
|
||||
register int j;
|
||||
for (j = 0; j < XVECLEN (x, i); j++)
|
||||
mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
|
||||
mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -419,9 +419,6 @@ reg_to_stack (first, file)
|
||||
int max_uid;
|
||||
block_info bi;
|
||||
|
||||
if (!optimize)
|
||||
split_all_insns (0);
|
||||
|
||||
/* See if there is something to do. Flow analysis is quite
|
||||
expensive so we might save some compilation time. */
|
||||
for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
|
||||
@ -432,8 +429,7 @@ reg_to_stack (first, file)
|
||||
|
||||
/* Ok, floating point instructions exist. If not optimizing,
|
||||
build the CFG and run life analysis. */
|
||||
if (!optimize)
|
||||
find_basic_blocks (first, max_reg_num (), file);
|
||||
find_basic_blocks (first, max_reg_num (), file);
|
||||
count_or_remove_death_notes (NULL, 1);
|
||||
life_analysis (first, file, PROP_DEATH_NOTES);
|
||||
|
||||
|
@ -1285,7 +1285,7 @@ extern enum rtx_code reverse_condition_maybe_unordered PARAMS ((enum rtx_code));
|
||||
extern enum rtx_code swap_condition PARAMS ((enum rtx_code));
|
||||
extern enum rtx_code unsigned_condition PARAMS ((enum rtx_code));
|
||||
extern enum rtx_code signed_condition PARAMS ((enum rtx_code));
|
||||
extern void mark_jump_label PARAMS ((rtx, rtx, int));
|
||||
extern void mark_jump_label PARAMS ((rtx, rtx, int, int));
|
||||
|
||||
/* In jump.c */
|
||||
extern rtx squeeze_notes PARAMS ((rtx, rtx));
|
||||
@ -1715,7 +1715,7 @@ extern int rtx_renumbered_equal_p PARAMS ((rtx, rtx));
|
||||
extern int true_regnum PARAMS ((rtx));
|
||||
extern int redirect_jump_1 PARAMS ((rtx, rtx));
|
||||
extern int redirect_jump PARAMS ((rtx, rtx, int));
|
||||
extern void jump_optimize PARAMS ((rtx, int, int));
|
||||
extern void jump_optimize PARAMS ((rtx, int, int, int));
|
||||
extern void jump_optimize_minimal PARAMS ((rtx));
|
||||
extern void rebuild_jump_labels PARAMS ((rtx));
|
||||
extern void thread_jumps PARAMS ((rtx, int, int));
|
||||
@ -1729,6 +1729,8 @@ extern int condjump_in_parallel_p PARAMS ((rtx));
|
||||
extern void never_reached_warning PARAMS ((rtx));
|
||||
|
||||
/* Flags for jump_optimize() */
|
||||
#define JUMP_CROSS_JUMP 1
|
||||
#define JUMP_CROSS_JUMP_DEATH_MATTERS 2
|
||||
#define JUMP_NOOP_MOVES 1
|
||||
#define JUMP_AFTER_REGSCAN 1
|
||||
|
||||
|
84
gcc/toplev.c
84
gcc/toplev.c
@ -282,10 +282,11 @@ enum dump_file_index
|
||||
DFI_rnreg,
|
||||
DFI_ce2,
|
||||
DFI_sched2,
|
||||
DFI_stack,
|
||||
DFI_bbro,
|
||||
DFI_jump2,
|
||||
DFI_mach,
|
||||
DFI_dbr,
|
||||
DFI_stack,
|
||||
DFI_MAX
|
||||
};
|
||||
|
||||
@ -295,7 +296,7 @@ enum dump_file_index
|
||||
Remaining -d letters:
|
||||
|
||||
" o q u "
|
||||
" H JK OPQ TUV YZ"
|
||||
" H K OPQ TUV YZ"
|
||||
*/
|
||||
|
||||
struct dump_file_info dump_file[DFI_MAX] =
|
||||
@ -329,6 +330,7 @@ struct dump_file_info dump_file[DFI_MAX] =
|
||||
{ "ce2", 'E', 1, 0, 0 },
|
||||
{ "sched2", 'R', 1, 0, 0 },
|
||||
{ "bbro", 'B', 1, 0, 0 },
|
||||
{ "jump2", 'J', 1, 0, 0 },
|
||||
{ "mach", 'M', 1, 0, 0 },
|
||||
{ "dbr", 'd', 0, 0, 0 },
|
||||
{ "stack", 'k', 1, 0, 0 },
|
||||
@ -2837,7 +2839,8 @@ rest_of_compilation (decl)
|
||||
|
||||
optimize = 0;
|
||||
find_exception_handler_labels ();
|
||||
jump_optimize (insns, !JUMP_NOOP_MOVES, !JUMP_AFTER_REGSCAN);
|
||||
jump_optimize (insns, !JUMP_CROSS_JUMP, !JUMP_NOOP_MOVES,
|
||||
!JUMP_AFTER_REGSCAN);
|
||||
optimize = saved_optimize;
|
||||
}
|
||||
|
||||
@ -2944,7 +2947,8 @@ rest_of_compilation (decl)
|
||||
expected_value_to_br_prob ();
|
||||
|
||||
reg_scan (insns, max_reg_num (), 0);
|
||||
jump_optimize (insns, !JUMP_NOOP_MOVES, JUMP_AFTER_REGSCAN);
|
||||
jump_optimize (insns, !JUMP_CROSS_JUMP, !JUMP_NOOP_MOVES,
|
||||
JUMP_AFTER_REGSCAN);
|
||||
|
||||
timevar_pop (TV_JUMP);
|
||||
|
||||
@ -3086,7 +3090,8 @@ rest_of_compilation (decl)
|
||||
if (tem || optimize > 1)
|
||||
{
|
||||
timevar_push (TV_JUMP);
|
||||
jump_optimize (insns, !JUMP_NOOP_MOVES, !JUMP_AFTER_REGSCAN);
|
||||
jump_optimize (insns, !JUMP_CROSS_JUMP, !JUMP_NOOP_MOVES,
|
||||
!JUMP_AFTER_REGSCAN);
|
||||
timevar_pop (TV_JUMP);
|
||||
}
|
||||
|
||||
@ -3158,7 +3163,8 @@ rest_of_compilation (decl)
|
||||
{
|
||||
tem = tem2 = 0;
|
||||
timevar_push (TV_JUMP);
|
||||
jump_optimize (insns, !JUMP_NOOP_MOVES, !JUMP_AFTER_REGSCAN);
|
||||
jump_optimize (insns, !JUMP_CROSS_JUMP, !JUMP_NOOP_MOVES,
|
||||
!JUMP_AFTER_REGSCAN);
|
||||
timevar_pop (TV_JUMP);
|
||||
|
||||
if (flag_expensive_optimizations)
|
||||
@ -3231,7 +3237,8 @@ rest_of_compilation (decl)
|
||||
delete_trivially_dead_insns (insns, max_reg_num ());
|
||||
|
||||
reg_scan (insns, max_reg_num (), 0);
|
||||
jump_optimize (insns, !JUMP_NOOP_MOVES, JUMP_AFTER_REGSCAN);
|
||||
jump_optimize (insns, !JUMP_CROSS_JUMP,
|
||||
!JUMP_NOOP_MOVES, JUMP_AFTER_REGSCAN);
|
||||
|
||||
timevar_push (TV_IFCVT);
|
||||
|
||||
@ -3249,7 +3256,8 @@ rest_of_compilation (decl)
|
||||
if (tem)
|
||||
{
|
||||
timevar_push (TV_JUMP);
|
||||
jump_optimize (insns, !JUMP_NOOP_MOVES, !JUMP_AFTER_REGSCAN);
|
||||
jump_optimize (insns, !JUMP_CROSS_JUMP,
|
||||
!JUMP_NOOP_MOVES, !JUMP_AFTER_REGSCAN);
|
||||
timevar_pop (TV_JUMP);
|
||||
}
|
||||
}
|
||||
@ -3563,7 +3571,8 @@ rest_of_compilation (decl)
|
||||
timevar_push (TV_FLOW2);
|
||||
open_dump_file (DFI_flow2, decl);
|
||||
|
||||
jump_optimize (insns, JUMP_NOOP_MOVES, !JUMP_AFTER_REGSCAN);
|
||||
jump_optimize (insns, !JUMP_CROSS_JUMP,
|
||||
JUMP_NOOP_MOVES, !JUMP_AFTER_REGSCAN);
|
||||
find_basic_blocks (insns, max_reg_num (), rtl_dump_file);
|
||||
|
||||
/* On some machines, the prologue and epilogue code, or parts thereof,
|
||||
@ -3628,10 +3637,6 @@ rest_of_compilation (decl)
|
||||
close_dump_file (DFI_ce2, print_rtl_with_bb, insns);
|
||||
timevar_pop (TV_IFCVT2);
|
||||
}
|
||||
#ifdef STACK_REGS
|
||||
if (optimize)
|
||||
split_all_insns (1);
|
||||
#endif
|
||||
|
||||
#ifdef INSN_SCHEDULING
|
||||
if (optimize > 0 && flag_schedule_insns_after_reload)
|
||||
@ -3658,17 +3663,6 @@ rest_of_compilation (decl)
|
||||
= optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
|
||||
#endif
|
||||
|
||||
#ifdef STACK_REGS
|
||||
timevar_push (TV_REG_STACK);
|
||||
open_dump_file (DFI_stack, decl);
|
||||
|
||||
reg_to_stack (insns, rtl_dump_file);
|
||||
|
||||
close_dump_file (DFI_stack, print_rtl, insns);
|
||||
timevar_pop (TV_REG_STACK);
|
||||
|
||||
ggc_collect ();
|
||||
#endif
|
||||
if (optimize > 0 && flag_reorder_blocks)
|
||||
{
|
||||
timevar_push (TV_REORDER_BLOCKS);
|
||||
@ -3677,10 +3671,26 @@ rest_of_compilation (decl)
|
||||
reorder_basic_blocks ();
|
||||
|
||||
close_dump_file (DFI_bbro, print_rtl_with_bb, insns);
|
||||
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
|
||||
timevar_pop (TV_REORDER_BLOCKS);
|
||||
}
|
||||
|
||||
/* One more attempt to remove jumps to .+1 left by dead-store elimination.
|
||||
Also do cross-jumping this time and delete no-op move insns. */
|
||||
|
||||
if (optimize > 0)
|
||||
{
|
||||
timevar_push (TV_JUMP);
|
||||
open_dump_file (DFI_jump2, decl);
|
||||
|
||||
jump_optimize (insns, JUMP_CROSS_JUMP, JUMP_NOOP_MOVES,
|
||||
!JUMP_AFTER_REGSCAN);
|
||||
|
||||
/* CFG no longer kept up to date. */
|
||||
|
||||
close_dump_file (DFI_jump2, print_rtl, insns);
|
||||
timevar_pop (TV_JUMP);
|
||||
}
|
||||
|
||||
/* If a machine dependent reorganization is needed, call it. */
|
||||
#ifdef MACHINE_DEPENDENT_REORG
|
||||
open_dump_file (DFI_mach, decl);
|
||||
@ -3692,8 +3702,6 @@ rest_of_compilation (decl)
|
||||
ggc_collect ();
|
||||
#endif
|
||||
|
||||
/* CFG no longer kept up to date. */
|
||||
|
||||
/* If a scheduling pass for delayed branches is to be done,
|
||||
call the scheduling code. */
|
||||
|
||||
@ -3712,10 +3720,28 @@ rest_of_compilation (decl)
|
||||
}
|
||||
#endif
|
||||
|
||||
#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
|
||||
timevar_push (TV_SHORTEN_BRANCH);
|
||||
split_all_insns (0);
|
||||
if (0
|
||||
#ifdef HAVE_ATTR_length
|
||||
|| 1
|
||||
#endif
|
||||
#ifdef STACK_REGS
|
||||
|| 1
|
||||
#endif
|
||||
)
|
||||
split_all_insns (0);
|
||||
timevar_pop (TV_SHORTEN_BRANCH);
|
||||
|
||||
#ifdef STACK_REGS
|
||||
timevar_push (TV_REG_STACK);
|
||||
open_dump_file (DFI_stack, decl);
|
||||
|
||||
reg_to_stack (insns, rtl_dump_file);
|
||||
|
||||
close_dump_file (DFI_stack, print_rtl, insns);
|
||||
timevar_pop (TV_REG_STACK);
|
||||
|
||||
ggc_collect ();
|
||||
#endif
|
||||
|
||||
convert_to_eh_region_ranges ();
|
||||
|
Loading…
Reference in New Issue
Block a user