mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-19 21:31:42 +08:00
re PR rtl-optimization/83496 (wrong code generated with -Os -mbranch-cost=1)
PR rtl-optimization/83496 * reorg.c (steal_delay_list_from_target): Change REDUNDANT array from booleans to RTXes. Call fix_reg_dead_note on every non-null element. (steal_delay_list_from_fallthrough): Call fix_reg_dead_note on a redundant insn, if any. (relax_delay_slots): Likewise. (update_reg_unused_notes): Rename REDUNDANT_INSN to OTHER_INSN. From-SVN: r257996
This commit is contained in:
parent
d99dcb77bb
commit
f41881a4ad
@ -1,3 +1,13 @@
|
||||
2018-02-26 Eric Botcazou <ebotcazou@adacore.com>
|
||||
|
||||
PR rtl-optimization/83496
|
||||
* reorg.c (steal_delay_list_from_target): Change REDUNDANT array from
|
||||
booleans to RTX. Call fix_reg_dead_note on every non-null element.
|
||||
(steal_delay_list_from_fallthrough): Call fix_reg_dead_note on a
|
||||
redundant insn, if any.
|
||||
(relax_delay_slots): Likewise.
|
||||
(update_reg_unused_notes): Rename REDUNDANT_INSN to OTHER_INSN.
|
||||
|
||||
2018-02-26 Richard Sandiford <richard.sandiford@linaro.org>
|
||||
|
||||
PR tree-optimization/83965
|
||||
|
38
gcc/reorg.c
38
gcc/reorg.c
@ -1036,7 +1036,8 @@ check_annul_list_true_false (int annul_true_p,
|
||||
|
||||
static void
|
||||
steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
|
||||
vec<rtx_insn *> *delay_list, resources *sets,
|
||||
vec<rtx_insn *> *delay_list,
|
||||
struct resources *sets,
|
||||
struct resources *needed,
|
||||
struct resources *other_needed,
|
||||
int slots_to_fill, int *pslots_filled,
|
||||
@ -1049,7 +1050,7 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
|
||||
int used_annul = 0;
|
||||
int i;
|
||||
struct resources cc_set;
|
||||
bool *redundant;
|
||||
rtx_insn **redundant;
|
||||
|
||||
/* We can't do anything if there are more delay slots in SEQ than we
|
||||
can handle, or if we don't know that it will be a taken branch.
|
||||
@ -1088,7 +1089,7 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
|
||||
if (! targetm.can_follow_jump (insn, seq->insn (0)))
|
||||
return;
|
||||
|
||||
redundant = XALLOCAVEC (bool, XVECLEN (seq, 0));
|
||||
redundant = XALLOCAVEC (rtx_insn *, XVECLEN (seq, 0));
|
||||
for (i = 1; i < seq->len (); i++)
|
||||
{
|
||||
rtx_insn *trial = seq->insn (i);
|
||||
@ -1152,7 +1153,10 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
|
||||
we therefore decided not to copy. */
|
||||
for (i = 1; i < seq->len (); i++)
|
||||
if (redundant[i])
|
||||
update_block (seq->insn (i), insn);
|
||||
{
|
||||
fix_reg_dead_note (redundant[i], insn);
|
||||
update_block (seq->insn (i), insn);
|
||||
}
|
||||
|
||||
/* Show the place to which we will be branching. */
|
||||
*pnew_thread = first_active_target_insn (JUMP_LABEL (seq->insn (0)));
|
||||
@ -1199,6 +1203,7 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
|
||||
for (i = 1; i < seq->len (); i++)
|
||||
{
|
||||
rtx_insn *trial = seq->insn (i);
|
||||
rtx_insn *prior_insn;
|
||||
|
||||
/* If TRIAL sets CC0, stealing it will move it too far from the use
|
||||
of CC0. */
|
||||
@ -1210,8 +1215,9 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
|
||||
break;
|
||||
|
||||
/* If this insn was already done, we don't need it. */
|
||||
if (redundant_insn (trial, insn, *delay_list))
|
||||
if ((prior_insn = redundant_insn (trial, insn, *delay_list)))
|
||||
{
|
||||
fix_reg_dead_note (prior_insn, insn);
|
||||
update_block (trial, insn);
|
||||
delete_from_delay_slot (trial);
|
||||
continue;
|
||||
@ -1791,15 +1797,14 @@ fix_reg_dead_note (rtx_insn *start_insn, rtx stop_insn)
|
||||
}
|
||||
}
|
||||
|
||||
/* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
|
||||
/* Delete any REG_UNUSED notes that exist on INSN but not on OTHER_INSN.
|
||||
|
||||
This handles the case of udivmodXi4 instructions which optimize their
|
||||
output depending on whether any REG_UNUSED notes are present.
|
||||
we must make sure that INSN calculates as many results as REDUNDANT_INSN
|
||||
does. */
|
||||
output depending on whether any REG_UNUSED notes are present. We must
|
||||
make sure that INSN calculates as many results as OTHER_INSN does. */
|
||||
|
||||
static void
|
||||
update_reg_unused_notes (rtx_insn *insn, rtx redundant_insn)
|
||||
update_reg_unused_notes (rtx_insn *insn, rtx other_insn)
|
||||
{
|
||||
rtx link, next;
|
||||
|
||||
@ -1811,8 +1816,7 @@ update_reg_unused_notes (rtx_insn *insn, rtx redundant_insn)
|
||||
|| !REG_P (XEXP (link, 0)))
|
||||
continue;
|
||||
|
||||
if (! find_regno_note (redundant_insn, REG_UNUSED,
|
||||
REGNO (XEXP (link, 0))))
|
||||
if (!find_regno_note (other_insn, REG_UNUSED, REGNO (XEXP (link, 0))))
|
||||
remove_note (insn, link);
|
||||
}
|
||||
}
|
||||
@ -2325,9 +2329,8 @@ follow_jumps (rtx label, rtx_insn *jump, bool *crossing)
|
||||
taken and THREAD_IF_TRUE is set. This is used for the branch at the
|
||||
end of a loop back up to the top.
|
||||
|
||||
OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
|
||||
thread. I.e., it is the fallthrough code of our jump or the target of the
|
||||
jump when we are the only jump going there.
|
||||
OWN_THREAD is true if we are the only user of the thread, i.e. it is
|
||||
the target of the jump when we are the only jump going there.
|
||||
|
||||
If OWN_THREAD is false, it must be the "true" thread of a jump. In that
|
||||
case, we can only take insns from the head of the thread for our delay
|
||||
@ -3118,7 +3121,7 @@ relax_delay_slots (rtx_insn *first)
|
||||
/* Look at every JUMP_INSN and see if we can improve it. */
|
||||
for (insn = first; insn; insn = next)
|
||||
{
|
||||
rtx_insn *other;
|
||||
rtx_insn *other, *prior_insn;
|
||||
bool crossing;
|
||||
|
||||
next = next_active_insn (insn);
|
||||
@ -3224,8 +3227,9 @@ relax_delay_slots (rtx_insn *first)
|
||||
/* See if the first insn in the delay slot is redundant with some
|
||||
previous insn. Remove it from the delay slot if so; then set up
|
||||
to reprocess this insn. */
|
||||
if (redundant_insn (pat->insn (1), delay_insn, vNULL))
|
||||
if ((prior_insn = redundant_insn (pat->insn (1), delay_insn, vNULL)))
|
||||
{
|
||||
fix_reg_dead_note (prior_insn, insn);
|
||||
update_block (pat->insn (1), insn);
|
||||
delete_from_delay_slot (pat->insn (1));
|
||||
next = prev_active_insn (next);
|
||||
|
@ -1,3 +1,7 @@
|
||||
2018-02-26 Eric Botcazou <ebotcazou@adacore.com>
|
||||
|
||||
* gcc.c-torture/execute/20180226-1.c: New test.
|
||||
|
||||
2018-02-26 Richard Sandiford <richard.sandiford@linaro.org>
|
||||
|
||||
PR tree-optimization/83965
|
||||
|
1
gcc/testsuite/gcc.c-torture/execute/20180226-1.c
Symbolic link
1
gcc/testsuite/gcc.c-torture/execute/20180226-1.c
Symbolic link
@ -0,0 +1 @@
|
||||
/home/eric/build/gcc/mips-linux/pr83496.c
|
Loading…
x
Reference in New Issue
Block a user