loop.c (scan_loop): Add USEs inside PARALLELs into dependencies of the movable.

* loop.c (scan_loop): Add USEs inside PARALLELs into dependencies
	of the movable.

	* toplev.c (rest_of_compilation): Allow new pseudoes for mode switching.

	* i386-protos.h (ix86_split_fp_branch): Update prototype.
	(ix86_fp_jump_nontrivial_p): New.
	* i386.md (fp_jcc_?): Update call of split_fp_branch;
	use ix86_fp_jump_nontrivial_p.
	* i386.c (ix86_fp_jump_nontrivial_p): New.
	(ix86_split_fp_branch): Accept code instead of rtx.
	(ix86_expand_compare): Expand comparison early in case
	doing so is resonably cheap.

From-SVN: r44019
This commit is contained in:
Jan Hubicka 2001-07-15 14:00:34 +02:00 committed by Jan Hubicka
parent b8313db6c0
commit 03598deac8
6 changed files with 104 additions and 35 deletions

View File

@ -1,3 +1,19 @@
Sun Jul 15 00:53:35 CEST 2001 Jan Hubicka <jh@suse.cz>
* loop.c (scan_loop): Add USEs inside PARALLELs into dependencies
of the movable.
* toplev.c (rest_of_compilation): Allow new pseudoes for mode switching.
* i386-protos.h (ix86_split_fp_branch): Update prototype.
(ix86_fp_jump_nontrivial_p): New.
* i386.md (fp_jcc_?): Update call of split_fp_branch;
use ix86_fp_jump_nontrivial_p.
* i386.c (ix86_fp_jump_nontrivial_p): New.
(ix86_split_fp_branch): Accept code instead of rtx.
(ix86_expand_compare): Expand comparison early in case
doing so is resonably cheap.
2001-07-15 Nick Clifton <nickc@cambridge.redhat.com>
* config/rs6000/sysv4.h (CPP_ENDIAN_BIG_SPEC): Assert 'endian'

View File

@ -142,7 +142,8 @@ extern int x86_64_zero_extended_value PARAMS ((rtx));
extern rtx ix86_force_to_memory PARAMS ((enum machine_mode, rtx));
extern void ix86_free_from_memory PARAMS ((enum machine_mode));
extern void ix86_split_fp_branch PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
extern void ix86_split_fp_branch PARAMS ((enum rtx_code code, rtx,
rtx, rtx, rtx, rtx));
extern int ix86_hard_regno_mode_ok PARAMS ((int, enum machine_mode));
extern int ix86_register_move_cost PARAMS ((enum machine_mode, enum reg_class,
enum reg_class));
@ -155,6 +156,7 @@ extern int ix86_memory_move_cost PARAMS ((enum machine_mode, enum reg_class,
int));
extern void ix86_set_move_mem_attrs PARAMS ((rtx, rtx, rtx, rtx, rtx));
extern void emit_i387_cw_initialization PARAMS ((rtx, rtx));
extern bool ix86_fp_jump_nontrivial_p PARAMS ((enum rtx_code));
#ifdef TREE_CODE

View File

@ -6060,6 +6060,18 @@ ix86_expand_compare (code, second_test, bypass_test)
return ret;
}
/* Return true if the CODE will result in nontrivial jump sequence. */
bool
ix86_fp_jump_nontrivial_p (code)
enum rtx_code code;
{
enum rtx_code bypass_code, first_code, second_code;
if (!TARGET_CMOVE)
return true;
ix86_fp_comparison_codes (code, &bypass_code, &first_code, &second_code);
return bypass_code != NIL || second_code != NIL;
}
void
ix86_expand_branch (code, label)
enum rtx_code code;
@ -6084,34 +6096,48 @@ ix86_expand_branch (code, label)
case DFmode:
case XFmode:
case TFmode:
/* Don't expand the comparison early, so that we get better code
when jump or whoever decides to reverse the comparison. */
{
rtvec vec;
int use_fcomi;
enum rtx_code bypass_code, first_code, second_code;
code = ix86_prepare_fp_compare_args (code, &ix86_compare_op0,
&ix86_compare_op1);
ix86_fp_comparison_codes (code, &bypass_code, &first_code, &second_code);
tmp = gen_rtx_fmt_ee (code, VOIDmode,
ix86_compare_op0, ix86_compare_op1);
tmp = gen_rtx_IF_THEN_ELSE (VOIDmode, tmp,
gen_rtx_LABEL_REF (VOIDmode, label),
pc_rtx);
tmp = gen_rtx_SET (VOIDmode, pc_rtx, tmp);
/* Check whether we will use the natural sequence with one jump. If
so, we can expand jump early. Otherwise delay expansion by
creating compound insn to not confuse optimizers. */
if (bypass_code == NIL && second_code == NIL
&& TARGET_CMOVE)
{
ix86_split_fp_branch (code, ix86_compare_op0, ix86_compare_op1,
gen_rtx_LABEL_REF (VOIDmode, label),
pc_rtx, NULL_RTX);
}
else
{
tmp = gen_rtx_fmt_ee (code, VOIDmode,
ix86_compare_op0, ix86_compare_op1);
tmp = gen_rtx_IF_THEN_ELSE (VOIDmode, tmp,
gen_rtx_LABEL_REF (VOIDmode, label),
pc_rtx);
tmp = gen_rtx_SET (VOIDmode, pc_rtx, tmp);
use_fcomi = ix86_use_fcomi_compare (code);
vec = rtvec_alloc (3 + !use_fcomi);
RTVEC_ELT (vec, 0) = tmp;
RTVEC_ELT (vec, 1)
= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCFPmode, 18));
RTVEC_ELT (vec, 2)
= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCFPmode, 17));
if (! use_fcomi)
RTVEC_ELT (vec, 3)
= gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (HImode));
use_fcomi = ix86_use_fcomi_compare (code);
vec = rtvec_alloc (3 + !use_fcomi);
RTVEC_ELT (vec, 0) = tmp;
RTVEC_ELT (vec, 1)
= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCFPmode, 18));
RTVEC_ELT (vec, 2)
= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCFPmode, 17));
if (! use_fcomi)
RTVEC_ELT (vec, 3)
= gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (HImode));
emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
}
return;
}
@ -6235,12 +6261,13 @@ ix86_expand_branch (code, label)
/* Split branch based on floating point condition. */
void
ix86_split_fp_branch (condition, op1, op2, target1, target2, tmp)
rtx condition, op1, op2, target1, target2, tmp;
ix86_split_fp_branch (code, op1, op2, target1, target2, tmp)
enum rtx_code code;
rtx op1, op2, target1, target2, tmp;
{
rtx second, bypass;
rtx label = NULL_RTX;
enum rtx_code code = GET_CODE (condition);
rtx condition;
if (target2 != pc_rtx)
{

View File

@ -12686,7 +12686,8 @@
"TARGET_CMOVE && TARGET_80387
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
&& FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_1_sse"
@ -12700,7 +12701,8 @@
(clobber (reg:CCFP 17))]
"TARGET_80387
&& SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_1_sse_only"
@ -12713,7 +12715,8 @@
(clobber (reg:CCFP 18))
(clobber (reg:CCFP 17))]
"SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_2"
@ -12728,7 +12731,8 @@
"TARGET_CMOVE && TARGET_80387
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
&& FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_2_sse"
@ -12742,7 +12746,8 @@
(clobber (reg:CCFP 17))]
"TARGET_80387
&& SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_2_sse_only"
@ -12755,7 +12760,8 @@
(clobber (reg:CCFP 18))
(clobber (reg:CCFP 17))]
"SSE_FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_3"
@ -12773,7 +12779,8 @@
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& !ix86_use_fcomi_compare (GET_CODE (operands[0]))
&& SELECT_CC_MODE (GET_CODE (operands[0]),
operands[1], operands[2]) == CCFPmode"
operands[1], operands[2]) == CCFPmode
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_4"
@ -12791,7 +12798,8 @@
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& !ix86_use_fcomi_compare (GET_CODE (operands[0]))
&& SELECT_CC_MODE (GET_CODE (operands[0]),
operands[1], operands[2]) == CCFPmode"
operands[1], operands[2]) == CCFPmode
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_5"
@ -12806,7 +12814,8 @@
(clobber (match_scratch:HI 4 "=a"))]
"TARGET_80387
&& FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_insn "*fp_jcc_6"
@ -12821,7 +12830,8 @@
(clobber (match_scratch:HI 4 "=a"))]
"TARGET_80387
&& FLOAT_MODE_P (GET_MODE (operands[1]))
&& GET_MODE (operands[1]) == GET_MODE (operands[2])"
&& GET_MODE (operands[1]) == GET_MODE (operands[2])
&& ix86_fp_jump_nontrivial_p (GET_CODE (operands[0]))"
"#")
(define_split
@ -12836,7 +12846,7 @@
"reload_completed"
[(const_int 0)]
{
ix86_split_fp_branch (operands[0], operands[1], operands[2],
ix86_split_fp_branch (GET_CODE (operands[0]), operands[1], operands[2],
operands[3], operands[4], NULL_RTX);
DONE;
})
@ -12857,7 +12867,7 @@
(match_dup 3)
(match_dup 4)))]
{
ix86_split_fp_branch (operands[0], operands[1], operands[2],
ix86_split_fp_branch (GET_CODE (operands[0]), operands[1], operands[2],
operands[3], operands[4], operands[5]);
DONE;
})

View File

@ -705,6 +705,18 @@ scan_loop (loop, flags)
}
}
/* For parallels, add any possible uses to the depencies, as we can't move
the insn without resolving them first. */
if (GET_CODE (PATTERN (p)) == PARALLEL)
{
for (i = 0; i < XVECLEN (PATTERN (p), 0); i++)
{
rtx x = XVECEXP (PATTERN (p), 0, i);
if (GET_CODE (x) == USE)
dependencies = gen_rtx_EXPR_LIST (VOIDmode, XEXP (x, 0), dependencies);
}
}
/* Don't try to optimize a register that was made
by loop-optimization for an inner loop.
We don't know its life-span, so we can't compute the benefit. */

View File

@ -3422,6 +3422,7 @@ rest_of_compilation (decl)
#ifdef OPTIMIZE_MODE_SWITCHING
timevar_push (TV_GCSE);
no_new_pseudos = 1;
if (optimize_mode_switching (NULL))
{
/* We did work, and so had to regenerate global life information.
@ -3429,6 +3430,7 @@ rest_of_compilation (decl)
information below. */
register_life_up_to_date = 1;
}
no_new_pseudos = 0;
timevar_pop (TV_GCSE);
#endif