optabs.h (emit_unop_insn, [...]): Change insn code parameter from "int" to "enum insn_code".

gcc/
	* optabs.h (emit_unop_insn, maybe_emit_unop_insn): Change insn code
	parameter from "int" to "enum insn_code".
	(expand_operand_type): New enum.
	(expand_operand): New structure.
	(create_expand_operand): New function.
	(create_fixed_operand, create_output_operand): Likewise
	(create_input_operand, create_convert_operand_to): Likewise.
	(create_convert_operand_from, create_address_operand): Likewise.
	(create_integer_operand): Likewise.
	(create_convert_operand_from_type, maybe_legitimize_operands): Declare.
	(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
	(expand_insn, expand_jump_insn): Likewise.
	* builtins.c (expand_builtin_prefetch): Use the new interfaces.
	(expand_builtin_interclass_mathfn, expand_builtin_strlen): Likewise.
	(expand_movstr, expand_builtin___clear_cache): Likewise.
	(expand_builtin_lock_release): Likewise.
	* explow.c (allocate_dynamic_stack_space): Likewise.
	(probe_stack_range): Likewise.  Allow check_stack to FAIL,
	and use the default handling in that case.
	* expmed.c (check_predicate_volatile_ok): Delete.
	(store_bit_field_1, extract_bit_field_1): Use the new interfaces.
	(emit_cstore): Likewise.
	* expr.c (emit_block_move_via_movmem): Likewise.
	(set_storage_via_setmem, expand_assignment): Likewise.
	(emit_storent_insn, try_casesi): Likewise.
	(emit_single_push_insn): Likewise.  Allow the expansion to fail.
	* optabs.c (expand_widen_pattern_expr, expand_ternary_op): Likewise.
	(expand_vec_shift_expr, expand_binop_directly): Likewise.
	(expand_twoval_unop, expand_twoval_binop): Likewise.
	(expand_unop_direct, emit_indirect_jump): Likewise.
	(emit_conditional_move, vector_compare_rtx): Likewise.
	(expand_vec_cond_expr, expand_val_compare_and_swap_1): Likewise.
	(expand_sync_operation, expand_sync_fetch_operation): Likewise.
	(expand_sync_lock_test_and_set): Likewise.
	(maybe_emit_unop_insn): Likewise.  Change icode to an insn_code.
	(emit_unop_insn): Likewise.
	(expand_copysign_absneg): Change icode to an insn_code.
	(create_convert_operand_from_type): New function.
	(maybe_legitimize_operand, maybe_legitimize_operands): Likewise.
	(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
	(expand_insn, expand_jump_insn): Likewise.
	* config/i386/i386.md (setmem<mode>): Use nonmemory_operand rather
	than const_int_operand for operand 2.

From-SVN: r171341
This commit is contained in:
Richard Sandiford 2011-03-23 09:30:58 +00:00 committed by Richard Sandiford
parent 78fadbabe3
commit a5c7d693b9
8 changed files with 762 additions and 939 deletions

View File

@ -1,3 +1,49 @@
2011-03-23 Richard Sandiford <richard.sandiford@linaro.org>
* optabs.h (emit_unop_insn, maybe_emit_unop_insn): Change insn code
parameter from "int" to "enum insn_code".
(expand_operand_type): New enum.
(expand_operand): New structure.
(create_expand_operand): New function.
(create_fixed_operand, create_output_operand): Likewise
(create_input_operand, create_convert_operand_to): Likewise.
(create_convert_operand_from, create_address_operand): Likewise.
(create_integer_operand): Likewise.
(create_convert_operand_from_type, maybe_legitimize_operands): Declare.
(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
(expand_insn, expand_jump_insn): Likewise.
* builtins.c (expand_builtin_prefetch): Use the new interfaces.
(expand_builtin_interclass_mathfn, expand_builtin_strlen): Likewise.
(expand_movstr, expand_builtin___clear_cache): Likewise.
(expand_builtin_lock_release): Likewise.
* explow.c (allocate_dynamic_stack_space): Likewise.
(probe_stack_range): Likewise. Allow check_stack to FAIL,
and use the default handling in that case.
* expmed.c (check_predicate_volatile_ok): Delete.
(store_bit_field_1, extract_bit_field_1): Use the new interfaces.
(emit_cstore): Likewise.
* expr.c (emit_block_move_via_movmem): Likewise.
(set_storage_via_setmem, expand_assignment): Likewise.
(emit_storent_insn, try_casesi): Likewise.
(emit_single_push_insn): Likewise. Allow the expansion to fail.
* optabs.c (expand_widen_pattern_expr, expand_ternary_op): Likewise.
(expand_vec_shift_expr, expand_binop_directly): Likewise.
(expand_twoval_unop, expand_twoval_binop): Likewise.
(expand_unop_direct, emit_indirect_jump): Likewise.
(emit_conditional_move, vector_compare_rtx): Likewise.
(expand_vec_cond_expr, expand_val_compare_and_swap_1): Likewise.
(expand_sync_operation, expand_sync_fetch_operation): Likewise.
(expand_sync_lock_test_and_set): Likewise.
(maybe_emit_unop_insn): Likewise. Change icode to an insn_code.
(emit_unop_insn): Likewise.
(expand_copysign_absneg): Change icode to an insn_code.
(create_convert_operand_from_type): New function.
(maybe_legitimize_operand, maybe_legitimize_operands): Likewise.
(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
(expand_insn, expand_jump_insn): Likewise.
* config/i386/i386.md (setmem<mode>): Use nonmemory_operand rather
than const_int_operand for operand 2.
2011-03-23 Andreas Krebbel <Andreas.Krebbel@de.ibm.com> 2011-03-23 Andreas Krebbel <Andreas.Krebbel@de.ibm.com>
* dwarf2out.c (const_ok_for_output_1): Print the unspec enum name * dwarf2out.c (const_ok_for_output_1): Print the unspec enum name

View File

@ -1143,15 +1143,13 @@ expand_builtin_prefetch (tree exp)
#ifdef HAVE_prefetch #ifdef HAVE_prefetch
if (HAVE_prefetch) if (HAVE_prefetch)
{ {
if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate) struct expand_operand ops[3];
(op0,
insn_data[(int) CODE_FOR_prefetch].operand[0].mode)) create_address_operand (&ops[0], op0);
|| (GET_MODE (op0) != Pmode)) create_integer_operand (&ops[1], INTVAL (op1));
{ create_integer_operand (&ops[2], INTVAL (op2));
op0 = convert_memory_address (Pmode, op0); if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
op0 = force_reg (Pmode, op0); return;
}
emit_insn (gen_prefetch (op0, op1, op2));
} }
#endif #endif
@ -2431,16 +2429,9 @@ expand_builtin_interclass_mathfn (tree exp, rtx target)
if (icode != CODE_FOR_nothing) if (icode != CODE_FOR_nothing)
{ {
struct expand_operand ops[1];
rtx last = get_last_insn (); rtx last = get_last_insn ();
tree orig_arg = arg; tree orig_arg = arg;
/* Make a suitable register to place result in. */
if (!target
|| GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
|| !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
gcc_assert (insn_data[icode].operand[0].predicate
(target, GET_MODE (target)));
/* Wrap the computation of the argument in a SAVE_EXPR, as we may /* Wrap the computation of the argument in a SAVE_EXPR, as we may
need to expand the argument again. This way, we will not perform need to expand the argument again. This way, we will not perform
@ -2452,10 +2443,11 @@ expand_builtin_interclass_mathfn (tree exp, rtx target)
if (mode != GET_MODE (op0)) if (mode != GET_MODE (op0))
op0 = convert_to_mode (mode, op0, 0); op0 = convert_to_mode (mode, op0, 0);
/* Compute into TARGET. create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
Set TARGET to wherever the result comes back. */ if (maybe_legitimize_operands (icode, 0, 1, ops)
if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN)) && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
return target; return ops[0].value;
delete_insns_since (last); delete_insns_since (last);
CALL_EXPR_ARG (exp, 0) = orig_arg; CALL_EXPR_ARG (exp, 0) = orig_arg;
} }
@ -3362,11 +3354,12 @@ expand_builtin_strlen (tree exp, rtx target,
return NULL_RTX; return NULL_RTX;
else else
{ {
struct expand_operand ops[4];
rtx pat; rtx pat;
tree len; tree len;
tree src = CALL_EXPR_ARG (exp, 0); tree src = CALL_EXPR_ARG (exp, 0);
rtx result, src_reg, char_rtx, before_strlen; rtx src_reg, before_strlen;
enum machine_mode insn_mode = target_mode, char_mode; enum machine_mode insn_mode = target_mode;
enum insn_code icode = CODE_FOR_nothing; enum insn_code icode = CODE_FOR_nothing;
unsigned int align; unsigned int align;
@ -3405,14 +3398,6 @@ expand_builtin_strlen (tree exp, rtx target,
if (insn_mode == VOIDmode) if (insn_mode == VOIDmode)
return NULL_RTX; return NULL_RTX;
/* Make a place to write the result of the instruction. */
result = target;
if (! (result != 0
&& REG_P (result)
&& GET_MODE (result) == insn_mode
&& REGNO (result) >= FIRST_PSEUDO_REGISTER))
result = gen_reg_rtx (insn_mode);
/* Make a place to hold the source address. We will not expand /* Make a place to hold the source address. We will not expand
the actual source until we are sure that the expansion will the actual source until we are sure that the expansion will
not fail -- there are trees that cannot be expanded twice. */ not fail -- there are trees that cannot be expanded twice. */
@ -3422,17 +3407,12 @@ expand_builtin_strlen (tree exp, rtx target,
source operand later. */ source operand later. */
before_strlen = get_last_insn (); before_strlen = get_last_insn ();
char_rtx = const0_rtx; create_output_operand (&ops[0], target, insn_mode);
char_mode = insn_data[(int) icode].operand[2].mode; create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx, create_integer_operand (&ops[2], 0);
char_mode)) create_integer_operand (&ops[3], align);
char_rtx = copy_to_mode_reg (char_mode, char_rtx); if (!maybe_expand_insn (icode, 4, ops))
pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
char_rtx, GEN_INT (align));
if (! pat)
return NULL_RTX; return NULL_RTX;
emit_insn (pat);
/* Now that we are assured of success, expand the source. */ /* Now that we are assured of success, expand the source. */
start_sequence (); start_sequence ();
@ -3448,12 +3428,12 @@ expand_builtin_strlen (tree exp, rtx target,
emit_insn_before (pat, get_insns ()); emit_insn_before (pat, get_insns ());
/* Return the value in the proper mode for this function. */ /* Return the value in the proper mode for this function. */
if (GET_MODE (result) == target_mode) if (GET_MODE (ops[0].value) == target_mode)
target = result; target = ops[0].value;
else if (target != 0) else if (target != 0)
convert_move (target, result, 0); convert_move (target, ops[0].value, 0);
else else
target = convert_to_mode (target_mode, result, 0); target = convert_to_mode (target_mode, ops[0].value, 0);
return target; return target;
} }
@ -3674,56 +3654,39 @@ expand_builtin_mempcpy_args (tree dest, tree src, tree len,
static rtx static rtx
expand_movstr (tree dest, tree src, rtx target, int endp) expand_movstr (tree dest, tree src, rtx target, int endp)
{ {
struct expand_operand ops[3];
rtx end; rtx end;
rtx dest_mem; rtx dest_mem;
rtx src_mem; rtx src_mem;
rtx insn;
const struct insn_data_d * data;
if (!HAVE_movstr) if (!HAVE_movstr)
return NULL_RTX; return NULL_RTX;
dest_mem = get_memory_rtx (dest, NULL); dest_mem = get_memory_rtx (dest, NULL);
src_mem = get_memory_rtx (src, NULL); src_mem = get_memory_rtx (src, NULL);
data = insn_data + CODE_FOR_movstr;
if (!endp) if (!endp)
{ {
target = force_reg (Pmode, XEXP (dest_mem, 0)); target = force_reg (Pmode, XEXP (dest_mem, 0));
dest_mem = replace_equiv_address (dest_mem, target); dest_mem = replace_equiv_address (dest_mem, target);
end = gen_reg_rtx (Pmode);
} }
else
create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
create_fixed_operand (&ops[1], dest_mem);
create_fixed_operand (&ops[2], src_mem);
expand_insn (CODE_FOR_movstr, 3, ops);
if (endp && target != const0_rtx)
{ {
if (target == 0 target = ops[0].value;
|| target == const0_rtx
|| ! (*data->operand[0].predicate) (target, Pmode))
{
end = gen_reg_rtx (Pmode);
if (target != const0_rtx)
target = end;
}
else
end = target;
}
if (data->operand[0].mode != VOIDmode)
end = gen_lowpart (data->operand[0].mode, end);
insn = data->genfun (end, dest_mem, src_mem);
gcc_assert (insn);
emit_insn (insn);
/* movstr is supposed to set end to the address of the NUL /* movstr is supposed to set end to the address of the NUL
terminator. If the caller requested a mempcpy-like return value, terminator. If the caller requested a mempcpy-like return value,
adjust it. */ adjust it. */
if (endp == 1 && target != const0_rtx) if (endp == 1)
{ {
rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1); rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
emit_move_insn (target, force_operand (tem, NULL_RTX)); emit_move_insn (target, force_operand (tem, NULL_RTX));
} }
}
return target; return target;
} }
@ -5223,7 +5186,6 @@ expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
/* We have a "clear_cache" insn, and it will handle everything. */ /* We have a "clear_cache" insn, and it will handle everything. */
tree begin, end; tree begin, end;
rtx begin_rtx, end_rtx; rtx begin_rtx, end_rtx;
enum insn_code icode;
/* We must not expand to a library call. If we did, any /* We must not expand to a library call. If we did, any
fallback library function in libgcc that might contain a call to fallback library function in libgcc that might contain a call to
@ -5236,21 +5198,18 @@ expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
if (HAVE_clear_cache) if (HAVE_clear_cache)
{ {
icode = CODE_FOR_clear_cache; struct expand_operand ops[2];
begin = CALL_EXPR_ARG (exp, 0); begin = CALL_EXPR_ARG (exp, 0);
begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL); begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
begin_rtx = convert_memory_address (Pmode, begin_rtx);
if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
end = CALL_EXPR_ARG (exp, 1); end = CALL_EXPR_ARG (exp, 1);
end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL); end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
end_rtx = convert_memory_address (Pmode, end_rtx);
if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
end_rtx = copy_to_mode_reg (Pmode, end_rtx);
emit_insn (gen_clear_cache (begin_rtx, end_rtx)); create_address_operand (&ops[0], begin_rtx);
create_address_operand (&ops[1], end_rtx);
if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
return const0_rtx;
} }
return const0_rtx; return const0_rtx;
#endif /* HAVE_clear_cache */ #endif /* HAVE_clear_cache */
@ -5748,9 +5707,9 @@ expand_builtin_synchronize (void)
static void static void
expand_builtin_lock_release (enum machine_mode mode, tree exp) expand_builtin_lock_release (enum machine_mode mode, tree exp)
{ {
struct expand_operand ops[2];
enum insn_code icode; enum insn_code icode;
rtx mem, insn; rtx mem;
rtx val = const0_rtx;
/* Expand the operands. */ /* Expand the operands. */
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode); mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
@ -5759,21 +5718,16 @@ expand_builtin_lock_release (enum machine_mode mode, tree exp)
icode = direct_optab_handler (sync_lock_release_optab, mode); icode = direct_optab_handler (sync_lock_release_optab, mode);
if (icode != CODE_FOR_nothing) if (icode != CODE_FOR_nothing)
{ {
if (!insn_data[icode].operand[1].predicate (val, mode)) create_fixed_operand (&ops[0], mem);
val = force_reg (mode, val); create_input_operand (&ops[1], const0_rtx, mode);
if (maybe_expand_insn (icode, 2, ops))
insn = GEN_FCN (icode) (mem, val);
if (insn)
{
emit_insn (insn);
return; return;
} }
}
/* Otherwise we can implement this operation by emitting a barrier /* Otherwise we can implement this operation by emitting a barrier
followed by a store of zero. */ followed by a store of zero. */
expand_builtin_synchronize (); expand_builtin_synchronize ();
emit_move_insn (mem, val); emit_move_insn (mem, const0_rtx);
} }
/* Expand an expression EXP that calls a built-in function, /* Expand an expression EXP that calls a built-in function,

View File

@ -15793,7 +15793,7 @@
(define_expand "setmem<mode>" (define_expand "setmem<mode>"
[(use (match_operand:BLK 0 "memory_operand" "")) [(use (match_operand:BLK 0 "memory_operand" ""))
(use (match_operand:SWI48 1 "nonmemory_operand" "")) (use (match_operand:SWI48 1 "nonmemory_operand" ""))
(use (match_operand 2 "const_int_operand" "")) (use (match_operand:QI 2 "nonmemory_operand" ""))
(use (match_operand 3 "const_int_operand" "")) (use (match_operand 3 "const_int_operand" ""))
(use (match_operand:SI 4 "const_int_operand" "")) (use (match_operand:SI 4 "const_int_operand" ""))
(use (match_operand:SI 5 "const_int_operand" ""))] (use (match_operand:SI 5 "const_int_operand" ""))]

View File

@ -1379,21 +1379,13 @@ allocate_dynamic_stack_space (rtx size, unsigned size_align,
#ifdef HAVE_allocate_stack #ifdef HAVE_allocate_stack
if (HAVE_allocate_stack) if (HAVE_allocate_stack)
{ {
enum machine_mode mode = STACK_SIZE_MODE; struct expand_operand ops[2];
insn_operand_predicate_fn pred;
/* We don't have to check against the predicate for operand 0 since /* We don't have to check against the predicate for operand 0 since
TARGET is known to be a pseudo of the proper mode, which must TARGET is known to be a pseudo of the proper mode, which must
be valid for the operand. For operand 1, convert to the be valid for the operand. */
proper mode and validate. */ create_fixed_operand (&ops[0], target);
if (mode == VOIDmode) create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode; expand_insn (CODE_FOR_allocate_stack, 2, ops);
pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
if (pred && ! ((*pred) (size, mode)))
size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
emit_insn (gen_allocate_stack (target, size));
} }
else else
#endif #endif
@ -1544,22 +1536,22 @@ probe_stack_range (HOST_WIDE_INT first, rtx size)
plus_constant (size, first))); plus_constant (size, first)));
emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr, emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
Pmode); Pmode);
return;
} }
/* Next see if we have an insn to check the stack. */ /* Next see if we have an insn to check the stack. */
#ifdef HAVE_check_stack #ifdef HAVE_check_stack
else if (HAVE_check_stack) if (HAVE_check_stack)
{ {
struct expand_operand ops[1];
rtx addr = memory_address (Pmode, rtx addr = memory_address (Pmode,
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
stack_pointer_rtx, stack_pointer_rtx,
plus_constant (size, first))); plus_constant (size, first)));
insn_operand_predicate_fn pred
= insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
if (pred && !((*pred) (addr, Pmode)))
addr = copy_to_mode_reg (Pmode, addr);
emit_insn (gen_check_stack (addr)); create_input_operand (&ops[0], addr, Pmode);
if (maybe_expand_insn (CODE_FOR_check_stack, 1, ops))
return;
} }
#endif #endif

View File

@ -323,22 +323,6 @@ mode_for_extraction (enum extraction_pattern pattern, int opno)
return word_mode; return word_mode;
return data->operand[opno].mode; return data->operand[opno].mode;
} }
/* Return true if X, of mode MODE, matches the predicate for operand
OPNO of instruction ICODE. Allow volatile memories, regardless of
the ambient volatile_ok setting. */
static bool
check_predicate_volatile_ok (enum insn_code icode, int opno,
rtx x, enum machine_mode mode)
{
bool save_volatile_ok, result;
save_volatile_ok = volatile_ok;
result = insn_data[(int) icode].operand[opno].predicate (x, mode);
volatile_ok = save_volatile_ok;
return result;
}
/* A subroutine of store_bit_field, with the same arguments. Return true /* A subroutine of store_bit_field, with the same arguments. Return true
if the operation could be implemented. if the operation could be implemented.
@ -405,41 +389,18 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& bitsize == GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0))) && bitsize == GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))
&& !(bitnum % GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0))))) && !(bitnum % GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))))
{ {
struct expand_operand ops[3];
enum machine_mode outermode = GET_MODE (op0); enum machine_mode outermode = GET_MODE (op0);
enum machine_mode innermode = GET_MODE_INNER (outermode); enum machine_mode innermode = GET_MODE_INNER (outermode);
int icode = (int) optab_handler (vec_set_optab, outermode); enum insn_code icode = optab_handler (vec_set_optab, outermode);
int pos = bitnum / GET_MODE_BITSIZE (innermode); int pos = bitnum / GET_MODE_BITSIZE (innermode);
rtx rtxpos = GEN_INT (pos);
rtx src = value;
rtx dest = op0;
rtx pat, seq;
enum machine_mode mode0 = insn_data[icode].operand[0].mode;
enum machine_mode mode1 = insn_data[icode].operand[1].mode;
enum machine_mode mode2 = insn_data[icode].operand[2].mode;
start_sequence (); create_fixed_operand (&ops[0], op0);
create_input_operand (&ops[1], value, innermode);
if (! (*insn_data[icode].operand[1].predicate) (src, mode1)) create_integer_operand (&ops[2], pos);
src = copy_to_mode_reg (mode1, src); if (maybe_expand_insn (icode, 3, ops))
if (! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2))
rtxpos = copy_to_mode_reg (mode1, rtxpos);
/* We could handle this, but we should always be called with a pseudo
for our targets and all insns should take them as outputs. */
gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0)
&& (*insn_data[icode].operand[1].predicate) (src, mode1)
&& (*insn_data[icode].operand[2].predicate) (rtxpos, mode2));
pat = GEN_FCN (icode) (dest, src, rtxpos);
seq = get_insns ();
end_sequence ();
if (pat)
{
emit_insn (seq);
emit_insn (pat);
return true; return true;
} }
}
/* If the target is a register, overwriting the entire object, or storing /* If the target is a register, overwriting the entire object, or storing
a full-word or multi-word field can be done with just a SUBREG. a full-word or multi-word field can be done with just a SUBREG.
@ -515,45 +476,31 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& bitsize == GET_MODE_BITSIZE (fieldmode) && bitsize == GET_MODE_BITSIZE (fieldmode)
&& optab_handler (movstrict_optab, fieldmode) != CODE_FOR_nothing) && optab_handler (movstrict_optab, fieldmode) != CODE_FOR_nothing)
{ {
int icode = optab_handler (movstrict_optab, fieldmode); struct expand_operand ops[2];
rtx insn; enum insn_code icode = optab_handler (movstrict_optab, fieldmode);
rtx start = get_last_insn ();
rtx arg0 = op0; rtx arg0 = op0;
/* Get appropriate low part of the value being stored. */ if (GET_CODE (arg0) == SUBREG)
if (CONST_INT_P (value) || REG_P (value))
value = gen_lowpart (fieldmode, value);
else if (!(GET_CODE (value) == SYMBOL_REF
|| GET_CODE (value) == LABEL_REF
|| GET_CODE (value) == CONST))
value = convert_to_mode (fieldmode, value, 0);
if (! (*insn_data[icode].operand[1].predicate) (value, fieldmode))
value = copy_to_mode_reg (fieldmode, value);
if (GET_CODE (op0) == SUBREG)
{ {
/* Else we've got some float mode source being extracted into /* Else we've got some float mode source being extracted into
a different float mode destination -- this combination of a different float mode destination -- this combination of
subregs results in Severe Tire Damage. */ subregs results in Severe Tire Damage. */
gcc_assert (GET_MODE (SUBREG_REG (op0)) == fieldmode gcc_assert (GET_MODE (SUBREG_REG (arg0)) == fieldmode
|| GET_MODE_CLASS (fieldmode) == MODE_INT || GET_MODE_CLASS (fieldmode) == MODE_INT
|| GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT); || GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT);
arg0 = SUBREG_REG (op0); arg0 = SUBREG_REG (arg0);
} }
insn = (GEN_FCN (icode) arg0 = gen_rtx_SUBREG (fieldmode, arg0,
(gen_rtx_SUBREG (fieldmode, arg0,
(bitnum % BITS_PER_WORD) / BITS_PER_UNIT (bitnum % BITS_PER_WORD) / BITS_PER_UNIT
+ (offset * UNITS_PER_WORD)), + (offset * UNITS_PER_WORD));
value));
if (insn) create_fixed_operand (&ops[0], arg0);
{ /* Shrink the source operand to FIELDMODE. */
emit_insn (insn); create_convert_operand_to (&ops[1], value, fieldmode, false);
if (maybe_expand_insn (icode, 2, ops))
return true; return true;
} }
delete_insns_since (start);
}
/* Handle fields bigger than a word. */ /* Handle fields bigger than a word. */
@ -653,16 +600,13 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& bitsize > 0 && bitsize > 0
&& GET_MODE_BITSIZE (op_mode) >= bitsize && GET_MODE_BITSIZE (op_mode) >= bitsize
&& ! ((REG_P (op0) || GET_CODE (op0) == SUBREG) && ! ((REG_P (op0) || GET_CODE (op0) == SUBREG)
&& (bitsize + bitpos > GET_MODE_BITSIZE (op_mode))) && (bitsize + bitpos > GET_MODE_BITSIZE (op_mode))))
&& insn_data[CODE_FOR_insv].operand[1].predicate (GEN_INT (bitsize),
VOIDmode)
&& check_predicate_volatile_ok (CODE_FOR_insv, 0, op0, VOIDmode))
{ {
struct expand_operand ops[4];
int xbitpos = bitpos; int xbitpos = bitpos;
rtx value1; rtx value1;
rtx xop0 = op0; rtx xop0 = op0;
rtx last = get_last_insn (); rtx last = get_last_insn ();
rtx pat;
bool copy_back = false; bool copy_back = false;
/* Add OFFSET into OP0's address. */ /* Add OFFSET into OP0's address. */
@ -743,17 +687,12 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
gcc_assert (CONSTANT_P (value)); gcc_assert (CONSTANT_P (value));
} }
/* If this machine's insv insists on a register, create_fixed_operand (&ops[0], xop0);
get VALUE1 into a register. */ create_integer_operand (&ops[1], bitsize);
if (! ((*insn_data[(int) CODE_FOR_insv].operand[3].predicate) create_integer_operand (&ops[2], xbitpos);
(value1, op_mode))) create_input_operand (&ops[3], value1, op_mode);
value1 = force_reg (op_mode, value1); if (maybe_expand_insn (CODE_FOR_insv, 4, ops))
pat = gen_insv (xop0, GEN_INT (bitsize), GEN_INT (xbitpos), value1);
if (pat)
{ {
emit_insn (pat);
if (copy_back) if (copy_back)
convert_move (op0, xop0, true); convert_move (op0, xop0, true);
return true; return true;
@ -1235,50 +1174,21 @@ extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& ((bitnum + bitsize - 1) / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0))) && ((bitnum + bitsize - 1) / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))
== bitnum / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0))))) == bitnum / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))))
{ {
struct expand_operand ops[3];
enum machine_mode outermode = GET_MODE (op0); enum machine_mode outermode = GET_MODE (op0);
enum machine_mode innermode = GET_MODE_INNER (outermode); enum machine_mode innermode = GET_MODE_INNER (outermode);
int icode = (int) optab_handler (vec_extract_optab, outermode); enum insn_code icode = optab_handler (vec_extract_optab, outermode);
unsigned HOST_WIDE_INT pos = bitnum / GET_MODE_BITSIZE (innermode); unsigned HOST_WIDE_INT pos = bitnum / GET_MODE_BITSIZE (innermode);
rtx rtxpos = GEN_INT (pos);
rtx src = op0;
rtx dest = NULL, pat, seq;
enum machine_mode mode0 = insn_data[icode].operand[0].mode;
enum machine_mode mode1 = insn_data[icode].operand[1].mode;
enum machine_mode mode2 = insn_data[icode].operand[2].mode;
if (innermode == tmode || innermode == mode) create_output_operand (&ops[0], target, innermode);
dest = target; create_input_operand (&ops[1], op0, outermode);
create_integer_operand (&ops[2], pos);
if (!dest) if (maybe_expand_insn (icode, 3, ops))
dest = gen_reg_rtx (innermode);
start_sequence ();
if (! (*insn_data[icode].operand[0].predicate) (dest, mode0))
dest = copy_to_mode_reg (mode0, dest);
if (! (*insn_data[icode].operand[1].predicate) (src, mode1))
src = copy_to_mode_reg (mode1, src);
if (! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2))
rtxpos = copy_to_mode_reg (mode1, rtxpos);
/* We could handle this, but we should always be called with a pseudo
for our targets and all insns should take them as outputs. */
gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0)
&& (*insn_data[icode].operand[1].predicate) (src, mode1)
&& (*insn_data[icode].operand[2].predicate) (rtxpos, mode2));
pat = GEN_FCN (icode) (dest, src, rtxpos);
seq = get_insns ();
end_sequence ();
if (pat)
{ {
emit_insn (seq); target = ops[0].value;
emit_insn (pat); if (GET_MODE (target) != mode)
if (mode0 != mode) return gen_lowpart (tmode, target);
return gen_lowpart (tmode, dest); return target;
return dest;
} }
} }
@ -1517,17 +1427,14 @@ extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
acceptable to the format of ext(z)v. */ acceptable to the format of ext(z)v. */
&& !(GET_CODE (op0) == SUBREG && GET_MODE (op0) != ext_mode) && !(GET_CODE (op0) == SUBREG && GET_MODE (op0) != ext_mode)
&& !((REG_P (op0) || GET_CODE (op0) == SUBREG) && !((REG_P (op0) || GET_CODE (op0) == SUBREG)
&& (bitsize + bitpos > GET_MODE_BITSIZE (ext_mode))) && (bitsize + bitpos > GET_MODE_BITSIZE (ext_mode))))
&& check_predicate_volatile_ok (icode, 1, op0, GET_MODE (op0)))
{ {
struct expand_operand ops[4];
unsigned HOST_WIDE_INT xbitpos = bitpos, xoffset = offset; unsigned HOST_WIDE_INT xbitpos = bitpos, xoffset = offset;
rtx bitsize_rtx, bitpos_rtx;
rtx last = get_last_insn ();
rtx xop0 = op0; rtx xop0 = op0;
rtx xtarget = target; rtx xtarget = target;
rtx xspec_target = target; rtx xspec_target = target;
rtx xspec_target_subreg = 0; rtx xspec_target_subreg = 0;
rtx pat;
/* If op0 is a register, we need it in EXT_MODE to make it /* If op0 is a register, we need it in EXT_MODE to make it
acceptable to the format of ext(z)v. */ acceptable to the format of ext(z)v. */
@ -1570,27 +1477,20 @@ extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
xtarget = gen_reg_rtx (ext_mode); xtarget = gen_reg_rtx (ext_mode);
} }
/* If this machine's ext(z)v insists on a register target, create_output_operand (&ops[0], xtarget, ext_mode);
make sure we have one. */ create_fixed_operand (&ops[1], xop0);
if (!insn_data[(int) icode].operand[0].predicate (xtarget, ext_mode)) create_integer_operand (&ops[2], bitsize);
xtarget = gen_reg_rtx (ext_mode); create_integer_operand (&ops[3], xbitpos);
if (maybe_expand_insn (unsignedp ? CODE_FOR_extzv : CODE_FOR_extv,
bitsize_rtx = GEN_INT (bitsize); 4, ops))
bitpos_rtx = GEN_INT (xbitpos);
pat = (unsignedp
? gen_extzv (xtarget, xop0, bitsize_rtx, bitpos_rtx)
: gen_extv (xtarget, xop0, bitsize_rtx, bitpos_rtx));
if (pat)
{ {
emit_insn (pat); xtarget = ops[0].value;
if (xtarget == xspec_target) if (xtarget == xspec_target)
return xtarget; return xtarget;
if (xtarget == xspec_target_subreg) if (xtarget == xspec_target_subreg)
return xspec_target; return xspec_target;
return convert_extracted_bit_field (xtarget, mode, tmode, unsignedp); return convert_extracted_bit_field (xtarget, mode, tmode, unsignedp);
} }
delete_insns_since (last);
} }
/* If OP0 is a memory, try copying it to a register and seeing if a /* If OP0 is a memory, try copying it to a register and seeing if a
@ -5101,19 +5001,14 @@ emit_cstore (rtx target, enum insn_code icode, enum rtx_code code,
int unsignedp, rtx x, rtx y, int normalizep, int unsignedp, rtx x, rtx y, int normalizep,
enum machine_mode target_mode) enum machine_mode target_mode)
{ {
rtx op0, last, comparison, subtarget, pattern; struct expand_operand ops[4];
rtx op0, last, comparison, subtarget;
enum machine_mode result_mode = insn_data[(int) icode].operand[0].mode; enum machine_mode result_mode = insn_data[(int) icode].operand[0].mode;
last = get_last_insn (); last = get_last_insn ();
x = prepare_operand (icode, x, 2, mode, compare_mode, unsignedp); x = prepare_operand (icode, x, 2, mode, compare_mode, unsignedp);
y = prepare_operand (icode, y, 3, mode, compare_mode, unsignedp); y = prepare_operand (icode, y, 3, mode, compare_mode, unsignedp);
comparison = gen_rtx_fmt_ee (code, result_mode, x, y); if (!x || !y)
if (!x || !y
|| !insn_data[icode].operand[2].predicate
(x, insn_data[icode].operand[2].mode)
|| !insn_data[icode].operand[3].predicate
(y, insn_data[icode].operand[3].mode)
|| !insn_data[icode].operand[1].predicate (comparison, VOIDmode))
{ {
delete_insns_since (last); delete_insns_since (last);
return NULL_RTX; return NULL_RTX;
@ -5124,16 +5019,18 @@ emit_cstore (rtx target, enum insn_code icode, enum rtx_code code,
if (!target) if (!target)
target = gen_reg_rtx (target_mode); target = gen_reg_rtx (target_mode);
if (optimize comparison = gen_rtx_fmt_ee (code, result_mode, x, y);
|| !(insn_data[(int) icode].operand[0].predicate (target, result_mode)))
subtarget = gen_reg_rtx (result_mode);
else
subtarget = target;
pattern = GEN_FCN (icode) (subtarget, comparison, x, y); create_output_operand (&ops[0], optimize ? NULL_RTX : target, result_mode);
if (!pattern) create_fixed_operand (&ops[1], comparison);
create_fixed_operand (&ops[2], x);
create_fixed_operand (&ops[3], y);
if (!maybe_expand_insn (icode, 4, ops))
{
delete_insns_since (last);
return NULL_RTX; return NULL_RTX;
emit_insn (pattern); }
subtarget = ops[0].value;
/* If we are converting to a wider mode, first convert to /* If we are converting to a wider mode, first convert to
TARGET_MODE, then normalize. This produces better combining TARGET_MODE, then normalize. This produces better combining

View File

@ -1258,7 +1258,6 @@ static bool
emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align, emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
unsigned int expected_align, HOST_WIDE_INT expected_size) unsigned int expected_align, HOST_WIDE_INT expected_size)
{ {
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
int save_volatile_ok = volatile_ok; int save_volatile_ok = volatile_ok;
enum machine_mode mode; enum machine_mode mode;
@ -1276,7 +1275,6 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
mode = GET_MODE_WIDER_MODE (mode)) mode = GET_MODE_WIDER_MODE (mode))
{ {
enum insn_code code = direct_optab_handler (movmem_optab, mode); enum insn_code code = direct_optab_handler (movmem_optab, mode);
insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
@ -1286,43 +1284,32 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
&& ((CONST_INT_P (size) && ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size) && ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1))) <= (GET_MODE_MASK (mode) >> 1)))
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|| (*pred) (x, BLKmode))
&& ((pred = insn_data[(int) code].operand[1].predicate) == 0
|| (*pred) (y, BLKmode))
&& ((pred = insn_data[(int) code].operand[3].predicate) == 0
|| (*pred) (opalign, VOIDmode)))
{ {
rtx op2; struct expand_operand ops[6];
rtx last = get_last_insn (); unsigned int nops;
rtx pat;
op2 = convert_to_mode (mode, size, 1);
pred = insn_data[(int) code].operand[2].predicate;
if (pred != 0 && ! (*pred) (op2, mode))
op2 = copy_to_mode_reg (mode, op2);
/* ??? When called via emit_block_move_for_call, it'd be /* ??? When called via emit_block_move_for_call, it'd be
nice if there were some way to inform the backend, so nice if there were some way to inform the backend, so
that it doesn't fail the expansion because it thinks that it doesn't fail the expansion because it thinks
emitting the libcall would be more efficient. */ emitting the libcall would be more efficient. */
nops = insn_data[(int) code].n_operands;
if (insn_data[(int) code].n_operands == 4) create_fixed_operand (&ops[0], x);
pat = GEN_FCN ((int) code) (x, y, op2, opalign); create_fixed_operand (&ops[1], y);
else /* The check above guarantees that this size conversion is valid. */
pat = GEN_FCN ((int) code) (x, y, op2, opalign, create_convert_operand_to (&ops[2], size, mode, true);
GEN_INT (expected_align create_integer_operand (&ops[3], align / BITS_PER_UNIT);
/ BITS_PER_UNIT), if (nops != 4)
GEN_INT (expected_size)); {
if (pat) create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
create_integer_operand (&ops[5], expected_size);
nops = 6;
}
if (maybe_expand_insn (code, nops, ops))
{ {
emit_insn (pat);
volatile_ok = save_volatile_ok; volatile_ok = save_volatile_ok;
return true; return true;
} }
else
delete_insns_since (last);
} }
} }
@ -2705,7 +2692,6 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
including more than one in the machine description unless including more than one in the machine description unless
the more limited one has some advantage. */ the more limited one has some advantage. */
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode; enum machine_mode mode;
if (expected_align < align) if (expected_align < align)
@ -2715,7 +2701,6 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
mode = GET_MODE_WIDER_MODE (mode)) mode = GET_MODE_WIDER_MODE (mode))
{ {
enum insn_code code = direct_optab_handler (setmem_optab, mode); enum insn_code code = direct_optab_handler (setmem_optab, mode);
insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than /* We don't need MODE to be narrower than
@ -2725,47 +2710,26 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
&& ((CONST_INT_P (size) && ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size) && ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1))) <= (GET_MODE_MASK (mode) >> 1)))
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|| (*pred) (object, BLKmode))
&& ((pred = insn_data[(int) code].operand[3].predicate) == 0
|| (*pred) (opalign, VOIDmode)))
{ {
rtx opsize, opchar; struct expand_operand ops[6];
enum machine_mode char_mode; unsigned int nops;
rtx last = get_last_insn ();
rtx pat;
opsize = convert_to_mode (mode, size, 1); nops = insn_data[(int) code].n_operands;
pred = insn_data[(int) code].operand[1].predicate; create_fixed_operand (&ops[0], object);
if (pred != 0 && ! (*pred) (opsize, mode)) /* The check above guarantees that this size conversion is valid. */
opsize = copy_to_mode_reg (mode, opsize); create_convert_operand_to (&ops[1], size, mode, true);
create_convert_operand_from (&ops[2], val, byte_mode, true);
opchar = val; create_integer_operand (&ops[3], align / BITS_PER_UNIT);
char_mode = insn_data[(int) code].operand[2].mode; if (nops != 4)
if (char_mode != VOIDmode)
{ {
opchar = convert_to_mode (char_mode, opchar, 1); create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
pred = insn_data[(int) code].operand[2].predicate; create_integer_operand (&ops[5], expected_size);
if (pred != 0 && ! (*pred) (opchar, char_mode)) nops = 6;
opchar = copy_to_mode_reg (char_mode, opchar);
} }
if (maybe_expand_insn (code, nops, ops))
if (insn_data[(int) code].n_operands == 4)
pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
else
pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
GEN_INT (expected_align
/ BITS_PER_UNIT),
GEN_INT (expected_size));
if (pat)
{
emit_insn (pat);
return true; return true;
} }
else
delete_insns_since (last);
}
} }
return false; return false;
@ -3547,7 +3511,6 @@ emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
rtx dest; rtx dest;
enum insn_code icode; enum insn_code icode;
insn_operand_predicate_fn pred;
stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
/* If there is push pattern, use it. Otherwise try old way of throwing /* If there is push pattern, use it. Otherwise try old way of throwing
@ -3555,10 +3518,10 @@ emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
icode = optab_handler (push_optab, mode); icode = optab_handler (push_optab, mode);
if (icode != CODE_FOR_nothing) if (icode != CODE_FOR_nothing)
{ {
if (((pred = insn_data[(int) icode].operand[0].predicate) struct expand_operand ops[1];
&& !((*pred) (x, mode))))
x = force_reg (mode, x); create_input_operand (&ops[0], x, mode);
emit_insn (GEN_FCN (icode) (x)); if (maybe_expand_insn (icode, 1, ops))
return; return;
} }
if (GET_MODE_SIZE (mode) == rounded_size) if (GET_MODE_SIZE (mode) == rounded_size)
@ -4147,7 +4110,8 @@ expand_assignment (tree to, tree from, bool nontemporal)
rtx to_rtx = 0; rtx to_rtx = 0;
rtx result; rtx result;
enum machine_mode mode; enum machine_mode mode;
int align, icode; int align;
enum insn_code icode;
/* Don't crash if the lhs of the assignment was erroneous. */ /* Don't crash if the lhs of the assignment was erroneous. */
if (TREE_CODE (to) == ERROR_MARK) if (TREE_CODE (to) == ERROR_MARK)
@ -4170,8 +4134,9 @@ expand_assignment (tree to, tree from, bool nontemporal)
&& ((icode = optab_handler (movmisalign_optab, mode)) && ((icode = optab_handler (movmisalign_optab, mode))
!= CODE_FOR_nothing)) != CODE_FOR_nothing))
{ {
enum machine_mode address_mode, op_mode1; struct expand_operand ops[2];
rtx insn, reg, op0, mem; enum machine_mode address_mode;
rtx reg, op0, mem;
reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL); reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
reg = force_not_mem (reg); reg = force_not_mem (reg);
@ -4212,16 +4177,11 @@ expand_assignment (tree to, tree from, bool nontemporal)
if (TREE_THIS_VOLATILE (to)) if (TREE_THIS_VOLATILE (to))
MEM_VOLATILE_P (mem) = 1; MEM_VOLATILE_P (mem) = 1;
op_mode1 = insn_data[icode].operand[1].mode; create_fixed_operand (&ops[0], mem);
if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1) create_input_operand (&ops[1], reg, mode);
&& op_mode1 != VOIDmode)
reg = copy_to_mode_reg (op_mode1, reg);
insn = GEN_FCN (icode) (mem, reg);
/* The movmisalign<mode> pattern cannot fail, else the assignment would /* The movmisalign<mode> pattern cannot fail, else the assignment would
silently be omitted. */ silently be omitted. */
gcc_assert (insn != NULL_RTX); expand_insn (icode, 2, ops);
emit_insn (insn);
return; return;
} }
@ -4483,31 +4443,16 @@ expand_assignment (tree to, tree from, bool nontemporal)
bool bool
emit_storent_insn (rtx to, rtx from) emit_storent_insn (rtx to, rtx from)
{ {
enum machine_mode mode = GET_MODE (to), imode; struct expand_operand ops[2];
enum machine_mode mode = GET_MODE (to);
enum insn_code code = optab_handler (storent_optab, mode); enum insn_code code = optab_handler (storent_optab, mode);
rtx pattern;
if (code == CODE_FOR_nothing) if (code == CODE_FOR_nothing)
return false; return false;
imode = insn_data[code].operand[0].mode; create_fixed_operand (&ops[0], to);
if (!insn_data[code].operand[0].predicate (to, imode)) create_input_operand (&ops[1], from, mode);
return false; return maybe_expand_insn (code, 2, ops);
imode = insn_data[code].operand[1].mode;
if (!insn_data[code].operand[1].predicate (from, imode))
{
from = copy_to_mode_reg (imode, from);
if (!insn_data[code].operand[1].predicate (from, imode))
return false;
}
pattern = GEN_FCN (code) (to, from);
if (pattern == NULL_RTX)
return false;
emit_insn (pattern);
return true;
} }
/* Generate code for computing expression EXP, /* Generate code for computing expression EXP,
@ -10120,10 +10065,10 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
rtx table_label ATTRIBUTE_UNUSED, rtx default_label, rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
rtx fallback_label ATTRIBUTE_UNUSED) rtx fallback_label ATTRIBUTE_UNUSED)
{ {
struct expand_operand ops[5];
enum machine_mode index_mode = SImode; enum machine_mode index_mode = SImode;
int index_bits = GET_MODE_BITSIZE (index_mode); int index_bits = GET_MODE_BITSIZE (index_mode);
rtx op1, op2, index; rtx op1, op2, index;
enum machine_mode op_mode;
if (! HAVE_casesi) if (! HAVE_casesi)
return 0; return 0;
@ -10158,32 +10103,17 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
do_pending_stack_adjust (); do_pending_stack_adjust ();
op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
(index, op_mode))
index = copy_to_mode_reg (op_mode, index);
op1 = expand_normal (minval); op1 = expand_normal (minval);
op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
(op1, op_mode))
op1 = copy_to_mode_reg (op_mode, op1);
op2 = expand_normal (range); op2 = expand_normal (range);
op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; create_input_operand (&ops[0], index, index_mode);
op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
op2, TYPE_UNSIGNED (TREE_TYPE (range))); create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) create_fixed_operand (&ops[3], table_label);
(op2, op_mode)) create_fixed_operand (&ops[4], (default_label
op2 = copy_to_mode_reg (op_mode, op2); ? default_label
: fallback_label));
emit_jump_insn (gen_casesi (index, op1, op2, expand_jump_insn (CODE_FOR_casesi, 5, ops);
table_label, !default_label
? fallback_label : default_label));
return 1; return 1;
} }

File diff suppressed because it is too large Load Diff

View File

@ -791,8 +791,8 @@ extern rtx expand_copysign (rtx, rtx, rtx);
/* Generate an instruction with a given INSN_CODE with an output and /* Generate an instruction with a given INSN_CODE with an output and
an input. */ an input. */
extern void emit_unop_insn (int, rtx, rtx, enum rtx_code); extern void emit_unop_insn (enum insn_code, rtx, rtx, enum rtx_code);
extern bool maybe_emit_unop_insn (int, rtx, rtx, enum rtx_code); extern bool maybe_emit_unop_insn (enum insn_code, rtx, rtx, enum rtx_code);
/* An extra flag to control optab_for_tree_code's behavior. This is needed to /* An extra flag to control optab_for_tree_code's behavior. This is needed to
distinguish between machines with a vector shift that takes a scalar for the distinguish between machines with a vector shift that takes a scalar for the
@ -926,6 +926,148 @@ extern rtx convert_optab_libfunc (convert_optab optab, enum machine_mode mode1,
extern bool insn_operand_matches (enum insn_code icode, unsigned int opno, extern bool insn_operand_matches (enum insn_code icode, unsigned int opno,
rtx operand); rtx operand);
/* Describes the type of an expand_operand. Each value is associated
with a create_*_operand function; see the comments above those
functions for details. */
enum expand_operand_type {
EXPAND_FIXED,
EXPAND_OUTPUT,
EXPAND_INPUT,
EXPAND_CONVERT_TO,
EXPAND_CONVERT_FROM,
EXPAND_ADDRESS,
EXPAND_INTEGER
};
/* Information about an operand for instruction expansion. */
struct expand_operand {
/* The type of operand. */
ENUM_BITFIELD (expand_operand_type) type : 8;
/* True if any conversion should treat VALUE as being unsigned
rather than signed. Only meaningful for certain types. */
unsigned int unsigned_p : 1;
/* Unused; available for future use. */
unsigned int unused : 7;
/* The mode passed to the convert_*_operand function. It has a
type-dependent meaning. */
ENUM_BITFIELD (machine_mode) mode : 16;
/* The value of the operand. */
rtx value;
};
/* Initialize OP with the given fields. Initialise the other fields
to their default values. */
static inline void
create_expand_operand (struct expand_operand *op,
enum expand_operand_type type,
rtx value, enum machine_mode mode,
bool unsigned_p)
{
op->type = type;
op->unsigned_p = unsigned_p;
op->unused = 0;
op->mode = mode;
op->value = value;
}
/* Make OP describe an operand that must use rtx X, even if X is volatile. */
static inline void
create_fixed_operand (struct expand_operand *op, rtx x)
{
create_expand_operand (op, EXPAND_FIXED, x, VOIDmode, false);
}
/* Make OP describe an output operand that must have mode MODE.
X, if nonnull, is a suggestion for where the output should be stored.
It is OK for VALUE to be inconsistent with MODE, although it will just
be ignored in that case. */
static inline void
create_output_operand (struct expand_operand *op, rtx x,
enum machine_mode mode)
{
create_expand_operand (op, EXPAND_OUTPUT, x, mode, false);
}
/* Make OP describe an input operand that must have mode MODE and
value VALUE; MODE cannot be VOIDmode. The backend may request that
VALUE be copied into a different kind of rtx before being passed
as an operand. */
static inline void
create_input_operand (struct expand_operand *op, rtx value,
enum machine_mode mode)
{
create_expand_operand (op, EXPAND_INPUT, value, mode, false);
}
/* Like create_input_operand, except that VALUE must first be converted
to mode MODE. UNSIGNED_P says whether VALUE is unsigned. */
static inline void
create_convert_operand_to (struct expand_operand *op, rtx value,
enum machine_mode mode, bool unsigned_p)
{
create_expand_operand (op, EXPAND_CONVERT_TO, value, mode, unsigned_p);
}
/* Make OP describe an input operand that should have the same value
as VALUE, after any mode conversion that the backend might request.
If VALUE is a CONST_INT, it should be treated as having mode MODE.
UNSIGNED_P says whether VALUE is unsigned. */
static inline void
create_convert_operand_from (struct expand_operand *op, rtx value,
enum machine_mode mode, bool unsigned_p)
{
create_expand_operand (op, EXPAND_CONVERT_FROM, value, mode, unsigned_p);
}
extern void create_convert_operand_from_type (struct expand_operand *op,
rtx value, tree type);
/* Make OP describe an input Pmode address operand. VALUE is the value
of the address, but it may need to be converted to Pmode first. */
static inline void
create_address_operand (struct expand_operand *op, rtx value)
{
create_expand_operand (op, EXPAND_ADDRESS, value, Pmode, false);
}
/* Make OP describe an input operand that has value INTVAL and that has
no inherent mode. This function should only be used for operands that
are always expand-time constants. The backend may request that INTVAL
be copied into a different kind of rtx, but it must specify the mode
of that rtx if so. */
static inline void
create_integer_operand (struct expand_operand *op, HOST_WIDE_INT intval)
{
create_expand_operand (op, EXPAND_INTEGER, GEN_INT (intval), VOIDmode, false);
}
extern bool maybe_legitimize_operands (enum insn_code icode,
unsigned int opno, unsigned int nops,
struct expand_operand *ops);
extern rtx maybe_gen_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern bool maybe_expand_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern bool maybe_expand_jump_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern void expand_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern void expand_jump_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern rtx prepare_operand (enum insn_code, rtx, int, enum machine_mode, extern rtx prepare_operand (enum insn_code, rtx, int, enum machine_mode,
enum machine_mode, int); enum machine_mode, int);