re PR target/50931 ([avr] Support a 24-bit scalar integer mode)

gcc/
	PR target/50931
	* config/avr/avr-modes.def: New file defining PSImode.
	* config/avr/avr-c.c (__INT24_MAX__, __INT24_MIN__,
	__UINT24_MAX__): New built-in defines.
	* config/avr/avr.md (adjust_len): Add tstpsi, mov24,  reload_in24,
	ashlpsi, ashrpsi, lshrpsi.
	(QISO, QIDI, HISI, HIDI, MPUSH, rotx, rotsmode): Add PSI.
	(MOVMODE): New mode iterator.
	(movpsi): New expander.
	(movqi, movhi, movsi, movsf, movpsi): Write as one using MOVMODE.
	(*reload_inpsi, *movpsi): New insns.
	(*reload_inpsi): New RTL peephole.
	(addpsi3, *addpsi3_zero_extend.qi, *addpsi3_zero_extend.hi,
	*addpsi3_sign_extend.hi): New insns.
	(subpsi3, *subpsi3_zero_extend.qi, *subpsi3_zero_extend.hi,
	*subpsi3_sign_extend.hi): New insns.
	(divmodpsi4, udivmodpsi4): New define insn-and-split.
	(*divmodpsi4_call, *udivmodpsi4_call): New insns.
	(andpsi3, iorpsi3, xorpsi3): New insns.
	(*rotlpsi2.1, *rotlpsi2.23): New insns.
	(*rotw<mode>): Insn condition only allow even-sized modes.
	(*rotb<mode>): Insn condition allows odd-sized modes.
	(ashlpsi3, ashrpsi3, lshrpsi3, *addpsi3.lt0): New insns.
	(negpsi2, one_cmplpsi2): New insns.
	(extendqipsi2, extendhipsi2, extendpsisi2): New insns.
	(zero_extendqipsi2, zero_extendhipsi2, zero_extendpsisi2): New
	insn-and-splits.
	(*cmppsi, *negated_tstpsi, *reversed_tstpsi): New insns.
	(cbranchpsi4): New expander.
	* config/avr/constraints.md (Ca3, Co3, Cx3): New constraints.
	* config/avr/avr-protos.h (avr_out_tstpsi, avr_out_movpsi,
	avr_out_ashlpsi3, avr_out_ashrpsi3, avr_out_lshrpsi3,
	avr_out_reload_inpsi): New prototypes.

	* config/avr/avr.c (TARGET_SCALAR_MODE_SUPPORTED_P): Define to...
	(avr_scalar_mode_supported_p): ...this new static function.
	(avr_asm_len): Always return "".
	(avr_out_load_psi, avr_out_store_psi): New static functions.
	(avr_out_movpsi, avr_out_reload_inpsi): New functions.
	(avr_out_tstpsi): New function.
	(avr_out_ashlpsi3, avr_out_ashrpsi3, avr_out_lshrpsi3): New functions.
	(avr_out_plus_1, output_reload_in_const): Handle 3-byte types.
	(avr_simplify_comparison_p): Ditto.
	(adjust_insn_length): Handle ADJUST_LEN_RELOAD_IN24,
	ADJUST_LEN_MOV24, ADJUST_LEN_TSTPSI, ADJUST_LEN_ASHLPSI,
	ADJUST_LEN_ASHRPSI, ADJUST_LEN_LSHRPSI.
	(avr_rtx_costs_1): Report PSI costs.
	(avr_libcall_value): Handle odd-sized parameters.
	(avr_init_builtin_int24): New static function to define built-in
	24-bit types __int24 and __uint24.
	(avr_init_builtins): Use it.

libgcc/
	PR target/50931
	* config/t-avr (LIB1ASMFUNCS): Add _divmodpsi4, _udivmodpsi4.
	* config/lib1funcs.S (__udivmodpsi4, __divmodpsi4): New functions.

From-SVN: r180962
This commit is contained in:
Georg-Johann Lay 2011-11-04 16:20:18 +00:00
parent 2fcc5e6474
commit e4fe948a6e
10 changed files with 1425 additions and 95 deletions

View File

@ -1,4 +1,58 @@
2011-11-04 Thomas Doerfler <thomas.doerfler@embedded-brains.de>
2011-11-04 Georg-Johann Lay <avr@gjlay.de>
PR target/50931
* config/avr/avr-modes.def: New file defining PSImode.
* config/avr/avr-c.c (__INT24_MAX__, __INT24_MIN__,
__UINT24_MAX__): New built-in defines.
* config/avr/avr.md (adjust_len): Add tstpsi, mov24, reload_in24,
ashlpsi, ashrpsi, lshrpsi.
(QISO, QIDI, HISI, HIDI, MPUSH, rotx, rotsmode): Add PSI.
(MOVMODE): New mode iterator.
(movpsi): New expander.
(movqi, movhi, movsi, movsf, movpsi): Write as one using MOVMODE.
(*reload_inpsi, *movpsi): New insns.
(*reload_inpsi): New RTL peephole.
(addpsi3, *addpsi3_zero_extend.qi, *addpsi3_zero_extend.hi,
*addpsi3_sign_extend.hi): New insns.
(subpsi3, *subpsi3_zero_extend.qi, *subpsi3_zero_extend.hi,
*subpsi3_sign_extend.hi): New insns.
(divmodpsi4, udivmodpsi4): New define insn-and-split.
(*divmodpsi4_call, *udivmodpsi4_call): New insns.
(andpsi3, iorpsi3, xorpsi3): New insns.
(*rotlpsi2.1, *rotlpsi2.23): New insns.
(*rotw<mode>): Insn condition only allow even-sized modes.
(*rotb<mode>): Insn condition allows odd-sized modes.
(ashlpsi3, ashrpsi3, lshrpsi3, *addpsi3.lt0): New insns.
(negpsi2, one_cmplpsi2): New insns.
(extendqipsi2, extendhipsi2, extendpsisi2): New insns.
(zero_extendqipsi2, zero_extendhipsi2, zero_extendpsisi2): New
insn-and-splits.
(*cmppsi, *negated_tstpsi, *reversed_tstpsi): New insns.
(cbranchpsi4): New expander.
* config/avr/constraints.md (Ca3, Co3, Cx3): New constraints.
* config/avr/avr-protos.h (avr_out_tstpsi, avr_out_movpsi,
avr_out_ashlpsi3, avr_out_ashrpsi3, avr_out_lshrpsi3,
avr_out_reload_inpsi): New prototypes.
* config/avr/avr.c (TARGET_SCALAR_MODE_SUPPORTED_P): Define to...
(avr_scalar_mode_supported_p): ...this new static function.
(avr_asm_len): Always return "".
(avr_out_load_psi, avr_out_store_psi): New static functions.
(avr_out_movpsi, avr_out_reload_inpsi): New functions.
(avr_out_tstpsi): New function.
(avr_out_ashlpsi3, avr_out_ashrpsi3, avr_out_lshrpsi3): New functions.
(avr_out_plus_1, output_reload_in_const): Handle 3-byte types.
(avr_simplify_comparison_p): Ditto.
(adjust_insn_length): Handle ADJUST_LEN_RELOAD_IN24,
ADJUST_LEN_MOV24, ADJUST_LEN_TSTPSI, ADJUST_LEN_ASHLPSI,
ADJUST_LEN_ASHRPSI, ADJUST_LEN_LSHRPSI.
(avr_rtx_costs_1): Report PSI costs.
(avr_libcall_value): Handle odd-sized parameters.
(avr_init_builtin_int24): New static function to define built-in
24-bit types __int24 and __uint24.
(avr_init_builtins): Use it.
2011-11-04 Thomas Doerfler <thomas.doerfler@embedded-brains.de>
PR target/50989
* config/arm/rtems-elf.h, config/arm/t-rtems: Add optional

View File

@ -105,4 +105,8 @@ avr_cpu_cpp_builtins (struct cpp_reader *pfile)
cpp_define (pfile, "__BUILTIN_AVR_FMUL");
cpp_define (pfile, "__BUILTIN_AVR_FMULS");
cpp_define (pfile, "__BUILTIN_AVR_FMULSU");
cpp_define (pfile, "__INT24_MAX__=8388607L");
cpp_define (pfile, "__INT24_MIN__=(-__INT24_MAX__-1)");
cpp_define (pfile, "__UINT24_MAX__=16777215UL");
}

View File

@ -0,0 +1 @@
FRACTIONAL_INT_MODE (PSI, 24, 3);

View File

@ -59,8 +59,10 @@ extern const char *out_movsi_mr_r (rtx insn, rtx op[], int *l);
extern const char *output_movsisf (rtx insn, rtx operands[], int *l);
extern const char *avr_out_tstsi (rtx, rtx*, int*);
extern const char *avr_out_tsthi (rtx, rtx*, int*);
extern const char *avr_out_tstpsi (rtx, rtx*, int*);
extern const char *avr_out_compare (rtx, rtx*, int*);
extern const char *ret_cond_branch (rtx x, int len, int reverse);
extern const char *avr_out_movpsi (rtx, rtx*, int*);
extern const char *ashlqi3_out (rtx insn, rtx operands[], int *len);
extern const char *ashlhi3_out (rtx insn, rtx operands[], int *len);
@ -73,6 +75,11 @@ extern const char *ashrsi3_out (rtx insn, rtx operands[], int *len);
extern const char *lshrqi3_out (rtx insn, rtx operands[], int *len);
extern const char *lshrhi3_out (rtx insn, rtx operands[], int *len);
extern const char *lshrsi3_out (rtx insn, rtx operands[], int *len);
extern const char *avr_out_ashlpsi3 (rtx, rtx*, int*);
extern const char *avr_out_ashrpsi3 (rtx, rtx*, int*);
extern const char *avr_out_lshrpsi3 (rtx, rtx*, int*);
extern bool avr_rotate_bytes (rtx operands[]);
extern void expand_prologue (void);
@ -93,6 +100,7 @@ extern int extra_constraint_Q (rtx x);
extern int adjust_insn_length (rtx insn, int len);
extern const char* output_reload_inhi (rtx*, rtx, int*);
extern const char* output_reload_insisf (rtx*, rtx, int*);
extern const char* avr_out_reload_inpsi (rtx*, rtx, int*);
extern void notice_update_cc (rtx body, rtx insn);
extern void print_operand (FILE *file, rtx x, int code);
extern void print_operand_address (FILE *file, rtx addr);

View File

@ -217,6 +217,9 @@ bool avr_need_copy_data_p = false;
#undef TARGET_ASM_FUNCTION_RODATA_SECTION
#define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
#undef TARGET_SCALAR_MODE_SUPPORTED_P
#define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
/* Custom function to replace string prefix.
@ -369,6 +372,17 @@ avr_regno_reg_class (int r)
return ALL_REGS;
}
static bool
avr_scalar_mode_supported_p (enum machine_mode mode)
{
if (PSImode == mode)
return true;
return default_scalar_mode_supported_p (mode);
}
/* A helper for the subsequent function attribute used to dig for
attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
@ -1487,7 +1501,7 @@ avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
/* Helper function to print assembler resp. track instruction
sequence lengths.
sequence lengths. Always return "".
If PLEN == NULL:
Output assembler code from template TPL with operands supplied
@ -1499,7 +1513,7 @@ avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
Don't output anything.
*/
static void
static const char*
avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
{
if (NULL == plen)
@ -1513,6 +1527,8 @@ avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
else
*plen += n_words;
}
return "";
}
@ -1562,6 +1578,8 @@ cond_string (enum rtx_code code)
default:
gcc_unreachable ();
}
return "";
}
/* Output ADDR to FILE as address. */
@ -1968,6 +1986,7 @@ avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
{
unsigned int max = (mode == QImode ? 0xff :
mode == HImode ? 0xffff :
mode == PSImode ? 0xffffff :
mode == SImode ? 0xffffffff : 0);
if (max && op && GET_CODE (x) == CONST_INT)
{
@ -2956,6 +2975,306 @@ output_movsisf (rtx insn, rtx operands[], int *l)
return "";
}
/* Handle loads of 24-bit types from memory to register. */
static const char*
avr_out_load_psi (rtx insn, rtx *op, int *plen)
{
rtx dest = op[0];
rtx src = op[1];
rtx base = XEXP (src, 0);
int reg_dest = true_regnum (dest);
int reg_base = true_regnum (base);
if (reg_base > 0)
{
if (reg_base == REG_X) /* (R26) */
{
if (reg_dest == REG_X)
/* "ld r26,-X" is undefined */
return avr_asm_len ("adiw r26,2" CR_TAB
"ld r28,X" CR_TAB
"ld __tmp_reg__,-X" CR_TAB
"sbiw r26,1" CR_TAB
"ld r26,X" CR_TAB
"mov r27,__tmp_reg__", op, plen, -6);
else
{
avr_asm_len ("ld %A0,X+" CR_TAB
"ld %B0,X+" CR_TAB
"ld %C0,X", op, plen, -3);
if (reg_dest != REG_X - 2
&& !reg_unused_after (insn, base))
{
avr_asm_len ("sbiw r26,2", op, plen, 1);
}
return "";
}
}
else /* reg_base != REG_X */
{
if (reg_dest == reg_base)
return avr_asm_len ("ldd %C0,%1+2" CR_TAB
"ldd __tmp_reg__,%1+1" CR_TAB
"ld %A0,%1" CR_TAB
"mov %B0,__tmp_reg__", op, plen, -4);
else
return avr_asm_len ("ld %A0,%1" CR_TAB
"ldd %B0,%1+1" CR_TAB
"ldd %C0,%1+2", op, plen, -3);
}
}
else if (GET_CODE (base) == PLUS) /* (R + i) */
{
int disp = INTVAL (XEXP (base, 1));
if (disp > MAX_LD_OFFSET (GET_MODE (src)))
{
if (REGNO (XEXP (base, 0)) != REG_Y)
fatal_insn ("incorrect insn:",insn);
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
return avr_asm_len ("adiw r28,%o1-61" CR_TAB
"ldd %A0,Y+61" CR_TAB
"ldd %B0,Y+62" CR_TAB
"ldd %C0,Y+63" CR_TAB
"sbiw r28,%o1-61", op, plen, -5);
return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
"sbci r29,hi8(-%o1)" CR_TAB
"ld %A0,Y" CR_TAB
"ldd %B0,Y+1" CR_TAB
"ldd %C0,Y+2" CR_TAB
"subi r28,lo8(%o1)" CR_TAB
"sbci r29,hi8(%o1)", op, plen, -7);
}
reg_base = true_regnum (XEXP (base, 0));
if (reg_base == REG_X)
{
/* R = (X + d) */
if (reg_dest == REG_X)
{
/* "ld r26,-X" is undefined */
return avr_asm_len ("adiw r26,%o1+2" CR_TAB
"ld r28,X" CR_TAB
"ld __tmp_reg__,-X" CR_TAB
"sbiw r26,1" CR_TAB
"ld r26,X" CR_TAB
"mov r27,__tmp_reg__", op, plen, -6);
}
avr_asm_len ("adiw r26,%o1" CR_TAB
"ld r24,X+" CR_TAB
"ld r25,X+" CR_TAB
"ld r26,X", op, plen, -4);
if (reg_dest != REG_X - 2)
avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
return "";
}
if (reg_dest == reg_base)
return avr_asm_len ("ldd %C0,%C1" CR_TAB
"ldd __tmp_reg__,%B1" CR_TAB
"ldd %A0,%A1" CR_TAB
"mov %B0,__tmp_reg__", op, plen, -4);
return avr_asm_len ("ldd %A0,%A1" CR_TAB
"ldd %B0,%B1" CR_TAB
"ldd %C0,%C1", op, plen, -3);
}
else if (GET_CODE (base) == PRE_DEC) /* (--R) */
return avr_asm_len ("ld %C0,%1" CR_TAB
"ld %B0,%1" CR_TAB
"ld %A0,%1", op, plen, -3);
else if (GET_CODE (base) == POST_INC) /* (R++) */
return avr_asm_len ("ld %A0,%1" CR_TAB
"ld %B0,%1" CR_TAB
"ld %C0,%1", op, plen, -3);
else if (CONSTANT_ADDRESS_P (base))
return avr_asm_len ("lds %A0,%m1" CR_TAB
"lds %B0,%m1+1" CR_TAB
"lds %C0,%m1+2", op, plen , -6);
fatal_insn ("unknown move insn:",insn);
return "";
}
/* Handle store of 24-bit type from register or zero to memory. */
static const char*
avr_out_store_psi (rtx insn, rtx *op, int *plen)
{
rtx dest = op[0];
rtx src = op[1];
rtx base = XEXP (dest, 0);
int reg_base = true_regnum (base);
if (CONSTANT_ADDRESS_P (base))
return avr_asm_len ("sts %m0,%A1" CR_TAB
"sts %m0+1,%B1" CR_TAB
"sts %m0+2,%C1", op, plen, -6);
if (reg_base > 0) /* (r) */
{
if (reg_base == REG_X) /* (R26) */
{
gcc_assert (!reg_overlap_mentioned_p (base, src));
avr_asm_len ("st %0+,%A1" CR_TAB
"st %0+,%B1" CR_TAB
"st %0,%C1", op, plen, -3);
if (!reg_unused_after (insn, base))
avr_asm_len ("sbiw r26,2", op, plen, 1);
return "";
}
else
return avr_asm_len ("st %0,%A1" CR_TAB
"std %0+1,%B1" CR_TAB
"std %0+2,%C1", op, plen, -3);
}
else if (GET_CODE (base) == PLUS) /* (R + i) */
{
int disp = INTVAL (XEXP (base, 1));
reg_base = REGNO (XEXP (base, 0));
if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
{
if (reg_base != REG_Y)
fatal_insn ("incorrect insn:",insn);
if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
return avr_asm_len ("adiw r28,%o0-61" CR_TAB
"std Y+61,%A1" CR_TAB
"std Y+62,%B1" CR_TAB
"std Y+63,%C1" CR_TAB
"sbiw r28,%o0-60", op, plen, -5);
return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
"sbci r29,hi8(-%o0)" CR_TAB
"st Y,%A1" CR_TAB
"std Y+1,%B1" CR_TAB
"std Y+2,%C1" CR_TAB
"subi r28,lo8(%o0)" CR_TAB
"sbci r29,hi8(%o0)", op, plen, -7);
}
if (reg_base == REG_X)
{
/* (X + d) = R */
gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
avr_asm_len ("adiw r26,%o0" CR_TAB
"st X+,%A1" CR_TAB
"st X+,%B1" CR_TAB
"st X,%C1", op, plen, -4);
if (!reg_unused_after (insn, XEXP (base, 0)))
avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
return "";
}
return avr_asm_len ("std %A0,%A1" CR_TAB
"std %B0,%B1" CR_TAB
"std %C0,%C1", op, plen, -3);
}
else if (GET_CODE (base) == PRE_DEC) /* (--R) */
return avr_asm_len ("st %0,%C1" CR_TAB
"st %0,%B1" CR_TAB
"st %0,%A1", op, plen, -3);
else if (GET_CODE (base) == POST_INC) /* (R++) */
return avr_asm_len ("st %0,%A1" CR_TAB
"st %0,%B1" CR_TAB
"st %0,%C1", op, plen, -3);
fatal_insn ("unknown move insn:",insn);
return "";
}
/* Move around 24-bit stuff. */
const char *
avr_out_movpsi (rtx insn, rtx *op, int *plen)
{
rtx dest = op[0];
rtx src = op[1];
if (register_operand (dest, VOIDmode))
{
if (register_operand (src, VOIDmode)) /* mov r,r */
{
if (true_regnum (dest) > true_regnum (src))
{
avr_asm_len ("mov %C0,%C1", op, plen, -1);
if (AVR_HAVE_MOVW)
return avr_asm_len ("movw %A0,%A1", op, plen, 1);
else
return avr_asm_len ("mov %B0,%B1" CR_TAB
"mov %A0,%A1", op, plen, 2);
}
else
{
if (AVR_HAVE_MOVW)
avr_asm_len ("movw %A0,%A1", op, plen, -1);
else
avr_asm_len ("mov %A0,%A1" CR_TAB
"mov %B0,%B1", op, plen, -2);
return avr_asm_len ("mov %C0,%C1", op, plen, 1);
}
}
else if (CONST_INT_P (src))
{
return avr_out_reload_inpsi (op, NULL_RTX, plen);
}
else if (CONSTANT_P (src))
{
if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
{
return avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
"ldi %B0,hi8(%1)" CR_TAB
"ldi %C0,hh8(%1)", op, plen, -3);
}
/* Last resort, better than loading from memory. */
return avr_asm_len ("mov __tmp_reg__,r31" CR_TAB
"ldi r31,lo8(%1)" CR_TAB
"mov %A0,r31" CR_TAB
"ldi r31,hi8(%1)" CR_TAB
"mov %B0,r31" CR_TAB
"ldi r31,hh8(%1)" CR_TAB
"mov %C0,r31" CR_TAB
"mov r31,__tmp_reg__", op, plen, -8);
}
else if (MEM_P (src))
return avr_out_load_psi (insn, op, plen); /* mov r,m */
}
else if (MEM_P (dest))
{
if (src == CONST0_RTX (GET_MODE (dest)))
op[1] = zero_reg_rtx;
avr_out_store_psi (insn, op, plen);
op[1] = src;
return "";
}
fatal_insn ("invalid insn:", insn);
return "";
}
const char *
out_movqi_mr_r (rtx insn, rtx op[], int *l)
{
@ -3280,22 +3599,24 @@ avr_out_compare (rtx insn, rtx *xop, int *plen)
avr_asm_len ("dec %A0" CR_TAB
"or %A0,%B0", xop, plen, 2);
if (n_bytes == 4)
avr_asm_len ("or %A0,%C0" CR_TAB
"or %A0,%D0", xop, plen, 2);
if (n_bytes >= 3)
avr_asm_len ("or %A0,%C0", xop, plen, 1);
if (n_bytes >= 4)
avr_asm_len ("or %A0,%D0", xop, plen, 1);
return "";
}
else if (xval == constm1_rtx)
{
if (n_bytes == 4)
avr_asm_len ("and %A0,%D0" CR_TAB
"and %A0,%C0", xop, plen, 2);
if (n_bytes >= 4)
avr_asm_len ("and %A0,%D0", xop, plen, 1);
avr_asm_len ("and %A0,%B0" CR_TAB
"com %A0", xop, plen, 2);
if (n_bytes >= 3)
avr_asm_len ("and %A0,%C0", xop, plen, 1);
return "";
return avr_asm_len ("and %A0,%B0" CR_TAB
"com %A0", xop, plen, 2);
}
}
@ -3335,8 +3656,7 @@ avr_out_compare (rtx insn, rtx *xop, int *plen)
&& compare_eq_p (insn)
&& reg_unused_after (insn, xreg))
{
avr_asm_len ("adiw %0,%n1", xop, plen, 1);
break;
return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
}
}
@ -3410,6 +3730,31 @@ avr_out_tsthi (rtx insn, rtx *op, int *plen)
}
/* Output test instruction for PSImode. */
const char*
avr_out_tstpsi (rtx insn, rtx *op, int *plen)
{
if (compare_sign_p (insn))
{
avr_asm_len ("tst %C0", op, plen, -1);
}
else if (reg_unused_after (insn, op[0])
&& compare_eq_p (insn))
{
/* Faster than sbiw if we can clobber the operand. */
avr_asm_len ("or %A0,%B0" CR_TAB
"or %A0,%C0", op, plen, -2);
}
else
{
avr_out_compare (insn, op, plen);
}
return "";
}
/* Output test instruction for SImode. */
const char*
@ -3938,6 +4283,69 @@ ashlhi3_out (rtx insn, rtx operands[], int *len)
}
/* 24-bit shift left */
const char*
avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
{
if (plen)
*plen = 0;
if (CONST_INT_P (op[2]))
{
switch (INTVAL (op[2]))
{
default:
if (INTVAL (op[2]) < 24)
break;
return avr_asm_len ("clr %A0" CR_TAB
"clr %B0" CR_TAB
"clr %C0", op, plen, 3);
case 8:
{
int reg0 = REGNO (op[0]);
int reg1 = REGNO (op[1]);
if (reg0 >= reg1)
return avr_asm_len ("mov %C0,%B1" CR_TAB
"mov %B0,%A1" CR_TAB
"clr %A0", op, plen, 3);
else
return avr_asm_len ("clr %A0" CR_TAB
"mov %B0,%A1" CR_TAB
"mov %C0,%B1", op, plen, 3);
}
case 16:
{
int reg0 = REGNO (op[0]);
int reg1 = REGNO (op[1]);
if (reg0 + 2 != reg1)
avr_asm_len ("mov %C0,%A0", op, plen, 1);
return avr_asm_len ("clr %B0" CR_TAB
"clr %A0", op, plen, 2);
}
case 23:
return avr_asm_len ("clr %C0" CR_TAB
"lsr %A0" CR_TAB
"ror %C0" CR_TAB
"clr %B0" CR_TAB
"clr %A0", op, plen, 5);
}
}
out_shift_with_cnt ("lsl %A0" CR_TAB
"rol %B0" CR_TAB
"rol %C0", insn, op, plen, 3);
return "";
}
/* 32bit shift left ((long)x << i) */
const char *
@ -4264,6 +4672,65 @@ ashrhi3_out (rtx insn, rtx operands[], int *len)
}
/* 24-bit arithmetic shift right */
const char*
avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
{
int dest = REGNO (op[0]);
int src = REGNO (op[1]);
if (CONST_INT_P (op[2]))
{
if (plen)
*plen = 0;
switch (INTVAL (op[2]))
{
case 8:
if (dest <= src)
return avr_asm_len ("mov %A0,%B1" CR_TAB
"mov %B0,%C1" CR_TAB
"clr %C0" CR_TAB
"sbrc %B0,7" CR_TAB
"dec %C0", op, plen, 5);
else
return avr_asm_len ("clr %C0" CR_TAB
"sbrc %C1,7" CR_TAB
"dec %C0" CR_TAB
"mov %B0,%C1" CR_TAB
"mov %A0,%B1", op, plen, 5);
case 16:
if (dest != src + 2)
avr_asm_len ("mov %A0,%C1", op, plen, 1);
return avr_asm_len ("clr %B0" CR_TAB
"sbrc %A0,7" CR_TAB
"com %B0" CR_TAB
"mov %C0,%B0", op, plen, 4);
default:
if (INTVAL (op[2]) < 24)
break;
/* fall through */
case 31:
return avr_asm_len ("lsl %C0" CR_TAB
"sbc %A0,%A0" CR_TAB
"mov %B0,%A0" CR_TAB
"mov %C0,%A0", op, plen, 4);
} /* switch */
}
out_shift_with_cnt ("asr %C0" CR_TAB
"ror %B0" CR_TAB
"ror %A0", insn, op, plen, 3);
return "";
}
/* 32bit arithmetic shift right ((signed long)x >> i) */
const char *
@ -4714,6 +5181,61 @@ lshrhi3_out (rtx insn, rtx operands[], int *len)
return "";
}
/* 24-bit logic shift right */
const char*
avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
{
int dest = REGNO (op[0]);
int src = REGNO (op[1]);
if (CONST_INT_P (op[2]))
{
if (plen)
*plen = 0;
switch (INTVAL (op[2]))
{
case 8:
if (dest <= src)
return avr_asm_len ("mov %A0,%B1" CR_TAB
"mov %B0,%C1" CR_TAB
"clr %C0", op, plen, 3);
else
return avr_asm_len ("clr %C0" CR_TAB
"mov %B0,%C1" CR_TAB
"mov %A0,%B1", op, plen, 3);
case 16:
if (dest != src + 2)
avr_asm_len ("mov %A0,%C1", op, plen, 1);
return avr_asm_len ("clr %B0" CR_TAB
"clr %C0", op, plen, 2);
default:
if (INTVAL (op[2]) < 24)
break;
/* fall through */
case 23:
return avr_asm_len ("clr %A0" CR_TAB
"sbrc %C0,7" CR_TAB
"inc %A0" CR_TAB
"clr %B0" CR_TAB
"clr %C0", op, plen, 5);
} /* switch */
}
out_shift_with_cnt ("lsr %C0" CR_TAB
"ror %B0" CR_TAB
"ror %A0", insn, op, plen, 3);
return "";
}
/* 32bit logic shift right ((unsigned int)x >> i) */
const char *
@ -4874,7 +5396,9 @@ avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
if (i && !started)
*pcc = CC_CLOBBER;
if (!started && i % 2 == 0
if (!started
&& i % 2 == 0
&& i + 2 <= n_bytes
&& test_hard_reg_class (ADDW_REGS, reg8))
{
rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
@ -4911,11 +5435,11 @@ avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
else if ((val8 == 1 || val8 == 0xff)
&& !started
&& i == n_bytes - 1)
{
{
avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
op, plen, 1);
break;
}
}
switch (code)
{
@ -5399,6 +5923,7 @@ adjust_insn_length (rtx insn, int len)
switch (adjust_len)
{
case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
@ -5411,9 +5936,11 @@ adjust_insn_length (rtx insn, int len)
case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
@ -5429,6 +5956,10 @@ adjust_insn_length (rtx insn, int len)
case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
default:
@ -6228,13 +6759,11 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
*total = COSTS_N_INSNS (1);
break;
case HImode:
*total = COSTS_N_INSNS (3);
break;
case SImode:
*total = COSTS_N_INSNS (7);
break;
case HImode:
case PSImode:
case SImode:
*total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
break;
default:
return false;
@ -6320,6 +6849,19 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
*total = COSTS_N_INSNS (2);
break;
case PSImode:
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (3);
*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
speed);
}
else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
*total = COSTS_N_INSNS (2);
else
*total = COSTS_N_INSNS (3);
break;
case SImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
@ -6367,6 +6909,7 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
*total = COSTS_N_INSNS (1) + *total;
return true;
}
/* FALLTHRU */
case AND:
case IOR:
*total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
@ -6437,6 +6980,13 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
return false;
break;
case PSImode:
if (!speed)
*total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
else
*total = 10;
break;
case SImode:
if (AVR_HAVE_MUL)
{
@ -6611,6 +7161,31 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
}
break;
case PSImode:
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 6 : 73);
}
else
switch (INTVAL (XEXP (x, 1)))
{
case 0:
*total = 0;
break;
case 1:
case 8:
case 16:
*total = COSTS_N_INSNS (3);
break;
case 23:
*total = COSTS_N_INSNS (5);
break;
default:
*total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
break;
}
break;
case SImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
@ -6721,6 +7296,33 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
}
break;
case PSImode:
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 6 : 73);
}
else
switch (INTVAL (XEXP (x, 1)))
{
case 0:
*total = 0;
break;
case 1:
*total = COSTS_N_INSNS (3);
break;
case 16:
case 8:
*total = COSTS_N_INSNS (5);
break;
case 23:
*total = COSTS_N_INSNS (4);
break;
default:
*total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
break;
}
break;
case SImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
@ -6832,6 +7434,31 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
}
break;
case PSImode:
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 6 : 73);
}
else
switch (INTVAL (XEXP (x, 1)))
{
case 0:
*total = 0;
break;
case 1:
case 8:
case 16:
*total = COSTS_N_INSNS (3);
break;
case 23:
*total = COSTS_N_INSNS (5);
break;
default:
*total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
break;
}
break;
case SImode:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
{
@ -6889,6 +7516,12 @@ avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
*total += COSTS_N_INSNS (1);
break;
case PSImode:
*total = COSTS_N_INSNS (3);
if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
*total += COSTS_N_INSNS (2);
break;
case SImode:
*total = COSTS_N_INSNS (4);
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
@ -7310,8 +7943,10 @@ avr_libcall_value (enum machine_mode mode,
const_rtx func ATTRIBUTE_UNUSED)
{
int offs = GET_MODE_SIZE (mode);
if (offs < 2)
offs = 2;
if (offs <= 4)
offs = (offs + 1) & ~1;
return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
}
@ -7537,10 +8172,11 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
/* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
if (14 == REGNO (dest)
&& 4 == GET_MODE_SIZE (mode))
if (REGNO (dest) < 16
&& REGNO (dest) + GET_MODE_SIZE (mode) > 16)
{
clobber_reg = gen_rtx_REG (QImode, 17);
clobber_reg = gen_rtx_REG (QImode,
REGNO (dest) + GET_MODE_SIZE (mode) - 1);
}
/* We might need a clobber reg but don't have one. Look at the value
@ -7577,6 +8213,7 @@ output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
/* Look if we can reuse the low word by means of MOVW. */
if (n == 2
&& GET_MODE_SIZE (mode) >= 4
&& AVR_HAVE_MOVW)
{
rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
@ -7820,6 +8457,16 @@ output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
return "";
}
const char *
avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
{
gcc_assert (REG_P (op[0])
&& CONST_INT_P (op[1]));
output_reload_in_const (op, clobber_reg, len, false);
return "";
}
void
avr_output_bld (rtx operands[], int bit_nr)
{
@ -8078,6 +8725,16 @@ enum avr_builtin_id
AVR_BUILTIN_DELAY_CYCLES
};
static void
avr_init_builtin_int24 (void)
{
tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
(*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
(*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
}
#define DEF_BUILTIN(NAME, TYPE, CODE) \
do \
{ \
@ -8133,6 +8790,8 @@ avr_init_builtins (void)
AVR_BUILTIN_FMULS);
DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
AVR_BUILTIN_FMULSU);
avr_init_builtin_int24 ();
}
#undef DEF_BUILTIN

View File

@ -125,11 +125,12 @@
(define_attr "adjust_len"
"out_bitop, out_plus, out_plus_noclobber, addto_sp,
tsthi, tstsi, compare, call,
mov8, mov16, mov32, reload_in16, reload_in32,
tsthi, tstpsi, tstsi, compare, call,
mov8, mov16, mov24, mov32, reload_in16, reload_in24, reload_in32,
ashlqi, ashrqi, lshrqi,
ashlhi, ashrhi, lshrhi,
ashlsi, ashrsi, lshrsi,
ashlpsi, ashrpsi, lshrpsi,
no"
(const_string "no"))
@ -180,10 +181,13 @@
;; Define mode iterators
(define_mode_iterator QIHI [(QI "") (HI "")])
(define_mode_iterator QIHI2 [(QI "") (HI "")])
(define_mode_iterator QISI [(QI "") (HI "") (SI "")])
(define_mode_iterator QIDI [(QI "") (HI "") (SI "") (DI "")])
(define_mode_iterator HIDI [(HI "") (SI "") (DI "")])
(define_mode_iterator HISI [(HI "") (SI "")])
(define_mode_iterator QISI [(QI "") (HI "") (PSI "") (SI "")])
(define_mode_iterator QIDI [(QI "") (HI "") (PSI "") (SI "") (DI "")])
(define_mode_iterator HIDI [(HI "") (PSI "") (SI "") (DI "")])
(define_mode_iterator HISI [(HI "") (PSI "") (SI "")])
;; All supported move-modes
(define_mode_iterator MOVMODE [(QI "") (HI "") (SI "") (SF "") (PSI "")])
;; Define code iterators
;; Define two incarnations so that we can build the cross product.
@ -279,6 +283,7 @@
(define_mode_iterator MPUSH
[(CQI "")
(HI "") (CHI "")
(PSI "")
(SI "") (CSI "")
(DI "") (CDI "")
(SF "") (SC "")])
@ -309,6 +314,28 @@
[(set (reg:HI REG_SP) (reg:HI REG_Y))]
"")
;;========================================================================
;; "movqi"
;; "movhi"
;; "movsi"
;; "movsf"
;; "movpsi"
(define_expand "mov<mode>"
[(set (match_operand:MOVMODE 0 "nonimmediate_operand" "")
(match_operand:MOVMODE 1 "general_operand" ""))]
""
{
/* One of the ops has to be in a register. */
if (!register_operand (operands[0], <MODE>mode)
&& !(register_operand (operands[1], <MODE>mode)
|| CONST0_RTX (<MODE>mode) == operands[1]))
{
operands[1] = copy_to_mode_reg (<MODE>mode, operands[1]);
}
})
;;========================================================================
;; move byte
;; The last alternative (any immediate constant to any register) is
@ -318,16 +345,6 @@
;; are call-saved registers, and most of LD_REGS are call-used registers,
;; so this may still be a win for registers live across function calls.
(define_expand "movqi"
[(set (match_operand:QI 0 "nonimmediate_operand" "")
(match_operand:QI 1 "general_operand" ""))]
""
"/* One of the ops has to be in a register. */
if (!register_operand(operand0, QImode)
&& ! (register_operand(operand1, QImode) || const0_rtx == operand1))
operands[1] = copy_to_mode_reg(QImode, operand1);
")
(define_insn "movqi_insn"
[(set (match_operand:QI 0 "nonimmediate_operand" "=r,d,Qm,r,q,r,*r")
(match_operand:QI 1 "general_operand" "rL,i,rL,Qm,r,q,i"))]
@ -365,21 +382,6 @@
;;============================================================================
;; move word (16 bit)
(define_expand "movhi"
[(set (match_operand:HI 0 "nonimmediate_operand" "")
(match_operand:HI 1 "general_operand" ""))]
""
"
{
/* One of the ops has to be in a register. */
if (!register_operand(operand0, HImode)
&& !(register_operand(operand1, HImode) || const0_rtx == operands[1]))
{
operands[1] = copy_to_mode_reg(HImode, operand1);
}
}")
;; Move register $1 to the Stack Pointer register SP.
;; This insn is emit during function prologue/epilogue generation.
;; $2 = 0: We know that IRQs are off
@ -460,25 +462,50 @@
operands[5] = gen_rtx_REG (HImode, REGNO (operands[3]));
})
;;==========================================================================
;; xpointer move (24 bit)
(define_peephole2 ; *reload_inpsi
[(match_scratch:QI 2 "d")
(set (match_operand:PSI 0 "l_register_operand" "")
(match_operand:PSI 1 "immediate_operand" ""))
(match_dup 2)]
"operands[1] != const0_rtx
&& operands[1] != constm1_rtx"
[(parallel [(set (match_dup 0)
(match_dup 1))
(clobber (match_dup 2))])]
"")
;; '*' because it is not used in rtl generation.
(define_insn "*reload_inpsi"
[(set (match_operand:PSI 0 "register_operand" "=r")
(match_operand:PSI 1 "immediate_operand" "i"))
(clobber (match_operand:QI 2 "register_operand" "=&d"))]
"reload_completed"
{
return avr_out_reload_inpsi (operands, operands[2], NULL);
}
[(set_attr "length" "6")
(set_attr "adjust_len" "reload_in24")
(set_attr "cc" "clobber")])
(define_insn "*movpsi"
[(set (match_operand:PSI 0 "nonimmediate_operand" "=r,r,r ,Qm,!d,r")
(match_operand:PSI 1 "general_operand" "r,L,Qm,rL,i ,i"))]
"register_operand (operands[0], PSImode)
|| register_operand (operands[1], PSImode)
|| const0_rtx == operands[1]"
{
return avr_out_movpsi (insn, operands, NULL);
}
[(set_attr "length" "3,3,8,9,4,10")
(set_attr "adjust_len" "mov24")
(set_attr "cc" "none,set_zn,clobber,clobber,clobber,clobber")])
;;==========================================================================
;; move double word (32 bit)
(define_expand "movsi"
[(set (match_operand:SI 0 "nonimmediate_operand" "")
(match_operand:SI 1 "general_operand" ""))]
""
"
{
/* One of the ops has to be in a register. */
if (!register_operand (operand0, SImode)
&& !(register_operand (operand1, SImode) || const0_rtx == operand1))
{
operands[1] = copy_to_mode_reg (SImode, operand1);
}
}")
(define_peephole2 ; *reload_insi
[(match_scratch:QI 2 "d")
(set (match_operand:SI 0 "l_register_operand" "")
@ -519,20 +546,6 @@
;; fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
;; move floating point numbers (32 bit)
(define_expand "movsf"
[(set (match_operand:SF 0 "nonimmediate_operand" "")
(match_operand:SF 1 "general_operand" ""))]
""
"
{
/* One of the ops has to be in a register. */
if (!register_operand (operand1, SFmode)
&& !register_operand (operand0, SFmode))
{
operands[1] = copy_to_mode_reg (SFmode, operand1);
}
}")
(define_insn "*movsf"
[(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,r,Qm,!d,r")
(match_operand:SF 1 "general_operand" "r,G,Qm,rG,F,F"))]
@ -914,6 +927,33 @@
(set_attr "adjust_len" "*,*,out_plus,out_plus")
(set_attr "cc" "set_n,set_czn,out_plus,out_plus")])
(define_insn "*addpsi3_zero_extend.qi"
[(set (match_operand:PSI 0 "register_operand" "=r")
(plus:PSI (zero_extend:PSI (match_operand:QI 1 "register_operand" "r"))
(match_operand:PSI 2 "register_operand" "0")))]
""
"add %A0,%A1\;adc %B0,__zero_reg__\;adc %C0,__zero_reg__"
[(set_attr "length" "3")
(set_attr "cc" "set_n")])
(define_insn "*addpsi3_zero_extend.hi"
[(set (match_operand:PSI 0 "register_operand" "=r")
(plus:PSI (zero_extend:PSI (match_operand:HI 1 "register_operand" "r"))
(match_operand:PSI 2 "register_operand" "0")))]
""
"add %A0,%A1\;adc %B0,%B1\;adc %C0,__zero_reg__"
[(set_attr "length" "3")
(set_attr "cc" "set_n")])
(define_insn "*addpsi3_sign_extend.hi"
[(set (match_operand:PSI 0 "register_operand" "=r")
(plus:PSI (sign_extend:PSI (match_operand:HI 1 "register_operand" "r"))
(match_operand:PSI 2 "register_operand" "0")))]
""
"add %A0,%1\;adc %B0,%B1\;adc %C0,__zero_reg__\;sbrc %B1,7\;dec %C0"
[(set_attr "length" "5")
(set_attr "cc" "set_n")])
(define_insn "*addsi3_zero_extend"
[(set (match_operand:SI 0 "register_operand" "=r")
(plus:SI (zero_extend:SI (match_operand:QI 1 "register_operand" "r"))
@ -932,6 +972,66 @@
[(set_attr "length" "4")
(set_attr "cc" "set_n")])
(define_insn "addpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r,d ,d,r")
(plus:PSI (match_operand:PSI 1 "register_operand" "%0,0 ,0,0")
(match_operand:PSI 2 "nonmemory_operand" "r,s ,n,n")))
(clobber (match_scratch:QI 3 "=X,X ,X,&d"))]
""
{
static const char * const asm_code[] =
{
"add %A0,%A2\;adc %B0,%B2\;adc %C0,%C2",
"subi %0,lo8(-(%2))\;sbci %B0,hi8(-(%2))\;sbci %C0,hlo8(-(%2))",
"",
""
};
if (*asm_code[which_alternative])
return asm_code [which_alternative];
return avr_out_plus (operands, NULL, NULL);
}
[(set_attr "length" "3,3,3,6")
(set_attr "adjust_len" "*,*,out_plus,out_plus")
(set_attr "cc" "set_n,set_czn,out_plus,out_plus")])
(define_insn "subpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r")
(minus:PSI (match_operand:PSI 1 "register_operand" "0")
(match_operand:PSI 2 "register_operand" "r")))]
""
"sub %0,%2\;sbc %B0,%B2\;sbc %C0,%C2"
[(set_attr "length" "3")
(set_attr "cc" "set_czn")])
(define_insn "*subpsi3_zero_extend.qi"
[(set (match_operand:PSI 0 "register_operand" "=r")
(minus:PSI (match_operand:SI 1 "register_operand" "0")
(zero_extend:PSI (match_operand:QI 2 "register_operand" "r"))))]
""
"sub %A0,%2\;sbc %B0,__zero_reg__\;sbc %C0,__zero_reg__"
[(set_attr "length" "3")
(set_attr "cc" "set_czn")])
(define_insn "*subpsi3_zero_extend.hi"
[(set (match_operand:PSI 0 "register_operand" "=r")
(minus:PSI (match_operand:PSI 1 "register_operand" "0")
(zero_extend:PSI (match_operand:HI 2 "register_operand" "r"))))]
""
"sub %A0,%2\;sbc %B0,%B2\;sbc %C0,__zero_reg__"
[(set_attr "length" "3")
(set_attr "cc" "set_czn")])
(define_insn "*subpsi3_sign_extend.hi"
[(set (match_operand:PSI 0 "register_operand" "=r")
(minus:PSI (match_operand:PSI 1 "register_operand" "0")
(sign_extend:PSI (match_operand:HI 2 "register_operand" "r"))))]
""
"sub %A0,%A2\;sbc %B0,%B2\;sbc %C0,__zero_reg__\;sbrc %B2,7\;inc %C0"
[(set_attr "length" "5")
(set_attr "cc" "set_czn")])
;-----------------------------------------------------------------------------
; sub bytes
(define_insn "subqi3"
@ -1099,6 +1199,17 @@
[(set_attr "length" "2,3")
(set_attr "cc" "clobber")])
(define_insn "*addpsi3.lt0"
[(set (match_operand:PSI 0 "register_operand" "=r")
(plus:PSI (lshiftrt:PSI (match_operand:PSI 1 "register_operand" "r")
(const_int 23))
(match_operand:PSI 2 "register_operand" "0")))]
""
"mov __tmp_reg__,%C1\;lsl __tmp_reg__
adc %A0,__zero_reg__\;adc %B0,__zero_reg__\;adc %C0,__zero_reg__"
[(set_attr "length" "5")
(set_attr "cc" "clobber")])
(define_insn "*addsi3.lt0"
[(set (match_operand:SI 0 "register_operand" "=r")
(plus:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "r")
@ -2062,7 +2173,7 @@
; / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / %
; divmod
;; Generate libgcc.S calls ourselves, because:
;; Generate lib1funcs.S calls ourselves, because:
;; - we know exactly which registers are clobbered (for QI and HI
;; modes, some of the call-used registers are preserved)
;; - we get both the quotient and the remainder at no extra cost
@ -2199,6 +2310,80 @@
[(set_attr "type" "xcall")
(set_attr "cc" "clobber")])
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; 24-bit signed/unsigned division and modulo.
;; Notice that the libgcc implementation return the quotient in R22
;; and the remainder in R18 whereas the 32-bit [u]divmodsi4
;; implementation works the other way round.
(define_insn_and_split "divmodpsi4"
[(parallel [(set (match_operand:PSI 0 "pseudo_register_operand" "")
(div:PSI (match_operand:PSI 1 "pseudo_register_operand" "")
(match_operand:PSI 2 "pseudo_register_operand" "")))
(set (match_operand:PSI 3 "pseudo_register_operand" "")
(mod:PSI (match_dup 1)
(match_dup 2)))
(clobber (reg:DI 18))
(clobber (reg:QI 26))])]
""
{ gcc_unreachable(); }
""
[(set (reg:PSI 22) (match_dup 1))
(set (reg:PSI 18) (match_dup 2))
(parallel [(set (reg:PSI 22) (div:PSI (reg:PSI 22) (reg:PSI 18)))
(set (reg:PSI 18) (mod:PSI (reg:PSI 22) (reg:PSI 18)))
(clobber (reg:QI 21))
(clobber (reg:QI 25))
(clobber (reg:QI 26))])
(set (match_dup 0) (reg:PSI 22))
(set (match_dup 3) (reg:PSI 18))])
(define_insn "*divmodpsi4_call"
[(set (reg:PSI 22) (div:PSI (reg:PSI 22) (reg:PSI 18)))
(set (reg:PSI 18) (mod:PSI (reg:PSI 22) (reg:PSI 18)))
(clobber (reg:QI 21))
(clobber (reg:QI 25))
(clobber (reg:QI 26))]
""
"%~call __divmodpsi4"
[(set_attr "type" "xcall")
(set_attr "cc" "clobber")])
(define_insn_and_split "udivmodpsi4"
[(parallel [(set (match_operand:PSI 0 "pseudo_register_operand" "")
(udiv:PSI (match_operand:PSI 1 "pseudo_register_operand" "")
(match_operand:PSI 2 "pseudo_register_operand" "")))
(set (match_operand:PSI 3 "pseudo_register_operand" "")
(umod:PSI (match_dup 1)
(match_dup 2)))
(clobber (reg:DI 18))
(clobber (reg:QI 26))])]
""
{ gcc_unreachable(); }
""
[(set (reg:PSI 22) (match_dup 1))
(set (reg:PSI 18) (match_dup 2))
(parallel [(set (reg:PSI 22) (udiv:PSI (reg:PSI 22) (reg:PSI 18)))
(set (reg:PSI 18) (umod:PSI (reg:PSI 22) (reg:PSI 18)))
(clobber (reg:QI 21))
(clobber (reg:QI 25))
(clobber (reg:QI 26))])
(set (match_dup 0) (reg:PSI 22))
(set (match_dup 3) (reg:PSI 18))])
(define_insn "*udivmodpsi4_call"
[(set (reg:PSI 22) (udiv:PSI (reg:PSI 22) (reg:PSI 18)))
(set (reg:PSI 18) (umod:PSI (reg:PSI 22) (reg:PSI 18)))
(clobber (reg:QI 21))
(clobber (reg:QI 25))
(clobber (reg:QI 26))]
""
"%~call __udivmodpsi4"
[(set_attr "type" "xcall")
(set_attr "cc" "clobber")])
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define_insn_and_split "divmodsi4"
[(parallel [(set (match_operand:SI 0 "pseudo_register_operand" "")
(div:SI (match_operand:SI 1 "pseudo_register_operand" "")
@ -2297,6 +2482,24 @@
(set_attr "adjust_len" "*,*,out_bitop,out_bitop,out_bitop")
(set_attr "cc" "set_n,set_n,clobber,clobber,clobber")])
(define_insn "andpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r,d,r ,r")
(and:PSI (match_operand:PSI 1 "register_operand" "%0,0,0 ,0")
(match_operand:PSI 2 "nonmemory_operand" "r,n,Ca3,n")))
(clobber (match_scratch:QI 3 "=X,X,X ,&d"))]
""
{
if (which_alternative == 0)
return "and %A0,%A2" CR_TAB
"and %B0,%B2" CR_TAB
"and %C0,%C2";
return avr_out_bitop (insn, operands, NULL);
}
[(set_attr "length" "3,3,6,6")
(set_attr "adjust_len" "*,out_bitop,out_bitop,out_bitop")
(set_attr "cc" "set_n,clobber,clobber,clobber")])
(define_insn "andsi3"
[(set (match_operand:SI 0 "register_operand" "=r,d,r ,r")
(and:SI (match_operand:SI 1 "register_operand" "%0,0,0 ,0")
@ -2361,6 +2564,24 @@
(set_attr "adjust_len" "*,*,out_bitop,out_bitop,out_bitop")
(set_attr "cc" "set_n,set_n,clobber,clobber,clobber")])
(define_insn "iorpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r,d,r ,r")
(ior:PSI (match_operand:PSI 1 "register_operand" "%0,0,0 ,0")
(match_operand:PSI 2 "nonmemory_operand" "r,n,Co3,n")))
(clobber (match_scratch:QI 3 "=X,X,X ,&d"))]
""
{
if (which_alternative == 0)
return "or %A0,%A2" CR_TAB
"or %B0,%B2" CR_TAB
"or %C0,%C2";
return avr_out_bitop (insn, operands, NULL);
}
[(set_attr "length" "3,3,6,6")
(set_attr "adjust_len" "*,out_bitop,out_bitop,out_bitop")
(set_attr "cc" "set_n,clobber,clobber,clobber")])
(define_insn "iorsi3"
[(set (match_operand:SI 0 "register_operand" "=r,d,r ,r")
(ior:SI (match_operand:SI 1 "register_operand" "%0,0,0 ,0")
@ -2408,6 +2629,24 @@
(set_attr "adjust_len" "*,out_bitop,out_bitop")
(set_attr "cc" "set_n,clobber,clobber")])
(define_insn "xorpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r,r ,r")
(xor:PSI (match_operand:PSI 1 "register_operand" "%0,0 ,0")
(match_operand:PSI 2 "nonmemory_operand" "r,Cx3,n")))
(clobber (match_scratch:QI 3 "=X,X ,&d"))]
""
{
if (which_alternative == 0)
return "eor %A0,%A2" CR_TAB
"eor %B0,%B2" CR_TAB
"eor %C0,%C2";
return avr_out_bitop (insn, operands, NULL);
}
[(set_attr "length" "3,6,6")
(set_attr "adjust_len" "*,out_bitop,out_bitop")
(set_attr "cc" "set_n,clobber,clobber")])
(define_insn "xorsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r ,r")
(xor:SI (match_operand:SI 1 "register_operand" "%0,0 ,0")
@ -2472,10 +2711,11 @@
;; HImode does not need scratch. Use attribute for this constraint.
;; Use QI scratch for DI mode as this is often split into byte sized operands.
(define_mode_attr rotx [(DI "&r,&r,X") (SI "&r,&r,X") (HI "X,X,X")])
(define_mode_attr rotsmode [(DI "QI") (SI "HI") (HI "QI")])
(define_mode_attr rotx [(DI "&r,&r,X") (SI "&r,&r,X") (PSI "&r,&r,X") (HI "X,X,X")])
(define_mode_attr rotsmode [(DI "QI") (SI "HI") (PSI "QI") (HI "QI")])
;; "rotlhi3"
;; "rotlpsi3"
;; "rotlsi3"
;; "rotldi3"
(define_expand "rotl<mode>3"
@ -2531,6 +2771,24 @@
[(set_attr "length" "3")
(set_attr "cc" "clobber")])
(define_insn "*rotlpsi2.1"
[(set (match_operand:PSI 0 "register_operand" "=r")
(rotate:PSI (match_operand:PSI 1 "register_operand" "0")
(const_int 1)))]
""
"lsl %A0\;rol %B0\;rol %C0\;adc %A0,__zero_reg__"
[(set_attr "length" "4")
(set_attr "cc" "clobber")])
(define_insn "*rotlpsi2.23"
[(set (match_operand:PSI 0 "register_operand" "=r")
(rotate:PSI (match_operand:PSI 1 "register_operand" "0")
(const_int 23)))]
""
"bst %A0,0\;ror %C0\;ror %B0\;ror %A0\;bld %C0,7"
[(set_attr "length" "5")
(set_attr "cc" "clobber")])
(define_insn "*rotlsi2.1"
[(set (match_operand:SI 0 "register_operand" "=r")
(rotate:SI (match_operand:SI 1 "register_operand" "0")
@ -2567,6 +2825,7 @@
(clobber (match_scratch:<rotsmode> 3 "=<rotx>"))]
"AVR_HAVE_MOVW
&& CONST_INT_P (operands[2])
&& GET_MODE_SIZE (<MODE>mode) % 2 == 0
&& 0 == INTVAL (operands[2]) % 16"
"#"
"&& (reload_completed || <MODE>mode == DImode)"
@ -2580,6 +2839,7 @@
;; Split byte aligned rotates using scratch that is always QI mode.
;; "*rotbhi"
;; "*rotbpsi"
;; "*rotbsi"
;; "*rotbdi"
(define_insn_and_split "*rotb<mode>"
@ -2589,7 +2849,8 @@
(clobber (match_scratch:QI 3 "=<rotx>"))]
"CONST_INT_P (operands[2])
&& (8 == INTVAL (operands[2]) % 16
|| (!AVR_HAVE_MOVW
|| ((!AVR_HAVE_MOVW
|| GET_MODE_SIZE (<MODE>mode) % 2 != 0)
&& 0 == INTVAL (operands[2]) % 16))"
"#"
"&& (reload_completed || <MODE>mode == DImode)"
@ -2830,6 +3091,18 @@
(set_attr "adjust_len" "ashlsi")
(set_attr "cc" "none,set_n,clobber,clobber")])
(define_insn "ashlpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r,r,r,r")
(ashift:PSI (match_operand:PSI 1 "register_operand" "0,0,r,0")
(match_operand:QI 2 "nonmemory_operand" "r,P,O,n")))
(clobber (match_scratch:QI 3 "=X,X,X,&d"))]
""
{
return avr_out_ashlpsi3 (insn, operands, NULL);
}
[(set_attr "adjust_len" "ashlpsi")
(set_attr "cc" "clobber")])
;; >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >>
;; arithmetic shift right
@ -2853,6 +3126,18 @@
(set_attr "adjust_len" "ashrhi")
(set_attr "cc" "clobber,none,clobber,set_n,clobber,clobber,clobber")])
(define_insn "ashrpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r,r,r,r,r")
(ashiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0,0,r,0")
(match_operand:QI 2 "nonmemory_operand" "r,P,K,O,n")))
(clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
""
{
return avr_out_ashrpsi3 (insn, operands, NULL);
}
[(set_attr "adjust_len" "ashrpsi")
(set_attr "cc" "clobber")])
(define_insn "ashrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
(ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
@ -2966,6 +3251,18 @@
(set_attr "adjust_len" "lshrhi")
(set_attr "cc" "clobber,none,clobber,clobber,clobber,clobber,clobber")])
(define_insn "lshrpsi3"
[(set (match_operand:PSI 0 "register_operand" "=r,r,r,r,r")
(lshiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0,r,0,0")
(match_operand:QI 2 "nonmemory_operand" "r,P,O,K,n")))
(clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
""
{
return avr_out_lshrpsi3 (insn, operands, NULL);
}
[(set_attr "adjust_len" "lshrpsi")
(set_attr "cc" "clobber")])
(define_insn "lshrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
(lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
@ -3100,6 +3397,17 @@
[(set_attr "length" "3,4,4")
(set_attr "cc" "set_czn,set_n,set_czn")])
(define_insn "negpsi2"
[(set (match_operand:PSI 0 "register_operand" "=!d,r,&r")
(neg:PSI (match_operand:PSI 1 "register_operand" "0,0,r")))]
""
"@
com %C0\;com %B0\;neg %A0\;sbci %B0,-1\;sbci %C0,-1
com %C0\;com %B0\;com %A0\;adc %A0,__zero_reg__\;adc %B0,__zero_reg__\;adc %C0,__zero_reg__
clr %A0\;clr %B0\;clr %C0\;sub %A0,%A1\;sbc %B0,%B1\;sbc %C0,%C1"
[(set_attr "length" "5,6,6")
(set_attr "cc" "set_czn,set_n,set_czn")])
(define_insn "negsi2"
[(set (match_operand:SI 0 "register_operand" "=!d,r,&r,&r")
(neg:SI (match_operand:SI 1 "register_operand" "0,0,r ,r")))]
@ -3143,6 +3451,14 @@
[(set_attr "length" "2")
(set_attr "cc" "set_n")])
(define_insn "one_cmplpsi2"
[(set (match_operand:PSI 0 "register_operand" "=r")
(not:PSI (match_operand:PSI 1 "register_operand" "0")))]
""
"com %0\;com %B0\;com %C0"
[(set_attr "length" "3")
(set_attr "cc" "set_n")])
(define_insn "one_cmplsi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(not:SI (match_operand:SI 1 "register_operand" "0")))]
@ -3174,6 +3490,16 @@
[(set_attr "length" "3,4")
(set_attr "cc" "set_n,set_n")])
(define_insn "extendqipsi2"
[(set (match_operand:PSI 0 "register_operand" "=r,r")
(sign_extend:PSI (match_operand:QI 1 "combine_pseudo_register_operand" "0,*r")))]
""
"@
clr %B0\;sbrc %A0,7\;com %B0\;mov %C0,%B0
mov %A0,%A1\;clr %B0\;sbrc %A0,7\;com %B0\;mov %C0,%B0"
[(set_attr "length" "4,5")
(set_attr "cc" "set_n,set_n")])
(define_insn "extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=r,r")
(sign_extend:SI (match_operand:QI 1 "combine_pseudo_register_operand" "0,*r")))]
@ -3184,6 +3510,18 @@
[(set_attr "length" "5,6")
(set_attr "cc" "set_n,set_n")])
(define_insn "extendhipsi2"
[(set (match_operand:PSI 0 "register_operand" "=r,r ,r")
(sign_extend:PSI (match_operand:HI 1 "combine_pseudo_register_operand" "0,*r,*r")))]
""
"@
clr %C0\;sbrc %B0,7\;com %C0
mov %A0,%A1\;mov %B0,%B1\;clr %C0\;sbrc %B0,7\;com %C0
movw %A0,%A1\;clr %C0\;sbrc %B0,7\;com %C0"
[(set_attr "length" "3,5,4")
(set_attr "isa" "*,mov,movw")
(set_attr "cc" "set_n")])
(define_insn "extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=r,r ,r")
(sign_extend:SI (match_operand:HI 1 "combine_pseudo_register_operand" "0,*r,*r")))]
@ -3196,6 +3534,14 @@
(set_attr "isa" "*,mov,movw")
(set_attr "cc" "set_n")])
(define_insn "extendpsisi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(sign_extend:SI (match_operand:PSI 1 "combine_pseudo_register_operand" "0")))]
""
"clr %D0\;sbrc %C0,7\;com %D0"
[(set_attr "length" "3")
(set_attr "cc" "set_n")])
;; xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x
;; zero extend
@ -3215,6 +3561,21 @@
operands[3] = simplify_gen_subreg (QImode, operands[0], HImode, high_off);
})
(define_insn_and_split "zero_extendqipsi2"
[(set (match_operand:PSI 0 "register_operand" "=r")
(zero_extend:PSI (match_operand:QI 1 "combine_pseudo_register_operand" "r")))]
""
"#"
"reload_completed"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 3) (const_int 0))
(set (match_dup 4) (const_int 0))]
{
operands[2] = simplify_gen_subreg (QImode, operands[0], PSImode, 0);
operands[3] = simplify_gen_subreg (QImode, operands[0], PSImode, 1);
operands[4] = simplify_gen_subreg (QImode, operands[0], PSImode, 2);
})
(define_insn_and_split "zero_extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(zero_extend:SI (match_operand:QI 1 "combine_pseudo_register_operand" "r")))]
@ -3231,6 +3592,19 @@
operands[3] = simplify_gen_subreg (HImode, operands[0], SImode, high_off);
})
(define_insn_and_split "zero_extendhipsi2"
[(set (match_operand:PSI 0 "register_operand" "=r")
(zero_extend:PSI (match_operand:HI 1 "combine_pseudo_register_operand" "r")))]
""
"#"
"reload_completed"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 3) (const_int 0))]
{
operands[2] = simplify_gen_subreg (HImode, operands[0], PSImode, 0);
operands[3] = simplify_gen_subreg (QImode, operands[0], PSImode, 2);
})
(define_insn_and_split "zero_extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(zero_extend:SI (match_operand:HI 1 "combine_pseudo_register_operand" "r")))]
@ -3247,6 +3621,19 @@
operands[3] = simplify_gen_subreg (HImode, operands[0], SImode, high_off);
})
(define_insn_and_split "zero_extendpsisi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(zero_extend:SI (match_operand:PSI 1 "combine_pseudo_register_operand" "r")))]
""
"#"
"reload_completed"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 3) (const_int 0))]
{
operands[2] = simplify_gen_subreg (PSImode, operands[0], SImode, 0);
operands[3] = simplify_gen_subreg (QImode, operands[0], SImode, 3);
})
(define_insn_and_split "zero_extendqidi2"
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI (match_operand:QI 1 "register_operand" "r")))]
@ -3340,6 +3727,25 @@
[(set_attr "cc" "compare")
(set_attr "length" "2")])
(define_insn "*negated_tstpsi"
[(set (cc0)
(compare (neg:PSI (match_operand:PSI 0 "register_operand" "r"))
(const_int 0)))]
"!flag_wrapv && !flag_trapv && flag_strict_overflow"
"cp __zero_reg__,%A0\;cpc __zero_reg__,%B0\;cpc __zero_reg__,%C0"
[(set_attr "cc" "compare")
(set_attr "length" "3")])
(define_insn "*reversed_tstpsi"
[(set (cc0)
(compare (const_int 0)
(match_operand:PSI 0 "register_operand" "r")))
(clobber (match_scratch:QI 1 "=X"))]
""
"cp __zero_reg__,%A0\;cpc __zero_reg__,%B0\;cpc __zero_reg__,%C0"
[(set_attr "cc" "compare")
(set_attr "length" "3")])
(define_insn "*negated_tstsi"
[(set (cc0)
(compare (neg:SI (match_operand:SI 0 "register_operand" "r"))
@ -3418,6 +3824,35 @@
(set_attr "length" "1,2,2,3,4,2,4")
(set_attr "adjust_len" "tsthi,tsthi,*,*,*,compare,compare")])
(define_insn "*cmppsi"
[(set (cc0)
(compare (match_operand:PSI 0 "register_operand" "r,r,d ,r ,d,r")
(match_operand:PSI 1 "nonmemory_operand" "L,r,s ,s ,M,n")))
(clobber (match_scratch:QI 2 "=X,X,&d,&d ,X,&d"))]
""
{
switch (which_alternative)
{
case 0:
return avr_out_tstpsi (insn, operands, NULL);
case 1:
return "cp %A0,%A1\;cpc %B0,%B1\;cpc %C0,%C1";
case 2:
return reg_unused_after (insn, operands[0])
? "subi %A0,lo8(%1)\;sbci %B0,hi8(%1)\;sbci %C0,hh8(%1)"
: "cpi %A0,lo8(%1)\;ldi %2,hi8(%1)\;cpc %B0,%2\;ldi %2,hh8(%1)\;cpc %C0,%2";
case 3:
return "ldi %2,lo8(%1)\;cp %A0,%2\;ldi %2,hi8(%1)\;cpc %B0,%2\;ldi %2,hh8(%1)\;cpc %C0,%2";
}
return avr_out_compare (insn, operands, NULL);
}
[(set_attr "cc" "compare")
(set_attr "length" "3,3,5,6,3,7")
(set_attr "adjust_len" "tstpsi,*,*,*,compare,compare")])
(define_insn "*cmpsi"
[(set (cc0)
@ -3456,6 +3891,18 @@
(pc)))]
"")
(define_expand "cbranchpsi4"
[(parallel [(set (cc0)
(compare (match_operand:PSI 1 "register_operand" "")
(match_operand:PSI 2 "nonmemory_operand" "")))
(clobber (match_scratch:QI 4 ""))])
(set (pc)
(if_then_else (match_operator 0 "ordered_comparison_operator" [(cc0)
(const_int 0)])
(label_ref (match_operand 3 "" ""))
(pc)))]
"")
(define_expand "cbranchhi4"
[(parallel [(set (cc0)
(compare (match_operand:HI 1 "register_operand" "")

View File

@ -133,6 +133,11 @@
(and (match_code "const_int")
(match_test "avr_popcount_each_byte (op, 2, (1<<0) | (1<<7) | (1<<8))")))
(define_constraint "Ca3"
"Constant 3-byte integer that allows AND without clobber register."
(and (match_code "const_int")
(match_test "avr_popcount_each_byte (op, 3, (1<<0) | (1<<7) | (1<<8))")))
(define_constraint "Ca4"
"Constant 4-byte integer that allows AND without clobber register."
(and (match_code "const_int")
@ -143,6 +148,11 @@
(and (match_code "const_int")
(match_test "avr_popcount_each_byte (op, 2, (1<<0) | (1<<1) | (1<<8))")))
(define_constraint "Co3"
"Constant 3-byte integer that allows OR without clobber register."
(and (match_code "const_int")
(match_test "avr_popcount_each_byte (op, 3, (1<<0) | (1<<1) | (1<<8))")))
(define_constraint "Co4"
"Constant 4-byte integer that allows OR without clobber register."
(and (match_code "const_int")
@ -153,6 +163,11 @@
(and (match_code "const_int")
(match_test "avr_popcount_each_byte (op, 2, (1<<0) | (1<<8))")))
(define_constraint "Cx3"
"Constant 3-byte integer that allows XOR without clobber register."
(and (match_code "const_int")
(match_test "avr_popcount_each_byte (op, 3, (1<<0) | (1<<8))")))
(define_constraint "Cx4"
"Constant 4-byte integer that allows XOR without clobber register."
(and (match_code "const_int")

View File

@ -1,3 +1,9 @@
2011-11-04 Georg-Johann Lay <avr@gjlay.de>
PR target/50931
* config/t-avr (LIB1ASMFUNCS): Add _divmodpsi4, _udivmodpsi4.
* config/lib1funcs.S (__udivmodpsi4, __divmodpsi4): New functions.
2011-11-04 Joel Sherrill <joel.sherrill@oarcorp.com>
PR target/50989

View File

@ -599,7 +599,142 @@ ENDF __divmodhi4
#undef r_arg2L
#undef r_cnt
/*******************************************************
Division 24 / 24 => (result + remainder)
*******************************************************/
;; A[0..2]: In: Dividend; Out: Quotient
#define A0 22
#define A1 A0+1
#define A2 A0+2
;; B[0..2]: In: Divisor; Out: Remainder
#define B0 18
#define B1 B0+1
#define B2 B0+2
;; C[0..2]: Expand remainder
#define C0 __zero_reg__
#define C1 26
#define C2 25
;; Loop counter
#define r_cnt 21
#if defined (L_udivmodpsi4)
;; R24:R22 = R24:R22 udiv R20:R18
;; R20:R18 = R24:R22 umod R20:R18
;; Clobbers: R21, R25, R26
DEFUN __udivmodpsi4
; init loop counter
ldi r_cnt, 24+1
; Clear remainder and carry. C0 is already 0
clr C1
sub C2, C2
; jump to entry point
rjmp __udivmodpsi4_start
__udivmodpsi4_loop:
; shift dividend into remainder
rol C0
rol C1
rol C2
; compare remainder & divisor
cp C0, B0
cpc C1, B1
cpc C2, B2
brcs __udivmodpsi4_start ; remainder <= divisor
sub C0, B0 ; restore remainder
sbc C1, B1
sbc C2, B2
__udivmodpsi4_start:
; shift dividend (with CARRY)
rol A0
rol A1
rol A2
; decrement loop counter
dec r_cnt
brne __udivmodpsi4_loop
com A0
com A1
com A2
; div/mod results to return registers
; remainder
mov B0, C0
mov B1, C1
mov B2, C2
clr __zero_reg__ ; C0
ret
ENDF __udivmodpsi4
#endif /* defined (L_udivmodpsi4) */
#if defined (L_divmodpsi4)
;; R24:R22 = R24:R22 div R20:R18
;; R20:R18 = R24:R22 mod R20:R18
;; Clobbers: T, __tmp_reg__, R21, R25, R26
DEFUN __divmodpsi4
; R0.7 will contain the sign of the result:
; R0.7 = A.sign ^ B.sign
mov __tmp_reg__, B2
; T-flag = sign of dividend
bst A2, 7
brtc 0f
com __tmp_reg__
; Adjust dividend's sign
rcall __divmodpsi4_negA
0:
; Adjust divisor's sign
sbrc B2, 7
rcall __divmodpsi4_negB
; Do the unsigned div/mod
XCALL __udivmodpsi4
; Adjust quotient's sign
sbrc __tmp_reg__, 7
rcall __divmodpsi4_negA
; Adjust remainder's sign
brtc __divmodpsi4_end
__divmodpsi4_negB:
; Correct divisor/remainder sign
com B2
com B1
neg B0
sbci B1, -1
sbci B2, -1
ret
; Correct dividend/quotient sign
__divmodpsi4_negA:
com A2
com A1
neg A0
sbci A1, -1
sbci A2, -1
__divmodpsi4_end:
ret
ENDF __divmodpsi4
#endif /* defined (L_divmodpsi4) */
#undef A0
#undef A1
#undef A2
#undef B0
#undef B1
#undef B2
#undef C0
#undef C1
#undef C2
#undef r_cnt
/*******************************************************
Division 32 / 32 => (result + remainder)
*******************************************************/

View File

@ -12,6 +12,7 @@ LIB1ASMFUNCS = \
_divmodqi4 \
_udivmodhi4 \
_divmodhi4 \
_divmodpsi4 _udivmodpsi4 \
_udivmodsi4 \
_divmodsi4 \
_prologue \