alpha.c (aligned_memory_operand): Handle out of range stack slots.

* alpha.c (aligned_memory_operand): Handle out of range stack slots.
        Take a new SCRATCH argument for the occasion.  Update all callers.
        (get_unaligned_address): Abort on out of range stack slots.
        * alpha.md (adddi3 splitter): Check s_p_rtx not REGNO.
        (reload_inqi): Check for aligned mems before unaligned.
        (reload_inhi): Likewise.

From-SVN: r26362
This commit is contained in:
Richard Henderson 1999-04-11 20:11:37 -07:00 committed by Richard Henderson
parent 55a98783c8
commit 96043e7e1c
3 changed files with 142 additions and 48 deletions

View File

@ -1,3 +1,12 @@
Mon Apr 12 03:07:44 1999 Richard Henderson <rth@cygnus.com>
* alpha.c (aligned_memory_operand): Handle out of range stack slots.
Take a new SCRATCH argument for the occasion. Update all callers.
(get_unaligned_address): Abort on out of range stack slots.
* alpha.md (adddi3 splitter): Check s_p_rtx not REGNO.
(reload_inqi): Check for aligned mems before unaligned.
(reload_inhi): Likewise.
Mon Apr 12 03:11:30 1999 Jeffrey A Law (law@cygnus.com)
* flow.c (flow_delete_insn): If we delete a CODE_LABEL, also remove

View File

@ -728,11 +728,7 @@ divmod_operator (op, mode)
a constant. It must be a valid address. This means that we can do
this as an aligned reference plus some offset.
Take into account what reload will do.
We could say that out-of-range stack slots are alignable, but that would
complicate get_aligned_mem and it isn't worth the trouble since few
functions have large stack space. */
Take into account what reload will do. */
int
aligned_memory_operand (op, mode)
@ -747,11 +743,18 @@ aligned_memory_operand (op, mode)
mode = GET_MODE (op);
}
if (reload_in_progress && GET_CODE (op) == REG
&& REGNO (op) >= FIRST_PSEUDO_REGISTER)
op = reg_equiv_mem[REGNO (op)];
if (reload_in_progress)
{
/* This is a stack slot. The stack pointer is always aligned.
We may have to jump through hoops to get a valid address,
but we can do it. */
if (GET_CODE (op) == REG
&& REGNO (op) >= FIRST_PSEUDO_REGISTER)
return 1;
}
if (GET_CODE (op) != MEM || GET_MODE (op) != mode
if (GET_CODE (op) != MEM
|| GET_MODE (op) != mode
|| ! memory_address_p (mode, XEXP (op, 0)))
return 0;
@ -899,11 +902,12 @@ direct_return ()
/* REF is an alignable memory location. Place an aligned SImode
reference into *PALIGNED_MEM and the number of bits to shift into
*PBITNUM. */
*PBITNUM. SCRATCH is a free register for use in reloading out
of range stack slots. */
void
get_aligned_mem (ref, paligned_mem, pbitnum)
rtx ref;
get_aligned_mem (ref, scratch, paligned_mem, pbitnum)
rtx ref, scratch;
rtx *paligned_mem, *pbitnum;
{
rtx base;
@ -919,13 +923,48 @@ get_aligned_mem (ref, paligned_mem, pbitnum)
ref = SUBREG_REG (ref);
}
if (GET_CODE (ref) == REG)
ref = reg_equiv_mem[REGNO (ref)];
if (reload_in_progress)
base = find_replacement (&XEXP (ref, 0));
{
if (GET_CODE (ref) == REG)
{
/* The "simple" case is where the stack slot is in range. */
if (reg_equiv_mem[REGNO (ref)])
{
ref = reg_equiv_mem[REGNO (ref)];
base = find_replacement (&XEXP (ref, 0));
}
else
{
/* The stack slot isn't in range. Fix it up as needed. */
HOST_WIDE_INT hi, lo;
base = reg_equiv_address[REGNO (ref)];
if (GET_CODE (base) != PLUS)
abort ();
offset += INTVAL (XEXP (base, 1));
base = XEXP (base, 0);
lo = ((offset & 0xFFFF) ^ 0x8000) - 0x8000;
hi = (((offset - lo) & 0xFFFFFFFF) ^ 0x80000000) - 0x80000000;
if (hi + lo != offset)
abort ();
if (scratch == NULL)
abort ();
emit_insn (gen_adddi3 (scratch, base, GEN_INT (hi)));
base = scratch;
offset = lo;
}
}
else
base = find_replacement (&XEXP (ref, 0));
}
else
base = XEXP (ref, 0);
{
if (GET_CODE (ref) != MEM)
abort ();
base = XEXP (ref, 0);
}
if (GET_CODE (base) == PLUS)
offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
@ -962,13 +1001,27 @@ get_unaligned_address (ref, extra_offset)
ref = SUBREG_REG (ref);
}
if (GET_CODE (ref) == REG)
ref = reg_equiv_mem[REGNO (ref)];
if (reload_in_progress)
base = find_replacement (&XEXP (ref, 0));
{
if (GET_CODE (ref) == REG)
{
if (reg_equiv_mem[REGNO (ref)])
ref = reg_equiv_mem[REGNO (ref)];
else
{
/* The stack slot is out of range. We should have handled
this as an aligned access -- I wonder why we didn't? */
abort ();
}
}
base = find_replacement (&XEXP (ref, 0));
}
else
base = XEXP (ref, 0);
{
if (GET_CODE (ref) != MEM)
abort ();
base = XEXP (ref, 0);
}
if (GET_CODE (base) == PLUS)
offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);

View File

@ -547,7 +547,7 @@
(plus:DI (match_operand:DI 1 "register_operand" "")
(match_operand:DI 2 "const_int_operand" "")))]
"! add_operand (operands[2], DImode)
&& REGNO (operands[0]) != STACK_POINTER_REGNUM"
&& operands[0] != stack_pointer_rtx"
[(set (match_dup 0) (plus:DI (match_dup 1) (match_dup 3)))
(set (match_dup 0) (plus:DI (match_dup 0) (match_dup 4)))]
"
@ -4521,7 +4521,7 @@
? gen_rtx_REG (SImode, REGNO (operands[0]))
: gen_reg_rtx (SImode));
get_aligned_mem (operands[1], &aligned_mem, &bitnum);
get_aligned_mem (operands[1], scratch, &aligned_mem, &bitnum);
emit_insn (gen_aligned_loadqi (operands[0], aligned_mem, bitnum,
scratch));
@ -4561,7 +4561,7 @@
rtx temp1 = gen_reg_rtx (SImode);
rtx temp2 = gen_reg_rtx (SImode);
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
temp1, temp2));
@ -4632,7 +4632,7 @@
? gen_rtx_REG (SImode, REGNO (operands[0]))
: gen_reg_rtx (SImode));
get_aligned_mem (operands[1], &aligned_mem, &bitnum);
get_aligned_mem (operands[1], scratch, &aligned_mem, &bitnum);
emit_insn (gen_aligned_loadhi (operands[0], aligned_mem, bitnum,
scratch));
@ -4672,7 +4672,7 @@
rtx temp1 = gen_reg_rtx (SImode);
rtx temp2 = gen_reg_rtx (SImode);
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
temp1, temp2));
@ -4706,18 +4706,34 @@
"! TARGET_BWX"
"
{
rtx addr = get_unaligned_address (operands[1], 0);
rtx scratch, seq;
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
rtx scratch = gen_rtx_REG (DImode,
REGNO (operands[0]) == REGNO (operands[2])
? REGNO (operands[2]) + 1 : REGNO (operands[2]));
if (aligned_memory_operand (operands[1], QImode))
{
rtx aligned_mem, bitnum;
rtx seq = gen_unaligned_loadqi (operands[0], addr, scratch,
get_aligned_mem (operands[1],
gen_rtx_REG (DImode, REGNO (operands[2]) + 1),
&aligned_mem, &bitnum);
seq = gen_aligned_loadqi (operands[0], aligned_mem, bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])));
}
else
{
rtx addr;
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
if (REGNO (operands[0]) == REGNO (operands[2]))
scratch = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
else
scratch = gen_rtx_REG (DImode, REGNO (operands[2]));
addr = get_unaligned_address (operands[1], 0);
seq = gen_unaligned_loadqi (operands[0], addr, scratch,
gen_rtx_REG (DImode, REGNO (operands[0])));
}
alpha_set_memflags (seq, operands[1]);
emit_insn (seq);
DONE;
@ -4725,23 +4741,39 @@
(define_expand "reload_inhi"
[(parallel [(match_operand:HI 0 "register_operand" "=r")
(match_operand:HI 1 "unaligned_memory_operand" "m")
(match_operand:HI 1 "any_memory_operand" "m")
(match_operand:TI 2 "register_operand" "=&r")])]
"! TARGET_BWX"
"
{
rtx addr = get_unaligned_address (operands[1], 0);
rtx scratch, seq;
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
rtx scratch = gen_rtx_REG (DImode,
REGNO (operands[0]) == REGNO (operands[2])
? REGNO (operands[2]) + 1 : REGNO (operands[2]));
if (aligned_memory_operand (operands[1], HImode))
{
rtx aligned_mem, bitnum;
rtx seq = gen_unaligned_loadhi (operands[0], addr, scratch,
get_aligned_mem (operands[1],
gen_rtx_REG (DImode, REGNO (operands[2]) + 1),
&aligned_mem, &bitnum);
seq = gen_aligned_loadhi (operands[0], aligned_mem, bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])));
}
else
{
rtx addr;
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
if (REGNO (operands[0]) == REGNO (operands[2]))
scratch = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
else
scratch = gen_rtx_REG (DImode, REGNO (operands[2]));
addr = get_unaligned_address (operands[1], 0);
seq = gen_unaligned_loadhi (operands[0], addr, scratch,
gen_rtx_REG (DImode, REGNO (operands[0])));
}
alpha_set_memflags (seq, operands[1]);
emit_insn (seq);
DONE;
@ -4758,7 +4790,7 @@
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])),
@ -4796,7 +4828,7 @@
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])),