mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-10 17:11:01 +08:00
Remove trailing white spaces
* lra-assigns.c: Remove trailing white spaces. * lra-coalesce.c: Likewise. * lra-constraints.c: Likewise. * lra-eliminations.c: Likewise. * lra-int.h: Likewise. * lra-spills.c: Likewise. * lra.c: Likewise. From-SVN: r192966
This commit is contained in:
parent
1ea58d3499
commit
f4eafc3059
@ -1,3 +1,13 @@
|
||||
2012-10-29 H.J. Lu <hongjiu.lu@intel.com>
|
||||
|
||||
* lra-assigns.c: Remove trailing white spaces.
|
||||
* lra-coalesce.c: Likewise.
|
||||
* lra-constraints.c: Likewise.
|
||||
* lra-eliminations.c: Likewise.
|
||||
* lra-int.h: Likewise.
|
||||
* lra-spills.c: Likewise.
|
||||
* lra.c: Likewise.
|
||||
|
||||
2012-10-29 Manuel López-Ibáñez <manu@gcc.gnu.org>
|
||||
|
||||
PR c/53066
|
||||
|
@ -149,7 +149,7 @@ init_regno_assign_info (void)
|
||||
{
|
||||
int i, regno1, regno2, max_regno = max_reg_num ();
|
||||
lra_copy_t cp;
|
||||
|
||||
|
||||
regno_assign_info = XNEWVEC (struct regno_assign_info, max_regno);
|
||||
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
|
||||
{
|
||||
@ -185,10 +185,10 @@ reload_pseudo_compare_func (const void *v1p, const void *v2p)
|
||||
enum reg_class cl1 = regno_allocno_class_array[r1];
|
||||
enum reg_class cl2 = regno_allocno_class_array[r2];
|
||||
int diff;
|
||||
|
||||
|
||||
lra_assert (r1 >= lra_constraint_new_regno_start
|
||||
&& r2 >= lra_constraint_new_regno_start);
|
||||
|
||||
|
||||
/* Prefer to assign reload registers with smaller classes first to
|
||||
guarantee assignment to all reload registers. */
|
||||
if ((diff = (ira_class_hard_regs_num[cl1]
|
||||
@ -217,7 +217,7 @@ pseudo_compare_func (const void *v1p, const void *v2p)
|
||||
/* Prefer to assign more frequently used registers first. */
|
||||
if ((diff = lra_reg_info[r2].freq - lra_reg_info[r1].freq) != 0)
|
||||
return diff;
|
||||
|
||||
|
||||
/* If regs are equally good, sort by their numbers, so that the
|
||||
results of qsort leave nothing to chance. */
|
||||
return r1 - r2;
|
||||
@ -378,7 +378,7 @@ init_live_reload_and_inheritance_pseudos (void)
|
||||
{
|
||||
int i, p, max_regno = max_reg_num ();
|
||||
lra_live_range_t r;
|
||||
|
||||
|
||||
conflict_reload_and_inheritance_pseudos = sparseset_alloc (max_regno);
|
||||
live_reload_and_inheritance_pseudos = XNEWVEC (bitmap_head, lra_live_max_point);
|
||||
bitmap_obstack_initialize (&live_reload_and_inheritance_pseudos_bitmap_obstack);
|
||||
@ -470,7 +470,7 @@ find_hard_regno_for (int regno, int *cost, int try_only_hard_regno)
|
||||
for (p = r->start + 1; p <= r->finish; p++)
|
||||
{
|
||||
lra_live_range_t r2;
|
||||
|
||||
|
||||
for (r2 = start_point_ranges[p];
|
||||
r2 != NULL;
|
||||
r2 = r2->start_next)
|
||||
@ -511,7 +511,7 @@ find_hard_regno_for (int regno, int *cost, int try_only_hard_regno)
|
||||
[lra_reg_info[conflict_regno].biggest_mode]);
|
||||
/* Remember about multi-register pseudos. For example, 2 hard
|
||||
register pseudos can start on the same hard register but can
|
||||
not start on HR and HR+1/HR-1. */
|
||||
not start on HR and HR+1/HR-1. */
|
||||
for (hr = conflict_hr + 1;
|
||||
hr < FIRST_PSEUDO_REGISTER && hr < conflict_hr + nregs;
|
||||
hr++)
|
||||
@ -810,7 +810,7 @@ spill_for (int regno, bitmap spilled_pseudo_bitmap)
|
||||
EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
|
||||
{
|
||||
struct lra_insn_reg *ir;
|
||||
|
||||
|
||||
for (ir = lra_get_insn_regs (uid); ir != NULL; ir = ir->next)
|
||||
if (ir->regno >= FIRST_PSEUDO_REGISTER)
|
||||
bitmap_set_bit (&insn_conflict_pseudos, ir->regno);
|
||||
@ -867,7 +867,7 @@ spill_for (int regno, bitmap spilled_pseudo_bitmap)
|
||||
for (p = r->start; p <= r->finish; p++)
|
||||
{
|
||||
lra_live_range_t r2;
|
||||
|
||||
|
||||
for (r2 = start_point_ranges[p];
|
||||
r2 != NULL;
|
||||
r2 = r2->start_next)
|
||||
@ -913,7 +913,7 @@ spill_for (int regno, bitmap spilled_pseudo_bitmap)
|
||||
EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)
|
||||
{
|
||||
rtx x;
|
||||
|
||||
|
||||
cost += lra_reg_info[spill_regno].freq;
|
||||
if (ira_reg_equiv[spill_regno].memory != NULL
|
||||
|| ira_reg_equiv[spill_regno].constant != NULL)
|
||||
@ -1038,7 +1038,7 @@ setup_live_pseudos_and_spill_after_risky_transforms (bitmap
|
||||
for (p = r->start + 1; p <= r->finish; p++)
|
||||
{
|
||||
lra_live_range_t r2;
|
||||
|
||||
|
||||
for (r2 = start_point_ranges[p];
|
||||
r2 != NULL;
|
||||
r2 = r2->start_next)
|
||||
@ -1239,7 +1239,7 @@ assign_by_spills (void)
|
||||
{
|
||||
lra_insn_recog_data_t data;
|
||||
struct lra_insn_reg *r;
|
||||
|
||||
|
||||
data = lra_get_insn_recog_data (insn);
|
||||
for (r = data->regs; r != NULL; r = r->next)
|
||||
{
|
||||
|
@ -79,7 +79,7 @@ move_freq_compare_func (const void *v1p, const void *v2p)
|
||||
rtx mv1 = *(const rtx *) v1p;
|
||||
rtx mv2 = *(const rtx *) v2p;
|
||||
int pri1, pri2;
|
||||
|
||||
|
||||
pri1 = BLOCK_FOR_INSN (mv1)->frequency;
|
||||
pri2 = BLOCK_FOR_INSN (mv2)->frequency;
|
||||
if (pri2 - pri1)
|
||||
|
@ -87,7 +87,7 @@
|
||||
|
||||
r <- ... or ... <- r r <- ... or ... <- r
|
||||
... s <- r (new insn -- save)
|
||||
... =>
|
||||
... =>
|
||||
... r <- s (new insn -- restore)
|
||||
... <- r ... <- r
|
||||
|
||||
@ -250,14 +250,14 @@ in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class)
|
||||
enum machine_mode reg_mode;
|
||||
int class_size, hard_regno, nregs, i, j;
|
||||
int regno = REGNO (reg);
|
||||
|
||||
|
||||
if (new_class != NULL)
|
||||
*new_class = NO_REGS;
|
||||
if (regno < FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
rtx final_reg = reg;
|
||||
rtx *final_loc = &final_reg;
|
||||
|
||||
|
||||
lra_eliminate_reg_if_possible (final_loc);
|
||||
return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
|
||||
}
|
||||
@ -447,7 +447,7 @@ static inline bool
|
||||
ok_for_index_p_nonstrict (rtx reg)
|
||||
{
|
||||
unsigned regno = REGNO (reg);
|
||||
|
||||
|
||||
return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
|
||||
}
|
||||
|
||||
@ -510,7 +510,7 @@ operands_match_p (rtx x, rtx y, int y_hard_regno)
|
||||
&& (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
|
||||
{
|
||||
int j;
|
||||
|
||||
|
||||
i = get_hard_regno (x);
|
||||
if (i < 0)
|
||||
goto slow;
|
||||
@ -538,7 +538,7 @@ operands_match_p (rtx x, rtx y, int y_hard_regno)
|
||||
if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
|
||||
|| GET_CODE (y) == PRE_MODIFY)
|
||||
return operands_match_p (x, XEXP (y, 0), -1);
|
||||
|
||||
|
||||
slow:
|
||||
|
||||
if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
|
||||
@ -711,13 +711,13 @@ match_reload (signed char out, signed char *ins, enum reg_class goal_class,
|
||||
pseudo which is wrong when the input pseudo lives after the
|
||||
insn and as the new pseudo value is changed by the insn
|
||||
output. Therefore we create the new pseudo from the output.
|
||||
|
||||
|
||||
We cannot reuse the current output register because we might
|
||||
have a situation like "a <- a op b", where the constraints
|
||||
force the second input operand ("b") to match the output
|
||||
operand ("a"). "b" must then be copied into a new register
|
||||
so that it doesn't clobber the current value of "a". */
|
||||
|
||||
|
||||
new_in_reg = new_out_reg
|
||||
= lra_create_new_reg_with_unique_value (outmode, out_rtx,
|
||||
goal_class, "");
|
||||
@ -773,12 +773,12 @@ reg_class_from_constraints (const char *p)
|
||||
[op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
|
||||
ADDRESS, SCRATCH)]);
|
||||
break;
|
||||
|
||||
|
||||
case 'g':
|
||||
case 'r':
|
||||
op_class = reg_class_subunion[op_class][GENERAL_REGS];
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
|
||||
{
|
||||
@ -791,7 +791,7 @@ reg_class_from_constraints (const char *p)
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
op_class
|
||||
= reg_class_subunion[op_class][REG_CLASS_FROM_CONSTRAINT (c, p)];
|
||||
break;
|
||||
@ -1127,7 +1127,7 @@ simplify_operand_subreg (int nop, enum machine_mode reg_mode)
|
||||
|
||||
if (GET_CODE (operand) != SUBREG)
|
||||
return false;
|
||||
|
||||
|
||||
mode = GET_MODE (operand);
|
||||
reg = SUBREG_REG (operand);
|
||||
/* If we change address for paradoxical subreg of memory, the
|
||||
@ -1219,7 +1219,7 @@ uses_hard_regs_p (rtx x, HARD_REG_SET set)
|
||||
if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (mode))
|
||||
mode = GET_MODE (x);
|
||||
}
|
||||
|
||||
|
||||
if (REG_P (x))
|
||||
{
|
||||
x_hard_regno = get_hard_regno (x);
|
||||
@ -1329,7 +1329,7 @@ process_alt_operands (int only_alternative)
|
||||
op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
|
||||
/* The real hard regno of the operand after the allocation. */
|
||||
hard_regno[nop] = get_hard_regno (op);
|
||||
|
||||
|
||||
operand_reg[nop] = op;
|
||||
biggest_mode[nop] = GET_MODE (operand_reg[nop]);
|
||||
if (GET_CODE (operand_reg[nop]) == SUBREG)
|
||||
@ -1402,16 +1402,16 @@ process_alt_operands (int only_alternative)
|
||||
curr_alt_matches[nop] = -1;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
op = no_subreg_reg_operand[nop];
|
||||
mode = curr_operand_mode[nop];
|
||||
|
||||
win = did_match = winreg = offmemok = constmemok = false;
|
||||
badop = true;
|
||||
|
||||
|
||||
early_clobber_p = false;
|
||||
p = curr_static_id->operand_alternative[opalt_num].constraint;
|
||||
|
||||
|
||||
this_costly_alternative = this_alternative = NO_REGS;
|
||||
/* We update set of possible hard regs besides its class
|
||||
because reg class might be inaccurate. For example,
|
||||
@ -1424,11 +1424,11 @@ process_alt_operands (int only_alternative)
|
||||
this_alternative_match_win = false;
|
||||
this_alternative_offmemok = false;
|
||||
this_alternative_matches = -1;
|
||||
|
||||
|
||||
/* An empty constraint should be excluded by the fast
|
||||
track. */
|
||||
lra_assert (*p != 0 && *p != ',');
|
||||
|
||||
|
||||
/* Scan this alternative's specs for this operand; set WIN
|
||||
if the operand fits any letter in this alternative.
|
||||
Otherwise, clear BADOP if this operand could fit some
|
||||
@ -1446,36 +1446,36 @@ process_alt_operands (int only_alternative)
|
||||
case ',':
|
||||
c = '\0';
|
||||
break;
|
||||
|
||||
|
||||
case '=': case '+': case '?': case '*': case '!':
|
||||
case ' ': case '\t':
|
||||
break;
|
||||
|
||||
|
||||
case '%':
|
||||
/* We only support one commutative marker, the first
|
||||
one. We already set commutative above. */
|
||||
break;
|
||||
|
||||
|
||||
case '&':
|
||||
early_clobber_p = true;
|
||||
break;
|
||||
|
||||
|
||||
case '#':
|
||||
/* Ignore rest of this alternative. */
|
||||
c = '\0';
|
||||
break;
|
||||
|
||||
|
||||
case '0': case '1': case '2': case '3': case '4':
|
||||
case '5': case '6': case '7': case '8': case '9':
|
||||
{
|
||||
int m_hregno;
|
||||
bool match_p;
|
||||
|
||||
|
||||
m = strtoul (p, &end, 10);
|
||||
p = end;
|
||||
len = 0;
|
||||
lra_assert (nop > m);
|
||||
|
||||
|
||||
this_alternative_matches = m;
|
||||
m_hregno = get_hard_regno (*curr_id->operand_loc[m]);
|
||||
/* We are supposed to match a previous operand.
|
||||
@ -1511,7 +1511,7 @@ process_alt_operands (int only_alternative)
|
||||
&& MEM_P (*curr_id->operand_loc[m])
|
||||
&& curr_alt[m] == NO_REGS && ! curr_alt_win[m])
|
||||
continue;
|
||||
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -1533,7 +1533,7 @@ process_alt_operands (int only_alternative)
|
||||
+= (ira_reg_class_max_nregs[curr_alt[m]]
|
||||
[GET_MODE (*curr_id->operand_loc[m])]);
|
||||
}
|
||||
|
||||
|
||||
/* We prefer no matching alternatives because
|
||||
it gives more freedom in RA. */
|
||||
if (operand_reg[nop] == NULL_RTX
|
||||
@ -1556,7 +1556,7 @@ process_alt_operands (int only_alternative)
|
||||
}
|
||||
else
|
||||
did_match = true;
|
||||
|
||||
|
||||
/* This can be fixed with reloads if the operand
|
||||
we are supposed to match can be fixed with
|
||||
reloads. */
|
||||
@ -1565,7 +1565,7 @@ process_alt_operands (int only_alternative)
|
||||
COPY_HARD_REG_SET (this_alternative_set, curr_alt_set[m]);
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
case 'p':
|
||||
cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
|
||||
ADDRESS, SCRATCH);
|
||||
@ -1582,7 +1582,7 @@ process_alt_operands (int only_alternative)
|
||||
win = true;
|
||||
badop = false;
|
||||
break;
|
||||
|
||||
|
||||
case TARGET_MEM_CONSTRAINT:
|
||||
if (MEM_P (op) || spilled_pseudo_p (op))
|
||||
win = true;
|
||||
@ -1590,28 +1590,28 @@ process_alt_operands (int only_alternative)
|
||||
badop = false;
|
||||
constmemok = true;
|
||||
break;
|
||||
|
||||
|
||||
case '<':
|
||||
if (MEM_P (op)
|
||||
&& (GET_CODE (XEXP (op, 0)) == PRE_DEC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_DEC))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
case '>':
|
||||
if (MEM_P (op)
|
||||
&& (GET_CODE (XEXP (op, 0)) == PRE_INC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_INC))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
/* Memory op whose address is not offsettable. */
|
||||
case 'V':
|
||||
if (MEM_P (op)
|
||||
&& ! offsettable_nonstrict_memref_p (op))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
/* Memory operand whose address is offsettable. */
|
||||
case 'o':
|
||||
if ((MEM_P (op)
|
||||
@ -1623,7 +1623,7 @@ process_alt_operands (int only_alternative)
|
||||
constmemok = true;
|
||||
offmemok = true;
|
||||
break;
|
||||
|
||||
|
||||
case 'E':
|
||||
case 'F':
|
||||
if (GET_CODE (op) == CONST_DOUBLE
|
||||
@ -1631,14 +1631,14 @@ process_alt_operands (int only_alternative)
|
||||
&& (GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
case 'G':
|
||||
case 'H':
|
||||
if (GET_CODE (op) == CONST_DOUBLE
|
||||
&& CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
case 's':
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE && mode == VOIDmode))
|
||||
@ -1647,13 +1647,13 @@ process_alt_operands (int only_alternative)
|
||||
if (general_constant_p (op))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
case 'n':
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE && mode == VOIDmode))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
case 'I':
|
||||
case 'J':
|
||||
case 'K':
|
||||
@ -1666,20 +1666,20 @@ process_alt_operands (int only_alternative)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
|
||||
win = true;
|
||||
break;
|
||||
|
||||
|
||||
case 'X':
|
||||
/* This constraint should be excluded by the fast
|
||||
track. */
|
||||
gcc_unreachable ();
|
||||
break;
|
||||
|
||||
|
||||
case 'g':
|
||||
if (MEM_P (op)
|
||||
|| general_constant_p (op)
|
||||
|| spilled_pseudo_p (op))
|
||||
win = true;
|
||||
/* Drop through into 'r' case. */
|
||||
|
||||
|
||||
case 'r':
|
||||
this_alternative
|
||||
= reg_class_subunion[this_alternative][GENERAL_REGS];
|
||||
@ -1694,7 +1694,7 @@ process_alt_operands (int only_alternative)
|
||||
reg_class_contents[GENERAL_REGS]);
|
||||
}
|
||||
goto reg;
|
||||
|
||||
|
||||
default:
|
||||
if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
|
||||
{
|
||||
@ -1705,7 +1705,7 @@ process_alt_operands (int only_alternative)
|
||||
win = true;
|
||||
else if (spilled_pseudo_p (op))
|
||||
win = true;
|
||||
|
||||
|
||||
/* If we didn't already win, we can reload
|
||||
constants via force_const_mem, and other
|
||||
MEMs by reloading the address like for
|
||||
@ -1720,7 +1720,7 @@ process_alt_operands (int only_alternative)
|
||||
{
|
||||
if (EXTRA_CONSTRAINT_STR (op, c, p))
|
||||
win = true;
|
||||
|
||||
|
||||
/* If we didn't already win, we can reload
|
||||
the address into a base register. */
|
||||
cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
|
||||
@ -1740,13 +1740,13 @@ process_alt_operands (int only_alternative)
|
||||
badop = false;
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
if (EXTRA_CONSTRAINT_STR (op, c, p))
|
||||
win = true;
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
cl = REG_CLASS_FROM_CONSTRAINT (c, p);
|
||||
this_alternative = reg_class_subunion[this_alternative][cl];
|
||||
IOR_HARD_REG_SET (this_alternative_set,
|
||||
@ -1778,7 +1778,7 @@ process_alt_operands (int only_alternative)
|
||||
costly_p = c == '*';
|
||||
}
|
||||
while ((p += len), c);
|
||||
|
||||
|
||||
/* Record which operands fit this alternative. */
|
||||
if (win)
|
||||
{
|
||||
@ -1827,7 +1827,7 @@ process_alt_operands (int only_alternative)
|
||||
then this operand can be reloaded. */
|
||||
if (winreg && !no_regs_p)
|
||||
badop = false;
|
||||
|
||||
|
||||
if (badop)
|
||||
goto fail;
|
||||
|
||||
@ -1880,7 +1880,7 @@ process_alt_operands (int only_alternative)
|
||||
if (! no_regs_p)
|
||||
losers++;
|
||||
}
|
||||
|
||||
|
||||
/* Alternative loses if it requires a type of reload not
|
||||
permitted for this insn. We can always reload
|
||||
objects with a REG_UNUSED note. */
|
||||
@ -1890,7 +1890,7 @@ process_alt_operands (int only_alternative)
|
||||
|| (curr_static_id->operand[nop].type != OP_OUT
|
||||
&& no_input_reloads_p && ! const_to_mem))
|
||||
goto fail;
|
||||
|
||||
|
||||
/* If we can't reload this value at all, reject this
|
||||
alternative. Note that we could also lose due to
|
||||
LIMIT_RELOAD_CLASS, but we don't check that here. */
|
||||
@ -1899,13 +1899,13 @@ process_alt_operands (int only_alternative)
|
||||
if (targetm.preferred_reload_class
|
||||
(op, this_alternative) == NO_REGS)
|
||||
reject = MAX_OVERALL_COST_BOUND;
|
||||
|
||||
|
||||
if (curr_static_id->operand[nop].type == OP_OUT
|
||||
&& (targetm.preferred_output_reload_class
|
||||
(op, this_alternative) == NO_REGS))
|
||||
reject = MAX_OVERALL_COST_BOUND;
|
||||
}
|
||||
|
||||
|
||||
if (! ((const_to_mem && constmemok)
|
||||
|| (MEM_P (op) && offmemok)))
|
||||
{
|
||||
@ -1918,7 +1918,7 @@ process_alt_operands (int only_alternative)
|
||||
match then. */
|
||||
if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
|
||||
reject += 2;
|
||||
|
||||
|
||||
if (! no_regs_p)
|
||||
reload_nregs
|
||||
+= ira_reg_class_max_nregs[this_alternative][mode];
|
||||
@ -1930,7 +1930,7 @@ process_alt_operands (int only_alternative)
|
||||
if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
|
||||
reject++;
|
||||
}
|
||||
|
||||
|
||||
if (early_clobber_p)
|
||||
reject++;
|
||||
/* ??? We check early clobbers after processing all operands
|
||||
@ -1948,11 +1948,11 @@ process_alt_operands (int only_alternative)
|
||||
curr_alt_match_win[nop] = this_alternative_match_win;
|
||||
curr_alt_offmemok[nop] = this_alternative_offmemok;
|
||||
curr_alt_matches[nop] = this_alternative_matches;
|
||||
|
||||
|
||||
if (this_alternative_matches >= 0
|
||||
&& !did_match && !this_alternative_win)
|
||||
curr_alt_win[this_alternative_matches] = false;
|
||||
|
||||
|
||||
if (early_clobber_p && operand_reg[nop] != NULL_RTX)
|
||||
early_clobbered_nops[early_clobbered_regs_num++] = nop;
|
||||
}
|
||||
@ -2054,7 +2054,7 @@ process_alt_operands (int only_alternative)
|
||||
}
|
||||
if (losers == 0)
|
||||
/* Everything is satisfied. Do not process alternatives
|
||||
anymore. */
|
||||
anymore. */
|
||||
break;
|
||||
fail:
|
||||
;
|
||||
@ -2073,7 +2073,7 @@ valid_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
|
||||
lra_assert (ADDR_SPACE_GENERIC_P (as));
|
||||
GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
|
||||
return 0;
|
||||
|
||||
|
||||
win:
|
||||
return 1;
|
||||
#else
|
||||
@ -2660,7 +2660,7 @@ curr_insn_transform (void)
|
||||
change_p = true;
|
||||
lra_update_dup (curr_id, i);
|
||||
}
|
||||
|
||||
|
||||
if (change_p)
|
||||
/* If we've changed the instruction then any alternative that
|
||||
we chose previously may no longer be valid. */
|
||||
@ -2798,7 +2798,7 @@ curr_insn_transform (void)
|
||||
/* Right now, for any pair of operands I and J that are required to
|
||||
match, with J < I, goal_alt_matches[I] is J. Add I to
|
||||
goal_alt_matched[J]. */
|
||||
|
||||
|
||||
for (i = 0; i < n_operands; i++)
|
||||
if ((j = goal_alt_matches[i]) >= 0)
|
||||
{
|
||||
@ -2814,10 +2814,10 @@ curr_insn_transform (void)
|
||||
goal_alt_matched[j][k] = i;
|
||||
goal_alt_matched[j][k + 1] = -1;
|
||||
}
|
||||
|
||||
|
||||
for (i = 0; i < n_operands; i++)
|
||||
goal_alt_win[i] |= goal_alt_match_win[i];
|
||||
|
||||
|
||||
/* Any constants that aren't allowed and can't be reloaded into
|
||||
registers are here changed into memory references. */
|
||||
for (i = 0; i < n_operands; i++)
|
||||
@ -2829,7 +2829,7 @@ curr_insn_transform (void)
|
||||
|
||||
if (GET_CODE (reg) == SUBREG)
|
||||
reg = SUBREG_REG (reg);
|
||||
|
||||
|
||||
if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
|
||||
@ -2848,29 +2848,29 @@ curr_insn_transform (void)
|
||||
rtx op = *curr_id->operand_loc[i];
|
||||
rtx subreg = NULL_RTX;
|
||||
enum machine_mode mode = curr_operand_mode[i];
|
||||
|
||||
|
||||
if (GET_CODE (op) == SUBREG)
|
||||
{
|
||||
subreg = op;
|
||||
op = SUBREG_REG (op);
|
||||
mode = GET_MODE (op);
|
||||
}
|
||||
|
||||
|
||||
if (CONST_POOL_OK_P (mode, op)
|
||||
&& ((targetm.preferred_reload_class
|
||||
(op, (enum reg_class) goal_alt[i]) == NO_REGS)
|
||||
|| no_input_reloads_p))
|
||||
{
|
||||
rtx tem = force_const_mem (mode, op);
|
||||
|
||||
|
||||
change_p = true;
|
||||
if (subreg != NULL_RTX)
|
||||
tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
|
||||
|
||||
|
||||
*curr_id->operand_loc[i] = tem;
|
||||
lra_update_dup (curr_id, i);
|
||||
process_address (i, &before, &after);
|
||||
|
||||
|
||||
/* If the alternative accepts constant pool refs directly
|
||||
there will be no reload needed at all. */
|
||||
if (subreg != NULL_RTX)
|
||||
@ -2892,11 +2892,11 @@ curr_insn_transform (void)
|
||||
}
|
||||
if (c == '\0' || c == ',' || c == '#')
|
||||
continue;
|
||||
|
||||
|
||||
goal_alt_win[i] = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (i = 0; i < n_operands; i++)
|
||||
{
|
||||
rtx old, new_reg;
|
||||
@ -2915,7 +2915,7 @@ curr_insn_transform (void)
|
||||
change_class (REGNO (op), NO_REGS, " Change", true);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
/* Operands that match previous ones have already been handled. */
|
||||
if (goal_alt_matches[i] >= 0)
|
||||
continue;
|
||||
@ -3172,10 +3172,10 @@ multi_block_pseudo_p (int regno)
|
||||
basic_block bb = NULL;
|
||||
unsigned int uid;
|
||||
bitmap_iterator bi;
|
||||
|
||||
|
||||
if (regno < FIRST_PSEUDO_REGISTER)
|
||||
return false;
|
||||
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
|
||||
if (bb == NULL)
|
||||
bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
|
||||
@ -3273,7 +3273,7 @@ lra_constraints (bool first_p)
|
||||
if ((hard_regno = lra_get_regno_hard_regno (i)) >= 0)
|
||||
{
|
||||
int j, nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (i)];
|
||||
|
||||
|
||||
for (j = 0; j < nregs; j++)
|
||||
df_set_regs_ever_live (hard_regno + j, true);
|
||||
}
|
||||
@ -3326,7 +3326,7 @@ lra_constraints (bool first_p)
|
||||
{
|
||||
curr_insn = lra_pop_insn ();
|
||||
--new_min_len;
|
||||
curr_bb = BLOCK_FOR_INSN (curr_insn);
|
||||
curr_bb = BLOCK_FOR_INSN (curr_insn);
|
||||
if (curr_bb != last_bb)
|
||||
{
|
||||
last_bb = curr_bb;
|
||||
@ -3366,7 +3366,7 @@ lra_constraints (bool first_p)
|
||||
can not be changed. Such insns might be not in
|
||||
init_insns because we don't update equiv data
|
||||
during insn transformations.
|
||||
|
||||
|
||||
As an example, let suppose that a pseudo got
|
||||
hard register and on the 1st pass was not
|
||||
changed to equivalent constant. We generate an
|
||||
@ -3420,7 +3420,7 @@ lra_constraints (bool first_p)
|
||||
&& (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
|
||||
{
|
||||
int j, nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (i)];
|
||||
|
||||
|
||||
for (j = 0; j < nregs; j++)
|
||||
lra_assert (df_regs_ever_live_p (hard_regno + j));
|
||||
}
|
||||
@ -3504,7 +3504,7 @@ static void
|
||||
add_next_usage_insn (int regno, rtx insn, int reloads_num)
|
||||
{
|
||||
rtx next_usage_insns;
|
||||
|
||||
|
||||
if (usage_insns[regno].check == curr_usage_insns_check
|
||||
&& (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
|
||||
&& DEBUG_INSN_P (insn))
|
||||
@ -3521,7 +3521,7 @@ add_next_usage_insn (int regno, rtx insn, int reloads_num)
|
||||
else
|
||||
usage_insns[regno].check = 0;
|
||||
}
|
||||
|
||||
|
||||
/* Replace all references to register OLD_REGNO in *LOC with pseudo
|
||||
register NEW_REG. Return true if any change was made. */
|
||||
static bool
|
||||
@ -3637,8 +3637,8 @@ static bitmap_head check_only_regs;
|
||||
... =>
|
||||
<- ... p ... <- ... i ...
|
||||
where p is a spilled original pseudo and i is a new inheritance pseudo.
|
||||
|
||||
|
||||
|
||||
|
||||
The inheritance pseudo has the smallest class of two classes CL and
|
||||
class of ORIGINAL REGNO. */
|
||||
static bool
|
||||
@ -3677,7 +3677,7 @@ inherit_reload_reg (bool def_p, int original_regno,
|
||||
if (lra_dump_file != NULL)
|
||||
fprintf (lra_dump_file, " Use smallest class of %s and %s\n",
|
||||
reg_class_names[cl], reg_class_names[rclass]);
|
||||
|
||||
|
||||
rclass = cl;
|
||||
}
|
||||
if (check_secondary_memory_needed_p (cl, next_usage_insns))
|
||||
@ -3857,7 +3857,7 @@ choose_split_class (enum reg_class allocno_class,
|
||||
int i;
|
||||
enum reg_class cl, best_cl = NO_REGS;
|
||||
enum reg_class hard_reg_class = REGNO_REG_CLASS (hard_regno);
|
||||
|
||||
|
||||
if (! SECONDARY_MEMORY_NEEDED (allocno_class, allocno_class, mode)
|
||||
&& TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
|
||||
return allocno_class;
|
||||
@ -3928,7 +3928,7 @@ split_reg (bool before_p, int original_regno, rtx insn, rtx next_usage_insns)
|
||||
if (call_save_p)
|
||||
{
|
||||
enum machine_mode sec_mode;
|
||||
|
||||
|
||||
#ifdef SECONDARY_MEMORY_NEEDED_MODE
|
||||
sec_mode = SECONDARY_MEMORY_NEEDED_MODE (GET_MODE (original_reg));
|
||||
#else
|
||||
@ -4088,7 +4088,7 @@ update_ebb_live_info (rtx head, rtx tail)
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
last_bb = BLOCK_FOR_INSN (tail);
|
||||
last_bb = BLOCK_FOR_INSN (tail);
|
||||
prev_bb = NULL;
|
||||
for (curr_insn = tail;
|
||||
curr_insn != PREV_INSN (head);
|
||||
@ -4236,7 +4236,7 @@ get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
|
||||
if (reg->type != OP_IN)
|
||||
bitmap_set_bit (res, reg->regno);
|
||||
}
|
||||
|
||||
|
||||
/* Used as a temporary results of some bitmap calculations. */
|
||||
static bitmap_head temp_bitmap;
|
||||
|
||||
@ -4287,7 +4287,7 @@ inherit_in_ebb (rtx head, rtx tail)
|
||||
pseudos for potential splitting. */
|
||||
to_process = df_get_live_out (curr_bb);
|
||||
if (last_processed_bb != NULL)
|
||||
{
|
||||
{
|
||||
/* We are somewhere in the middle of EBB. */
|
||||
get_live_on_other_edges (curr_bb, last_processed_bb,
|
||||
&temp_bitmap);
|
||||
@ -4388,7 +4388,7 @@ inherit_in_ebb (rtx head, rtx tail)
|
||||
= usage_insns[dst_regno].insns) != NULL_RTX)
|
||||
{
|
||||
struct lra_insn_reg *r;
|
||||
|
||||
|
||||
for (r = curr_id->regs; r != NULL; r = r->next)
|
||||
if (r->type != OP_OUT && r->regno == dst_regno)
|
||||
break;
|
||||
@ -4407,7 +4407,7 @@ inherit_in_ebb (rtx head, rtx tail)
|
||||
&& ! reg->subreg_p && reg->type == OP_OUT)
|
||||
{
|
||||
HARD_REG_SET s;
|
||||
|
||||
|
||||
if (split_if_necessary (dst_regno, reg->biggest_mode,
|
||||
potential_reload_hard_regs,
|
||||
false, curr_insn, max_uid))
|
||||
@ -4554,7 +4554,7 @@ inherit_in_ebb (rtx head, rtx tail)
|
||||
rest of spliting in the current BB. */
|
||||
to_process = df_get_live_in (curr_bb);
|
||||
if (BLOCK_FOR_INSN (head) != curr_bb)
|
||||
{
|
||||
{
|
||||
/* We are somewhere in the middle of EBB. */
|
||||
get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
|
||||
curr_bb, &temp_bitmap);
|
||||
@ -4591,7 +4591,7 @@ inherit_in_ebb (rtx head, rtx tail)
|
||||
|
||||
/* This value affects EBB forming. If probability of edge from EBB to
|
||||
a BB is not greater than the following value, we don't add the BB
|
||||
to EBB. */
|
||||
to EBB. */
|
||||
#define EBB_PROBABILITY_CUTOFF (REG_BR_PROB_BASE / 2)
|
||||
|
||||
/* Current number of inheritance/split iteration. */
|
||||
@ -4734,7 +4734,7 @@ remove_inheritance_pseudos (bitmap remove_pseudos)
|
||||
dregno = get_regno (SET_DEST (set));
|
||||
sregno = get_regno (SET_SRC (set));
|
||||
}
|
||||
|
||||
|
||||
if (sregno >= 0 && dregno >= 0)
|
||||
{
|
||||
if ((bitmap_bit_p (remove_pseudos, sregno)
|
||||
|
@ -81,25 +81,25 @@ along with GCC; see the file COPYING3. If not see
|
||||
struct elim_table
|
||||
{
|
||||
/* Hard register number to be eliminated. */
|
||||
int from;
|
||||
int from;
|
||||
/* Hard register number used as replacement. */
|
||||
int to;
|
||||
int to;
|
||||
/* Difference between values of the two hard registers above on
|
||||
previous iteration. */
|
||||
HOST_WIDE_INT previous_offset;
|
||||
/* Difference between the values on the current iteration. */
|
||||
HOST_WIDE_INT offset;
|
||||
HOST_WIDE_INT offset;
|
||||
/* Nonzero if this elimination can be done. */
|
||||
bool can_eliminate;
|
||||
bool can_eliminate;
|
||||
/* CAN_ELIMINATE since the last check. */
|
||||
bool prev_can_eliminate;
|
||||
/* REG rtx for the register to be eliminated. We cannot simply
|
||||
compare the number since we might then spuriously replace a hard
|
||||
register corresponding to a pseudo assigned to the reg to be
|
||||
eliminated. */
|
||||
rtx from_rtx;
|
||||
rtx from_rtx;
|
||||
/* REG rtx for the replacement. */
|
||||
rtx to_rtx;
|
||||
rtx to_rtx;
|
||||
};
|
||||
|
||||
/* The elimination table. Each array entry describes one possible way
|
||||
@ -335,7 +335,7 @@ lra_eliminate_regs_1 (rtx x, enum machine_mode mem_mode,
|
||||
if ((ep = get_elimination (x)) != NULL)
|
||||
{
|
||||
rtx to = subst_p ? ep->to_rtx : ep->from_rtx;
|
||||
|
||||
|
||||
if (update_p)
|
||||
return plus_constant (Pmode, to, ep->offset - ep->previous_offset);
|
||||
else if (full_p)
|
||||
@ -354,10 +354,10 @@ lra_eliminate_regs_1 (rtx x, enum machine_mode mem_mode,
|
||||
{
|
||||
HOST_WIDE_INT offset;
|
||||
rtx to = subst_p ? ep->to_rtx : ep->from_rtx;
|
||||
|
||||
|
||||
if (! update_p && ! full_p)
|
||||
return gen_rtx_PLUS (Pmode, to, XEXP (x, 1));
|
||||
|
||||
|
||||
offset = (update_p
|
||||
? ep->offset - ep->previous_offset : ep->offset);
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
@ -405,7 +405,7 @@ lra_eliminate_regs_1 (rtx x, enum machine_mode mem_mode,
|
||||
&& (ep = get_elimination (XEXP (x, 0))) != NULL)
|
||||
{
|
||||
rtx to = subst_p ? ep->to_rtx : ep->from_rtx;
|
||||
|
||||
|
||||
if (update_p)
|
||||
return
|
||||
plus_constant (Pmode,
|
||||
@ -420,7 +420,7 @@ lra_eliminate_regs_1 (rtx x, enum machine_mode mem_mode,
|
||||
else
|
||||
return gen_rtx_MULT (Pmode, to, XEXP (x, 1));
|
||||
}
|
||||
|
||||
|
||||
/* ... fall through ... */
|
||||
|
||||
case CALL:
|
||||
@ -777,7 +777,7 @@ eliminate_regs_in_insn (rtx insn, bool replace_p)
|
||||
&& (ep = get_elimination (SET_DEST (old_set))) != NULL)
|
||||
{
|
||||
bool delete_p = replace_p;
|
||||
|
||||
|
||||
#ifdef HARD_FRAME_POINTER_REGNUM
|
||||
/* If this is setting the frame pointer register to the hardware
|
||||
frame pointer register and this is an elimination that will
|
||||
@ -798,11 +798,11 @@ eliminate_regs_in_insn (rtx insn, bool replace_p)
|
||||
rtx base = SET_SRC (old_set);
|
||||
HOST_WIDE_INT offset = 0;
|
||||
rtx base_insn = insn;
|
||||
|
||||
|
||||
while (base != ep->to_rtx)
|
||||
{
|
||||
rtx prev_insn, prev_set;
|
||||
|
||||
|
||||
if (GET_CODE (base) == PLUS && CONST_INT_P (XEXP (base, 1)))
|
||||
{
|
||||
offset += INTVAL (XEXP (base, 1));
|
||||
@ -818,14 +818,14 @@ eliminate_regs_in_insn (rtx insn, bool replace_p)
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
if (base == ep->to_rtx)
|
||||
{
|
||||
rtx src;
|
||||
|
||||
|
||||
offset -= (ep->offset - ep->previous_offset);
|
||||
src = plus_constant (Pmode, ep->to_rtx, offset);
|
||||
|
||||
|
||||
/* First see if this insn remains valid when we make
|
||||
the change. If not, keep the INSN_CODE the same
|
||||
and let the constraint pass fit it up. */
|
||||
@ -841,14 +841,14 @@ eliminate_regs_in_insn (rtx insn, bool replace_p)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/* We can't delete this insn, but needn't process it
|
||||
since it won't be used unless something changes. */
|
||||
delete_p = false;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
/* This insn isn't serving a useful purpose. We delete it
|
||||
when REPLACE is set. */
|
||||
if (delete_p)
|
||||
@ -892,13 +892,13 @@ eliminate_regs_in_insn (rtx insn, bool replace_p)
|
||||
if (REG_P (reg) && (ep = get_elimination (reg)) != NULL)
|
||||
{
|
||||
rtx to_rtx = replace_p ? ep->to_rtx : ep->from_rtx;
|
||||
|
||||
|
||||
if (! replace_p)
|
||||
{
|
||||
offset += (ep->offset - ep->previous_offset);
|
||||
offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
|
||||
}
|
||||
|
||||
|
||||
if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
|
||||
to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)), to_rtx);
|
||||
/* If we have a nonzero offset, and the source is already a
|
||||
@ -909,7 +909,7 @@ eliminate_regs_in_insn (rtx insn, bool replace_p)
|
||||
if (offset == 0 || plus_src)
|
||||
{
|
||||
rtx new_src = plus_constant (GET_MODE (to_rtx), to_rtx, offset);
|
||||
|
||||
|
||||
old_set = single_set (insn);
|
||||
|
||||
/* First see if this insn remains valid when we make the
|
||||
@ -923,7 +923,7 @@ eliminate_regs_in_insn (rtx insn, bool replace_p)
|
||||
{
|
||||
rtx new_pat = gen_rtx_SET (VOIDmode,
|
||||
SET_DEST (old_set), new_src);
|
||||
|
||||
|
||||
if (! validate_change (insn, &PATTERN (insn), new_pat, 0))
|
||||
SET_SRC (old_set) = new_src;
|
||||
}
|
||||
@ -1153,7 +1153,7 @@ init_elim_table (void)
|
||||
ep->to = ep1->to;
|
||||
value_p = (targetm.can_eliminate (ep->from, ep->to)
|
||||
&& ! (ep->to == STACK_POINTER_REGNUM
|
||||
&& frame_pointer_needed
|
||||
&& frame_pointer_needed
|
||||
&& (! SUPPORTS_STACK_ALIGNMENT
|
||||
|| ! stack_realign_fp)));
|
||||
setup_can_eliminate (ep, value_p);
|
||||
|
@ -390,7 +390,7 @@ lra_update_operator_dups (lra_insn_recog_data_t id)
|
||||
for (i = 0; i < static_id->n_dups; i++)
|
||||
{
|
||||
int ndup = static_id->dup_num[i];
|
||||
|
||||
|
||||
if (static_id->operand[ndup].is_operator)
|
||||
*id->dup_loc[i] = *id->operand_loc[ndup];
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
end
|
||||
create new stack slot S and assign P to S
|
||||
end
|
||||
|
||||
|
||||
The actual algorithm is bit more complicated because of different
|
||||
pseudo sizes.
|
||||
|
||||
@ -143,9 +143,9 @@ assign_mem_slot (int i)
|
||||
|
||||
lra_assert (regno_reg_rtx[i] != NULL_RTX && REG_P (regno_reg_rtx[i])
|
||||
&& lra_reg_info[i].nrefs != 0 && reg_renumber[i] < 0);
|
||||
|
||||
|
||||
x = slots[pseudo_slots[i].slot_num].mem;
|
||||
|
||||
|
||||
/* We can use a slot already allocated because it is guaranteed the
|
||||
slot provides both enough inherent space and enough total
|
||||
space. */
|
||||
@ -181,14 +181,14 @@ assign_mem_slot (int i)
|
||||
}
|
||||
slots[pseudo_slots[i].slot_num].mem = stack_slot;
|
||||
}
|
||||
|
||||
|
||||
/* On a big endian machine, the "address" of the slot is the address
|
||||
of the low part that fits its inherent mode. */
|
||||
if (BYTES_BIG_ENDIAN && inherent_size < total_size)
|
||||
adjust += (total_size - inherent_size);
|
||||
|
||||
|
||||
x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
|
||||
|
||||
|
||||
/* Set all of the memory attributes as appropriate for a spill. */
|
||||
set_mem_attrs_for_spill (x);
|
||||
pseudo_slots[i].mem = x;
|
||||
@ -265,7 +265,7 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
|
||||
bitmap setjump_crosses = regstat_get_setjmp_crosses ();
|
||||
/* Hard registers which can not be used for any purpose at given
|
||||
program point because they are unallocatable or already allocated
|
||||
for other pseudos. */
|
||||
for other pseudos. */
|
||||
HARD_REG_SET *reserved_hard_regs;
|
||||
|
||||
if (! lra_reg_spill_p)
|
||||
@ -604,7 +604,7 @@ alter_subregs (rtx *loc, bool final_p)
|
||||
else if (fmt[i] == 'E')
|
||||
{
|
||||
int j;
|
||||
|
||||
|
||||
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
||||
if (alter_subregs (&XVECEXP (x, i, j), final_p))
|
||||
res = true;
|
||||
|
34
gcc/lra.c
34
gcc/lra.c
@ -44,12 +44,12 @@ along with GCC; see the file COPYING3. If not see
|
||||
|
||||
Here is block diagram of LRA passes:
|
||||
|
||||
---------------------
|
||||
| Undo inheritance | --------------- ---------------
|
||||
---------------------
|
||||
| Undo inheritance | --------------- ---------------
|
||||
| for spilled pseudos)| | Memory-memory | | New (and old) |
|
||||
| and splits (for |<----| move coalesce |<-----| pseudos |
|
||||
| pseudos got the | --------------- | assignment |
|
||||
Start | same hard regs) | ---------------
|
||||
Start | same hard regs) | ---------------
|
||||
| --------------------- ^
|
||||
V | ---------------- |
|
||||
----------- V | Update virtual | |
|
||||
@ -63,7 +63,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
| to memory |<-------| RTL |--------->| transformations |
|
||||
| substitution | | transfor- | | in EBB scope |
|
||||
---------------- | mations | -------------------
|
||||
| ------------
|
||||
| ------------
|
||||
V
|
||||
-------------------------
|
||||
| Hard regs substitution, |
|
||||
@ -958,7 +958,7 @@ collect_non_operand_hard_regs (rtx *x, lra_insn_recog_data_t data,
|
||||
break;
|
||||
case CLOBBER:
|
||||
/* We treat clobber of non-operand hard registers as early
|
||||
clobber (the behavior is expected from asm). */
|
||||
clobber (the behavior is expected from asm). */
|
||||
list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
|
||||
list, OP_OUT, true);
|
||||
break;
|
||||
@ -1055,7 +1055,7 @@ lra_set_insn_recog_data (rtx insn)
|
||||
if (nop > 0)
|
||||
{
|
||||
const char *p = recog_data.constraints[0];
|
||||
|
||||
|
||||
for (p = constraints[0]; *p; p++)
|
||||
n += *p == ',';
|
||||
}
|
||||
@ -1241,7 +1241,7 @@ lra_update_insn_recog_data (rtx insn)
|
||||
int n;
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
struct lra_static_insn_data *insn_static_data;
|
||||
|
||||
|
||||
check_and_expand_insn_recog_data (uid);
|
||||
if ((data = lra_insn_recog_data[uid]) != NULL
|
||||
&& data->icode != INSN_CODE (insn))
|
||||
@ -1310,7 +1310,7 @@ lra_update_insn_recog_data (rtx insn)
|
||||
{
|
||||
int i;
|
||||
bool *bp;
|
||||
|
||||
|
||||
n = insn_static_data->n_alternatives;
|
||||
bp = data->alternative_enabled_p;
|
||||
lra_assert (n >= 0 && bp != NULL);
|
||||
@ -1578,7 +1578,7 @@ add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x, int uid,
|
||||
break;
|
||||
case CLOBBER:
|
||||
/* We treat clobber of non-operand hard registers as early
|
||||
clobber (the behavior is expected from asm). */
|
||||
clobber (the behavior is expected from asm). */
|
||||
add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_OUT, true);
|
||||
break;
|
||||
case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
|
||||
@ -2026,7 +2026,7 @@ check_rtl (bool final_p)
|
||||
for (i = 0; i < id->insn_static_data->n_operands; i++)
|
||||
{
|
||||
rtx op = *id->operand_loc[i];
|
||||
|
||||
|
||||
if (MEM_P (op)
|
||||
&& (GET_MODE (op) != BLKmode
|
||||
|| GET_CODE (XEXP (op, 0)) != SCRATCH)
|
||||
@ -2055,7 +2055,7 @@ has_nonexceptional_receiver (void)
|
||||
/* If we're not optimizing, then just err on the safe side. */
|
||||
if (!optimize)
|
||||
return true;
|
||||
|
||||
|
||||
/* First determine which blocks can reach exit via normal paths. */
|
||||
tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
|
||||
|
||||
@ -2065,7 +2065,7 @@ has_nonexceptional_receiver (void)
|
||||
/* Place the exit block on our worklist. */
|
||||
EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
|
||||
*tos++ = EXIT_BLOCK_PTR;
|
||||
|
||||
|
||||
/* Iterate: find everything reachable from what we've already seen. */
|
||||
while (tos != worklist)
|
||||
{
|
||||
@ -2155,17 +2155,17 @@ update_inc_notes (void)
|
||||
/* Set to 1 while in lra. */
|
||||
int lra_in_progress;
|
||||
|
||||
/* Start of reload pseudo regnos before the new spill pass. */
|
||||
/* Start of reload pseudo regnos before the new spill pass. */
|
||||
int lra_constraint_new_regno_start;
|
||||
|
||||
/* Inheritance pseudo regnos before the new spill pass. */
|
||||
/* Inheritance pseudo regnos before the new spill pass. */
|
||||
bitmap_head lra_inheritance_pseudos;
|
||||
|
||||
/* Split regnos before the new spill pass. */
|
||||
/* Split regnos before the new spill pass. */
|
||||
bitmap_head lra_split_regs;
|
||||
|
||||
/* Reload pseudo regnos before the new assign pass which still can be
|
||||
spilled after the assinment pass. */
|
||||
spilled after the assinment pass. */
|
||||
bitmap_head lra_optional_reload_pseudos;
|
||||
|
||||
/* First UID of insns generated before a new spill pass. */
|
||||
@ -2307,7 +2307,7 @@ lra (FILE *f)
|
||||
else
|
||||
{
|
||||
/* Do coalescing only for regular algorithms. */
|
||||
if (! lra_assign () && lra_coalesce ())
|
||||
if (! lra_assign () && lra_coalesce ())
|
||||
live_p = false;
|
||||
if (lra_undo_inheritance ())
|
||||
live_p = false;
|
||||
|
Loading…
x
Reference in New Issue
Block a user