mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-05 22:01:27 +08:00
rtl.h (true_dependence, [...]): Remove varies parameter.
gcc/ * rtl.h (true_dependence, canon_true_dependence): Remove varies parameter. * alias.c (fixed_scalar_and_varying_struct_p): Delete. (true_dependence_1, write_dependence_p, may_alias_p): Don't call it. (true_dependence_1, true_dependence, canon_true_dependence): Remove varies parameter. * cselib.c (cselib_rtx_varies_p): Delete. (cselib_invalidate_mem): Update call to canon_true_dependence. * dse.c (record_store, check_mem_read_rtx): Likewise. (scan_reads_nospill): Likewise. * cse.c (check_dependence): Likewise. (cse_rtx_varies_p): Delete. * expr.c (safe_from_p): Update call to true_dependence. * ira.c (validate_equiv_mem_from_store): Likewise. (memref_referenced_p): Likewise. * postreload-gcse.c (find_mem_conflicts): Likewise. * sched-deps.c (sched_analyze_2): Likewise. * store-motion.c (load_kills_store): Likewise. * config/frv/frv.c (frv_registers_conflict_p_1): Likewise. * gcse.c (mems_conflict_for_gcse_p): Likewise. (compute_transp): Update call to canon_true_dependence. From-SVN: r183485
This commit is contained in:
parent
f8a27aa631
commit
53d9622bda
@ -1,3 +1,27 @@
|
||||
2012-01-24 Richard Sandiford <rdsandiford@googlemail.com>
|
||||
|
||||
* rtl.h (true_dependence, canon_true_dependence): Remove varies
|
||||
parameter.
|
||||
* alias.c (fixed_scalar_and_varying_struct_p): Delete.
|
||||
(true_dependence_1, write_dependence_p, may_alias_p): Don't call it.
|
||||
(true_dependence_1, true_dependence, canon_true_dependence): Remove
|
||||
varies parameter.
|
||||
* cselib.c (cselib_rtx_varies_p): Delete.
|
||||
(cselib_invalidate_mem): Update call to canon_true_dependence.
|
||||
* dse.c (record_store, check_mem_read_rtx): Likewise.
|
||||
(scan_reads_nospill): Likewise.
|
||||
* cse.c (check_dependence): Likewise.
|
||||
(cse_rtx_varies_p): Delete.
|
||||
* expr.c (safe_from_p): Update call to true_dependence.
|
||||
* ira.c (validate_equiv_mem_from_store): Likewise.
|
||||
(memref_referenced_p): Likewise.
|
||||
* postreload-gcse.c (find_mem_conflicts): Likewise.
|
||||
* sched-deps.c (sched_analyze_2): Likewise.
|
||||
* store-motion.c (load_kills_store): Likewise.
|
||||
* config/frv/frv.c (frv_registers_conflict_p_1): Likewise.
|
||||
* gcse.c (mems_conflict_for_gcse_p): Likewise.
|
||||
(compute_transp): Update call to canon_true_dependence.
|
||||
|
||||
2012-01-25 Richard Henderson <rth@redhat.com>
|
||||
|
||||
* optabs.c (CODE_FOR_atomic_test_and_set): Provide default.
|
||||
|
75
gcc/alias.c
75
gcc/alias.c
@ -157,8 +157,6 @@ static rtx find_base_value (rtx);
|
||||
static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
|
||||
static int insert_subset_children (splay_tree_node, void*);
|
||||
static alias_set_entry get_alias_set_entry (alias_set_type);
|
||||
static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
|
||||
bool (*) (const_rtx, bool));
|
||||
static int aliases_everything_p (const_rtx);
|
||||
static bool nonoverlapping_component_refs_p (const_tree, const_tree);
|
||||
static tree decl_for_component_ref (tree);
|
||||
@ -2078,11 +2076,9 @@ memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
|
||||
changed. A volatile and non-volatile reference can be interchanged
|
||||
though.
|
||||
|
||||
A MEM_IN_STRUCT reference at a non-AND varying address can never
|
||||
conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
|
||||
also must allow AND addresses, because they may generate accesses
|
||||
outside the object being referenced. This is used to generate
|
||||
aligned addresses from unaligned addresses, for instance, the alpha
|
||||
We also must allow AND addresses, because they may generate accesses
|
||||
outside the object being referenced. This is used to generate aligned
|
||||
addresses from unaligned addresses, for instance, the alpha
|
||||
storeqi_unaligned pattern. */
|
||||
|
||||
/* Read dependence: X is read after read in MEM takes place. There can
|
||||
@ -2094,39 +2090,6 @@ read_dependence (const_rtx mem, const_rtx x)
|
||||
return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
|
||||
}
|
||||
|
||||
/* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
|
||||
MEM2 is a reference to a structure at a varying address, or returns
|
||||
MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
|
||||
value is returned MEM1 and MEM2 can never alias. VARIES_P is used
|
||||
to decide whether or not an address may vary; it should return
|
||||
nonzero whenever variation is possible.
|
||||
MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
|
||||
|
||||
static const_rtx
|
||||
fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
|
||||
rtx mem2_addr,
|
||||
bool (*varies_p) (const_rtx, bool))
|
||||
{
|
||||
if (! flag_strict_aliasing)
|
||||
return NULL_RTX;
|
||||
|
||||
if (MEM_ALIAS_SET (mem2)
|
||||
&& MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
|
||||
&& !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
|
||||
/* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
|
||||
varying address. */
|
||||
return mem1;
|
||||
|
||||
if (MEM_ALIAS_SET (mem1)
|
||||
&& MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
|
||||
&& varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
|
||||
/* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
|
||||
varying address. */
|
||||
return mem2;
|
||||
|
||||
return NULL_RTX;
|
||||
}
|
||||
|
||||
/* Returns nonzero if something about the mode or address format MEM1
|
||||
indicates that it might well alias *anything*. */
|
||||
|
||||
@ -2391,8 +2354,6 @@ nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant)
|
||||
/* Helper for true_dependence and canon_true_dependence.
|
||||
Checks for true dependence: X is read after store in MEM takes place.
|
||||
|
||||
VARIES is the function that should be used as rtx_varies function.
|
||||
|
||||
If MEM_CANONICALIZED is FALSE, then X_ADDR and MEM_ADDR should be
|
||||
NULL_RTX, and the canonical addresses of MEM and X are both computed
|
||||
here. If MEM_CANONICALIZED, then MEM must be already canonicalized.
|
||||
@ -2403,8 +2364,7 @@ nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant)
|
||||
|
||||
static int
|
||||
true_dependence_1 (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
|
||||
const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool),
|
||||
bool mem_canonicalized)
|
||||
const_rtx x, rtx x_addr, bool mem_canonicalized)
|
||||
{
|
||||
rtx base;
|
||||
int ret;
|
||||
@ -2496,21 +2456,16 @@ true_dependence_1 (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
|
||||
if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
|
||||
return 1;
|
||||
|
||||
if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
|
||||
return 0;
|
||||
|
||||
return rtx_refs_may_alias_p (x, mem, true);
|
||||
}
|
||||
|
||||
/* True dependence: X is read after store in MEM takes place. */
|
||||
|
||||
int
|
||||
true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
|
||||
bool (*varies) (const_rtx, bool))
|
||||
true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x)
|
||||
{
|
||||
return true_dependence_1 (mem, mem_mode, NULL_RTX,
|
||||
x, NULL_RTX, varies,
|
||||
/*mem_canonicalized=*/false);
|
||||
x, NULL_RTX, /*mem_canonicalized=*/false);
|
||||
}
|
||||
|
||||
/* Canonical true dependence: X is read after store in MEM takes place.
|
||||
@ -2521,11 +2476,10 @@ true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
|
||||
|
||||
int
|
||||
canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
|
||||
const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
|
||||
const_rtx x, rtx x_addr)
|
||||
{
|
||||
return true_dependence_1 (mem, mem_mode, mem_addr,
|
||||
x, x_addr, varies,
|
||||
/*mem_canonicalized=*/true);
|
||||
x, x_addr, /*mem_canonicalized=*/true);
|
||||
}
|
||||
|
||||
/* Returns nonzero if a write to X might alias a previous read from
|
||||
@ -2535,7 +2489,6 @@ static int
|
||||
write_dependence_p (const_rtx mem, const_rtx x, int writep)
|
||||
{
|
||||
rtx x_addr, mem_addr;
|
||||
const_rtx fixed_scalar;
|
||||
rtx base;
|
||||
int ret;
|
||||
|
||||
@ -2598,14 +2551,6 @@ write_dependence_p (const_rtx mem, const_rtx x, int writep)
|
||||
if (nonoverlapping_memrefs_p (x, mem, false))
|
||||
return 0;
|
||||
|
||||
fixed_scalar
|
||||
= fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
|
||||
rtx_addr_varies_p);
|
||||
|
||||
if ((fixed_scalar == mem && !aliases_everything_p (x))
|
||||
|| (fixed_scalar == x && !aliases_everything_p (mem)))
|
||||
return 0;
|
||||
|
||||
return rtx_refs_may_alias_p (x, mem, false);
|
||||
}
|
||||
|
||||
@ -2687,10 +2632,6 @@ may_alias_p (const_rtx mem, const_rtx x)
|
||||
if (GET_CODE (mem_addr) == AND)
|
||||
return 1;
|
||||
|
||||
if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
|
||||
rtx_addr_varies_p))
|
||||
return 0;
|
||||
|
||||
/* TBAA not valid for loop_invarint */
|
||||
return rtx_refs_may_alias_p (x, mem, false);
|
||||
}
|
||||
|
@ -7229,8 +7229,7 @@ frv_registers_conflict_p_1 (rtx *x, void *data)
|
||||
for (i = 0; i < frv_packet.num_mems; i++)
|
||||
if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
|
||||
{
|
||||
if (true_dependence (frv_packet.mems[i].mem, VOIDmode,
|
||||
*x, rtx_varies_p))
|
||||
if (true_dependence (frv_packet.mems[i].mem, VOIDmode, *x))
|
||||
return 1;
|
||||
|
||||
if (output_dependence (frv_packet.mems[i].mem, *x))
|
||||
|
65
gcc/cse.c
65
gcc/cse.c
@ -573,7 +573,6 @@ static struct table_elt *insert (rtx, struct table_elt *, unsigned,
|
||||
enum machine_mode);
|
||||
static void merge_equiv_classes (struct table_elt *, struct table_elt *);
|
||||
static void invalidate (rtx, enum machine_mode);
|
||||
static bool cse_rtx_varies_p (const_rtx, bool);
|
||||
static void remove_invalid_refs (unsigned int);
|
||||
static void remove_invalid_subreg_refs (unsigned int, unsigned int,
|
||||
enum machine_mode);
|
||||
@ -1846,8 +1845,7 @@ check_dependence (rtx *x, void *data)
|
||||
{
|
||||
struct check_dependence_data *d = (struct check_dependence_data *) data;
|
||||
if (*x && MEM_P (*x))
|
||||
return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX,
|
||||
cse_rtx_varies_p);
|
||||
return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX);
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
@ -2794,67 +2792,6 @@ exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Return 1 if X has a value that can vary even between two
|
||||
executions of the program. 0 means X can be compared reliably
|
||||
against certain constants or near-constants. */
|
||||
|
||||
static bool
|
||||
cse_rtx_varies_p (const_rtx x, bool from_alias)
|
||||
{
|
||||
/* We need not check for X and the equivalence class being of the same
|
||||
mode because if X is equivalent to a constant in some mode, it
|
||||
doesn't vary in any mode. */
|
||||
|
||||
if (REG_P (x)
|
||||
&& REGNO_QTY_VALID_P (REGNO (x)))
|
||||
{
|
||||
int x_q = REG_QTY (REGNO (x));
|
||||
struct qty_table_elem *x_ent = &qty_table[x_q];
|
||||
|
||||
if (GET_MODE (x) == x_ent->mode
|
||||
&& x_ent->const_rtx != NULL_RTX)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (GET_CODE (x) == PLUS
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& REG_P (XEXP (x, 0))
|
||||
&& REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
|
||||
{
|
||||
int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
|
||||
struct qty_table_elem *x0_ent = &qty_table[x0_q];
|
||||
|
||||
if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
|
||||
&& x0_ent->const_rtx != NULL_RTX)
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* This can happen as the result of virtual register instantiation, if
|
||||
the initial constant is too large to be a valid address. This gives
|
||||
us a three instruction sequence, load large offset into a register,
|
||||
load fp minus a constant into a register, then a MEM which is the
|
||||
sum of the two `constant' registers. */
|
||||
if (GET_CODE (x) == PLUS
|
||||
&& REG_P (XEXP (x, 0))
|
||||
&& REG_P (XEXP (x, 1))
|
||||
&& REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
|
||||
&& REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
|
||||
{
|
||||
int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
|
||||
int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
|
||||
struct qty_table_elem *x0_ent = &qty_table[x0_q];
|
||||
struct qty_table_elem *x1_ent = &qty_table[x1_q];
|
||||
|
||||
if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
|
||||
&& x0_ent->const_rtx != NULL_RTX
|
||||
&& (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
|
||||
&& x1_ent->const_rtx != NULL_RTX)
|
||||
return 0;
|
||||
}
|
||||
|
||||
return rtx_varies_p (x, from_alias);
|
||||
}
|
||||
|
||||
/* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
|
||||
the result if necessary. INSN is as for canon_reg. */
|
||||
|
||||
|
18
gcc/cselib.c
18
gcc/cselib.c
@ -2172,20 +2172,6 @@ cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
|
||||
}
|
||||
}
|
||||
|
||||
/* Return 1 if X has a value that can vary even between two
|
||||
executions of the program. 0 means X can be compared reliably
|
||||
against certain constants or near-constants. */
|
||||
|
||||
static bool
|
||||
cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED)
|
||||
{
|
||||
/* We actually don't need to verify very hard. This is because
|
||||
if X has actually changed, we invalidate the memory anyway,
|
||||
so assume that all common memory addresses are
|
||||
invariant. */
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Invalidate any locations in the table which are changed because of a
|
||||
store to MEM_RTX. If this is called because of a non-const call
|
||||
instruction, MEM_RTX is (mem:BLK const0_rtx). */
|
||||
@ -2222,8 +2208,8 @@ cselib_invalidate_mem (rtx mem_rtx)
|
||||
continue;
|
||||
}
|
||||
if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
|
||||
&& ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
|
||||
x, NULL_RTX, cselib_rtx_varies_p))
|
||||
&& ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx),
|
||||
mem_addr, x, NULL_RTX))
|
||||
{
|
||||
has_mem = true;
|
||||
num_mems++;
|
||||
|
11
gcc/dse.c
11
gcc/dse.c
@ -1682,7 +1682,7 @@ record_store (rtx body, bb_info_t bb_info)
|
||||
if (canon_true_dependence (s_info->mem,
|
||||
GET_MODE (s_info->mem),
|
||||
s_info->mem_addr,
|
||||
mem, mem_addr, rtx_varies_p))
|
||||
mem, mem_addr))
|
||||
{
|
||||
s_info->rhs = NULL;
|
||||
s_info->const_rhs = NULL;
|
||||
@ -2279,7 +2279,7 @@ check_mem_read_rtx (rtx *loc, void *data)
|
||||
= canon_true_dependence (store_info->mem,
|
||||
GET_MODE (store_info->mem),
|
||||
store_info->mem_addr,
|
||||
mem, mem_addr, rtx_varies_p);
|
||||
mem, mem_addr);
|
||||
|
||||
else if (group_id == store_info->group_id)
|
||||
{
|
||||
@ -2290,7 +2290,7 @@ check_mem_read_rtx (rtx *loc, void *data)
|
||||
= canon_true_dependence (store_info->mem,
|
||||
GET_MODE (store_info->mem),
|
||||
store_info->mem_addr,
|
||||
mem, mem_addr, rtx_varies_p);
|
||||
mem, mem_addr);
|
||||
|
||||
/* If this read is just reading back something that we just
|
||||
stored, rewrite the read. */
|
||||
@ -2377,7 +2377,7 @@ check_mem_read_rtx (rtx *loc, void *data)
|
||||
remove = canon_true_dependence (store_info->mem,
|
||||
GET_MODE (store_info->mem),
|
||||
store_info->mem_addr,
|
||||
mem, mem_addr, rtx_varies_p);
|
||||
mem, mem_addr);
|
||||
|
||||
if (remove)
|
||||
{
|
||||
@ -3276,8 +3276,7 @@ scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
|
||||
&& canon_true_dependence (group->base_mem,
|
||||
GET_MODE (group->base_mem),
|
||||
group->canon_base_addr,
|
||||
read_info->mem, NULL_RTX,
|
||||
rtx_varies_p))
|
||||
read_info->mem, NULL_RTX))
|
||||
{
|
||||
if (kill)
|
||||
bitmap_ior_into (kill, group->group_kill);
|
||||
|
@ -7200,8 +7200,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
|
||||
are memory and they conflict. */
|
||||
return ! (rtx_equal_p (x, exp_rtl)
|
||||
|| (MEM_P (x) && MEM_P (exp_rtl)
|
||||
&& true_dependence (exp_rtl, VOIDmode, x,
|
||||
rtx_addr_varies_p)));
|
||||
&& true_dependence (exp_rtl, VOIDmode, x)));
|
||||
}
|
||||
|
||||
/* If we reach here, it is safe. */
|
||||
|
@ -968,7 +968,7 @@ mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
|
||||
return;
|
||||
}
|
||||
|
||||
if (true_dependence (dest, GET_MODE (dest), mci->mem, rtx_addr_varies_p))
|
||||
if (true_dependence (dest, GET_MODE (dest), mci->mem))
|
||||
mci->conflict = true;
|
||||
}
|
||||
|
||||
@ -1682,8 +1682,8 @@ compute_transp (const_rtx x, int indx, sbitmap *bmap)
|
||||
rtx dest = pair->dest;
|
||||
rtx dest_addr = pair->dest_addr;
|
||||
|
||||
if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
|
||||
x, NULL_RTX, rtx_addr_varies_p))
|
||||
if (canon_true_dependence (dest, GET_MODE (dest),
|
||||
dest_addr, x, NULL_RTX))
|
||||
RESET_BIT (bmap[bb_index], indx);
|
||||
}
|
||||
}
|
||||
|
@ -2335,7 +2335,7 @@ validate_equiv_mem_from_store (rtx dest, const_rtx set ATTRIBUTE_UNUSED,
|
||||
if ((REG_P (dest)
|
||||
&& reg_overlap_mentioned_p (dest, equiv_mem))
|
||||
|| (MEM_P (dest)
|
||||
&& true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
|
||||
&& true_dependence (dest, VOIDmode, equiv_mem)))
|
||||
equiv_mem_modified = 1;
|
||||
}
|
||||
|
||||
@ -2589,7 +2589,7 @@ memref_referenced_p (rtx memref, rtx x)
|
||||
reg_equiv[REGNO (x)].replacement));
|
||||
|
||||
case MEM:
|
||||
if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
|
||||
if (true_dependence (memref, VOIDmode, x))
|
||||
return 1;
|
||||
break;
|
||||
|
||||
|
@ -589,8 +589,7 @@ find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
|
||||
if (! MEM_P (dest))
|
||||
return;
|
||||
|
||||
if (true_dependence (dest, GET_MODE (dest), mem_op,
|
||||
rtx_addr_varies_p))
|
||||
if (true_dependence (dest, GET_MODE (dest), mem_op))
|
||||
mems_conflict_p = 1;
|
||||
}
|
||||
|
||||
|
@ -2602,10 +2602,10 @@ extern bool read_rtx (const char *, rtx *);
|
||||
|
||||
/* In alias.c */
|
||||
extern rtx canon_rtx (rtx);
|
||||
extern int true_dependence (const_rtx, enum machine_mode, const_rtx, bool (*)(const_rtx, bool));
|
||||
extern int true_dependence (const_rtx, enum machine_mode, const_rtx);
|
||||
extern rtx get_addr (rtx);
|
||||
extern int canon_true_dependence (const_rtx, enum machine_mode, rtx, const_rtx,
|
||||
rtx, bool (*)(const_rtx, bool));
|
||||
extern int canon_true_dependence (const_rtx, enum machine_mode, rtx,
|
||||
const_rtx, rtx);
|
||||
extern int read_dependence (const_rtx, const_rtx);
|
||||
extern int anti_dependence (const_rtx, const_rtx);
|
||||
extern int output_dependence (const_rtx, const_rtx);
|
||||
|
@ -2636,8 +2636,7 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx insn)
|
||||
pending_mem = deps->pending_write_mems;
|
||||
while (pending)
|
||||
{
|
||||
if (true_dependence (XEXP (pending_mem, 0), VOIDmode,
|
||||
t, rtx_varies_p)
|
||||
if (true_dependence (XEXP (pending_mem, 0), VOIDmode, t)
|
||||
&& ! sched_insns_conditions_mutex_p (insn,
|
||||
XEXP (pending, 0)))
|
||||
note_mem_dep (t, XEXP (pending_mem, 0), XEXP (pending, 0),
|
||||
|
@ -309,8 +309,7 @@ load_kills_store (const_rtx x, const_rtx store_pattern, int after)
|
||||
if (after)
|
||||
return anti_dependence (x, store_pattern);
|
||||
else
|
||||
return true_dependence (store_pattern, GET_MODE (store_pattern), x,
|
||||
rtx_addr_varies_p);
|
||||
return true_dependence (store_pattern, GET_MODE (store_pattern), x);
|
||||
}
|
||||
|
||||
/* Go through the entire rtx X, looking for any loads which might alias
|
||||
|
Loading…
x
Reference in New Issue
Block a user