mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-05 08:00:26 +08:00
tree-ssa-ccp.c (prop_value_d): Rename to ...
* tree-ssa-ccp.c (prop_value_d): Rename to ... (ccp_prop_value_t): ... this one to avoid ODR violation; update uses. * ipa-prop.c (struct type_change_info): Rename to ... (prop_type_change_info): ... this; update uses. * ggc-page.c (globals): Rename to ... (static struct ggc_globals): ... this; update uses. * tree-ssa-loop-im.c (mem_ref): Rename to ... (im_mem_ref): ... this; update uses. * ggc-common.c (loc_descriptor): Rename to ... (ggc_loc_descriptor): ... this; update uses. * lra-eliminations.c (elim_table): Rename to ... (lra_elim_table): ... this; update uses. * bitmap.c (output_info): Rename to ... (bitmap_output_info): ... this; update uses. * gcse.c (expr): Rename to ... (gcse_expr) ... this; update uses. (occr): Rename to ... (gcse_occr): .. this; update uses. * tree-ssa-copy.c (prop_value_d): Rename to ... (prop_value_t): ... this. * predict.c (block_info_def): Rename to ... (block_info): ... this; update uses. (edge_info_def): Rename to ... (edge_info): ... this; update uses. * profile.c (bb_info): Rename to ... (bb_profile_info): ... this; update uses. * alloc-pool.c (output_info): Rename to ... (pool_output_info): ... this; update uses. * ipa-cp.c (topo_info): Rename to .. (ipa_topo_info): ... this; update uses. * tree-nrv.c (nrv_data): Rename to ... (nrv_data_t): ... this; update uses. * ipa-split.c (bb_info): Rename to ... (split_bb_info): ... this one. * profile.h (edge_info): Rename to ... (edge_profile_info): ... this one; update uses. * dse.c (bb_info): Rename to ... (dse_bb_info): ... this one; update uses. * cprop.c (occr): Rename to ... (cprop_occr): ... this one; update uses. (expr): Rename to ... (cprop_expr): ... this one; update uses. From-SVN: r215480
This commit is contained in:
parent
6863c41a15
commit
114783066e
@ -1,3 +1,48 @@
|
||||
2014-09-22 Jan Hubicka <hubicka@ucw.cz>
|
||||
|
||||
* tree-ssa-ccp.c (prop_value_d): Rename to ...
|
||||
(ccp_prop_value_t): ... this one to avoid ODR violation; update uses.
|
||||
* ipa-prop.c (struct type_change_info): Rename to ...
|
||||
(prop_type_change_info): ... this; update uses.
|
||||
* ggc-page.c (globals): Rename to ...
|
||||
(static struct ggc_globals): ... this; update uses.
|
||||
* tree-ssa-loop-im.c (mem_ref): Rename to ...
|
||||
(im_mem_ref): ... this; update uses.
|
||||
* ggc-common.c (loc_descriptor): Rename to ...
|
||||
(ggc_loc_descriptor): ... this; update uses.
|
||||
* lra-eliminations.c (elim_table): Rename to ...
|
||||
(lra_elim_table): ... this; update uses.
|
||||
* bitmap.c (output_info): Rename to ...
|
||||
(bitmap_output_info): ... this; update uses.
|
||||
* gcse.c (expr): Rename to ...
|
||||
(gcse_expr) ... this; update uses.
|
||||
(occr): Rename to ...
|
||||
(gcse_occr): .. this; update uses.
|
||||
* tree-ssa-copy.c (prop_value_d): Rename to ...
|
||||
(prop_value_t): ... this.
|
||||
* predict.c (block_info_def): Rename to ...
|
||||
(block_info): ... this; update uses.
|
||||
(edge_info_def): Rename to ...
|
||||
(edge_info): ... this; update uses.
|
||||
* profile.c (bb_info): Rename to ...
|
||||
(bb_profile_info): ... this; update uses.
|
||||
* alloc-pool.c (output_info): Rename to ...
|
||||
(pool_output_info): ... this; update uses.
|
||||
* ipa-cp.c (topo_info): Rename to ..
|
||||
(ipa_topo_info): ... this; update uses.
|
||||
* tree-nrv.c (nrv_data): Rename to ...
|
||||
(nrv_data_t): ... this; update uses.
|
||||
* ipa-split.c (bb_info): Rename to ...
|
||||
(split_bb_info): ... this one.
|
||||
* profile.h (edge_info): Rename to ...
|
||||
(edge_profile_info): ... this one; update uses.
|
||||
* dse.c (bb_info): Rename to ...
|
||||
(dse_bb_info): ... this one; update uses.
|
||||
* cprop.c (occr): Rename to ...
|
||||
(cprop_occr): ... this one; update uses.
|
||||
(expr): Rename to ...
|
||||
(cprop_expr): ... this one; update uses.
|
||||
|
||||
2014-09-22 Jason Merrill <jason@redhat.com>
|
||||
|
||||
* Makefile.in (check-parallel-%): Add @.
|
||||
|
@ -339,7 +339,7 @@ pool_free (alloc_pool pool, void *ptr)
|
||||
/* Output per-alloc_pool statistics. */
|
||||
|
||||
/* Used to accumulate statistics about alloc_pool sizes. */
|
||||
struct output_info
|
||||
struct pool_output_info
|
||||
{
|
||||
unsigned long total_created;
|
||||
unsigned long total_allocated;
|
||||
@ -350,7 +350,7 @@ struct output_info
|
||||
bool
|
||||
print_alloc_pool_statistics (const char *const &name,
|
||||
const alloc_pool_descriptor &d,
|
||||
struct output_info *i)
|
||||
struct pool_output_info *i)
|
||||
{
|
||||
if (d.allocated)
|
||||
{
|
||||
@ -369,7 +369,7 @@ print_alloc_pool_statistics (const char *const &name,
|
||||
void
|
||||
dump_alloc_pool_statistics (void)
|
||||
{
|
||||
struct output_info info;
|
||||
struct pool_output_info info;
|
||||
|
||||
if (! GATHER_STATISTICS)
|
||||
return;
|
||||
@ -381,7 +381,7 @@ dump_alloc_pool_statistics (void)
|
||||
fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
|
||||
info.total_created = 0;
|
||||
info.total_allocated = 0;
|
||||
alloc_pool_hash->traverse <struct output_info *,
|
||||
alloc_pool_hash->traverse <struct pool_output_info *,
|
||||
print_alloc_pool_statistics> (&info);
|
||||
fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
|
||||
fprintf (stderr, "%-22s %7lu %10lu\n",
|
||||
|
@ -2143,7 +2143,7 @@ bitmap_print (FILE *file, const_bitmap head, const char *prefix,
|
||||
|
||||
|
||||
/* Used to accumulate statistics about bitmap sizes. */
|
||||
struct output_info
|
||||
struct bitmap_output_info
|
||||
{
|
||||
uint64_t size;
|
||||
uint64_t count;
|
||||
@ -2152,7 +2152,7 @@ struct output_info
|
||||
/* Called via hash_table::traverse. Output bitmap descriptor pointed out by
|
||||
SLOT and update statistics. */
|
||||
int
|
||||
print_statistics (bitmap_descriptor_d **slot, output_info *i)
|
||||
print_statistics (bitmap_descriptor_d **slot, bitmap_output_info *i)
|
||||
{
|
||||
bitmap_descriptor d = *slot;
|
||||
char s[4096];
|
||||
@ -2181,7 +2181,7 @@ print_statistics (bitmap_descriptor_d **slot, output_info *i)
|
||||
void
|
||||
dump_bitmap_statistics (void)
|
||||
{
|
||||
struct output_info info;
|
||||
struct bitmap_output_info info;
|
||||
|
||||
if (! GATHER_STATISTICS)
|
||||
return;
|
||||
@ -2197,7 +2197,7 @@ dump_bitmap_statistics (void)
|
||||
fprintf (stderr, "---------------------------------------------------------------------------------\n");
|
||||
info.count = 0;
|
||||
info.size = 0;
|
||||
bitmap_desc_hash->traverse <output_info *, print_statistics> (&info);
|
||||
bitmap_desc_hash->traverse <bitmap_output_info *, print_statistics> (&info);
|
||||
fprintf (stderr, "---------------------------------------------------------------------------------\n");
|
||||
fprintf (stderr,
|
||||
"%-41s %9"PRId64" %15"PRId64"\n",
|
||||
|
66
gcc/cprop.c
66
gcc/cprop.c
@ -55,19 +55,19 @@ static struct obstack cprop_obstack;
|
||||
There is one per basic block. If a pattern appears more than once the
|
||||
last appearance is used. */
|
||||
|
||||
struct occr
|
||||
struct cprop_occr
|
||||
{
|
||||
/* Next occurrence of this expression. */
|
||||
struct occr *next;
|
||||
struct cprop_occr *next;
|
||||
/* The insn that computes the expression. */
|
||||
rtx_insn *insn;
|
||||
};
|
||||
|
||||
typedef struct occr *occr_t;
|
||||
typedef struct cprop_occr *occr_t;
|
||||
|
||||
/* Hash table entry for assignment expressions. */
|
||||
|
||||
struct expr
|
||||
struct cprop_expr
|
||||
{
|
||||
/* The expression (DEST := SRC). */
|
||||
rtx dest;
|
||||
@ -76,12 +76,12 @@ struct expr
|
||||
/* Index in the available expression bitmaps. */
|
||||
int bitmap_index;
|
||||
/* Next entry with the same hash. */
|
||||
struct expr *next_same_hash;
|
||||
struct cprop_expr *next_same_hash;
|
||||
/* List of available occurrence in basic blocks in the function.
|
||||
An "available occurrence" is one that is the last occurrence in the
|
||||
basic block and whose operands are not modified by following statements
|
||||
in the basic block [including this insn]. */
|
||||
struct occr *avail_occr;
|
||||
struct cprop_occr *avail_occr;
|
||||
};
|
||||
|
||||
/* Hash table for copy propagation expressions.
|
||||
@ -97,7 +97,7 @@ struct hash_table_d
|
||||
{
|
||||
/* The table itself.
|
||||
This is an array of `set_hash_table_size' elements. */
|
||||
struct expr **table;
|
||||
struct cprop_expr **table;
|
||||
|
||||
/* Size of the hash table, in elements. */
|
||||
unsigned int size;
|
||||
@ -184,8 +184,8 @@ insert_set_in_table (rtx dest, rtx src, rtx_insn *insn,
|
||||
{
|
||||
bool found = false;
|
||||
unsigned int hash;
|
||||
struct expr *cur_expr, *last_expr = NULL;
|
||||
struct occr *cur_occr;
|
||||
struct cprop_expr *cur_expr, *last_expr = NULL;
|
||||
struct cprop_occr *cur_occr;
|
||||
|
||||
hash = hash_mod (REGNO (dest), table->size);
|
||||
|
||||
@ -203,8 +203,8 @@ insert_set_in_table (rtx dest, rtx src, rtx_insn *insn,
|
||||
|
||||
if (! found)
|
||||
{
|
||||
cur_expr = GOBNEW (struct expr);
|
||||
bytes_used += sizeof (struct expr);
|
||||
cur_expr = GOBNEW (struct cprop_expr);
|
||||
bytes_used += sizeof (struct cprop_expr);
|
||||
if (table->table[hash] == NULL)
|
||||
/* This is the first pattern that hashed to this index. */
|
||||
table->table[hash] = cur_expr;
|
||||
@ -237,8 +237,8 @@ insert_set_in_table (rtx dest, rtx src, rtx_insn *insn,
|
||||
else
|
||||
{
|
||||
/* First occurrence of this expression in this basic block. */
|
||||
cur_occr = GOBNEW (struct occr);
|
||||
bytes_used += sizeof (struct occr);
|
||||
cur_occr = GOBNEW (struct cprop_occr);
|
||||
bytes_used += sizeof (struct cprop_occr);
|
||||
cur_occr->insn = insn;
|
||||
cur_occr->next = cur_expr->avail_occr;
|
||||
cur_expr->avail_occr = cur_occr;
|
||||
@ -335,11 +335,11 @@ dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
|
||||
{
|
||||
int i;
|
||||
/* Flattened out table, so it's printed in proper order. */
|
||||
struct expr **flat_table;
|
||||
struct cprop_expr **flat_table;
|
||||
unsigned int *hash_val;
|
||||
struct expr *expr;
|
||||
struct cprop_expr *expr;
|
||||
|
||||
flat_table = XCNEWVEC (struct expr *, table->n_elems);
|
||||
flat_table = XCNEWVEC (struct cprop_expr *, table->n_elems);
|
||||
hash_val = XNEWVEC (unsigned int, table->n_elems);
|
||||
|
||||
for (i = 0; i < (int) table->size; i++)
|
||||
@ -451,8 +451,8 @@ alloc_hash_table (struct hash_table_d *table)
|
||||
Making it an odd number is simplest for now.
|
||||
??? Later take some measurements. */
|
||||
table->size |= 1;
|
||||
n = table->size * sizeof (struct expr *);
|
||||
table->table = XNEWVAR (struct expr *, n);
|
||||
n = table->size * sizeof (struct cprop_expr *);
|
||||
table->table = XNEWVAR (struct cprop_expr *, n);
|
||||
}
|
||||
|
||||
/* Free things allocated by alloc_hash_table. */
|
||||
@ -471,7 +471,7 @@ compute_hash_table (struct hash_table_d *table)
|
||||
{
|
||||
/* Initialize count of number of entries in hash table. */
|
||||
table->n_elems = 0;
|
||||
memset (table->table, 0, table->size * sizeof (struct expr *));
|
||||
memset (table->table, 0, table->size * sizeof (struct cprop_expr *));
|
||||
|
||||
compute_hash_table_work (table);
|
||||
}
|
||||
@ -481,11 +481,11 @@ compute_hash_table (struct hash_table_d *table)
|
||||
/* Lookup REGNO in the set TABLE. The result is a pointer to the
|
||||
table entry, or NULL if not found. */
|
||||
|
||||
static struct expr *
|
||||
static struct cprop_expr *
|
||||
lookup_set (unsigned int regno, struct hash_table_d *table)
|
||||
{
|
||||
unsigned int hash = hash_mod (regno, table->size);
|
||||
struct expr *expr;
|
||||
struct cprop_expr *expr;
|
||||
|
||||
expr = table->table[hash];
|
||||
|
||||
@ -497,8 +497,8 @@ lookup_set (unsigned int regno, struct hash_table_d *table)
|
||||
|
||||
/* Return the next entry for REGNO in list EXPR. */
|
||||
|
||||
static struct expr *
|
||||
next_set (unsigned int regno, struct expr *expr)
|
||||
static struct cprop_expr *
|
||||
next_set (unsigned int regno, struct cprop_expr *expr)
|
||||
{
|
||||
do
|
||||
expr = expr->next_same_hash;
|
||||
@ -599,13 +599,13 @@ compute_local_properties (sbitmap *kill, sbitmap *comp,
|
||||
|
||||
for (i = 0; i < table->size; i++)
|
||||
{
|
||||
struct expr *expr;
|
||||
struct cprop_expr *expr;
|
||||
|
||||
for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
|
||||
{
|
||||
int indx = expr->bitmap_index;
|
||||
df_ref def;
|
||||
struct occr *occr;
|
||||
struct cprop_occr *occr;
|
||||
|
||||
/* For each definition of the destination pseudo-reg, the expression
|
||||
is killed in the block where the definition is. */
|
||||
@ -799,12 +799,12 @@ try_replace_reg (rtx from, rtx to, rtx_insn *insn)
|
||||
/* Find a set of REGNOs that are available on entry to INSN's block. Return
|
||||
NULL no such set is found. */
|
||||
|
||||
static struct expr *
|
||||
static struct cprop_expr *
|
||||
find_avail_set (int regno, rtx_insn *insn)
|
||||
{
|
||||
/* SET1 contains the last set found that can be returned to the caller for
|
||||
use in a substitution. */
|
||||
struct expr *set1 = 0;
|
||||
struct cprop_expr *set1 = 0;
|
||||
|
||||
/* Loops are not possible here. To get a loop we would need two sets
|
||||
available at the start of the block containing INSN. i.e. we would
|
||||
@ -818,7 +818,7 @@ find_avail_set (int regno, rtx_insn *insn)
|
||||
while (1)
|
||||
{
|
||||
rtx src;
|
||||
struct expr *set = lookup_set (regno, &set_hash_table);
|
||||
struct cprop_expr *set = lookup_set (regno, &set_hash_table);
|
||||
|
||||
/* Find a set that is available at the start of the block
|
||||
which contains INSN. */
|
||||
@ -1040,7 +1040,7 @@ retry:
|
||||
rtx reg_used = reg_use_table[i];
|
||||
unsigned int regno = REGNO (reg_used);
|
||||
rtx src;
|
||||
struct expr *set;
|
||||
struct cprop_expr *set;
|
||||
|
||||
/* If the register has already been set in this block, there's
|
||||
nothing we can do. */
|
||||
@ -1429,15 +1429,15 @@ static int bypass_last_basic_block;
|
||||
block BB. Return NULL if no such set is found. Based heavily upon
|
||||
find_avail_set. */
|
||||
|
||||
static struct expr *
|
||||
static struct cprop_expr *
|
||||
find_bypass_set (int regno, int bb)
|
||||
{
|
||||
struct expr *result = 0;
|
||||
struct cprop_expr *result = 0;
|
||||
|
||||
for (;;)
|
||||
{
|
||||
rtx src;
|
||||
struct expr *set = lookup_set (regno, &set_hash_table);
|
||||
struct cprop_expr *set = lookup_set (regno, &set_hash_table);
|
||||
|
||||
while (set)
|
||||
{
|
||||
@ -1561,7 +1561,7 @@ bypass_block (basic_block bb, rtx_insn *setcc, rtx_insn *jump)
|
||||
rtx reg_used = reg_use_table[i];
|
||||
unsigned int regno = REGNO (reg_used);
|
||||
basic_block dest, old_dest;
|
||||
struct expr *set;
|
||||
struct cprop_expr *set;
|
||||
rtx src, new_rtx;
|
||||
|
||||
set = find_bypass_set (regno, e->src->index);
|
||||
|
@ -421,7 +421,7 @@ static alloc_pool insn_info_pool;
|
||||
static insn_info_t active_local_stores;
|
||||
static int active_local_stores_len;
|
||||
|
||||
struct bb_info
|
||||
struct dse_bb_info
|
||||
{
|
||||
|
||||
/* Pointer to the insn info for the last insn in the block. These
|
||||
@ -479,7 +479,7 @@ struct bb_info
|
||||
bitmap regs_live;
|
||||
};
|
||||
|
||||
typedef struct bb_info *bb_info_t;
|
||||
typedef struct dse_bb_info *bb_info_t;
|
||||
static alloc_pool bb_info_pool;
|
||||
|
||||
/* Table to hold all bb_infos. */
|
||||
@ -757,7 +757,7 @@ dse_step0 (void)
|
||||
sizeof (struct insn_info), 100);
|
||||
bb_info_pool
|
||||
= create_alloc_pool ("bb_info_pool",
|
||||
sizeof (struct bb_info), 100);
|
||||
sizeof (struct dse_bb_info), 100);
|
||||
rtx_group_info_pool
|
||||
= create_alloc_pool ("rtx_group_info_pool",
|
||||
sizeof (struct group_info), 100);
|
||||
@ -2700,7 +2700,7 @@ dse_step1 (void)
|
||||
insn_info_t ptr;
|
||||
bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
|
||||
|
||||
memset (bb_info, 0, sizeof (struct bb_info));
|
||||
memset (bb_info, 0, sizeof (struct dse_bb_info));
|
||||
bitmap_set_bit (all_blocks, bb->index);
|
||||
bb_info->regs_live = regs_live;
|
||||
|
||||
|
168
gcc/gcse.c
168
gcc/gcse.c
@ -256,25 +256,25 @@ static struct obstack gcse_obstack;
|
||||
|
||||
/* Hash table of expressions. */
|
||||
|
||||
struct expr
|
||||
struct gcse_expr
|
||||
{
|
||||
/* The expression. */
|
||||
rtx expr;
|
||||
/* Index in the available expression bitmaps. */
|
||||
int bitmap_index;
|
||||
/* Next entry with the same hash. */
|
||||
struct expr *next_same_hash;
|
||||
struct gcse_expr *next_same_hash;
|
||||
/* List of anticipatable occurrences in basic blocks in the function.
|
||||
An "anticipatable occurrence" is one that is the first occurrence in the
|
||||
basic block, the operands are not modified in the basic block prior
|
||||
to the occurrence and the output is not used between the start of
|
||||
the block and the occurrence. */
|
||||
struct occr *antic_occr;
|
||||
struct gcse_occr *antic_occr;
|
||||
/* List of available occurrence in basic blocks in the function.
|
||||
An "available occurrence" is one that is the last occurrence in the
|
||||
basic block and the operands are not modified by following statements in
|
||||
the basic block [including this insn]. */
|
||||
struct occr *avail_occr;
|
||||
struct gcse_occr *avail_occr;
|
||||
/* Non-null if the computation is PRE redundant.
|
||||
The value is the newly created pseudo-reg to record a copy of the
|
||||
expression in all the places that reach the redundant copy. */
|
||||
@ -291,10 +291,10 @@ struct expr
|
||||
There is one per basic block. If a pattern appears more than once the
|
||||
last appearance is used [or first for anticipatable expressions]. */
|
||||
|
||||
struct occr
|
||||
struct gcse_occr
|
||||
{
|
||||
/* Next occurrence of this expression. */
|
||||
struct occr *next;
|
||||
struct gcse_occr *next;
|
||||
/* The insn that computes the expression. */
|
||||
rtx_insn *insn;
|
||||
/* Nonzero if this [anticipatable] occurrence has been deleted. */
|
||||
@ -306,7 +306,7 @@ struct occr
|
||||
char copied_p;
|
||||
};
|
||||
|
||||
typedef struct occr *occr_t;
|
||||
typedef struct gcse_occr *occr_t;
|
||||
|
||||
/* Expression hash tables.
|
||||
Each hash table is an array of buckets.
|
||||
@ -317,11 +317,11 @@ typedef struct occr *occr_t;
|
||||
[one could build a mapping table without holes afterwards though].
|
||||
Someday I'll perform the computation and figure it out. */
|
||||
|
||||
struct hash_table_d
|
||||
struct gcse_hash_table_d
|
||||
{
|
||||
/* The table itself.
|
||||
This is an array of `expr_hash_table_size' elements. */
|
||||
struct expr **table;
|
||||
struct gcse_expr **table;
|
||||
|
||||
/* Size of the hash table, in elements. */
|
||||
unsigned int size;
|
||||
@ -331,7 +331,7 @@ struct hash_table_d
|
||||
};
|
||||
|
||||
/* Expression hash table. */
|
||||
static struct hash_table_d expr_hash_table;
|
||||
static struct gcse_hash_table_d expr_hash_table;
|
||||
|
||||
/* This is a list of expressions which are MEMs and will be used by load
|
||||
or store motion.
|
||||
@ -344,7 +344,7 @@ static struct hash_table_d expr_hash_table;
|
||||
|
||||
struct ls_expr
|
||||
{
|
||||
struct expr * expr; /* Gcse expression reference for LM. */
|
||||
struct gcse_expr * expr; /* Gcse expression reference for LM. */
|
||||
rtx pattern; /* Pattern of this mem. */
|
||||
rtx pattern_regs; /* List of registers mentioned by the mem. */
|
||||
rtx_insn_list *loads; /* INSN list of loads seen. */
|
||||
@ -462,38 +462,38 @@ static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
|
||||
static void *gcse_alloc (unsigned long);
|
||||
static void alloc_gcse_mem (void);
|
||||
static void free_gcse_mem (void);
|
||||
static void hash_scan_insn (rtx_insn *, struct hash_table_d *);
|
||||
static void hash_scan_set (rtx, rtx_insn *, struct hash_table_d *);
|
||||
static void hash_scan_clobber (rtx, rtx_insn *, struct hash_table_d *);
|
||||
static void hash_scan_call (rtx, rtx_insn *, struct hash_table_d *);
|
||||
static void hash_scan_insn (rtx_insn *, struct gcse_hash_table_d *);
|
||||
static void hash_scan_set (rtx, rtx_insn *, struct gcse_hash_table_d *);
|
||||
static void hash_scan_clobber (rtx, rtx_insn *, struct gcse_hash_table_d *);
|
||||
static void hash_scan_call (rtx, rtx_insn *, struct gcse_hash_table_d *);
|
||||
static int want_to_gcse_p (rtx, int *);
|
||||
static int oprs_unchanged_p (const_rtx, const rtx_insn *, int);
|
||||
static int oprs_anticipatable_p (const_rtx, const rtx_insn *);
|
||||
static int oprs_available_p (const_rtx, const rtx_insn *);
|
||||
static void insert_expr_in_table (rtx, enum machine_mode, rtx_insn *, int, int,
|
||||
int, struct hash_table_d *);
|
||||
int, struct gcse_hash_table_d *);
|
||||
static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
|
||||
static void record_last_reg_set_info (rtx, int);
|
||||
static void record_last_mem_set_info (rtx_insn *);
|
||||
static void record_last_set_info (rtx, const_rtx, void *);
|
||||
static void compute_hash_table (struct hash_table_d *);
|
||||
static void alloc_hash_table (struct hash_table_d *);
|
||||
static void free_hash_table (struct hash_table_d *);
|
||||
static void compute_hash_table_work (struct hash_table_d *);
|
||||
static void dump_hash_table (FILE *, const char *, struct hash_table_d *);
|
||||
static void compute_hash_table (struct gcse_hash_table_d *);
|
||||
static void alloc_hash_table (struct gcse_hash_table_d *);
|
||||
static void free_hash_table (struct gcse_hash_table_d *);
|
||||
static void compute_hash_table_work (struct gcse_hash_table_d *);
|
||||
static void dump_hash_table (FILE *, const char *, struct gcse_hash_table_d *);
|
||||
static void compute_transp (const_rtx, int, sbitmap *);
|
||||
static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
|
||||
struct hash_table_d *);
|
||||
struct gcse_hash_table_d *);
|
||||
static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
|
||||
static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
|
||||
static void canon_list_insert (rtx, const_rtx, void *);
|
||||
static void alloc_pre_mem (int, int);
|
||||
static void free_pre_mem (void);
|
||||
static struct edge_list *compute_pre_data (void);
|
||||
static int pre_expr_reaches_here_p (basic_block, struct expr *,
|
||||
static int pre_expr_reaches_here_p (basic_block, struct gcse_expr *,
|
||||
basic_block);
|
||||
static void insert_insn_end_basic_block (struct expr *, basic_block);
|
||||
static void pre_insert_copy_insn (struct expr *, rtx_insn *);
|
||||
static void insert_insn_end_basic_block (struct gcse_expr *, basic_block);
|
||||
static void pre_insert_copy_insn (struct gcse_expr *, rtx_insn *);
|
||||
static void pre_insert_copies (void);
|
||||
static int pre_delete (void);
|
||||
static int pre_gcse (struct edge_list *);
|
||||
@ -503,16 +503,16 @@ static void alloc_code_hoist_mem (int, int);
|
||||
static void free_code_hoist_mem (void);
|
||||
static void compute_code_hoist_vbeinout (void);
|
||||
static void compute_code_hoist_data (void);
|
||||
static int should_hoist_expr_to_dom (basic_block, struct expr *, basic_block,
|
||||
static int should_hoist_expr_to_dom (basic_block, struct gcse_expr *, basic_block,
|
||||
sbitmap, int, int *, enum reg_class,
|
||||
int *, bitmap, rtx_insn *);
|
||||
static int hoist_code (void);
|
||||
static enum reg_class get_regno_pressure_class (int regno, int *nregs);
|
||||
static enum reg_class get_pressure_class_and_nregs (rtx_insn *insn, int *nregs);
|
||||
static int one_code_hoisting_pass (void);
|
||||
static rtx_insn *process_insert_insn (struct expr *);
|
||||
static int pre_edge_insert (struct edge_list *, struct expr **);
|
||||
static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
|
||||
static rtx_insn *process_insert_insn (struct gcse_expr *);
|
||||
static int pre_edge_insert (struct edge_list *, struct gcse_expr **);
|
||||
static int pre_expr_reaches_here_p_work (basic_block, struct gcse_expr *,
|
||||
basic_block, char *);
|
||||
static struct ls_expr * ldst_entry (rtx);
|
||||
static void free_ldst_entry (struct ls_expr *);
|
||||
@ -523,7 +523,7 @@ static int simple_mem (const_rtx);
|
||||
static void invalidate_any_buried_refs (rtx);
|
||||
static void compute_ld_motion_mems (void);
|
||||
static void trim_ld_motion_mems (void);
|
||||
static void update_ld_motion_stores (struct expr *);
|
||||
static void update_ld_motion_stores (struct gcse_expr *);
|
||||
static void clear_modify_mem_tables (void);
|
||||
static void free_modify_mem_tables (void);
|
||||
static rtx gcse_emit_move_after (rtx, rtx, rtx_insn *);
|
||||
@ -679,7 +679,7 @@ free_gcse_mem (void)
|
||||
|
||||
static void
|
||||
compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
|
||||
struct hash_table_d *table)
|
||||
struct gcse_hash_table_d *table)
|
||||
{
|
||||
unsigned int i;
|
||||
|
||||
@ -696,12 +696,12 @@ compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
|
||||
|
||||
for (i = 0; i < table->size; i++)
|
||||
{
|
||||
struct expr *expr;
|
||||
struct gcse_expr *expr;
|
||||
|
||||
for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
|
||||
{
|
||||
int indx = expr->bitmap_index;
|
||||
struct occr *occr;
|
||||
struct gcse_occr *occr;
|
||||
|
||||
/* The expression is transparent in this block if it is not killed.
|
||||
We start by assuming all are transparent [none are killed], and
|
||||
@ -1128,12 +1128,12 @@ expr_equiv_p (const_rtx x, const_rtx y)
|
||||
static void
|
||||
insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
|
||||
int antic_p,
|
||||
int avail_p, int max_distance, struct hash_table_d *table)
|
||||
int avail_p, int max_distance, struct gcse_hash_table_d *table)
|
||||
{
|
||||
int found, do_not_record_p;
|
||||
unsigned int hash;
|
||||
struct expr *cur_expr, *last_expr = NULL;
|
||||
struct occr *antic_occr, *avail_occr;
|
||||
struct gcse_expr *cur_expr, *last_expr = NULL;
|
||||
struct gcse_occr *antic_occr, *avail_occr;
|
||||
|
||||
hash = hash_expr (x, mode, &do_not_record_p, table->size);
|
||||
|
||||
@ -1156,8 +1156,8 @@ insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
|
||||
|
||||
if (! found)
|
||||
{
|
||||
cur_expr = GOBNEW (struct expr);
|
||||
bytes_used += sizeof (struct expr);
|
||||
cur_expr = GOBNEW (struct gcse_expr);
|
||||
bytes_used += sizeof (struct gcse_expr);
|
||||
if (table->table[hash] == NULL)
|
||||
/* This is the first pattern that hashed to this index. */
|
||||
table->table[hash] = cur_expr;
|
||||
@ -1194,8 +1194,8 @@ insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
|
||||
else
|
||||
{
|
||||
/* First occurrence of this expression in this basic block. */
|
||||
antic_occr = GOBNEW (struct occr);
|
||||
bytes_used += sizeof (struct occr);
|
||||
antic_occr = GOBNEW (struct gcse_occr);
|
||||
bytes_used += sizeof (struct gcse_occr);
|
||||
antic_occr->insn = insn;
|
||||
antic_occr->next = cur_expr->antic_occr;
|
||||
antic_occr->deleted_p = 0;
|
||||
@ -1219,8 +1219,8 @@ insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
|
||||
else
|
||||
{
|
||||
/* First occurrence of this expression in this basic block. */
|
||||
avail_occr = GOBNEW (struct occr);
|
||||
bytes_used += sizeof (struct occr);
|
||||
avail_occr = GOBNEW (struct gcse_occr);
|
||||
bytes_used += sizeof (struct gcse_occr);
|
||||
avail_occr->insn = insn;
|
||||
avail_occr->next = cur_expr->avail_occr;
|
||||
avail_occr->deleted_p = 0;
|
||||
@ -1232,7 +1232,7 @@ insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
|
||||
/* Scan SET present in INSN and add an entry to the hash TABLE. */
|
||||
|
||||
static void
|
||||
hash_scan_set (rtx set, rtx_insn *insn, struct hash_table_d *table)
|
||||
hash_scan_set (rtx set, rtx_insn *insn, struct gcse_hash_table_d *table)
|
||||
{
|
||||
rtx src = SET_SRC (set);
|
||||
rtx dest = SET_DEST (set);
|
||||
@ -1352,14 +1352,14 @@ hash_scan_set (rtx set, rtx_insn *insn, struct hash_table_d *table)
|
||||
|
||||
static void
|
||||
hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx_insn *insn ATTRIBUTE_UNUSED,
|
||||
struct hash_table_d *table ATTRIBUTE_UNUSED)
|
||||
struct gcse_hash_table_d *table ATTRIBUTE_UNUSED)
|
||||
{
|
||||
/* Currently nothing to do. */
|
||||
}
|
||||
|
||||
static void
|
||||
hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx_insn *insn ATTRIBUTE_UNUSED,
|
||||
struct hash_table_d *table ATTRIBUTE_UNUSED)
|
||||
struct gcse_hash_table_d *table ATTRIBUTE_UNUSED)
|
||||
{
|
||||
/* Currently nothing to do. */
|
||||
}
|
||||
@ -1367,7 +1367,7 @@ hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx_insn *insn ATTRIBUTE_UNUSED,
|
||||
/* Process INSN and add hash table entries as appropriate. */
|
||||
|
||||
static void
|
||||
hash_scan_insn (rtx_insn *insn, struct hash_table_d *table)
|
||||
hash_scan_insn (rtx_insn *insn, struct gcse_hash_table_d *table)
|
||||
{
|
||||
rtx pat = PATTERN (insn);
|
||||
int i;
|
||||
@ -1401,15 +1401,15 @@ hash_scan_insn (rtx_insn *insn, struct hash_table_d *table)
|
||||
/* Dump the hash table TABLE to file FILE under the name NAME. */
|
||||
|
||||
static void
|
||||
dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
|
||||
dump_hash_table (FILE *file, const char *name, struct gcse_hash_table_d *table)
|
||||
{
|
||||
int i;
|
||||
/* Flattened out table, so it's printed in proper order. */
|
||||
struct expr **flat_table;
|
||||
struct gcse_expr **flat_table;
|
||||
unsigned int *hash_val;
|
||||
struct expr *expr;
|
||||
struct gcse_expr *expr;
|
||||
|
||||
flat_table = XCNEWVEC (struct expr *, table->n_elems);
|
||||
flat_table = XCNEWVEC (struct gcse_expr *, table->n_elems);
|
||||
hash_val = XNEWVEC (unsigned int, table->n_elems);
|
||||
|
||||
for (i = 0; i < (int) table->size; i++)
|
||||
@ -1553,7 +1553,7 @@ record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
|
||||
TABLE is the table computed. */
|
||||
|
||||
static void
|
||||
compute_hash_table_work (struct hash_table_d *table)
|
||||
compute_hash_table_work (struct gcse_hash_table_d *table)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -1605,7 +1605,7 @@ compute_hash_table_work (struct hash_table_d *table)
|
||||
It is used to determine the number of buckets to use. */
|
||||
|
||||
static void
|
||||
alloc_hash_table (struct hash_table_d *table)
|
||||
alloc_hash_table (struct gcse_hash_table_d *table)
|
||||
{
|
||||
int n;
|
||||
|
||||
@ -1619,14 +1619,14 @@ alloc_hash_table (struct hash_table_d *table)
|
||||
Making it an odd number is simplest for now.
|
||||
??? Later take some measurements. */
|
||||
table->size |= 1;
|
||||
n = table->size * sizeof (struct expr *);
|
||||
table->table = GNEWVAR (struct expr *, n);
|
||||
n = table->size * sizeof (struct gcse_expr *);
|
||||
table->table = GNEWVAR (struct gcse_expr *, n);
|
||||
}
|
||||
|
||||
/* Free things allocated by alloc_hash_table. */
|
||||
|
||||
static void
|
||||
free_hash_table (struct hash_table_d *table)
|
||||
free_hash_table (struct gcse_hash_table_d *table)
|
||||
{
|
||||
free (table->table);
|
||||
}
|
||||
@ -1634,11 +1634,11 @@ free_hash_table (struct hash_table_d *table)
|
||||
/* Compute the expression hash table TABLE. */
|
||||
|
||||
static void
|
||||
compute_hash_table (struct hash_table_d *table)
|
||||
compute_hash_table (struct gcse_hash_table_d *table)
|
||||
{
|
||||
/* Initialize count of number of entries in hash table. */
|
||||
table->n_elems = 0;
|
||||
memset (table->table, 0, table->size * sizeof (struct expr *));
|
||||
memset (table->table, 0, table->size * sizeof (struct gcse_expr *));
|
||||
|
||||
compute_hash_table_work (table);
|
||||
}
|
||||
@ -1864,7 +1864,7 @@ static void
|
||||
prune_expressions (bool pre_p)
|
||||
{
|
||||
sbitmap prune_exprs;
|
||||
struct expr *expr;
|
||||
struct gcse_expr *expr;
|
||||
unsigned int ui;
|
||||
basic_block bb;
|
||||
|
||||
@ -2063,7 +2063,7 @@ compute_pre_data (void)
|
||||
the closest such expression. */
|
||||
|
||||
static int
|
||||
pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
|
||||
pre_expr_reaches_here_p_work (basic_block occr_bb, struct gcse_expr *expr,
|
||||
basic_block bb, char *visited)
|
||||
{
|
||||
edge pred;
|
||||
@ -2110,7 +2110,7 @@ pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
|
||||
memory allocated for that function is returned. */
|
||||
|
||||
static int
|
||||
pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
|
||||
pre_expr_reaches_here_p (basic_block occr_bb, struct gcse_expr *expr, basic_block bb)
|
||||
{
|
||||
int rval;
|
||||
char *visited = XCNEWVEC (char, last_basic_block_for_fn (cfun));
|
||||
@ -2124,7 +2124,7 @@ pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
|
||||
/* Generate RTL to copy an EXPR to its `reaching_reg' and return it. */
|
||||
|
||||
static rtx_insn *
|
||||
process_insert_insn (struct expr *expr)
|
||||
process_insert_insn (struct gcse_expr *expr)
|
||||
{
|
||||
rtx reg = expr->reaching_reg;
|
||||
/* Copy the expression to make sure we don't have any sharing issues. */
|
||||
@ -2159,7 +2159,7 @@ process_insert_insn (struct expr *expr)
|
||||
This is used by both the PRE and code hoisting. */
|
||||
|
||||
static void
|
||||
insert_insn_end_basic_block (struct expr *expr, basic_block bb)
|
||||
insert_insn_end_basic_block (struct gcse_expr *expr, basic_block bb)
|
||||
{
|
||||
rtx_insn *insn = BB_END (bb);
|
||||
rtx_insn *new_insn;
|
||||
@ -2259,7 +2259,7 @@ insert_insn_end_basic_block (struct expr *expr, basic_block bb)
|
||||
the expressions fully redundant. */
|
||||
|
||||
static int
|
||||
pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
|
||||
pre_edge_insert (struct edge_list *edge_list, struct gcse_expr **index_map)
|
||||
{
|
||||
int e, i, j, num_edges, set_size, did_insert = 0;
|
||||
sbitmap *inserted;
|
||||
@ -2286,8 +2286,8 @@ pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
|
||||
j++, insert >>= 1)
|
||||
if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
|
||||
{
|
||||
struct expr *expr = index_map[j];
|
||||
struct occr *occr;
|
||||
struct gcse_expr *expr = index_map[j];
|
||||
struct gcse_occr *occr;
|
||||
|
||||
/* Now look at each deleted occurrence of this expression. */
|
||||
for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
|
||||
@ -2356,7 +2356,7 @@ pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
|
||||
MEM <- reaching_reg. */
|
||||
|
||||
static void
|
||||
pre_insert_copy_insn (struct expr *expr, rtx_insn *insn)
|
||||
pre_insert_copy_insn (struct gcse_expr *expr, rtx_insn *insn)
|
||||
{
|
||||
rtx reg = expr->reaching_reg;
|
||||
int regno = REGNO (reg);
|
||||
@ -2448,9 +2448,9 @@ static void
|
||||
pre_insert_copies (void)
|
||||
{
|
||||
unsigned int i, added_copy;
|
||||
struct expr *expr;
|
||||
struct occr *occr;
|
||||
struct occr *avail;
|
||||
struct gcse_expr *expr;
|
||||
struct gcse_occr *occr;
|
||||
struct gcse_occr *avail;
|
||||
|
||||
/* For each available expression in the table, copy the result to
|
||||
`reaching_reg' if the expression reaches a deleted one.
|
||||
@ -2614,8 +2614,8 @@ pre_delete (void)
|
||||
{
|
||||
unsigned int i;
|
||||
int changed;
|
||||
struct expr *expr;
|
||||
struct occr *occr;
|
||||
struct gcse_expr *expr;
|
||||
struct gcse_occr *occr;
|
||||
|
||||
changed = 0;
|
||||
for (i = 0; i < expr_hash_table.size; i++)
|
||||
@ -2687,13 +2687,13 @@ pre_gcse (struct edge_list *edge_list)
|
||||
{
|
||||
unsigned int i;
|
||||
int did_insert, changed;
|
||||
struct expr **index_map;
|
||||
struct expr *expr;
|
||||
struct gcse_expr **index_map;
|
||||
struct gcse_expr *expr;
|
||||
|
||||
/* Compute a mapping from expression number (`bitmap_index') to
|
||||
hash table entry. */
|
||||
|
||||
index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
|
||||
index_map = XCNEWVEC (struct gcse_expr *, expr_hash_table.n_elems);
|
||||
for (i = 0; i < expr_hash_table.size; i++)
|
||||
for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
|
||||
index_map[expr->bitmap_index] = expr;
|
||||
@ -3042,7 +3042,7 @@ update_bb_reg_pressure (basic_block bb, rtx_insn *from)
|
||||
paths. */
|
||||
|
||||
static int
|
||||
should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
|
||||
should_hoist_expr_to_dom (basic_block expr_bb, struct gcse_expr *expr,
|
||||
basic_block bb, sbitmap visited, int distance,
|
||||
int *bb_size, enum reg_class pressure_class,
|
||||
int *nregs, bitmap hoisted_bbs, rtx_insn *from)
|
||||
@ -3150,8 +3150,8 @@ should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
|
||||
|
||||
/* Find occurrence in BB. */
|
||||
|
||||
static struct occr *
|
||||
find_occr_in_bb (struct occr *occr, basic_block bb)
|
||||
static struct gcse_occr *
|
||||
find_occr_in_bb (struct gcse_occr *occr, basic_block bb)
|
||||
{
|
||||
/* Find the right occurrence of this expression. */
|
||||
while (occr && BLOCK_FOR_INSN (occr->insn) != bb)
|
||||
@ -3212,8 +3212,8 @@ hoist_code (void)
|
||||
unsigned int dom_tree_walk_index;
|
||||
vec<basic_block> domby;
|
||||
unsigned int i, j, k;
|
||||
struct expr **index_map;
|
||||
struct expr *expr;
|
||||
struct gcse_expr **index_map;
|
||||
struct gcse_expr *expr;
|
||||
int *to_bb_head;
|
||||
int *bb_size;
|
||||
int changed = 0;
|
||||
@ -3227,7 +3227,7 @@ hoist_code (void)
|
||||
/* Compute a mapping from expression number (`bitmap_index') to
|
||||
hash table entry. */
|
||||
|
||||
index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
|
||||
index_map = XCNEWVEC (struct gcse_expr *, expr_hash_table.n_elems);
|
||||
for (i = 0; i < expr_hash_table.size; i++)
|
||||
for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
|
||||
index_map[expr->bitmap_index] = expr;
|
||||
@ -3285,7 +3285,7 @@ hoist_code (void)
|
||||
int nregs = 0;
|
||||
enum reg_class pressure_class = NO_REGS;
|
||||
/* Current expression. */
|
||||
struct expr *expr = index_map[i];
|
||||
struct gcse_expr *expr = index_map[i];
|
||||
/* Number of occurrences of EXPR that can be hoisted to BB. */
|
||||
int hoistable = 0;
|
||||
/* Occurrences reachable from BB. */
|
||||
@ -4028,7 +4028,7 @@ trim_ld_motion_mems (void)
|
||||
|
||||
while (ptr != NULL)
|
||||
{
|
||||
struct expr * expr;
|
||||
struct gcse_expr * expr;
|
||||
|
||||
/* Delete if entry has been made invalid. */
|
||||
if (! ptr->invalid)
|
||||
@ -4043,7 +4043,7 @@ trim_ld_motion_mems (void)
|
||||
break;
|
||||
}
|
||||
else
|
||||
expr = (struct expr *) 0;
|
||||
expr = (struct gcse_expr *) 0;
|
||||
|
||||
if (expr)
|
||||
{
|
||||
@ -4074,7 +4074,7 @@ trim_ld_motion_mems (void)
|
||||
correct value in the reaching register for the loads. */
|
||||
|
||||
static void
|
||||
update_ld_motion_stores (struct expr * expr)
|
||||
update_ld_motion_stores (struct gcse_expr * expr)
|
||||
{
|
||||
struct ls_expr * mem_ptr;
|
||||
|
||||
|
@ -904,7 +904,7 @@ init_ggc_heuristics (void)
|
||||
}
|
||||
|
||||
/* Datastructure used to store per-call-site statistics. */
|
||||
struct loc_descriptor
|
||||
struct ggc_loc_descriptor
|
||||
{
|
||||
const char *file;
|
||||
int line;
|
||||
@ -918,42 +918,42 @@ struct loc_descriptor
|
||||
|
||||
/* Hash table helper. */
|
||||
|
||||
struct loc_desc_hasher : typed_noop_remove <loc_descriptor>
|
||||
struct ggc_loc_desc_hasher : typed_noop_remove <ggc_loc_descriptor>
|
||||
{
|
||||
typedef loc_descriptor value_type;
|
||||
typedef loc_descriptor compare_type;
|
||||
typedef ggc_loc_descriptor value_type;
|
||||
typedef ggc_loc_descriptor compare_type;
|
||||
static inline hashval_t hash (const value_type *);
|
||||
static inline bool equal (const value_type *, const compare_type *);
|
||||
};
|
||||
|
||||
inline hashval_t
|
||||
loc_desc_hasher::hash (const value_type *d)
|
||||
ggc_loc_desc_hasher::hash (const value_type *d)
|
||||
{
|
||||
return htab_hash_pointer (d->function) | d->line;
|
||||
}
|
||||
|
||||
inline bool
|
||||
loc_desc_hasher::equal (const value_type *d, const compare_type *d2)
|
||||
ggc_loc_desc_hasher::equal (const value_type *d, const compare_type *d2)
|
||||
{
|
||||
return (d->file == d2->file && d->line == d2->line
|
||||
&& d->function == d2->function);
|
||||
}
|
||||
|
||||
/* Hashtable used for statistics. */
|
||||
static hash_table<loc_desc_hasher> *loc_hash;
|
||||
static hash_table<ggc_loc_desc_hasher> *loc_hash;
|
||||
|
||||
struct ptr_hash_entry
|
||||
struct ggc_ptr_hash_entry
|
||||
{
|
||||
void *ptr;
|
||||
struct loc_descriptor *loc;
|
||||
struct ggc_loc_descriptor *loc;
|
||||
size_t size;
|
||||
};
|
||||
|
||||
/* Helper for ptr_hash table. */
|
||||
|
||||
struct ptr_hash_hasher : typed_noop_remove <ptr_hash_entry>
|
||||
struct ptr_hash_hasher : typed_noop_remove <ggc_ptr_hash_entry>
|
||||
{
|
||||
typedef ptr_hash_entry value_type;
|
||||
typedef ggc_ptr_hash_entry value_type;
|
||||
typedef void compare_type;
|
||||
static inline hashval_t hash (const value_type *);
|
||||
static inline bool equal (const value_type *, const compare_type *);
|
||||
@ -975,22 +975,22 @@ ptr_hash_hasher::equal (const value_type *p, const compare_type *p2)
|
||||
static hash_table<ptr_hash_hasher> *ptr_hash;
|
||||
|
||||
/* Return descriptor for given call site, create new one if needed. */
|
||||
static struct loc_descriptor *
|
||||
static struct ggc_loc_descriptor *
|
||||
make_loc_descriptor (const char *name, int line, const char *function)
|
||||
{
|
||||
struct loc_descriptor loc;
|
||||
struct loc_descriptor **slot;
|
||||
struct ggc_loc_descriptor loc;
|
||||
struct ggc_loc_descriptor **slot;
|
||||
|
||||
loc.file = name;
|
||||
loc.line = line;
|
||||
loc.function = function;
|
||||
if (!loc_hash)
|
||||
loc_hash = new hash_table<loc_desc_hasher> (10);
|
||||
loc_hash = new hash_table<ggc_loc_desc_hasher> (10);
|
||||
|
||||
slot = loc_hash->find_slot (&loc, INSERT);
|
||||
if (*slot)
|
||||
return *slot;
|
||||
*slot = XCNEW (struct loc_descriptor);
|
||||
*slot = XCNEW (struct ggc_loc_descriptor);
|
||||
(*slot)->file = name;
|
||||
(*slot)->line = line;
|
||||
(*slot)->function = function;
|
||||
@ -1002,9 +1002,9 @@ void
|
||||
ggc_record_overhead (size_t allocated, size_t overhead, void *ptr,
|
||||
const char *name, int line, const char *function)
|
||||
{
|
||||
struct loc_descriptor *loc = make_loc_descriptor (name, line, function);
|
||||
struct ptr_hash_entry *p = XNEW (struct ptr_hash_entry);
|
||||
ptr_hash_entry **slot;
|
||||
struct ggc_loc_descriptor *loc = make_loc_descriptor (name, line, function);
|
||||
struct ggc_ptr_hash_entry *p = XNEW (struct ggc_ptr_hash_entry);
|
||||
ggc_ptr_hash_entry **slot;
|
||||
|
||||
p->ptr = ptr;
|
||||
p->loc = loc;
|
||||
@ -1023,9 +1023,9 @@ ggc_record_overhead (size_t allocated, size_t overhead, void *ptr,
|
||||
/* Helper function for prune_overhead_list. See if SLOT is still marked and
|
||||
remove it from hashtable if it is not. */
|
||||
int
|
||||
ggc_prune_ptr (ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
|
||||
ggc_prune_ptr (ggc_ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
|
||||
{
|
||||
struct ptr_hash_entry *p = *slot;
|
||||
struct ggc_ptr_hash_entry *p = *slot;
|
||||
if (!ggc_marked_p (p->ptr))
|
||||
{
|
||||
p->loc->collected += p->size;
|
||||
@ -1047,15 +1047,15 @@ ggc_prune_overhead_list (void)
|
||||
void
|
||||
ggc_free_overhead (void *ptr)
|
||||
{
|
||||
ptr_hash_entry **slot
|
||||
ggc_ptr_hash_entry **slot
|
||||
= ptr_hash->find_slot_with_hash (ptr, htab_hash_pointer (ptr), NO_INSERT);
|
||||
struct ptr_hash_entry *p;
|
||||
struct ggc_ptr_hash_entry *p;
|
||||
/* The pointer might be not found if a PCH read happened between allocation
|
||||
and ggc_free () call. FIXME: account memory properly in the presence of
|
||||
PCH. */
|
||||
if (!slot)
|
||||
return;
|
||||
p = (struct ptr_hash_entry *) *slot;
|
||||
p = (struct ggc_ptr_hash_entry *) *slot;
|
||||
p->loc->freed += p->size;
|
||||
ptr_hash->clear_slot (slot);
|
||||
free (p);
|
||||
@ -1065,10 +1065,10 @@ ggc_free_overhead (void *ptr)
|
||||
static int
|
||||
final_cmp_statistic (const void *loc1, const void *loc2)
|
||||
{
|
||||
const struct loc_descriptor *const l1 =
|
||||
*(const struct loc_descriptor *const *) loc1;
|
||||
const struct loc_descriptor *const l2 =
|
||||
*(const struct loc_descriptor *const *) loc2;
|
||||
const struct ggc_loc_descriptor *const l1 =
|
||||
*(const struct ggc_loc_descriptor *const *) loc1;
|
||||
const struct ggc_loc_descriptor *const l2 =
|
||||
*(const struct ggc_loc_descriptor *const *) loc2;
|
||||
long diff;
|
||||
diff = ((long)(l1->allocated + l1->overhead - l1->freed) -
|
||||
(l2->allocated + l2->overhead - l2->freed));
|
||||
@ -1079,10 +1079,10 @@ final_cmp_statistic (const void *loc1, const void *loc2)
|
||||
static int
|
||||
cmp_statistic (const void *loc1, const void *loc2)
|
||||
{
|
||||
const struct loc_descriptor *const l1 =
|
||||
*(const struct loc_descriptor *const *) loc1;
|
||||
const struct loc_descriptor *const l2 =
|
||||
*(const struct loc_descriptor *const *) loc2;
|
||||
const struct ggc_loc_descriptor *const l1 =
|
||||
*(const struct ggc_loc_descriptor *const *) loc1;
|
||||
const struct ggc_loc_descriptor *const l2 =
|
||||
*(const struct ggc_loc_descriptor *const *) loc2;
|
||||
long diff;
|
||||
|
||||
diff = ((long)(l1->allocated + l1->overhead - l1->freed - l1->collected) -
|
||||
@ -1095,9 +1095,9 @@ cmp_statistic (const void *loc1, const void *loc2)
|
||||
}
|
||||
|
||||
/* Collect array of the descriptors from hashtable. */
|
||||
static struct loc_descriptor **loc_array;
|
||||
static struct ggc_loc_descriptor **loc_array;
|
||||
int
|
||||
ggc_add_statistics (loc_descriptor **slot, int *n)
|
||||
ggc_add_statistics (ggc_loc_descriptor **slot, int *n)
|
||||
{
|
||||
loc_array[*n] = *slot;
|
||||
(*n)++;
|
||||
@ -1120,7 +1120,7 @@ dump_ggc_loc_statistics (bool final)
|
||||
ggc_force_collect = true;
|
||||
ggc_collect ();
|
||||
|
||||
loc_array = XCNEWVEC (struct loc_descriptor *,
|
||||
loc_array = XCNEWVEC (struct ggc_loc_descriptor *,
|
||||
loc_hash->elements_with_deleted ());
|
||||
fprintf (stderr, "-------------------------------------------------------\n");
|
||||
fprintf (stderr, "\n%-48s %10s %10s %10s %10s %10s\n",
|
||||
@ -1131,7 +1131,7 @@ dump_ggc_loc_statistics (bool final)
|
||||
final ? final_cmp_statistic : cmp_statistic);
|
||||
for (i = 0; i < nentries; i++)
|
||||
{
|
||||
struct loc_descriptor *d = loc_array[i];
|
||||
struct ggc_loc_descriptor *d = loc_array[i];
|
||||
allocated += d->allocated;
|
||||
times += d->times;
|
||||
freed += d->freed;
|
||||
@ -1140,7 +1140,7 @@ dump_ggc_loc_statistics (bool final)
|
||||
}
|
||||
for (i = 0; i < nentries; i++)
|
||||
{
|
||||
struct loc_descriptor *d = loc_array[i];
|
||||
struct ggc_loc_descriptor *d = loc_array[i];
|
||||
if (d->allocated)
|
||||
{
|
||||
const char *s1 = d->file;
|
||||
|
@ -378,7 +378,7 @@ struct free_object
|
||||
#endif
|
||||
|
||||
/* The rest of the global variables. */
|
||||
static struct globals
|
||||
static struct ggc_globals
|
||||
{
|
||||
/* The Nth element in this array is a page with objects of size 2^N.
|
||||
If there are any pages with free objects, they will be at the
|
||||
|
18
gcc/ipa-cp.c
18
gcc/ipa-cp.c
@ -573,7 +573,7 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
|
||||
/* Arrays representing a topological ordering of call graph nodes and a stack
|
||||
of noes used during constant propagation. */
|
||||
|
||||
struct topo_info
|
||||
struct ipa_topo_info
|
||||
{
|
||||
struct cgraph_node **order;
|
||||
struct cgraph_node **stack;
|
||||
@ -583,7 +583,7 @@ struct topo_info
|
||||
/* Allocate the arrays in TOPO and topologically sort the nodes into order. */
|
||||
|
||||
static void
|
||||
build_toporder_info (struct topo_info *topo)
|
||||
build_toporder_info (struct ipa_topo_info *topo)
|
||||
{
|
||||
topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
|
||||
topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
|
||||
@ -596,7 +596,7 @@ build_toporder_info (struct topo_info *topo)
|
||||
TOPO. */
|
||||
|
||||
static void
|
||||
free_toporder_info (struct topo_info *topo)
|
||||
free_toporder_info (struct ipa_topo_info *topo)
|
||||
{
|
||||
ipa_free_postorder_info ();
|
||||
free (topo->order);
|
||||
@ -606,7 +606,7 @@ free_toporder_info (struct topo_info *topo)
|
||||
/* Add NODE to the stack in TOPO, unless it is already there. */
|
||||
|
||||
static inline void
|
||||
push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
|
||||
push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
if (info->node_enqueued)
|
||||
@ -619,7 +619,7 @@ push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
|
||||
is empty. */
|
||||
|
||||
static struct cgraph_node *
|
||||
pop_node_from_stack (struct topo_info *topo)
|
||||
pop_node_from_stack (struct ipa_topo_info *topo)
|
||||
{
|
||||
if (topo->stack_top)
|
||||
{
|
||||
@ -2198,7 +2198,7 @@ add_all_node_vals_to_toposort (struct cgraph_node *node)
|
||||
connected components. */
|
||||
|
||||
static void
|
||||
propagate_constants_topo (struct topo_info *topo)
|
||||
propagate_constants_topo (struct ipa_topo_info *topo)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -2297,7 +2297,7 @@ propagate_effects (void)
|
||||
interprocedurally. */
|
||||
|
||||
static void
|
||||
ipcp_propagate_stage (struct topo_info *topo)
|
||||
ipcp_propagate_stage (struct ipa_topo_info *topo)
|
||||
{
|
||||
struct cgraph_node *node;
|
||||
|
||||
@ -3658,7 +3658,7 @@ identify_dead_nodes (struct cgraph_node *node)
|
||||
TOPO and make specialized clones if deemed beneficial. */
|
||||
|
||||
static void
|
||||
ipcp_decision_stage (struct topo_info *topo)
|
||||
ipcp_decision_stage (struct ipa_topo_info *topo)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -3693,7 +3693,7 @@ ipcp_driver (void)
|
||||
{
|
||||
struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
|
||||
struct cgraph_edge_hook_list *edge_removal_hook_holder;
|
||||
struct topo_info topo;
|
||||
struct ipa_topo_info topo;
|
||||
|
||||
ipa_check_create_node_params ();
|
||||
ipa_check_create_edge_args ();
|
||||
|
@ -592,7 +592,7 @@ ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
|
||||
/* Structure to be passed in between detect_type_change and
|
||||
check_stmt_for_type_change. */
|
||||
|
||||
struct type_change_info
|
||||
struct prop_type_change_info
|
||||
{
|
||||
/* Offset into the object where there is the virtual method pointer we are
|
||||
looking for. */
|
||||
@ -680,7 +680,7 @@ stmt_may_be_vtbl_ptr_store (gimple stmt)
|
||||
identified, return the type. Otherwise return NULL_TREE. */
|
||||
|
||||
static tree
|
||||
extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
|
||||
extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
|
||||
{
|
||||
HOST_WIDE_INT offset, size, max_size;
|
||||
tree lhs, rhs, base, binfo;
|
||||
@ -726,13 +726,13 @@ extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
|
||||
detect_type_change to check whether a particular statement may modify
|
||||
the virtual table pointer, and if possible also determine the new type of
|
||||
the (sub-)object. It stores its result into DATA, which points to a
|
||||
type_change_info structure. */
|
||||
prop_type_change_info structure. */
|
||||
|
||||
static bool
|
||||
check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
|
||||
{
|
||||
gimple stmt = SSA_NAME_DEF_STMT (vdef);
|
||||
struct type_change_info *tci = (struct type_change_info *) data;
|
||||
struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
|
||||
|
||||
if (stmt_may_be_vtbl_ptr_store (stmt))
|
||||
{
|
||||
@ -830,7 +830,7 @@ detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
|
||||
gimple call, struct ipa_jump_func *jfunc,
|
||||
HOST_WIDE_INT offset)
|
||||
{
|
||||
struct type_change_info tci;
|
||||
struct prop_type_change_info tci;
|
||||
ao_ref ao;
|
||||
bool entry_reached = false;
|
||||
|
||||
|
@ -117,9 +117,9 @@ typedef struct
|
||||
{
|
||||
unsigned int size;
|
||||
unsigned int time;
|
||||
} bb_info;
|
||||
} split_bb_info;
|
||||
|
||||
static vec<bb_info> bb_info_vec;
|
||||
static vec<split_bb_info> bb_info_vec;
|
||||
|
||||
/* Description of split point. */
|
||||
|
||||
|
@ -77,7 +77,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
|
||||
/* This structure is used to record information about hard register
|
||||
eliminations. */
|
||||
struct elim_table
|
||||
struct lra_elim_table
|
||||
{
|
||||
/* Hard register number to be eliminated. */
|
||||
int from;
|
||||
@ -105,7 +105,7 @@ struct elim_table
|
||||
of eliminating a register in favor of another. If there is more
|
||||
than one way of eliminating a particular register, the most
|
||||
preferred should be specified first. */
|
||||
static struct elim_table *reg_eliminate = 0;
|
||||
static struct lra_elim_table *reg_eliminate = 0;
|
||||
|
||||
/* This is an intermediate structure to initialize the table. It has
|
||||
exactly the members provided by ELIMINABLE_REGS. */
|
||||
@ -131,7 +131,7 @@ static const struct elim_table_1
|
||||
static void
|
||||
print_elim_table (FILE *f)
|
||||
{
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
|
||||
for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
|
||||
fprintf (f, "%s eliminate %d to %d (offset=" HOST_WIDE_INT_PRINT_DEC
|
||||
@ -151,7 +151,7 @@ lra_debug_elim_table (void)
|
||||
VALUE. Setup FRAME_POINTER_NEEDED if elimination from frame
|
||||
pointer to stack pointer is not possible anymore. */
|
||||
static void
|
||||
setup_can_eliminate (struct elim_table *ep, bool value)
|
||||
setup_can_eliminate (struct lra_elim_table *ep, bool value)
|
||||
{
|
||||
ep->can_eliminate = ep->prev_can_eliminate = value;
|
||||
if (! value
|
||||
@ -163,12 +163,12 @@ setup_can_eliminate (struct elim_table *ep, bool value)
|
||||
or NULL if none. The elimination table may contain more than
|
||||
one elimination for the same hard register, but this map specifies
|
||||
the one that we are currently using. */
|
||||
static struct elim_table *elimination_map[FIRST_PSEUDO_REGISTER];
|
||||
static struct lra_elim_table *elimination_map[FIRST_PSEUDO_REGISTER];
|
||||
|
||||
/* When an eliminable hard register becomes not eliminable, we use the
|
||||
following special structure to restore original offsets for the
|
||||
register. */
|
||||
static struct elim_table self_elim_table;
|
||||
static struct lra_elim_table self_elim_table;
|
||||
|
||||
/* Offsets should be used to restore original offsets for eliminable
|
||||
hard register which just became not eliminable. Zero,
|
||||
@ -184,7 +184,7 @@ static void
|
||||
setup_elimination_map (void)
|
||||
{
|
||||
int i;
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
|
||||
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
||||
elimination_map[i] = NULL;
|
||||
@ -249,7 +249,7 @@ form_sum (rtx x, rtx y)
|
||||
int
|
||||
lra_get_elimination_hard_regno (int hard_regno)
|
||||
{
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
|
||||
if (hard_regno < 0 || hard_regno >= FIRST_PSEUDO_REGISTER)
|
||||
return hard_regno;
|
||||
@ -260,11 +260,11 @@ lra_get_elimination_hard_regno (int hard_regno)
|
||||
|
||||
/* Return elimination which will be used for hard reg REG, NULL
|
||||
otherwise. */
|
||||
static struct elim_table *
|
||||
static struct lra_elim_table *
|
||||
get_elimination (rtx reg)
|
||||
{
|
||||
int hard_regno;
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
HOST_WIDE_INT offset;
|
||||
|
||||
lra_assert (REG_P (reg));
|
||||
@ -306,7 +306,7 @@ lra_eliminate_regs_1 (rtx_insn *insn, rtx x, enum machine_mode mem_mode,
|
||||
bool subst_p, bool update_p, bool full_p)
|
||||
{
|
||||
enum rtx_code code = GET_CODE (x);
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
rtx new_rtx;
|
||||
int i, j;
|
||||
const char *fmt;
|
||||
@ -674,7 +674,7 @@ static void
|
||||
mark_not_eliminable (rtx x, enum machine_mode mem_mode)
|
||||
{
|
||||
enum rtx_code code = GET_CODE (x);
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
int i, j;
|
||||
const char *fmt;
|
||||
|
||||
@ -856,7 +856,7 @@ eliminate_regs_in_insn (rtx_insn *insn, bool replace_p, bool first_p)
|
||||
int i;
|
||||
rtx substed_operand[MAX_RECOG_OPERANDS];
|
||||
rtx orig_operand[MAX_RECOG_OPERANDS];
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
rtx plus_src, plus_cst_src;
|
||||
lra_insn_recog_data_t id;
|
||||
struct lra_static_insn_data *static_id;
|
||||
@ -1130,7 +1130,7 @@ static bool
|
||||
update_reg_eliminate (bitmap insns_with_changed_offsets)
|
||||
{
|
||||
bool prev, result;
|
||||
struct elim_table *ep, *ep1;
|
||||
struct lra_elim_table *ep, *ep1;
|
||||
HARD_REG_SET temp_hard_reg_set;
|
||||
|
||||
/* Clear self elimination offsets. */
|
||||
@ -1235,14 +1235,14 @@ update_reg_eliminate (bitmap insns_with_changed_offsets)
|
||||
static void
|
||||
init_elim_table (void)
|
||||
{
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
#ifdef ELIMINABLE_REGS
|
||||
bool value_p;
|
||||
const struct elim_table_1 *ep1;
|
||||
#endif
|
||||
|
||||
if (!reg_eliminate)
|
||||
reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
|
||||
reg_eliminate = XCNEWVEC (struct lra_elim_table, NUM_ELIMINABLE_REGS);
|
||||
|
||||
memset (self_elim_offsets, 0, sizeof (self_elim_offsets));
|
||||
/* Initiate member values which will be never changed. */
|
||||
@ -1291,7 +1291,7 @@ init_elimination (void)
|
||||
bool stop_to_sp_elimination_p;
|
||||
basic_block bb;
|
||||
rtx_insn *insn;
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
|
||||
init_elim_table ();
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
@ -1325,7 +1325,7 @@ void
|
||||
lra_eliminate_reg_if_possible (rtx *loc)
|
||||
{
|
||||
int regno;
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
|
||||
lra_assert (REG_P (*loc));
|
||||
if ((regno = REGNO (*loc)) >= FIRST_PSEUDO_REGISTER
|
||||
@ -1369,7 +1369,7 @@ lra_eliminate (bool final_p, bool first_p)
|
||||
unsigned int uid;
|
||||
bitmap_head insns_with_changed_offsets;
|
||||
bitmap_iterator bi;
|
||||
struct elim_table *ep;
|
||||
struct lra_elim_table *ep;
|
||||
|
||||
gcc_assert (! final_p || ! first_p);
|
||||
|
||||
|
@ -2496,7 +2496,7 @@ predict_paths_leading_to_edge (edge e, enum br_predictor pred,
|
||||
/* This is used to carry information about basic blocks. It is
|
||||
attached to the AUX field of the standard CFG block. */
|
||||
|
||||
typedef struct block_info_def
|
||||
struct block_info
|
||||
{
|
||||
/* Estimated frequency of execution of basic_block. */
|
||||
sreal frequency;
|
||||
@ -2506,10 +2506,10 @@ typedef struct block_info_def
|
||||
|
||||
/* Number of predecessors we need to visit first. */
|
||||
int npredecessors;
|
||||
} *block_info;
|
||||
};
|
||||
|
||||
/* Similar information for edges. */
|
||||
typedef struct edge_info_def
|
||||
struct edge_prob_info
|
||||
{
|
||||
/* In case edge is a loopback edge, the probability edge will be reached
|
||||
in case header is. Estimated number of iterations of the loop can be
|
||||
@ -2517,10 +2517,10 @@ typedef struct edge_info_def
|
||||
sreal back_edge_prob;
|
||||
/* True if the edge is a loopback edge in the natural loop. */
|
||||
unsigned int back_edge:1;
|
||||
} *edge_info;
|
||||
};
|
||||
|
||||
#define BLOCK_INFO(B) ((block_info) (B)->aux)
|
||||
#define EDGE_INFO(E) ((edge_info) (E)->aux)
|
||||
#define BLOCK_INFO(B) ((block_info *) (B)->aux)
|
||||
#define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
|
||||
|
||||
/* Helper function for estimate_bb_frequencies.
|
||||
Propagate the frequencies in blocks marked in
|
||||
@ -2935,8 +2935,8 @@ estimate_bb_frequencies (bool force)
|
||||
REG_BR_PROB_BASE;
|
||||
|
||||
/* Set up block info for each basic block. */
|
||||
alloc_aux_for_blocks (sizeof (struct block_info_def));
|
||||
alloc_aux_for_edges (sizeof (struct edge_info_def));
|
||||
alloc_aux_for_blocks (sizeof (block_info));
|
||||
alloc_aux_for_edges (sizeof (edge_prob_info));
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
|
@ -23,7 +23,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
#define PROFILE_H
|
||||
|
||||
/* Additional information about edges. */
|
||||
struct edge_info
|
||||
struct edge_profile_info
|
||||
{
|
||||
unsigned int count_valid:1;
|
||||
|
||||
@ -35,7 +35,7 @@ struct edge_info
|
||||
unsigned int ignore:1;
|
||||
};
|
||||
|
||||
#define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
|
||||
#define EDGE_INFO(e) ((struct edge_profile_info *) (e)->aux)
|
||||
|
||||
/* Smoothes the initial assigned basic block and edge counts using
|
||||
a minimum cost flow algorithm. */
|
||||
|
@ -55,7 +55,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
This is basically a generic equivalent to the C++ front-end's
|
||||
Named Return Value optimization. */
|
||||
|
||||
struct nrv_data
|
||||
struct nrv_data_t
|
||||
{
|
||||
/* This is the temporary (a VAR_DECL) which appears in all of
|
||||
this function's RETURN_EXPR statements. */
|
||||
@ -84,7 +84,7 @@ static tree
|
||||
finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
|
||||
{
|
||||
struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
|
||||
struct nrv_data *dp = (struct nrv_data *) wi->info;
|
||||
struct nrv_data_t *dp = (struct nrv_data_t *) wi->info;
|
||||
|
||||
/* No need to walk into types. */
|
||||
if (TYPE_P (*tp))
|
||||
@ -150,7 +150,7 @@ pass_nrv::execute (function *fun)
|
||||
tree found = NULL;
|
||||
basic_block bb;
|
||||
gimple_stmt_iterator gsi;
|
||||
struct nrv_data data;
|
||||
struct nrv_data_t data;
|
||||
|
||||
/* If this function does not return an aggregate type in memory, then
|
||||
there is nothing to do. */
|
||||
|
@ -166,7 +166,7 @@ typedef enum
|
||||
VARYING
|
||||
} ccp_lattice_t;
|
||||
|
||||
struct prop_value_d {
|
||||
struct ccp_prop_value_t {
|
||||
/* Lattice value. */
|
||||
ccp_lattice_t lattice_val;
|
||||
|
||||
@ -180,24 +180,22 @@ struct prop_value_d {
|
||||
widest_int mask;
|
||||
};
|
||||
|
||||
typedef struct prop_value_d prop_value_t;
|
||||
|
||||
/* Array of propagated constant values. After propagation,
|
||||
CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
|
||||
the constant is held in an SSA name representing a memory store
|
||||
(i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
|
||||
memory reference used to store (i.e., the LHS of the assignment
|
||||
doing the store). */
|
||||
static prop_value_t *const_val;
|
||||
static ccp_prop_value_t *const_val;
|
||||
static unsigned n_const_val;
|
||||
|
||||
static void canonicalize_value (prop_value_t *);
|
||||
static void canonicalize_value (ccp_prop_value_t *);
|
||||
static bool ccp_fold_stmt (gimple_stmt_iterator *);
|
||||
|
||||
/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
|
||||
|
||||
static void
|
||||
dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
|
||||
dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
|
||||
{
|
||||
switch (val.lattice_val)
|
||||
{
|
||||
@ -236,10 +234,10 @@ dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
|
||||
|
||||
/* Print lattice value VAL to stderr. */
|
||||
|
||||
void debug_lattice_value (prop_value_t val);
|
||||
void debug_lattice_value (ccp_prop_value_t val);
|
||||
|
||||
DEBUG_FUNCTION void
|
||||
debug_lattice_value (prop_value_t val)
|
||||
debug_lattice_value (ccp_prop_value_t val)
|
||||
{
|
||||
dump_lattice_value (stderr, "", val);
|
||||
fprintf (stderr, "\n");
|
||||
@ -272,10 +270,10 @@ extend_mask (const wide_int &nonzero_bits)
|
||||
4- Initial values of variables that are not GIMPLE registers are
|
||||
considered VARYING. */
|
||||
|
||||
static prop_value_t
|
||||
static ccp_prop_value_t
|
||||
get_default_value (tree var)
|
||||
{
|
||||
prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
|
||||
ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
|
||||
gimple stmt;
|
||||
|
||||
stmt = SSA_NAME_DEF_STMT (var);
|
||||
@ -343,10 +341,10 @@ get_default_value (tree var)
|
||||
|
||||
/* Get the constant value associated with variable VAR. */
|
||||
|
||||
static inline prop_value_t *
|
||||
static inline ccp_prop_value_t *
|
||||
get_value (tree var)
|
||||
{
|
||||
prop_value_t *val;
|
||||
ccp_prop_value_t *val;
|
||||
|
||||
if (const_val == NULL
|
||||
|| SSA_NAME_VERSION (var) >= n_const_val)
|
||||
@ -366,7 +364,7 @@ get_value (tree var)
|
||||
static inline tree
|
||||
get_constant_value (tree var)
|
||||
{
|
||||
prop_value_t *val;
|
||||
ccp_prop_value_t *val;
|
||||
if (TREE_CODE (var) != SSA_NAME)
|
||||
{
|
||||
if (is_gimple_min_invariant (var))
|
||||
@ -387,7 +385,7 @@ get_constant_value (tree var)
|
||||
static inline void
|
||||
set_value_varying (tree var)
|
||||
{
|
||||
prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
|
||||
ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
|
||||
|
||||
val->lattice_val = VARYING;
|
||||
val->value = NULL_TREE;
|
||||
@ -413,7 +411,7 @@ set_value_varying (tree var)
|
||||
For other constants, make sure to drop TREE_OVERFLOW. */
|
||||
|
||||
static void
|
||||
canonicalize_value (prop_value_t *val)
|
||||
canonicalize_value (ccp_prop_value_t *val)
|
||||
{
|
||||
enum machine_mode mode;
|
||||
tree type;
|
||||
@ -451,7 +449,7 @@ canonicalize_value (prop_value_t *val)
|
||||
/* Return whether the lattice transition is valid. */
|
||||
|
||||
static bool
|
||||
valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
|
||||
valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
|
||||
{
|
||||
/* Lattice transitions must always be monotonically increasing in
|
||||
value. */
|
||||
@ -486,10 +484,10 @@ valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
|
||||
value is different from VAR's previous value. */
|
||||
|
||||
static bool
|
||||
set_lattice_value (tree var, prop_value_t new_val)
|
||||
set_lattice_value (tree var, ccp_prop_value_t new_val)
|
||||
{
|
||||
/* We can deal with old UNINITIALIZED values just fine here. */
|
||||
prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
|
||||
ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
|
||||
|
||||
canonicalize_value (&new_val);
|
||||
|
||||
@ -534,8 +532,8 @@ set_lattice_value (tree var, prop_value_t new_val)
|
||||
return false;
|
||||
}
|
||||
|
||||
static prop_value_t get_value_for_expr (tree, bool);
|
||||
static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
|
||||
static ccp_prop_value_t get_value_for_expr (tree, bool);
|
||||
static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
|
||||
static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
|
||||
tree, const widest_int &, const widest_int &,
|
||||
tree, const widest_int &, const widest_int &);
|
||||
@ -544,7 +542,7 @@ static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
|
||||
from VAL. */
|
||||
|
||||
static widest_int
|
||||
value_to_wide_int (prop_value_t val)
|
||||
value_to_wide_int (ccp_prop_value_t val)
|
||||
{
|
||||
if (val.value
|
||||
&& TREE_CODE (val.value) == INTEGER_CST)
|
||||
@ -556,11 +554,11 @@ value_to_wide_int (prop_value_t val)
|
||||
/* Return the value for the address expression EXPR based on alignment
|
||||
information. */
|
||||
|
||||
static prop_value_t
|
||||
static ccp_prop_value_t
|
||||
get_value_from_alignment (tree expr)
|
||||
{
|
||||
tree type = TREE_TYPE (expr);
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
unsigned HOST_WIDE_INT bitpos;
|
||||
unsigned int align;
|
||||
|
||||
@ -583,10 +581,10 @@ get_value_from_alignment (tree expr)
|
||||
return constant bits extracted from alignment information for
|
||||
invariant addresses. */
|
||||
|
||||
static prop_value_t
|
||||
static ccp_prop_value_t
|
||||
get_value_for_expr (tree expr, bool for_bits_p)
|
||||
{
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
|
||||
if (TREE_CODE (expr) == SSA_NAME)
|
||||
{
|
||||
@ -654,7 +652,7 @@ likely_value (gimple stmt)
|
||||
all_undefined_operands = true;
|
||||
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
|
||||
{
|
||||
prop_value_t *val = get_value (use);
|
||||
ccp_prop_value_t *val = get_value (use);
|
||||
|
||||
if (val->lattice_val == UNDEFINED)
|
||||
has_undefined_operand = true;
|
||||
@ -792,7 +790,7 @@ ccp_initialize (void)
|
||||
basic_block bb;
|
||||
|
||||
n_const_val = num_ssa_names;
|
||||
const_val = XCNEWVEC (prop_value_t, n_const_val);
|
||||
const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
|
||||
|
||||
/* Initialize simulation flags for PHI nodes and statements. */
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
@ -884,7 +882,7 @@ ccp_finalize (void)
|
||||
for (i = 1; i < num_ssa_names; ++i)
|
||||
{
|
||||
tree name = ssa_name (i);
|
||||
prop_value_t *val;
|
||||
ccp_prop_value_t *val;
|
||||
unsigned int tem, align;
|
||||
|
||||
if (!name
|
||||
@ -941,7 +939,7 @@ ccp_finalize (void)
|
||||
*/
|
||||
|
||||
static void
|
||||
ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
|
||||
ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
|
||||
{
|
||||
if (val1->lattice_val == UNDEFINED)
|
||||
{
|
||||
@ -997,7 +995,7 @@ ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
|
||||
{
|
||||
/* When not equal addresses are involved try meeting for
|
||||
alignment. */
|
||||
prop_value_t tem = *val2;
|
||||
ccp_prop_value_t tem = *val2;
|
||||
if (TREE_CODE (val1->value) == ADDR_EXPR)
|
||||
*val1 = get_value_for_expr (val1->value, true);
|
||||
if (TREE_CODE (val2->value) == ADDR_EXPR)
|
||||
@ -1023,7 +1021,7 @@ static enum ssa_prop_result
|
||||
ccp_visit_phi_node (gimple phi)
|
||||
{
|
||||
unsigned i;
|
||||
prop_value_t *old_val, new_val;
|
||||
ccp_prop_value_t *old_val, new_val;
|
||||
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
{
|
||||
@ -1069,7 +1067,7 @@ ccp_visit_phi_node (gimple phi)
|
||||
if (e->flags & EDGE_EXECUTABLE)
|
||||
{
|
||||
tree arg = gimple_phi_arg (phi, i)->def;
|
||||
prop_value_t arg_val = get_value_for_expr (arg, false);
|
||||
ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
|
||||
|
||||
ccp_lattice_meet (&new_val, &arg_val);
|
||||
|
||||
@ -1449,12 +1447,12 @@ bit_value_binop_1 (enum tree_code code, tree type,
|
||||
/* Return the propagation value when applying the operation CODE to
|
||||
the value RHS yielding type TYPE. */
|
||||
|
||||
static prop_value_t
|
||||
static ccp_prop_value_t
|
||||
bit_value_unop (enum tree_code code, tree type, tree rhs)
|
||||
{
|
||||
prop_value_t rval = get_value_for_expr (rhs, true);
|
||||
ccp_prop_value_t rval = get_value_for_expr (rhs, true);
|
||||
widest_int value, mask;
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
|
||||
if (rval.lattice_val == UNDEFINED)
|
||||
return rval;
|
||||
@ -1483,13 +1481,13 @@ bit_value_unop (enum tree_code code, tree type, tree rhs)
|
||||
/* Return the propagation value when applying the operation CODE to
|
||||
the values RHS1 and RHS2 yielding type TYPE. */
|
||||
|
||||
static prop_value_t
|
||||
static ccp_prop_value_t
|
||||
bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
|
||||
{
|
||||
prop_value_t r1val = get_value_for_expr (rhs1, true);
|
||||
prop_value_t r2val = get_value_for_expr (rhs2, true);
|
||||
ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
|
||||
ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
|
||||
widest_int value, mask;
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
|
||||
if (r1val.lattice_val == UNDEFINED
|
||||
|| r2val.lattice_val == UNDEFINED)
|
||||
@ -1532,15 +1530,15 @@ bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
|
||||
is false, for alloc_aligned attribute ATTR is non-NULL and
|
||||
ALLOC_ALIGNED is true. */
|
||||
|
||||
static prop_value_t
|
||||
bit_value_assume_aligned (gimple stmt, tree attr, prop_value_t ptrval,
|
||||
static ccp_prop_value_t
|
||||
bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
|
||||
bool alloc_aligned)
|
||||
{
|
||||
tree align, misalign = NULL_TREE, type;
|
||||
unsigned HOST_WIDE_INT aligni, misaligni = 0;
|
||||
prop_value_t alignval;
|
||||
ccp_prop_value_t alignval;
|
||||
widest_int value, mask;
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
|
||||
if (attr == NULL_TREE)
|
||||
{
|
||||
@ -1632,10 +1630,10 @@ bit_value_assume_aligned (gimple stmt, tree attr, prop_value_t ptrval,
|
||||
/* Evaluate statement STMT.
|
||||
Valid only for assignments, calls, conditionals, and switches. */
|
||||
|
||||
static prop_value_t
|
||||
static ccp_prop_value_t
|
||||
evaluate_stmt (gimple stmt)
|
||||
{
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
tree simplified = NULL_TREE;
|
||||
ccp_lattice_t likelyvalue = likely_value (stmt);
|
||||
bool is_constant = false;
|
||||
@ -2062,7 +2060,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
|
||||
{
|
||||
case GIMPLE_COND:
|
||||
{
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
/* Statement evaluation will handle type mismatches in constants
|
||||
more gracefully than the final propagation. This allows us to
|
||||
fold more conditionals here. */
|
||||
@ -2197,7 +2195,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
|
||||
static enum ssa_prop_result
|
||||
visit_assignment (gimple stmt, tree *output_p)
|
||||
{
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
enum ssa_prop_result retval;
|
||||
|
||||
tree lhs = gimple_get_lhs (stmt);
|
||||
@ -2242,7 +2240,7 @@ visit_assignment (gimple stmt, tree *output_p)
|
||||
static enum ssa_prop_result
|
||||
visit_cond_stmt (gimple stmt, edge *taken_edge_p)
|
||||
{
|
||||
prop_value_t val;
|
||||
ccp_prop_value_t val;
|
||||
basic_block block;
|
||||
|
||||
block = gimple_bb (stmt);
|
||||
@ -2320,7 +2318,7 @@ ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
|
||||
Mark them VARYING. */
|
||||
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
|
||||
{
|
||||
prop_value_t v = { VARYING, NULL_TREE, -1 };
|
||||
ccp_prop_value_t v = { VARYING, NULL_TREE, -1 };
|
||||
set_lattice_value (def, v);
|
||||
}
|
||||
|
||||
|
@ -76,11 +76,10 @@ along with GCC; see the file COPYING3. If not see
|
||||
When visiting a statement or PHI node the lattice value for an
|
||||
SSA name can transition from UNDEFINED to COPY to VARYING. */
|
||||
|
||||
struct prop_value_d {
|
||||
struct prop_value_t {
|
||||
/* Copy-of value. */
|
||||
tree value;
|
||||
};
|
||||
typedef struct prop_value_d prop_value_t;
|
||||
|
||||
static prop_value_t *copy_of;
|
||||
static unsigned n_copy_of;
|
||||
|
Loading…
x
Reference in New Issue
Block a user