mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-08 06:40:25 +08:00
Remove a layer of indirection from hash_table
gcc/ * hash-table.h: Remove a layer of indirection from hash_table so that it contains the hash table's data instead of a pointer to the data. * alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c, config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c, config/mips/mips.c, config/sol2.c, coverage.c, cselib.c, data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c, fold-const.c, gcse.c, ggc-common.c, gimple-ssa-strength-reduction.c, gimplify.c, graphite-clast-to-gimple.c, graphite-dependences.c, graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c, ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c, loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c, lto-streamer.c, lto-streamer.h, passes.c, plugin.c, postreload-gcse.c, sese.c, statistics.c, store-motion.c, trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c, tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c, tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c, tree-ssa-live.c, tree-ssa-loop-im.c, tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c, tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c, tree-ssa-structalias.c, tree-ssa-tail-merge.c, tree-ssa-threadupdate.c, tree-ssa-uncprop.c, tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c, tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c, vtable-verify.c, vtable-verify.h: Adjust. gcc/c/ * c-decl.c: Adjust. gcc/cp/ * class.c, semantics.c, tree.c, vtable-class-hierarchy.c: Adjust. gcc/java/ * jcf-io.c: Adjust. gcc/lto/ * lto.c: Adjust. gcc/objc/ * objc-act.c: Adjust. From-SVN: r211936
This commit is contained in:
parent
fbc2a724d4
commit
c203e8a73b
@ -1,3 +1,31 @@
|
||||
2014-06-24 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* hash-table.h: Remove a layer of indirection from hash_table so that
|
||||
it contains the hash table's data instead of a pointer to the data.
|
||||
* alloc-pool.c, asan.c, attribs.c, bitmap.c, cfg.c,
|
||||
config/arm/arm.c, config/i386/winnt.c, config/ia64/ia64.c,
|
||||
config/mips/mips.c, config/sol2.c, coverage.c, cselib.c,
|
||||
data-streamer-out.c, dse.c, dwarf2cfi.c, dwarf2out.c, except.c,
|
||||
fold-const.c, gcse.c, ggc-common.c,
|
||||
gimple-ssa-strength-reduction.c, gimplify.c,
|
||||
graphite-clast-to-gimple.c, graphite-dependences.c,
|
||||
graphite-htab.h, graphite.c, haifa-sched.c, ipa-devirt.c,
|
||||
ipa-profile.c, ira-color.c, ira-costs.c, loop-invariant.c,
|
||||
loop-iv.c, loop-unroll.c, lto-streamer-in.c, lto-streamer-out.c,
|
||||
lto-streamer.c, lto-streamer.h, passes.c, plugin.c,
|
||||
postreload-gcse.c, sese.c, statistics.c, store-motion.c,
|
||||
trans-mem.c, tree-browser.c, tree-cfg.c, tree-complex.c,
|
||||
tree-eh.c, tree-into-ssa.c, tree-parloops.c, tree-sra.c,
|
||||
tree-ssa-ccp.c, tree-ssa-coalesce.c, tree-ssa-dom.c,
|
||||
tree-ssa-live.c, tree-ssa-loop-im.c,
|
||||
tree-ssa-loop-ivopts.c, tree-ssa-phiopt.c, tree-ssa-pre.c,
|
||||
tree-ssa-reassoc.c, tree-ssa-sccvn.c, tree-ssa-strlen.c,
|
||||
tree-ssa-structalias.c, tree-ssa-tail-merge.c,
|
||||
tree-ssa-threadupdate.c, tree-ssa-uncprop.c,
|
||||
tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.c,
|
||||
tree-vectorizer.h, valtrack.c, valtrack.h, var-tracking.c,
|
||||
vtable-verify.c, vtable-verify.h: Adjust.
|
||||
|
||||
2014-06-24 Richard Biener <rguenther@suse.de>
|
||||
|
||||
PR tree-optimization/61572
|
||||
|
@ -105,7 +105,7 @@ alloc_pool_hasher::equal (const value_type *d,
|
||||
}
|
||||
|
||||
/* Hashtable mapping alloc_pool names to descriptors. */
|
||||
static hash_table <alloc_pool_hasher> alloc_pool_hash;
|
||||
static hash_table<alloc_pool_hasher> *alloc_pool_hash;
|
||||
|
||||
/* For given name, return descriptor, create new if needed. */
|
||||
static struct alloc_pool_descriptor *
|
||||
@ -113,11 +113,12 @@ allocate_pool_descriptor (const char *name)
|
||||
{
|
||||
struct alloc_pool_descriptor **slot;
|
||||
|
||||
if (!alloc_pool_hash.is_created ())
|
||||
alloc_pool_hash.create (10);
|
||||
if (!alloc_pool_hash)
|
||||
alloc_pool_hash = new hash_table<alloc_pool_hasher> (10);
|
||||
|
||||
slot = alloc_pool_hash.find_slot_with_hash (name,
|
||||
htab_hash_pointer (name), INSERT);
|
||||
slot = alloc_pool_hash->find_slot_with_hash (name,
|
||||
htab_hash_pointer (name),
|
||||
INSERT);
|
||||
if (*slot)
|
||||
return *slot;
|
||||
*slot = XCNEW (struct alloc_pool_descriptor);
|
||||
@ -404,15 +405,15 @@ dump_alloc_pool_statistics (void)
|
||||
if (! GATHER_STATISTICS)
|
||||
return;
|
||||
|
||||
if (!alloc_pool_hash.is_created ())
|
||||
if (!alloc_pool_hash)
|
||||
return;
|
||||
|
||||
fprintf (stderr, "\nAlloc-pool Kind Elt size Pools Allocated (elts) Peak (elts) Leak (elts)\n");
|
||||
fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
|
||||
info.total_created = 0;
|
||||
info.total_allocated = 0;
|
||||
alloc_pool_hash.traverse <struct output_info *,
|
||||
print_alloc_pool_statistics> (&info);
|
||||
alloc_pool_hash->traverse <struct output_info *,
|
||||
print_alloc_pool_statistics> (&info);
|
||||
fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
|
||||
fprintf (stderr, "%-22s %7lu %10lu\n",
|
||||
"Total", info.total_created, info.total_allocated);
|
||||
|
22
gcc/asan.c
22
gcc/asan.c
@ -363,18 +363,18 @@ asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
|
||||
&& operand_equal_p (m1->start, m2->start, 0));
|
||||
}
|
||||
|
||||
static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
|
||||
static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
|
||||
|
||||
/* Returns a reference to the hash table containing memory references.
|
||||
This function ensures that the hash table is created. Note that
|
||||
this hash table is updated by the function
|
||||
update_mem_ref_hash_table. */
|
||||
|
||||
static hash_table <asan_mem_ref_hasher> &
|
||||
static hash_table<asan_mem_ref_hasher> *
|
||||
get_mem_ref_hash_table ()
|
||||
{
|
||||
if (!asan_mem_ref_ht.is_created ())
|
||||
asan_mem_ref_ht.create (10);
|
||||
if (!asan_mem_ref_ht)
|
||||
asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
|
||||
|
||||
return asan_mem_ref_ht;
|
||||
}
|
||||
@ -384,8 +384,8 @@ get_mem_ref_hash_table ()
|
||||
static void
|
||||
empty_mem_ref_hash_table ()
|
||||
{
|
||||
if (asan_mem_ref_ht.is_created ())
|
||||
asan_mem_ref_ht.empty ();
|
||||
if (asan_mem_ref_ht)
|
||||
asan_mem_ref_ht->empty ();
|
||||
}
|
||||
|
||||
/* Free the memory references hash table. */
|
||||
@ -393,8 +393,8 @@ empty_mem_ref_hash_table ()
|
||||
static void
|
||||
free_mem_ref_resources ()
|
||||
{
|
||||
if (asan_mem_ref_ht.is_created ())
|
||||
asan_mem_ref_ht.dispose ();
|
||||
delete asan_mem_ref_ht;
|
||||
asan_mem_ref_ht = NULL;
|
||||
|
||||
if (asan_mem_ref_alloc_pool)
|
||||
{
|
||||
@ -411,7 +411,7 @@ has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
|
||||
asan_mem_ref r;
|
||||
asan_mem_ref_init (&r, ref, access_size);
|
||||
|
||||
return (get_mem_ref_hash_table ().find (&r) != NULL);
|
||||
return (get_mem_ref_hash_table ()->find (&r) != NULL);
|
||||
}
|
||||
|
||||
/* Return true iff the memory reference REF has been instrumented. */
|
||||
@ -858,12 +858,12 @@ has_stmt_been_instrumented_p (gimple stmt)
|
||||
static void
|
||||
update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
|
||||
{
|
||||
hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
|
||||
hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
|
||||
|
||||
asan_mem_ref r;
|
||||
asan_mem_ref_init (&r, ref, access_size);
|
||||
|
||||
asan_mem_ref **slot = ht.find_slot (&r, INSERT);
|
||||
asan_mem_ref **slot = ht->find_slot (&r, INSERT);
|
||||
if (*slot == NULL)
|
||||
*slot = asan_mem_ref_new (ref, access_size);
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ struct scoped_attributes
|
||||
{
|
||||
const char *ns;
|
||||
vec<attribute_spec> attributes;
|
||||
hash_table <attribute_hasher> attribute_hash;
|
||||
hash_table<attribute_hasher> *attribute_hash;
|
||||
};
|
||||
|
||||
/* The table of scope attributes. */
|
||||
@ -144,7 +144,7 @@ register_scoped_attributes (const struct attribute_spec * attributes,
|
||||
sa.ns = ns;
|
||||
sa.attributes.create (64);
|
||||
result = attributes_table.safe_push (sa);
|
||||
result->attribute_hash.create (200);
|
||||
result->attribute_hash = new hash_table<attribute_hasher> (200);
|
||||
}
|
||||
|
||||
/* Really add the attributes to their namespace now. */
|
||||
@ -281,7 +281,7 @@ register_scoped_attribute (const struct attribute_spec *attr,
|
||||
|
||||
gcc_assert (attr != NULL && name_space != NULL);
|
||||
|
||||
gcc_assert (name_space->attribute_hash.is_created ());
|
||||
gcc_assert (name_space->attribute_hash);
|
||||
|
||||
str.str = attr->name;
|
||||
str.length = strlen (str.str);
|
||||
@ -291,8 +291,8 @@ register_scoped_attribute (const struct attribute_spec *attr,
|
||||
gcc_assert (str.length > 0 && str.str[0] != '_');
|
||||
|
||||
slot = name_space->attribute_hash
|
||||
.find_slot_with_hash (&str, substring_hash (str.str, str.length),
|
||||
INSERT);
|
||||
->find_slot_with_hash (&str, substring_hash (str.str, str.length),
|
||||
INSERT);
|
||||
gcc_assert (!*slot || attr->name[0] == '*');
|
||||
*slot = CONST_CAST (struct attribute_spec *, attr);
|
||||
}
|
||||
@ -316,8 +316,9 @@ lookup_scoped_attribute_spec (const_tree ns, const_tree name)
|
||||
attr.str = IDENTIFIER_POINTER (name);
|
||||
attr.length = IDENTIFIER_LENGTH (name);
|
||||
extract_attribute_substring (&attr);
|
||||
return attrs->attribute_hash.find_with_hash (&attr,
|
||||
substring_hash (attr.str, attr.length));
|
||||
return attrs->attribute_hash->find_with_hash (&attr,
|
||||
substring_hash (attr.str,
|
||||
attr.length));
|
||||
}
|
||||
|
||||
/* Return the spec for the attribute named NAME. If NAME is a TREE_LIST,
|
||||
|
17
gcc/bitmap.c
17
gcc/bitmap.c
@ -80,7 +80,7 @@ bitmap_desc_hasher::equal (const value_type *d, const compare_type *l)
|
||||
}
|
||||
|
||||
/* Hashtable mapping bitmap names to descriptors. */
|
||||
static hash_table <bitmap_desc_hasher> bitmap_desc_hash;
|
||||
static hash_table<bitmap_desc_hasher> *bitmap_desc_hash;
|
||||
|
||||
/* For given file and line, return descriptor, create new if needed. */
|
||||
static bitmap_descriptor
|
||||
@ -93,12 +93,13 @@ get_bitmap_descriptor (const char *file, int line, const char *function)
|
||||
loc.function = function;
|
||||
loc.line = line;
|
||||
|
||||
if (!bitmap_desc_hash.is_created ())
|
||||
bitmap_desc_hash.create (10);
|
||||
if (!bitmap_desc_hash)
|
||||
bitmap_desc_hash = new hash_table<bitmap_desc_hasher> (10);
|
||||
|
||||
slot = bitmap_desc_hash.find_slot_with_hash (&loc,
|
||||
htab_hash_pointer (file) + line,
|
||||
INSERT);
|
||||
slot
|
||||
= bitmap_desc_hash->find_slot_with_hash (&loc,
|
||||
htab_hash_pointer (file) + line,
|
||||
INSERT);
|
||||
if (*slot)
|
||||
return *slot;
|
||||
|
||||
@ -2185,7 +2186,7 @@ dump_bitmap_statistics (void)
|
||||
if (! GATHER_STATISTICS)
|
||||
return;
|
||||
|
||||
if (!bitmap_desc_hash.is_created ())
|
||||
if (!bitmap_desc_hash)
|
||||
return;
|
||||
|
||||
fprintf (stderr,
|
||||
@ -2196,7 +2197,7 @@ dump_bitmap_statistics (void)
|
||||
fprintf (stderr, "---------------------------------------------------------------------------------\n");
|
||||
info.count = 0;
|
||||
info.size = 0;
|
||||
bitmap_desc_hash.traverse <output_info *, print_statistics> (&info);
|
||||
bitmap_desc_hash->traverse <output_info *, print_statistics> (&info);
|
||||
fprintf (stderr, "---------------------------------------------------------------------------------\n");
|
||||
fprintf (stderr,
|
||||
"%-41s %9"PRId64" %15"PRId64"\n",
|
||||
|
@ -1,3 +1,7 @@
|
||||
2014-06-24 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* c-decl.c: Adjust.
|
||||
|
||||
2014-06-24 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
* c-parser.c (c_parser_omp_for_loop): For
|
||||
|
@ -7076,7 +7076,7 @@ is_duplicate_field (tree x, tree y)
|
||||
|
||||
static void
|
||||
detect_field_duplicates_hash (tree fieldlist,
|
||||
hash_table <pointer_hash <tree_node> > htab)
|
||||
hash_table<pointer_hash <tree_node> > *htab)
|
||||
{
|
||||
tree x, y;
|
||||
tree_node **slot;
|
||||
@ -7084,7 +7084,7 @@ detect_field_duplicates_hash (tree fieldlist,
|
||||
for (x = fieldlist; x ; x = DECL_CHAIN (x))
|
||||
if ((y = DECL_NAME (x)) != 0)
|
||||
{
|
||||
slot = htab.find_slot (y, INSERT);
|
||||
slot = htab->find_slot (y, INSERT);
|
||||
if (*slot)
|
||||
{
|
||||
error ("duplicate member %q+D", x);
|
||||
@ -7104,7 +7104,7 @@ detect_field_duplicates_hash (tree fieldlist,
|
||||
&& TREE_CODE (TYPE_NAME (TREE_TYPE (x))) == TYPE_DECL)
|
||||
{
|
||||
tree xn = DECL_NAME (TYPE_NAME (TREE_TYPE (x)));
|
||||
slot = htab.find_slot (xn, INSERT);
|
||||
slot = htab->find_slot (xn, INSERT);
|
||||
if (*slot)
|
||||
error ("duplicate member %q+D", TYPE_NAME (TREE_TYPE (x)));
|
||||
*slot = xn;
|
||||
@ -7176,11 +7176,8 @@ detect_field_duplicates (tree fieldlist)
|
||||
}
|
||||
else
|
||||
{
|
||||
hash_table <pointer_hash <tree_node> > htab;
|
||||
htab.create (37);
|
||||
|
||||
detect_field_duplicates_hash (fieldlist, htab);
|
||||
htab.dispose ();
|
||||
hash_table<pointer_hash <tree_node> > htab (37);
|
||||
detect_field_duplicates_hash (fieldlist, &htab);
|
||||
}
|
||||
}
|
||||
|
||||
|
37
gcc/cfg.c
37
gcc/cfg.c
@ -1038,11 +1038,11 @@ bb_copy_hasher::equal (const value_type *data, const compare_type *data2)
|
||||
|
||||
/* Data structures used to maintain mapping between basic blocks and
|
||||
copies. */
|
||||
static hash_table <bb_copy_hasher> bb_original;
|
||||
static hash_table <bb_copy_hasher> bb_copy;
|
||||
static hash_table<bb_copy_hasher> *bb_original;
|
||||
static hash_table<bb_copy_hasher> *bb_copy;
|
||||
|
||||
/* And between loops and copies. */
|
||||
static hash_table <bb_copy_hasher> loop_copy;
|
||||
static hash_table<bb_copy_hasher> *loop_copy;
|
||||
static alloc_pool original_copy_bb_pool;
|
||||
|
||||
|
||||
@ -1055,9 +1055,9 @@ initialize_original_copy_tables (void)
|
||||
original_copy_bb_pool
|
||||
= create_alloc_pool ("original_copy",
|
||||
sizeof (struct htab_bb_copy_original_entry), 10);
|
||||
bb_original.create (10);
|
||||
bb_copy.create (10);
|
||||
loop_copy.create (10);
|
||||
bb_original = new hash_table<bb_copy_hasher> (10);
|
||||
bb_copy = new hash_table<bb_copy_hasher> (10);
|
||||
loop_copy = new hash_table<bb_copy_hasher> (10);
|
||||
}
|
||||
|
||||
/* Free the data structures to maintain mapping between blocks and
|
||||
@ -1066,9 +1066,12 @@ void
|
||||
free_original_copy_tables (void)
|
||||
{
|
||||
gcc_assert (original_copy_bb_pool);
|
||||
bb_copy.dispose ();
|
||||
bb_original.dispose ();
|
||||
loop_copy.dispose ();
|
||||
delete bb_copy;
|
||||
bb_copy = NULL;
|
||||
delete bb_original;
|
||||
bb_copy = NULL;
|
||||
delete loop_copy;
|
||||
loop_copy = NULL;
|
||||
free_alloc_pool (original_copy_bb_pool);
|
||||
original_copy_bb_pool = NULL;
|
||||
}
|
||||
@ -1076,7 +1079,7 @@ free_original_copy_tables (void)
|
||||
/* Removes the value associated with OBJ from table TAB. */
|
||||
|
||||
static void
|
||||
copy_original_table_clear (hash_table <bb_copy_hasher> tab, unsigned obj)
|
||||
copy_original_table_clear (hash_table<bb_copy_hasher> *tab, unsigned obj)
|
||||
{
|
||||
htab_bb_copy_original_entry **slot;
|
||||
struct htab_bb_copy_original_entry key, *elt;
|
||||
@ -1085,12 +1088,12 @@ copy_original_table_clear (hash_table <bb_copy_hasher> tab, unsigned obj)
|
||||
return;
|
||||
|
||||
key.index1 = obj;
|
||||
slot = tab.find_slot (&key, NO_INSERT);
|
||||
slot = tab->find_slot (&key, NO_INSERT);
|
||||
if (!slot)
|
||||
return;
|
||||
|
||||
elt = *slot;
|
||||
tab.clear_slot (slot);
|
||||
tab->clear_slot (slot);
|
||||
pool_free (original_copy_bb_pool, elt);
|
||||
}
|
||||
|
||||
@ -1098,7 +1101,7 @@ copy_original_table_clear (hash_table <bb_copy_hasher> tab, unsigned obj)
|
||||
Do nothing when data structures are not initialized. */
|
||||
|
||||
static void
|
||||
copy_original_table_set (hash_table <bb_copy_hasher> tab,
|
||||
copy_original_table_set (hash_table<bb_copy_hasher> *tab,
|
||||
unsigned obj, unsigned val)
|
||||
{
|
||||
struct htab_bb_copy_original_entry **slot;
|
||||
@ -1108,7 +1111,7 @@ copy_original_table_set (hash_table <bb_copy_hasher> tab,
|
||||
return;
|
||||
|
||||
key.index1 = obj;
|
||||
slot = tab.find_slot (&key, INSERT);
|
||||
slot = tab->find_slot (&key, INSERT);
|
||||
if (!*slot)
|
||||
{
|
||||
*slot = (struct htab_bb_copy_original_entry *)
|
||||
@ -1136,7 +1139,7 @@ get_bb_original (basic_block bb)
|
||||
gcc_assert (original_copy_bb_pool);
|
||||
|
||||
key.index1 = bb->index;
|
||||
entry = bb_original.find (&key);
|
||||
entry = bb_original->find (&key);
|
||||
if (entry)
|
||||
return BASIC_BLOCK_FOR_FN (cfun, entry->index2);
|
||||
else
|
||||
@ -1161,7 +1164,7 @@ get_bb_copy (basic_block bb)
|
||||
gcc_assert (original_copy_bb_pool);
|
||||
|
||||
key.index1 = bb->index;
|
||||
entry = bb_copy.find (&key);
|
||||
entry = bb_copy->find (&key);
|
||||
if (entry)
|
||||
return BASIC_BLOCK_FOR_FN (cfun, entry->index2);
|
||||
else
|
||||
@ -1191,7 +1194,7 @@ get_loop_copy (struct loop *loop)
|
||||
gcc_assert (original_copy_bb_pool);
|
||||
|
||||
key.index1 = loop->num;
|
||||
entry = loop_copy.find (&key);
|
||||
entry = loop_copy->find (&key);
|
||||
if (entry)
|
||||
return get_loop (cfun, entry->index2);
|
||||
else
|
||||
|
@ -4661,25 +4661,25 @@ libcall_hasher::hash (const value_type *p1)
|
||||
return hash_rtx (p1, VOIDmode, NULL, NULL, FALSE);
|
||||
}
|
||||
|
||||
typedef hash_table <libcall_hasher> libcall_table_type;
|
||||
typedef hash_table<libcall_hasher> libcall_table_type;
|
||||
|
||||
static void
|
||||
add_libcall (libcall_table_type htab, rtx libcall)
|
||||
add_libcall (libcall_table_type *htab, rtx libcall)
|
||||
{
|
||||
*htab.find_slot (libcall, INSERT) = libcall;
|
||||
*htab->find_slot (libcall, INSERT) = libcall;
|
||||
}
|
||||
|
||||
static bool
|
||||
arm_libcall_uses_aapcs_base (const_rtx libcall)
|
||||
{
|
||||
static bool init_done = false;
|
||||
static libcall_table_type libcall_htab;
|
||||
static libcall_table_type *libcall_htab = NULL;
|
||||
|
||||
if (!init_done)
|
||||
{
|
||||
init_done = true;
|
||||
|
||||
libcall_htab.create (31);
|
||||
libcall_htab = new libcall_table_type (31);
|
||||
add_libcall (libcall_htab,
|
||||
convert_optab_libfunc (sfloat_optab, SFmode, SImode));
|
||||
add_libcall (libcall_htab,
|
||||
@ -4738,7 +4738,7 @@ arm_libcall_uses_aapcs_base (const_rtx libcall)
|
||||
DFmode));
|
||||
}
|
||||
|
||||
return libcall && libcall_htab.find (libcall) != NULL;
|
||||
return libcall && libcall_htab->find (libcall) != NULL;
|
||||
}
|
||||
|
||||
static rtx
|
||||
|
@ -469,7 +469,7 @@ i386_pe_reloc_rw_mask (void)
|
||||
unsigned int
|
||||
i386_pe_section_type_flags (tree decl, const char *name, int reloc)
|
||||
{
|
||||
static hash_table <pointer_hash <unsigned int> > htab;
|
||||
static hash_table<pointer_hash<unsigned int> > *htab = NULL;
|
||||
unsigned int flags;
|
||||
unsigned int **slot;
|
||||
|
||||
@ -480,8 +480,8 @@ i386_pe_section_type_flags (tree decl, const char *name, int reloc)
|
||||
/* The names we put in the hashtable will always be the unique
|
||||
versions given to us by the stringtable, so we can just use
|
||||
their addresses as the keys. */
|
||||
if (!htab.is_created ())
|
||||
htab.create (31);
|
||||
if (!htab)
|
||||
htab = new hash_table<pointer_hash<unsigned int> > (31);
|
||||
|
||||
if (decl && TREE_CODE (decl) == FUNCTION_DECL)
|
||||
flags = SECTION_CODE;
|
||||
@ -500,7 +500,7 @@ i386_pe_section_type_flags (tree decl, const char *name, int reloc)
|
||||
flags |= SECTION_LINKONCE;
|
||||
|
||||
/* See if we already have an entry for this section. */
|
||||
slot = htab.find_slot ((const unsigned int *)name, INSERT);
|
||||
slot = htab->find_slot ((const unsigned int *)name, INSERT);
|
||||
if (!*slot)
|
||||
{
|
||||
*slot = (unsigned int *) xmalloc (sizeof (unsigned int));
|
||||
@ -771,7 +771,7 @@ static const char *
|
||||
i386_find_on_wrapper_list (const char *target)
|
||||
{
|
||||
static char first_time = 1;
|
||||
static hash_table <wrapped_symbol_hasher> wrappers;
|
||||
static hash_table<wrapped_symbol_hasher> *wrappers;
|
||||
|
||||
if (first_time)
|
||||
{
|
||||
@ -784,7 +784,7 @@ i386_find_on_wrapper_list (const char *target)
|
||||
char *bufptr;
|
||||
/* Breaks up the char array into separated strings
|
||||
strings and enter them into the hash table. */
|
||||
wrappers.create (8);
|
||||
wrappers = new hash_table_c<wrapped_symbol_hasher> (8);
|
||||
for (bufptr = wrapper_list_buffer; *bufptr; ++bufptr)
|
||||
{
|
||||
char *found = NULL;
|
||||
@ -797,12 +797,12 @@ i386_find_on_wrapper_list (const char *target)
|
||||
if (*bufptr)
|
||||
*bufptr = 0;
|
||||
if (found)
|
||||
*wrappers.find_slot (found, INSERT) = found;
|
||||
*wrappers->find_slot (found, INSERT) = found;
|
||||
}
|
||||
first_time = 0;
|
||||
}
|
||||
|
||||
return wrappers.find (target);
|
||||
return wrappers->find (target);
|
||||
}
|
||||
|
||||
#endif /* CXX_WRAP_SPEC_LIST */
|
||||
|
@ -8586,7 +8586,7 @@ bundle_state_hasher::equal (const value_type *state1,
|
||||
/* Hash table of the bundle states. The key is dfa_state and insn_num
|
||||
of the bundle states. */
|
||||
|
||||
static hash_table <bundle_state_hasher> bundle_state_table;
|
||||
static hash_table<bundle_state_hasher> *bundle_state_table;
|
||||
|
||||
/* The function inserts the BUNDLE_STATE into the hash table. The
|
||||
function returns nonzero if the bundle has been inserted into the
|
||||
@ -8597,7 +8597,7 @@ insert_bundle_state (struct bundle_state *bundle_state)
|
||||
{
|
||||
struct bundle_state **entry_ptr;
|
||||
|
||||
entry_ptr = bundle_state_table.find_slot (bundle_state, INSERT);
|
||||
entry_ptr = bundle_state_table->find_slot (bundle_state, INSERT);
|
||||
if (*entry_ptr == NULL)
|
||||
{
|
||||
bundle_state->next = index_to_bundle_states [bundle_state->insn_num];
|
||||
@ -8634,7 +8634,7 @@ insert_bundle_state (struct bundle_state *bundle_state)
|
||||
static void
|
||||
initiate_bundle_state_table (void)
|
||||
{
|
||||
bundle_state_table.create (50);
|
||||
bundle_state_table = new hash_table<bundle_state_hasher> (50);
|
||||
}
|
||||
|
||||
/* Finish work with the hash table. */
|
||||
@ -8642,7 +8642,8 @@ initiate_bundle_state_table (void)
|
||||
static void
|
||||
finish_bundle_state_table (void)
|
||||
{
|
||||
bundle_state_table.dispose ();
|
||||
delete bundle_state_table;
|
||||
bundle_state_table = NULL;
|
||||
}
|
||||
|
||||
|
||||
|
@ -16018,14 +16018,14 @@ mips_lo_sum_offset_hasher::equal (const value_type *entry,
|
||||
return rtx_equal_p (entry->base, value);
|
||||
}
|
||||
|
||||
typedef hash_table <mips_lo_sum_offset_hasher> mips_offset_table;
|
||||
typedef hash_table<mips_lo_sum_offset_hasher> mips_offset_table;
|
||||
|
||||
/* Look up symbolic constant X in HTAB, which is a hash table of
|
||||
mips_lo_sum_offsets. If OPTION is NO_INSERT, return true if X can be
|
||||
paired with a recorded LO_SUM, otherwise record X in the table. */
|
||||
|
||||
static bool
|
||||
mips_lo_sum_offset_lookup (mips_offset_table htab, rtx x,
|
||||
mips_lo_sum_offset_lookup (mips_offset_table *htab, rtx x,
|
||||
enum insert_option option)
|
||||
{
|
||||
rtx base, offset;
|
||||
@ -16038,7 +16038,7 @@ mips_lo_sum_offset_lookup (mips_offset_table htab, rtx x,
|
||||
base = UNSPEC_ADDRESS (base);
|
||||
|
||||
/* Look up the base in the hash table. */
|
||||
slot = htab.find_slot_with_hash (base, mips_hash_base (base), option);
|
||||
slot = htab->find_slot_with_hash (base, mips_hash_base (base), option);
|
||||
if (slot == NULL)
|
||||
return false;
|
||||
|
||||
@ -16068,7 +16068,7 @@ static int
|
||||
mips_record_lo_sum (rtx *loc, void *data)
|
||||
{
|
||||
if (GET_CODE (*loc) == LO_SUM)
|
||||
mips_lo_sum_offset_lookup (*(mips_offset_table*) data,
|
||||
mips_lo_sum_offset_lookup ((mips_offset_table*) data,
|
||||
XEXP (*loc, 1), INSERT);
|
||||
return 0;
|
||||
}
|
||||
@ -16078,7 +16078,7 @@ mips_record_lo_sum (rtx *loc, void *data)
|
||||
LO_SUMs in the current function. */
|
||||
|
||||
static bool
|
||||
mips_orphaned_high_part_p (mips_offset_table htab, rtx insn)
|
||||
mips_orphaned_high_part_p (mips_offset_table *htab, rtx insn)
|
||||
{
|
||||
enum mips_symbol_type type;
|
||||
rtx x, set;
|
||||
@ -16186,7 +16186,6 @@ mips_reorg_process_insns (void)
|
||||
{
|
||||
rtx insn, last_insn, subinsn, next_insn, lo_reg, delayed_reg;
|
||||
int hilo_delay;
|
||||
mips_offset_table htab;
|
||||
|
||||
/* Force all instructions to be split into their final form. */
|
||||
split_all_insns_noflow ();
|
||||
@ -16227,7 +16226,7 @@ mips_reorg_process_insns (void)
|
||||
if (TARGET_FIX_VR4130 && !ISA_HAS_MACCHI)
|
||||
cfun->machine->all_noreorder_p = false;
|
||||
|
||||
htab.create (37);
|
||||
mips_offset_table htab (37);
|
||||
|
||||
/* Make a first pass over the instructions, recording all the LO_SUMs. */
|
||||
for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
|
||||
@ -16272,7 +16271,7 @@ mips_reorg_process_insns (void)
|
||||
FOR_EACH_SUBINSN (subinsn, insn)
|
||||
if (INSN_P (subinsn))
|
||||
{
|
||||
if (mips_orphaned_high_part_p (htab, subinsn))
|
||||
if (mips_orphaned_high_part_p (&htab, subinsn))
|
||||
{
|
||||
PATTERN (subinsn) = gen_nop ();
|
||||
INSN_CODE (subinsn) = CODE_FOR_nop;
|
||||
@ -16286,7 +16285,7 @@ mips_reorg_process_insns (void)
|
||||
{
|
||||
/* INSN is a single instruction. Delete it if it's an
|
||||
orphaned high-part relocation. */
|
||||
if (mips_orphaned_high_part_p (htab, insn))
|
||||
if (mips_orphaned_high_part_p (&htab, insn))
|
||||
delete_insn (insn);
|
||||
/* Also delete cache barriers if the last instruction
|
||||
was an annulled branch. INSN will not be speculatively
|
||||
@ -16305,8 +16304,6 @@ mips_reorg_process_insns (void)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
htab.dispose ();
|
||||
}
|
||||
|
||||
/* Return true if the function has a long branch instruction. */
|
||||
|
@ -195,7 +195,7 @@ comdat_entry_hasher::equal (const value_type *entry1,
|
||||
|
||||
/* Hash table of group signature symbols. */
|
||||
|
||||
static hash_table <comdat_entry_hasher> solaris_comdat_htab;
|
||||
static hash_table<comdat_entry_hasher> *solaris_comdat_htab;
|
||||
|
||||
/* Output assembly to switch to COMDAT group section NAME with attributes
|
||||
FLAGS and group signature symbol DECL, using Sun as syntax. */
|
||||
@ -236,11 +236,11 @@ solaris_elf_asm_comdat_section (const char *name, unsigned int flags, tree decl)
|
||||
identify the missing ones without changing the affected frontents,
|
||||
remember the signature symbols and emit those not marked
|
||||
TREE_SYMBOL_REFERENCED in solaris_file_end. */
|
||||
if (!solaris_comdat_htab.is_created ())
|
||||
solaris_comdat_htab.create (37);
|
||||
if (!solaris_comdat_htab)
|
||||
solaris_comdat_htab = new hash_table<comdat_entry_hasher> (37);
|
||||
|
||||
entry.sig = signature;
|
||||
slot = solaris_comdat_htab.find_slot (&entry, INSERT);
|
||||
slot = solaris_comdat_htab->find_slot (&entry, INSERT);
|
||||
|
||||
if (*slot == NULL)
|
||||
{
|
||||
@ -284,10 +284,11 @@ solaris_define_comdat_signature (comdat_entry **slot,
|
||||
void
|
||||
solaris_file_end (void)
|
||||
{
|
||||
if (!solaris_comdat_htab.is_created ())
|
||||
if (!solaris_comdat_htab)
|
||||
return;
|
||||
|
||||
solaris_comdat_htab.traverse <void *, solaris_define_comdat_signature> (NULL);
|
||||
solaris_comdat_htab->traverse <void *, solaris_define_comdat_signature>
|
||||
(NULL);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -174,7 +174,7 @@ counts_entry::remove (value_type *entry)
|
||||
}
|
||||
|
||||
/* Hash table of count data. */
|
||||
static hash_table <counts_entry> counts_hash;
|
||||
static hash_table<counts_entry> *counts_hash;
|
||||
|
||||
/* Read in the counts file, if available. */
|
||||
|
||||
@ -215,7 +215,7 @@ read_counts_file (void)
|
||||
tag = gcov_read_unsigned ();
|
||||
bbg_file_stamp = crc32_unsigned (bbg_file_stamp, tag);
|
||||
|
||||
counts_hash.create (10);
|
||||
counts_hash = new hash_table<counts_entry> (10);
|
||||
while ((tag = gcov_read_unsigned ()))
|
||||
{
|
||||
gcov_unsigned_t length;
|
||||
@ -270,7 +270,7 @@ read_counts_file (void)
|
||||
elt.ident = fn_ident;
|
||||
elt.ctr = GCOV_COUNTER_FOR_TAG (tag);
|
||||
|
||||
slot = counts_hash.find_slot (&elt, INSERT);
|
||||
slot = counts_hash->find_slot (&elt, INSERT);
|
||||
entry = *slot;
|
||||
if (!entry)
|
||||
{
|
||||
@ -291,14 +291,16 @@ read_counts_file (void)
|
||||
error ("checksum is (%x,%x) instead of (%x,%x)",
|
||||
entry->lineno_checksum, entry->cfg_checksum,
|
||||
lineno_checksum, cfg_checksum);
|
||||
counts_hash.dispose ();
|
||||
delete counts_hash;
|
||||
counts_hash = NULL;
|
||||
break;
|
||||
}
|
||||
else if (entry->summary.num != n_counts)
|
||||
{
|
||||
error ("Profile data for function %u is corrupted", fn_ident);
|
||||
error ("number of counters is %d instead of %d", entry->summary.num, n_counts);
|
||||
counts_hash.dispose ();
|
||||
delete counts_hash;
|
||||
counts_hash = NULL;
|
||||
break;
|
||||
}
|
||||
else if (elt.ctr >= GCOV_COUNTERS_SUMMABLE)
|
||||
@ -324,7 +326,8 @@ read_counts_file (void)
|
||||
{
|
||||
error (is_error < 0 ? "%qs has overflowed" : "%qs is corrupted",
|
||||
da_file_name);
|
||||
counts_hash.dispose ();
|
||||
delete counts_hash;
|
||||
counts_hash = NULL;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -342,7 +345,7 @@ get_coverage_counts (unsigned counter, unsigned expected,
|
||||
counts_entry_t *entry, elt;
|
||||
|
||||
/* No hash table, no counts. */
|
||||
if (!counts_hash.is_created ())
|
||||
if (!counts_hash)
|
||||
{
|
||||
static int warned = 0;
|
||||
|
||||
@ -358,7 +361,7 @@ get_coverage_counts (unsigned counter, unsigned expected,
|
||||
|
||||
elt.ident = current_function_funcdef_no + 1;
|
||||
elt.ctr = counter;
|
||||
entry = counts_hash.find (&elt);
|
||||
entry = counts_hash->find (&elt);
|
||||
if (!entry || !entry->summary.num)
|
||||
/* The function was not emitted, or is weak and not chosen in the
|
||||
final executable. Silently fail, because there's nothing we
|
||||
|
@ -1,3 +1,8 @@
|
||||
2014-06-24 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* class.c, semantics.c, tree.c, vtable-class-hierarchy.c:
|
||||
Adjust.
|
||||
|
||||
2014-06-24 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
* parser.c (cp_parser_omp_for_loop): For
|
||||
|
@ -6709,7 +6709,7 @@ finish_struct (tree t, tree attributes)
|
||||
}
|
||||
|
||||
/* Hash table to avoid endless recursion when handling references. */
|
||||
static hash_table <pointer_hash <tree_node> > fixed_type_or_null_ref_ht;
|
||||
static hash_table<pointer_hash<tree_node> > *fixed_type_or_null_ref_ht;
|
||||
|
||||
/* Return the dynamic type of INSTANCE, if known.
|
||||
Used to determine whether the virtual function table is needed
|
||||
@ -6826,8 +6826,9 @@ fixed_type_or_null (tree instance, int *nonnull, int *cdtorp)
|
||||
else if (TREE_CODE (TREE_TYPE (instance)) == REFERENCE_TYPE)
|
||||
{
|
||||
/* We only need one hash table because it is always left empty. */
|
||||
if (!fixed_type_or_null_ref_ht.is_created ())
|
||||
fixed_type_or_null_ref_ht.create (37);
|
||||
if (!fixed_type_or_null_ref_ht)
|
||||
fixed_type_or_null_ref_ht
|
||||
= new hash_table<pointer_hash<tree_node> > (37);
|
||||
|
||||
/* Reference variables should be references to objects. */
|
||||
if (nonnull)
|
||||
@ -6839,15 +6840,15 @@ fixed_type_or_null (tree instance, int *nonnull, int *cdtorp)
|
||||
if (VAR_P (instance)
|
||||
&& DECL_INITIAL (instance)
|
||||
&& !type_dependent_expression_p_push (DECL_INITIAL (instance))
|
||||
&& !fixed_type_or_null_ref_ht.find (instance))
|
||||
&& !fixed_type_or_null_ref_ht->find (instance))
|
||||
{
|
||||
tree type;
|
||||
tree_node **slot;
|
||||
|
||||
slot = fixed_type_or_null_ref_ht.find_slot (instance, INSERT);
|
||||
slot = fixed_type_or_null_ref_ht->find_slot (instance, INSERT);
|
||||
*slot = instance;
|
||||
type = RECUR (DECL_INITIAL (instance));
|
||||
fixed_type_or_null_ref_ht.remove_elt (instance);
|
||||
fixed_type_or_null_ref_ht->remove_elt (instance);
|
||||
|
||||
return type;
|
||||
}
|
||||
|
@ -4049,9 +4049,11 @@ expand_or_defer_fn (tree fn)
|
||||
|
||||
struct nrv_data
|
||||
{
|
||||
nrv_data () : visited (37) {}
|
||||
|
||||
tree var;
|
||||
tree result;
|
||||
hash_table <pointer_hash <tree_node> > visited;
|
||||
hash_table<pointer_hash <tree_node> > visited;
|
||||
};
|
||||
|
||||
/* Helper function for walk_tree, used by finalize_nrv below. */
|
||||
@ -4131,9 +4133,7 @@ finalize_nrv (tree *tp, tree var, tree result)
|
||||
|
||||
data.var = var;
|
||||
data.result = result;
|
||||
data.visited.create (37);
|
||||
cp_walk_tree (tp, finalize_nrv_r, &data, 0);
|
||||
data.visited.dispose ();
|
||||
}
|
||||
|
||||
/* Create CP_OMP_CLAUSE_INFO for clause C. Returns true if it is invalid. */
|
||||
|
@ -2092,8 +2092,8 @@ static tree
|
||||
verify_stmt_tree_r (tree* tp, int * /*walk_subtrees*/, void* data)
|
||||
{
|
||||
tree t = *tp;
|
||||
hash_table <pointer_hash <tree_node> > *statements
|
||||
= static_cast <hash_table <pointer_hash <tree_node> > *> (data);
|
||||
hash_table<pointer_hash <tree_node> > *statements
|
||||
= static_cast <hash_table<pointer_hash <tree_node> > *> (data);
|
||||
tree_node **slot;
|
||||
|
||||
if (!STATEMENT_CODE_P (TREE_CODE (t)))
|
||||
@ -2116,10 +2116,8 @@ verify_stmt_tree_r (tree* tp, int * /*walk_subtrees*/, void* data)
|
||||
void
|
||||
verify_stmt_tree (tree t)
|
||||
{
|
||||
hash_table <pointer_hash <tree_node> > statements;
|
||||
statements.create (37);
|
||||
hash_table<pointer_hash <tree_node> > statements (37);
|
||||
cp_walk_tree (&t, verify_stmt_tree_r, &statements, NULL);
|
||||
statements.dispose ();
|
||||
}
|
||||
|
||||
/* Check if the type T depends on a type with no linkage and if so, return
|
||||
|
@ -1028,7 +1028,7 @@ register_all_pairs (tree body)
|
||||
|
||||
if (vtbl_ptr_array->length() > 0
|
||||
|| (current->is_used
|
||||
|| (current->registered.size() > 0)))
|
||||
|| (current->registered->size() > 0)))
|
||||
{
|
||||
insert_call_to_register_pair (vtbl_ptr_array,
|
||||
arg1, arg2, size_hint_arg, str1,
|
||||
@ -1114,7 +1114,7 @@ write_out_vtv_count_data (void)
|
||||
{
|
||||
struct vtbl_map_node *current = vtbl_map_nodes_vec[i];
|
||||
if (!current->is_used
|
||||
&& current->registered.size() == 0)
|
||||
&& current->registered->size() == 0)
|
||||
unused_vtbl_map_vars++;
|
||||
}
|
||||
|
||||
|
41
gcc/cselib.c
41
gcc/cselib.c
@ -145,10 +145,10 @@ cselib_hasher::equal (const value_type *v, const compare_type *x_arg)
|
||||
}
|
||||
|
||||
/* A table that enables us to look up elts by their value. */
|
||||
static hash_table <cselib_hasher> cselib_hash_table;
|
||||
static hash_table<cselib_hasher> *cselib_hash_table;
|
||||
|
||||
/* A table to hold preserved values. */
|
||||
static hash_table <cselib_hasher> cselib_preserved_hash_table;
|
||||
static hash_table<cselib_hasher> *cselib_preserved_hash_table;
|
||||
|
||||
/* This is a global so we don't have to pass this through every function.
|
||||
It is used in new_elt_loc_list to set SETTING_INSN. */
|
||||
@ -499,13 +499,13 @@ preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
|
||||
GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
|
||||
};
|
||||
cselib_val **slot
|
||||
= cselib_preserved_hash_table.find_slot_with_hash (&lookup,
|
||||
= cselib_preserved_hash_table->find_slot_with_hash (&lookup,
|
||||
v->hash, INSERT);
|
||||
gcc_assert (!*slot);
|
||||
*slot = v;
|
||||
}
|
||||
|
||||
cselib_hash_table.clear_slot (x);
|
||||
cselib_hash_table->clear_slot (x);
|
||||
|
||||
return 1;
|
||||
}
|
||||
@ -546,10 +546,11 @@ cselib_reset_table (unsigned int num)
|
||||
}
|
||||
|
||||
if (cselib_preserve_constants)
|
||||
cselib_hash_table.traverse <void *, preserve_constants_and_equivs> (NULL);
|
||||
cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
|
||||
(NULL);
|
||||
else
|
||||
{
|
||||
cselib_hash_table.empty ();
|
||||
cselib_hash_table->empty ();
|
||||
gcc_checking_assert (!cselib_any_perm_equivs);
|
||||
}
|
||||
|
||||
@ -581,10 +582,10 @@ cselib_find_slot (enum machine_mode mode, rtx x, hashval_t hash,
|
||||
cselib_val **slot = NULL;
|
||||
cselib_hasher::compare_type lookup = { mode, x, memmode };
|
||||
if (cselib_preserve_constants)
|
||||
slot = cselib_preserved_hash_table.find_slot_with_hash (&lookup, hash,
|
||||
NO_INSERT);
|
||||
slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
|
||||
NO_INSERT);
|
||||
if (!slot)
|
||||
slot = cselib_hash_table.find_slot_with_hash (&lookup, hash, insert);
|
||||
slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
|
||||
return slot;
|
||||
}
|
||||
|
||||
@ -662,7 +663,7 @@ discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
|
||||
cselib_discard_hook (v);
|
||||
|
||||
CSELIB_VAL_PTR (v->val_rtx) = NULL;
|
||||
cselib_hash_table.clear_slot (x);
|
||||
cselib_hash_table->clear_slot (x);
|
||||
unchain_one_value (v);
|
||||
n_useless_values--;
|
||||
}
|
||||
@ -683,7 +684,7 @@ remove_useless_values (void)
|
||||
do
|
||||
{
|
||||
values_became_useless = 0;
|
||||
cselib_hash_table.traverse <void *, discard_useless_locs> (NULL);
|
||||
cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
|
||||
}
|
||||
while (values_became_useless);
|
||||
|
||||
@ -702,7 +703,7 @@ remove_useless_values (void)
|
||||
n_debug_values -= n_useless_debug_values;
|
||||
n_useless_debug_values = 0;
|
||||
|
||||
cselib_hash_table.traverse <void *, discard_useless_values> (NULL);
|
||||
cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
|
||||
|
||||
gcc_assert (!n_useless_values);
|
||||
}
|
||||
@ -2703,7 +2704,7 @@ cselib_process_insn (rtx insn)
|
||||
quadratic behavior for very large hashtables with very few
|
||||
useless elements. */
|
||||
&& ((unsigned int)n_useless_values
|
||||
> (cselib_hash_table.elements () - n_debug_values) / 4))
|
||||
> (cselib_hash_table->elements () - n_debug_values) / 4))
|
||||
remove_useless_values ();
|
||||
}
|
||||
|
||||
@ -2744,9 +2745,9 @@ cselib_init (int record_what)
|
||||
}
|
||||
used_regs = XNEWVEC (unsigned int, cselib_nregs);
|
||||
n_used_regs = 0;
|
||||
cselib_hash_table.create (31);
|
||||
cselib_hash_table = new hash_table<cselib_hasher> (31);
|
||||
if (cselib_preserve_constants)
|
||||
cselib_preserved_hash_table.create (31);
|
||||
cselib_preserved_hash_table = new hash_table<cselib_hasher> (31);
|
||||
next_uid = 1;
|
||||
}
|
||||
|
||||
@ -2766,9 +2767,11 @@ cselib_finish (void)
|
||||
free_alloc_pool (cselib_val_pool);
|
||||
free_alloc_pool (value_pool);
|
||||
cselib_clear_table ();
|
||||
cselib_hash_table.dispose ();
|
||||
delete cselib_hash_table;
|
||||
cselib_hash_table = NULL;
|
||||
if (preserved)
|
||||
cselib_preserved_hash_table.dispose ();
|
||||
delete cselib_preserved_hash_table;
|
||||
cselib_preserved_hash_table = NULL;
|
||||
free (used_regs);
|
||||
used_regs = 0;
|
||||
n_useless_values = 0;
|
||||
@ -2857,9 +2860,9 @@ void
|
||||
dump_cselib_table (FILE *out)
|
||||
{
|
||||
fprintf (out, "cselib hash table:\n");
|
||||
cselib_hash_table.traverse <FILE *, dump_cselib_val> (out);
|
||||
cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
|
||||
fprintf (out, "cselib preserved hash table:\n");
|
||||
cselib_preserved_hash_table.traverse <FILE *, dump_cselib_val> (out);
|
||||
cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
|
||||
if (first_containing_mem != &dummy_val)
|
||||
{
|
||||
fputs ("first mem ", out);
|
||||
|
@ -49,7 +49,7 @@ streamer_string_index (struct output_block *ob, const char *s, unsigned int len,
|
||||
s_slot.len = len;
|
||||
s_slot.slot_num = 0;
|
||||
|
||||
slot = ob->string_hash_table.find_slot (&s_slot, INSERT);
|
||||
slot = ob->string_hash_table->find_slot (&s_slot, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
struct lto_output_stream *string_stream = ob->string_stream;
|
||||
|
11
gcc/dse.c
11
gcc/dse.c
@ -658,7 +658,7 @@ invariant_group_base_hasher::hash (const value_type *gi)
|
||||
}
|
||||
|
||||
/* Tables of group_info structures, hashed by base value. */
|
||||
static hash_table <invariant_group_base_hasher> rtx_group_table;
|
||||
static hash_table<invariant_group_base_hasher> *rtx_group_table;
|
||||
|
||||
|
||||
/* Get the GROUP for BASE. Add a new group if it is not there. */
|
||||
@ -675,7 +675,7 @@ get_group_info (rtx base)
|
||||
/* Find the store_base_info structure for BASE, creating a new one
|
||||
if necessary. */
|
||||
tmp_gi.rtx_base = base;
|
||||
slot = rtx_group_table.find_slot (&tmp_gi, INSERT);
|
||||
slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
|
||||
gi = (group_info_t) *slot;
|
||||
}
|
||||
else
|
||||
@ -765,7 +765,7 @@ dse_step0 (void)
|
||||
= create_alloc_pool ("deferred_change_pool",
|
||||
sizeof (struct deferred_change), 10);
|
||||
|
||||
rtx_group_table.create (11);
|
||||
rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
|
||||
|
||||
bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
|
||||
rtx_group_next_id = 0;
|
||||
@ -2829,7 +2829,7 @@ dse_step1 (void)
|
||||
|
||||
BITMAP_FREE (regs_live);
|
||||
cselib_finish ();
|
||||
rtx_group_table.empty ();
|
||||
rtx_group_table->empty ();
|
||||
}
|
||||
|
||||
|
||||
@ -3654,7 +3654,8 @@ dse_step7 (void)
|
||||
|
||||
end_alias_analysis ();
|
||||
free (bb_table);
|
||||
rtx_group_table.dispose ();
|
||||
delete rtx_group_table;
|
||||
rtx_group_table = NULL;
|
||||
rtx_group_vec.release ();
|
||||
BITMAP_FREE (all_blocks);
|
||||
BITMAP_FREE (scratch);
|
||||
|
@ -182,7 +182,7 @@ trace_info_hasher::equal (const value_type *a, const compare_type *b)
|
||||
/* The variables making up the pseudo-cfg, as described above. */
|
||||
static vec<dw_trace_info> trace_info;
|
||||
static vec<dw_trace_info_ref> trace_work_list;
|
||||
static hash_table <trace_info_hasher> trace_index;
|
||||
static hash_table<trace_info_hasher> *trace_index;
|
||||
|
||||
/* A vector of call frame insns for the CIE. */
|
||||
cfi_vec cie_cfi_vec;
|
||||
@ -307,7 +307,7 @@ get_trace_info (rtx insn)
|
||||
{
|
||||
dw_trace_info dummy;
|
||||
dummy.head = insn;
|
||||
return trace_index.find_with_hash (&dummy, INSN_UID (insn));
|
||||
return trace_index->find_with_hash (&dummy, INSN_UID (insn));
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -2774,7 +2774,8 @@ create_pseudo_cfg (void)
|
||||
|
||||
/* Create the trace index after we've finished building trace_info,
|
||||
avoiding stale pointer problems due to reallocation. */
|
||||
trace_index.create (trace_info.length ());
|
||||
trace_index
|
||||
= new hash_table<trace_info_hasher> (trace_info.length ());
|
||||
dw_trace_info *tp;
|
||||
FOR_EACH_VEC_ELT (trace_info, i, tp)
|
||||
{
|
||||
@ -2785,7 +2786,7 @@ create_pseudo_cfg (void)
|
||||
rtx_name[(int) GET_CODE (tp->head)], INSN_UID (tp->head),
|
||||
tp->switch_sections ? " (section switch)" : "");
|
||||
|
||||
slot = trace_index.find_slot_with_hash (tp, INSN_UID (tp->head), INSERT);
|
||||
slot = trace_index->find_slot_with_hash (tp, INSN_UID (tp->head), INSERT);
|
||||
gcc_assert (*slot == NULL);
|
||||
*slot = tp;
|
||||
}
|
||||
@ -2936,7 +2937,8 @@ execute_dwarf2_frame (void)
|
||||
}
|
||||
trace_info.release ();
|
||||
|
||||
trace_index.dispose ();
|
||||
delete trace_index;
|
||||
trace_index = NULL;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
124
gcc/dwarf2out.c
124
gcc/dwarf2out.c
@ -6745,21 +6745,21 @@ cu_hash_table_entry_hasher::remove (value_type *entry)
|
||||
}
|
||||
}
|
||||
|
||||
typedef hash_table <cu_hash_table_entry_hasher> cu_hash_type;
|
||||
typedef hash_table<cu_hash_table_entry_hasher> cu_hash_type;
|
||||
|
||||
/* Check whether we have already seen this CU and set up SYM_NUM
|
||||
accordingly. */
|
||||
static int
|
||||
check_duplicate_cu (dw_die_ref cu, cu_hash_type htable, unsigned int *sym_num)
|
||||
check_duplicate_cu (dw_die_ref cu, cu_hash_type *htable, unsigned int *sym_num)
|
||||
{
|
||||
struct cu_hash_table_entry dummy;
|
||||
struct cu_hash_table_entry **slot, *entry, *last = &dummy;
|
||||
|
||||
dummy.max_comdat_num = 0;
|
||||
|
||||
slot = htable.find_slot_with_hash (cu,
|
||||
htab_hash_string (cu->die_id.die_symbol),
|
||||
INSERT);
|
||||
slot = htable->find_slot_with_hash (cu,
|
||||
htab_hash_string (cu->die_id.die_symbol),
|
||||
INSERT);
|
||||
entry = *slot;
|
||||
|
||||
for (; entry; last = entry, entry = entry->next)
|
||||
@ -6785,14 +6785,14 @@ check_duplicate_cu (dw_die_ref cu, cu_hash_type htable, unsigned int *sym_num)
|
||||
|
||||
/* Record SYM_NUM to record of CU in HTABLE. */
|
||||
static void
|
||||
record_comdat_symbol_number (dw_die_ref cu, cu_hash_type htable,
|
||||
record_comdat_symbol_number (dw_die_ref cu, cu_hash_type *htable,
|
||||
unsigned int sym_num)
|
||||
{
|
||||
struct cu_hash_table_entry **slot, *entry;
|
||||
|
||||
slot = htable.find_slot_with_hash (cu,
|
||||
htab_hash_string (cu->die_id.die_symbol),
|
||||
NO_INSERT);
|
||||
slot = htable->find_slot_with_hash (cu,
|
||||
htab_hash_string (cu->die_id.die_symbol),
|
||||
NO_INSERT);
|
||||
entry = *slot;
|
||||
|
||||
entry->max_comdat_num = sym_num;
|
||||
@ -6808,7 +6808,6 @@ break_out_includes (dw_die_ref die)
|
||||
dw_die_ref c;
|
||||
dw_die_ref unit = NULL;
|
||||
limbo_die_node *node, **pnode;
|
||||
cu_hash_type cu_hash_table;
|
||||
|
||||
c = die->die_child;
|
||||
if (c) do {
|
||||
@ -6841,7 +6840,7 @@ break_out_includes (dw_die_ref die)
|
||||
#endif
|
||||
|
||||
assign_symbol_names (die);
|
||||
cu_hash_table.create (10);
|
||||
cu_hash_type cu_hash_table (10);
|
||||
for (node = limbo_die_list, pnode = &limbo_die_list;
|
||||
node;
|
||||
node = node->next)
|
||||
@ -6849,7 +6848,7 @@ break_out_includes (dw_die_ref die)
|
||||
int is_dupl;
|
||||
|
||||
compute_section_prefix (node->die);
|
||||
is_dupl = check_duplicate_cu (node->die, cu_hash_table,
|
||||
is_dupl = check_duplicate_cu (node->die, &cu_hash_table,
|
||||
&comdat_symbol_number);
|
||||
assign_symbol_names (node->die);
|
||||
if (is_dupl)
|
||||
@ -6857,11 +6856,10 @@ break_out_includes (dw_die_ref die)
|
||||
else
|
||||
{
|
||||
pnode = &node->next;
|
||||
record_comdat_symbol_number (node->die, cu_hash_table,
|
||||
record_comdat_symbol_number (node->die, &cu_hash_table,
|
||||
comdat_symbol_number);
|
||||
}
|
||||
}
|
||||
cu_hash_table.dispose ();
|
||||
}
|
||||
|
||||
/* Return non-zero if this DIE is a declaration. */
|
||||
@ -7070,7 +7068,7 @@ decl_table_entry_hasher::equal (const value_type *entry1,
|
||||
return entry1->orig == entry2;
|
||||
}
|
||||
|
||||
typedef hash_table <decl_table_entry_hasher> decl_hash_type;
|
||||
typedef hash_table<decl_table_entry_hasher> decl_hash_type;
|
||||
|
||||
/* Copy DIE and its ancestors, up to, but not including, the compile unit
|
||||
or type unit entry, to a new tree. Adds the new tree to UNIT and returns
|
||||
@ -7078,7 +7076,8 @@ typedef hash_table <decl_table_entry_hasher> decl_hash_type;
|
||||
to check if the ancestor has already been copied into UNIT. */
|
||||
|
||||
static dw_die_ref
|
||||
copy_ancestor_tree (dw_die_ref unit, dw_die_ref die, decl_hash_type decl_table)
|
||||
copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
|
||||
decl_hash_type *decl_table)
|
||||
{
|
||||
dw_die_ref parent = die->die_parent;
|
||||
dw_die_ref new_parent = unit;
|
||||
@ -7086,11 +7085,11 @@ copy_ancestor_tree (dw_die_ref unit, dw_die_ref die, decl_hash_type decl_table)
|
||||
decl_table_entry **slot = NULL;
|
||||
struct decl_table_entry *entry = NULL;
|
||||
|
||||
if (decl_table.is_created ())
|
||||
if (decl_table)
|
||||
{
|
||||
/* Check if the entry has already been copied to UNIT. */
|
||||
slot = decl_table.find_slot_with_hash (die, htab_hash_pointer (die),
|
||||
INSERT);
|
||||
slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
|
||||
INSERT);
|
||||
if (*slot != HTAB_EMPTY_ENTRY)
|
||||
{
|
||||
entry = *slot;
|
||||
@ -7116,7 +7115,7 @@ copy_ancestor_tree (dw_die_ref unit, dw_die_ref die, decl_hash_type decl_table)
|
||||
copy = clone_as_declaration (die);
|
||||
add_child_die (new_parent, copy);
|
||||
|
||||
if (decl_table.is_created ())
|
||||
if (decl_table)
|
||||
{
|
||||
/* Record the pointer to the copy. */
|
||||
entry->copy = copy;
|
||||
@ -7171,7 +7170,7 @@ copy_declaration_context (dw_die_ref unit, dw_die_ref die)
|
||||
if (decl->die_parent != NULL
|
||||
&& !is_unit_die (decl->die_parent))
|
||||
{
|
||||
new_decl = copy_ancestor_tree (unit, decl, decl_hash_type ());
|
||||
new_decl = copy_ancestor_tree (unit, decl, NULL);
|
||||
if (new_decl != NULL)
|
||||
{
|
||||
remove_AT (new_decl, DW_AT_signature);
|
||||
@ -7396,7 +7395,7 @@ break_out_comdat_types (dw_die_ref die)
|
||||
Enter all the cloned children into the hash table decl_table. */
|
||||
|
||||
static dw_die_ref
|
||||
clone_tree_partial (dw_die_ref die, decl_hash_type decl_table)
|
||||
clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
|
||||
{
|
||||
dw_die_ref c;
|
||||
dw_die_ref clone;
|
||||
@ -7408,8 +7407,8 @@ clone_tree_partial (dw_die_ref die, decl_hash_type decl_table)
|
||||
else
|
||||
clone = clone_die (die);
|
||||
|
||||
slot = decl_table.find_slot_with_hash (die,
|
||||
htab_hash_pointer (die), INSERT);
|
||||
slot = decl_table->find_slot_with_hash (die,
|
||||
htab_hash_pointer (die), INSERT);
|
||||
|
||||
/* Assert that DIE isn't in the hash table yet. If it would be there
|
||||
before, the ancestors would be necessarily there as well, therefore
|
||||
@ -7433,7 +7432,7 @@ clone_tree_partial (dw_die_ref die, decl_hash_type decl_table)
|
||||
type_unit). */
|
||||
|
||||
static void
|
||||
copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type decl_table)
|
||||
copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
|
||||
{
|
||||
dw_die_ref c;
|
||||
dw_attr_ref a;
|
||||
@ -7450,8 +7449,9 @@ copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type decl_table)
|
||||
if (targ->die_mark != 0 || targ->comdat_type_p)
|
||||
continue;
|
||||
|
||||
slot = decl_table.find_slot_with_hash (targ, htab_hash_pointer (targ),
|
||||
INSERT);
|
||||
slot = decl_table->find_slot_with_hash (targ,
|
||||
htab_hash_pointer (targ),
|
||||
INSERT);
|
||||
|
||||
if (*slot != HTAB_EMPTY_ENTRY)
|
||||
{
|
||||
@ -7530,12 +7530,9 @@ copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type decl_table)
|
||||
static void
|
||||
copy_decls_for_unworthy_types (dw_die_ref unit)
|
||||
{
|
||||
decl_hash_type decl_table;
|
||||
|
||||
mark_dies (unit);
|
||||
decl_table.create (10);
|
||||
copy_decls_walk (unit, unit, decl_table);
|
||||
decl_table.dispose ();
|
||||
decl_hash_type decl_table (10);
|
||||
copy_decls_walk (unit, unit, &decl_table);
|
||||
unmark_dies (unit);
|
||||
}
|
||||
|
||||
@ -7627,18 +7624,18 @@ external_ref_hasher::equal (const value_type *r1, const compare_type *r2)
|
||||
return r1->type == r2->type;
|
||||
}
|
||||
|
||||
typedef hash_table <external_ref_hasher> external_ref_hash_type;
|
||||
typedef hash_table<external_ref_hasher> external_ref_hash_type;
|
||||
|
||||
/* Return a pointer to the external_ref for references to DIE. */
|
||||
|
||||
static struct external_ref *
|
||||
lookup_external_ref (external_ref_hash_type map, dw_die_ref die)
|
||||
lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
|
||||
{
|
||||
struct external_ref ref, *ref_p;
|
||||
external_ref **slot;
|
||||
|
||||
ref.type = die;
|
||||
slot = map.find_slot (&ref, INSERT);
|
||||
slot = map->find_slot (&ref, INSERT);
|
||||
if (*slot != HTAB_EMPTY_ENTRY)
|
||||
return *slot;
|
||||
|
||||
@ -7654,7 +7651,7 @@ lookup_external_ref (external_ref_hash_type map, dw_die_ref die)
|
||||
references, remember how many we've seen. */
|
||||
|
||||
static void
|
||||
optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type map)
|
||||
optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
|
||||
{
|
||||
dw_die_ref c;
|
||||
dw_attr_ref a;
|
||||
@ -7725,13 +7722,12 @@ dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
|
||||
them which will be applied in build_abbrev_table. This is useful because
|
||||
references to local DIEs are smaller. */
|
||||
|
||||
static external_ref_hash_type
|
||||
static external_ref_hash_type *
|
||||
optimize_external_refs (dw_die_ref die)
|
||||
{
|
||||
external_ref_hash_type map;
|
||||
map.create (10);
|
||||
external_ref_hash_type *map = new external_ref_hash_type (10);
|
||||
optimize_external_refs_1 (die, map);
|
||||
map.traverse <dw_die_ref, dwarf2_build_local_stub> (die);
|
||||
map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
|
||||
return map;
|
||||
}
|
||||
|
||||
@ -7741,7 +7737,7 @@ optimize_external_refs (dw_die_ref die)
|
||||
die are visited recursively. */
|
||||
|
||||
static void
|
||||
build_abbrev_table (dw_die_ref die, external_ref_hash_type extern_map)
|
||||
build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
|
||||
{
|
||||
unsigned long abbrev_id;
|
||||
unsigned int n_alloc;
|
||||
@ -8950,7 +8946,6 @@ output_comp_unit (dw_die_ref die, int output_if_empty)
|
||||
{
|
||||
const char *secname, *oldsym;
|
||||
char *tmp;
|
||||
external_ref_hash_type extern_map;
|
||||
|
||||
/* Unless we are outputting main CU, we may throw away empty ones. */
|
||||
if (!output_if_empty && die->die_child == NULL)
|
||||
@ -8963,11 +8958,11 @@ output_comp_unit (dw_die_ref die, int output_if_empty)
|
||||
this CU so we know which get local refs. */
|
||||
mark_dies (die);
|
||||
|
||||
extern_map = optimize_external_refs (die);
|
||||
external_ref_hash_type *extern_map = optimize_external_refs (die);
|
||||
|
||||
build_abbrev_table (die, extern_map);
|
||||
|
||||
extern_map.dispose ();
|
||||
delete extern_map;
|
||||
|
||||
/* Initialize the beginning DIE offset - and calculate sizes/offsets. */
|
||||
next_die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
|
||||
@ -9142,16 +9137,16 @@ output_comdat_type_unit (comdat_type_node *node)
|
||||
#if defined (OBJECT_FORMAT_ELF)
|
||||
tree comdat_key;
|
||||
#endif
|
||||
external_ref_hash_type extern_map;
|
||||
|
||||
/* First mark all the DIEs in this CU so we know which get local refs. */
|
||||
mark_dies (node->root_die);
|
||||
|
||||
extern_map = optimize_external_refs (node->root_die);
|
||||
external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
|
||||
|
||||
build_abbrev_table (node->root_die, extern_map);
|
||||
|
||||
extern_map.dispose ();
|
||||
delete extern_map;
|
||||
extern_map = NULL;
|
||||
|
||||
/* Initialize the beginning DIE offset - and calculate sizes/offsets. */
|
||||
next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
|
||||
@ -21892,7 +21887,7 @@ macinfo_entry_hasher::equal (const value_type *entry1,
|
||||
return !strcmp (entry1->info, entry2->info);
|
||||
}
|
||||
|
||||
typedef hash_table <macinfo_entry_hasher> macinfo_hash_type;
|
||||
typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
|
||||
|
||||
/* Output a single .debug_macinfo entry. */
|
||||
|
||||
@ -21982,7 +21977,7 @@ output_macinfo_op (macinfo_entry *ref)
|
||||
|
||||
static unsigned
|
||||
optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
|
||||
macinfo_hash_type *macinfo_htab)
|
||||
macinfo_hash_type **macinfo_htab)
|
||||
{
|
||||
macinfo_entry *first, *second, *cur, *inc;
|
||||
char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
|
||||
@ -22069,10 +22064,10 @@ optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
|
||||
inc->code = DW_MACRO_GNU_transparent_include;
|
||||
inc->lineno = 0;
|
||||
inc->info = ggc_strdup (grp_name);
|
||||
if (!macinfo_htab->is_created ())
|
||||
macinfo_htab->create (10);
|
||||
if (!*macinfo_htab)
|
||||
*macinfo_htab = new macinfo_hash_type (10);
|
||||
/* Avoid emitting duplicates. */
|
||||
slot = macinfo_htab->find_slot (inc, INSERT);
|
||||
slot = (*macinfo_htab)->find_slot (inc, INSERT);
|
||||
if (*slot != NULL)
|
||||
{
|
||||
inc->code = 0;
|
||||
@ -22092,7 +22087,7 @@ optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
|
||||
else
|
||||
{
|
||||
*slot = inc;
|
||||
inc->lineno = macinfo_htab->elements ();
|
||||
inc->lineno = (*macinfo_htab)->elements ();
|
||||
output_macinfo_op (inc);
|
||||
}
|
||||
return count;
|
||||
@ -22143,7 +22138,7 @@ output_macinfo (void)
|
||||
unsigned long length = vec_safe_length (macinfo_table);
|
||||
macinfo_entry *ref;
|
||||
vec<macinfo_entry, va_gc> *files = NULL;
|
||||
macinfo_hash_type macinfo_htab;
|
||||
macinfo_hash_type *macinfo_htab = NULL;
|
||||
|
||||
if (! length)
|
||||
return;
|
||||
@ -22216,10 +22211,11 @@ output_macinfo (void)
|
||||
ref->code = 0;
|
||||
}
|
||||
|
||||
if (!macinfo_htab.is_created ())
|
||||
if (!macinfo_htab)
|
||||
return;
|
||||
|
||||
macinfo_htab.dispose ();
|
||||
delete macinfo_htab;
|
||||
macinfo_htab = NULL;
|
||||
|
||||
/* If any DW_MACRO_GNU_transparent_include were used, on those
|
||||
DW_MACRO_GNU_transparent_include entries terminate the
|
||||
@ -24081,14 +24077,14 @@ loc_list_hasher::equal (const value_type *a, const compare_type *b)
|
||||
return a == NULL && b == NULL;
|
||||
}
|
||||
|
||||
typedef hash_table <loc_list_hasher> loc_list_hash_type;
|
||||
typedef hash_table<loc_list_hasher> loc_list_hash_type;
|
||||
|
||||
|
||||
/* Recursively optimize location lists referenced from DIE
|
||||
children and share them whenever possible. */
|
||||
|
||||
static void
|
||||
optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type htab)
|
||||
optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
|
||||
{
|
||||
dw_die_ref c;
|
||||
dw_attr_ref a;
|
||||
@ -24102,7 +24098,7 @@ optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type htab)
|
||||
/* TODO: perform some optimizations here, before hashing
|
||||
it and storing into the hash table. */
|
||||
hash_loc_list (list);
|
||||
slot = htab.find_slot_with_hash (list, list->hash, INSERT);
|
||||
slot = htab->find_slot_with_hash (list, list->hash, INSERT);
|
||||
if (*slot == NULL)
|
||||
*slot = list;
|
||||
else
|
||||
@ -24151,10 +24147,8 @@ index_location_lists (dw_die_ref die)
|
||||
static void
|
||||
optimize_location_lists (dw_die_ref die)
|
||||
{
|
||||
loc_list_hash_type htab;
|
||||
htab.create (500);
|
||||
optimize_location_lists_1 (die, htab);
|
||||
htab.dispose ();
|
||||
loc_list_hash_type htab (500);
|
||||
optimize_location_lists_1 (die, &htab);
|
||||
}
|
||||
|
||||
/* Output stuff that dwarf requires at the end of every file,
|
||||
@ -24165,7 +24159,6 @@ dwarf2out_finish (const char *filename)
|
||||
{
|
||||
limbo_die_node *node, *next_node;
|
||||
comdat_type_node *ctnode;
|
||||
hash_table <comdat_type_hasher> comdat_type_table;
|
||||
unsigned int i;
|
||||
dw_die_ref main_comp_unit_die;
|
||||
|
||||
@ -24434,7 +24427,7 @@ dwarf2out_finish (const char *filename)
|
||||
for (node = limbo_die_list; node; node = node->next)
|
||||
output_comp_unit (node->die, 0);
|
||||
|
||||
comdat_type_table.create (100);
|
||||
hash_table<comdat_type_hasher> comdat_type_table (100);
|
||||
for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
|
||||
{
|
||||
comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
|
||||
@ -24455,7 +24448,6 @@ dwarf2out_finish (const char *filename)
|
||||
output_comdat_type_unit (ctnode);
|
||||
*slot = ctnode;
|
||||
}
|
||||
comdat_type_table.dispose ();
|
||||
|
||||
/* The AT_pubnames attribute needs to go in all skeleton dies, including
|
||||
both the main_cu and all skeleton TUs. Making this call unconditional
|
||||
|
52
gcc/except.c
52
gcc/except.c
@ -209,7 +209,7 @@ action_record_hasher::equal (const value_type *entry, const compare_type *data)
|
||||
return entry->filter == data->filter && entry->next == data->next;
|
||||
}
|
||||
|
||||
typedef hash_table <action_record_hasher> action_hash_type;
|
||||
typedef hash_table<action_record_hasher> action_hash_type;
|
||||
|
||||
static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
|
||||
eh_landing_pad *);
|
||||
@ -219,7 +219,7 @@ static hashval_t t2r_hash (const void *);
|
||||
|
||||
static void dw2_build_landing_pads (void);
|
||||
|
||||
static int collect_one_action_chain (action_hash_type, eh_region);
|
||||
static int collect_one_action_chain (action_hash_type *, eh_region);
|
||||
static int add_call_site (rtx, int, int);
|
||||
|
||||
static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
|
||||
@ -760,7 +760,7 @@ ttypes_filter_hasher::hash (const value_type *entry)
|
||||
return TREE_HASH (entry->t);
|
||||
}
|
||||
|
||||
typedef hash_table <ttypes_filter_hasher> ttypes_hash_type;
|
||||
typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
|
||||
|
||||
|
||||
/* Helper for ehspec hashing. */
|
||||
@ -797,18 +797,18 @@ ehspec_hasher::hash (const value_type *entry)
|
||||
return h;
|
||||
}
|
||||
|
||||
typedef hash_table <ehspec_hasher> ehspec_hash_type;
|
||||
typedef hash_table<ehspec_hasher> ehspec_hash_type;
|
||||
|
||||
|
||||
/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
|
||||
to speed up the search. Return the filter value to be used. */
|
||||
|
||||
static int
|
||||
add_ttypes_entry (ttypes_hash_type ttypes_hash, tree type)
|
||||
add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
|
||||
{
|
||||
struct ttypes_filter **slot, *n;
|
||||
|
||||
slot = ttypes_hash.find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
|
||||
slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
|
||||
INSERT);
|
||||
|
||||
if ((n = *slot) == NULL)
|
||||
@ -830,14 +830,14 @@ add_ttypes_entry (ttypes_hash_type ttypes_hash, tree type)
|
||||
to speed up the search. Return the filter value to be used. */
|
||||
|
||||
static int
|
||||
add_ehspec_entry (ehspec_hash_type ehspec_hash, ttypes_hash_type ttypes_hash,
|
||||
add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
|
||||
tree list)
|
||||
{
|
||||
struct ttypes_filter **slot, *n;
|
||||
struct ttypes_filter dummy;
|
||||
|
||||
dummy.t = list;
|
||||
slot = ehspec_hash.find_slot (&dummy, INSERT);
|
||||
slot = ehspec_hash->find_slot (&dummy, INSERT);
|
||||
|
||||
if ((n = *slot) == NULL)
|
||||
{
|
||||
@ -886,8 +886,6 @@ void
|
||||
assign_filter_values (void)
|
||||
{
|
||||
int i;
|
||||
ttypes_hash_type ttypes;
|
||||
ehspec_hash_type ehspec;
|
||||
eh_region r;
|
||||
eh_catch c;
|
||||
|
||||
@ -897,8 +895,8 @@ assign_filter_values (void)
|
||||
else
|
||||
vec_alloc (cfun->eh->ehspec_data.other, 64);
|
||||
|
||||
ttypes.create (31);
|
||||
ehspec.create (31);
|
||||
ehspec_hash_type ehspec (31);
|
||||
ttypes_hash_type ttypes (31);
|
||||
|
||||
for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
|
||||
{
|
||||
@ -922,7 +920,8 @@ assign_filter_values (void)
|
||||
|
||||
for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
|
||||
{
|
||||
int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
|
||||
int flt
|
||||
= add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
|
||||
tree flt_node = build_int_cst (integer_type_node, flt);
|
||||
|
||||
c->filter_list
|
||||
@ -933,7 +932,7 @@ assign_filter_values (void)
|
||||
{
|
||||
/* Get a filter value for the NULL list also since it
|
||||
will need an action record anyway. */
|
||||
int flt = add_ttypes_entry (ttypes, NULL);
|
||||
int flt = add_ttypes_entry (&ttypes, NULL);
|
||||
tree flt_node = build_int_cst (integer_type_node, flt);
|
||||
|
||||
c->filter_list
|
||||
@ -944,16 +943,13 @@ assign_filter_values (void)
|
||||
|
||||
case ERT_ALLOWED_EXCEPTIONS:
|
||||
r->u.allowed.filter
|
||||
= add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
|
||||
= add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ttypes.dispose ();
|
||||
ehspec.dispose ();
|
||||
}
|
||||
|
||||
/* Emit SEQ into basic block just before INSN (that is assumed to be
|
||||
@ -1074,12 +1070,11 @@ static vec<int> sjlj_lp_call_site_index;
|
||||
static int
|
||||
sjlj_assign_call_site_values (void)
|
||||
{
|
||||
action_hash_type ar_hash;
|
||||
action_hash_type ar_hash (31);
|
||||
int i, disp_index;
|
||||
eh_landing_pad lp;
|
||||
|
||||
vec_alloc (crtl->eh.action_record_data, 64);
|
||||
ar_hash.create (31);
|
||||
|
||||
disp_index = 0;
|
||||
call_site_base = 1;
|
||||
@ -1089,7 +1084,7 @@ sjlj_assign_call_site_values (void)
|
||||
int action, call_site;
|
||||
|
||||
/* First: build the action table. */
|
||||
action = collect_one_action_chain (ar_hash, lp->region);
|
||||
action = collect_one_action_chain (&ar_hash, lp->region);
|
||||
|
||||
/* Next: assign call-site values. If dwarf2 terms, this would be
|
||||
the region number assigned by convert_to_eh_region_ranges, but
|
||||
@ -1108,8 +1103,6 @@ sjlj_assign_call_site_values (void)
|
||||
disp_index++;
|
||||
}
|
||||
|
||||
ar_hash.dispose ();
|
||||
|
||||
return disp_index;
|
||||
}
|
||||
|
||||
@ -2321,13 +2314,13 @@ expand_builtin_extend_pointer (tree addr_tree)
|
||||
}
|
||||
|
||||
static int
|
||||
add_action_record (action_hash_type ar_hash, int filter, int next)
|
||||
add_action_record (action_hash_type *ar_hash, int filter, int next)
|
||||
{
|
||||
struct action_record **slot, *new_ar, tmp;
|
||||
|
||||
tmp.filter = filter;
|
||||
tmp.next = next;
|
||||
slot = ar_hash.find_slot (&tmp, INSERT);
|
||||
slot = ar_hash->find_slot (&tmp, INSERT);
|
||||
|
||||
if ((new_ar = *slot) == NULL)
|
||||
{
|
||||
@ -2352,7 +2345,7 @@ add_action_record (action_hash_type ar_hash, int filter, int next)
|
||||
}
|
||||
|
||||
static int
|
||||
collect_one_action_chain (action_hash_type ar_hash, eh_region region)
|
||||
collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
|
||||
{
|
||||
int next;
|
||||
|
||||
@ -2481,7 +2474,7 @@ static unsigned int
|
||||
convert_to_eh_region_ranges (void)
|
||||
{
|
||||
rtx insn, iter, note;
|
||||
action_hash_type ar_hash;
|
||||
action_hash_type ar_hash (31);
|
||||
int last_action = -3;
|
||||
rtx last_action_insn = NULL_RTX;
|
||||
rtx last_landing_pad = NULL_RTX;
|
||||
@ -2495,8 +2488,6 @@ convert_to_eh_region_ranges (void)
|
||||
|
||||
vec_alloc (crtl->eh.action_record_data, 64);
|
||||
|
||||
ar_hash.create (31);
|
||||
|
||||
for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
|
||||
if (INSN_P (iter))
|
||||
{
|
||||
@ -2515,7 +2506,7 @@ convert_to_eh_region_ranges (void)
|
||||
if (nothrow)
|
||||
continue;
|
||||
if (region)
|
||||
this_action = collect_one_action_chain (ar_hash, region);
|
||||
this_action = collect_one_action_chain (&ar_hash, region);
|
||||
else
|
||||
this_action = -1;
|
||||
|
||||
@ -2632,7 +2623,6 @@ convert_to_eh_region_ranges (void)
|
||||
|
||||
call_site_base = saved_call_site_base;
|
||||
|
||||
ar_hash.dispose ();
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -14686,7 +14686,7 @@ fold (tree expr)
|
||||
#undef fold
|
||||
|
||||
static void fold_checksum_tree (const_tree, struct md5_ctx *,
|
||||
hash_table <pointer_hash <tree_node> >);
|
||||
hash_table<pointer_hash<tree_node> > *);
|
||||
static void fold_check_failed (const_tree, const_tree);
|
||||
void print_fold_checksum (const_tree);
|
||||
|
||||
@ -14700,20 +14700,18 @@ fold (tree expr)
|
||||
tree ret;
|
||||
struct md5_ctx ctx;
|
||||
unsigned char checksum_before[16], checksum_after[16];
|
||||
hash_table <pointer_hash <tree_node> > ht;
|
||||
hash_table<pointer_hash<tree_node> > ht (32);
|
||||
|
||||
ht.create (32);
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (expr, &ctx, ht);
|
||||
fold_checksum_tree (expr, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before);
|
||||
ht.empty ();
|
||||
|
||||
ret = fold_1 (expr);
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (expr, &ctx, ht);
|
||||
fold_checksum_tree (expr, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after);
|
||||
ht.dispose ();
|
||||
|
||||
if (memcmp (checksum_before, checksum_after, 16))
|
||||
fold_check_failed (expr, ret);
|
||||
@ -14726,13 +14724,11 @@ print_fold_checksum (const_tree expr)
|
||||
{
|
||||
struct md5_ctx ctx;
|
||||
unsigned char checksum[16], cnt;
|
||||
hash_table <pointer_hash <tree_node> > ht;
|
||||
hash_table<pointer_hash<tree_node> > ht (32);
|
||||
|
||||
ht.create (32);
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (expr, &ctx, ht);
|
||||
fold_checksum_tree (expr, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum);
|
||||
ht.dispose ();
|
||||
for (cnt = 0; cnt < 16; ++cnt)
|
||||
fprintf (stderr, "%02x", checksum[cnt]);
|
||||
putc ('\n', stderr);
|
||||
@ -14746,7 +14742,7 @@ fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UN
|
||||
|
||||
static void
|
||||
fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
|
||||
hash_table <pointer_hash <tree_node> > ht)
|
||||
hash_table<pointer_hash <tree_node> > *ht)
|
||||
{
|
||||
tree_node **slot;
|
||||
enum tree_code code;
|
||||
@ -14756,7 +14752,7 @@ fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
|
||||
recursive_label:
|
||||
if (expr == NULL)
|
||||
return;
|
||||
slot = ht.find_slot (expr, INSERT);
|
||||
slot = ht->find_slot (expr, INSERT);
|
||||
if (*slot != NULL)
|
||||
return;
|
||||
*slot = CONST_CAST_TREE (expr);
|
||||
@ -14903,11 +14899,10 @@ debug_fold_checksum (const_tree t)
|
||||
int i;
|
||||
unsigned char checksum[16];
|
||||
struct md5_ctx ctx;
|
||||
hash_table <pointer_hash <tree_node> > ht;
|
||||
ht.create (32);
|
||||
hash_table<pointer_hash<tree_node> > ht (32);
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (t, &ctx, ht);
|
||||
fold_checksum_tree (t, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum);
|
||||
ht.empty ();
|
||||
|
||||
@ -14932,11 +14927,10 @@ fold_build1_stat_loc (location_t loc,
|
||||
#ifdef ENABLE_FOLD_CHECKING
|
||||
unsigned char checksum_before[16], checksum_after[16];
|
||||
struct md5_ctx ctx;
|
||||
hash_table <pointer_hash <tree_node> > ht;
|
||||
hash_table<pointer_hash<tree_node> > ht (32);
|
||||
|
||||
ht.create (32);
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op0, &ctx, ht);
|
||||
fold_checksum_tree (op0, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before);
|
||||
ht.empty ();
|
||||
#endif
|
||||
@ -14947,9 +14941,8 @@ fold_build1_stat_loc (location_t loc,
|
||||
|
||||
#ifdef ENABLE_FOLD_CHECKING
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op0, &ctx, ht);
|
||||
fold_checksum_tree (op0, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after);
|
||||
ht.dispose ();
|
||||
|
||||
if (memcmp (checksum_before, checksum_after, 16))
|
||||
fold_check_failed (op0, tem);
|
||||
@ -14975,16 +14968,15 @@ fold_build2_stat_loc (location_t loc,
|
||||
checksum_after_op0[16],
|
||||
checksum_after_op1[16];
|
||||
struct md5_ctx ctx;
|
||||
hash_table <pointer_hash <tree_node> > ht;
|
||||
hash_table<pointer_hash<tree_node> > ht (32);
|
||||
|
||||
ht.create (32);
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op0, &ctx, ht);
|
||||
fold_checksum_tree (op0, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before_op0);
|
||||
ht.empty ();
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op1, &ctx, ht);
|
||||
fold_checksum_tree (op1, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before_op1);
|
||||
ht.empty ();
|
||||
#endif
|
||||
@ -14995,7 +14987,7 @@ fold_build2_stat_loc (location_t loc,
|
||||
|
||||
#ifdef ENABLE_FOLD_CHECKING
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op0, &ctx, ht);
|
||||
fold_checksum_tree (op0, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after_op0);
|
||||
ht.empty ();
|
||||
|
||||
@ -15003,9 +14995,8 @@ fold_build2_stat_loc (location_t loc,
|
||||
fold_check_failed (op0, tem);
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op1, &ctx, ht);
|
||||
fold_checksum_tree (op1, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after_op1);
|
||||
ht.dispose ();
|
||||
|
||||
if (memcmp (checksum_before_op1, checksum_after_op1, 16))
|
||||
fold_check_failed (op1, tem);
|
||||
@ -15031,21 +15022,20 @@ fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
|
||||
checksum_after_op1[16],
|
||||
checksum_after_op2[16];
|
||||
struct md5_ctx ctx;
|
||||
hash_table <pointer_hash <tree_node> > ht;
|
||||
hash_table<pointer_hash<tree_node> > ht (32);
|
||||
|
||||
ht.create (32);
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op0, &ctx, ht);
|
||||
fold_checksum_tree (op0, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before_op0);
|
||||
ht.empty ();
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op1, &ctx, ht);
|
||||
fold_checksum_tree (op1, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before_op1);
|
||||
ht.empty ();
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op2, &ctx, ht);
|
||||
fold_checksum_tree (op2, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before_op2);
|
||||
ht.empty ();
|
||||
#endif
|
||||
@ -15057,7 +15047,7 @@ fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
|
||||
|
||||
#ifdef ENABLE_FOLD_CHECKING
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op0, &ctx, ht);
|
||||
fold_checksum_tree (op0, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after_op0);
|
||||
ht.empty ();
|
||||
|
||||
@ -15065,7 +15055,7 @@ fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
|
||||
fold_check_failed (op0, tem);
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op1, &ctx, ht);
|
||||
fold_checksum_tree (op1, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after_op1);
|
||||
ht.empty ();
|
||||
|
||||
@ -15073,9 +15063,8 @@ fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
|
||||
fold_check_failed (op1, tem);
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (op2, &ctx, ht);
|
||||
fold_checksum_tree (op2, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after_op2);
|
||||
ht.dispose ();
|
||||
|
||||
if (memcmp (checksum_before_op2, checksum_after_op2, 16))
|
||||
fold_check_failed (op2, tem);
|
||||
@ -15099,18 +15088,17 @@ fold_build_call_array_loc (location_t loc, tree type, tree fn,
|
||||
checksum_after_fn[16],
|
||||
checksum_after_arglist[16];
|
||||
struct md5_ctx ctx;
|
||||
hash_table <pointer_hash <tree_node> > ht;
|
||||
hash_table<pointer_hash<tree_node> > ht (32);
|
||||
int i;
|
||||
|
||||
ht.create (32);
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (fn, &ctx, ht);
|
||||
fold_checksum_tree (fn, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before_fn);
|
||||
ht.empty ();
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
for (i = 0; i < nargs; i++)
|
||||
fold_checksum_tree (argarray[i], &ctx, ht);
|
||||
fold_checksum_tree (argarray[i], &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_before_arglist);
|
||||
ht.empty ();
|
||||
#endif
|
||||
@ -15119,7 +15107,7 @@ fold_build_call_array_loc (location_t loc, tree type, tree fn,
|
||||
|
||||
#ifdef ENABLE_FOLD_CHECKING
|
||||
md5_init_ctx (&ctx);
|
||||
fold_checksum_tree (fn, &ctx, ht);
|
||||
fold_checksum_tree (fn, &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after_fn);
|
||||
ht.empty ();
|
||||
|
||||
@ -15128,9 +15116,8 @@ fold_build_call_array_loc (location_t loc, tree type, tree fn,
|
||||
|
||||
md5_init_ctx (&ctx);
|
||||
for (i = 0; i < nargs; i++)
|
||||
fold_checksum_tree (argarray[i], &ctx, ht);
|
||||
fold_checksum_tree (argarray[i], &ctx, &ht);
|
||||
md5_finish_ctx (&ctx, checksum_after_arglist);
|
||||
ht.dispose ();
|
||||
|
||||
if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
|
||||
fold_check_failed (NULL_TREE, tem);
|
||||
|
16
gcc/gcse.c
16
gcc/gcse.c
@ -386,7 +386,7 @@ pre_ldst_expr_hasher::equal (const value_type *ptr1,
|
||||
}
|
||||
|
||||
/* Hashtable for the load/store memory refs. */
|
||||
static hash_table <pre_ldst_expr_hasher> pre_ldst_table;
|
||||
static hash_table<pre_ldst_expr_hasher> *pre_ldst_table;
|
||||
|
||||
/* Bitmap containing one bit for each register in the program.
|
||||
Used when performing GCSE to track which registers have been set since
|
||||
@ -3762,7 +3762,7 @@ ldst_entry (rtx x)
|
||||
NULL, /*have_reg_qty=*/false);
|
||||
|
||||
e.pattern = x;
|
||||
slot = pre_ldst_table.find_slot_with_hash (&e, hash, INSERT);
|
||||
slot = pre_ldst_table->find_slot_with_hash (&e, hash, INSERT);
|
||||
if (*slot)
|
||||
return *slot;
|
||||
|
||||
@ -3800,8 +3800,8 @@ free_ldst_entry (struct ls_expr * ptr)
|
||||
static void
|
||||
free_ld_motion_mems (void)
|
||||
{
|
||||
if (pre_ldst_table.is_created ())
|
||||
pre_ldst_table.dispose ();
|
||||
delete pre_ldst_table;
|
||||
pre_ldst_table = NULL;
|
||||
|
||||
while (pre_ldst_mems)
|
||||
{
|
||||
@ -3857,10 +3857,10 @@ find_rtx_in_ldst (rtx x)
|
||||
{
|
||||
struct ls_expr e;
|
||||
ls_expr **slot;
|
||||
if (!pre_ldst_table.is_created ())
|
||||
if (!pre_ldst_table)
|
||||
return NULL;
|
||||
e.pattern = x;
|
||||
slot = pre_ldst_table.find_slot (&e, NO_INSERT);
|
||||
slot = pre_ldst_table->find_slot (&e, NO_INSERT);
|
||||
if (!slot || (*slot)->invalid)
|
||||
return NULL;
|
||||
return *slot;
|
||||
@ -3951,7 +3951,7 @@ compute_ld_motion_mems (void)
|
||||
rtx insn;
|
||||
|
||||
pre_ldst_mems = NULL;
|
||||
pre_ldst_table.create (13);
|
||||
pre_ldst_table = new hash_table<pre_ldst_expr_hasher> (13);
|
||||
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
@ -4053,7 +4053,7 @@ trim_ld_motion_mems (void)
|
||||
else
|
||||
{
|
||||
*last = ptr->next;
|
||||
pre_ldst_table.remove_elt_with_hash (ptr, ptr->hash_index);
|
||||
pre_ldst_table->remove_elt_with_hash (ptr, ptr->hash_index);
|
||||
free_ldst_entry (ptr);
|
||||
ptr = * last;
|
||||
}
|
||||
|
@ -320,7 +320,7 @@ saving_hasher::equal (const value_type *p1, const compare_type *p2)
|
||||
return p1->obj == p2;
|
||||
}
|
||||
|
||||
static hash_table <saving_hasher> saving_htab;
|
||||
static hash_table<saving_hasher> *saving_htab;
|
||||
|
||||
/* Register an object in the hash table. */
|
||||
|
||||
@ -334,7 +334,7 @@ gt_pch_note_object (void *obj, void *note_ptr_cookie,
|
||||
return 0;
|
||||
|
||||
slot = (struct ptr_data **)
|
||||
saving_htab.find_slot_with_hash (obj, POINTER_HASH (obj), INSERT);
|
||||
saving_htab->find_slot_with_hash (obj, POINTER_HASH (obj), INSERT);
|
||||
if (*slot != NULL)
|
||||
{
|
||||
gcc_assert ((*slot)->note_ptr_fn == note_ptr_fn
|
||||
@ -365,7 +365,7 @@ gt_pch_note_reorder (void *obj, void *note_ptr_cookie,
|
||||
return;
|
||||
|
||||
data = (struct ptr_data *)
|
||||
saving_htab.find_with_hash (obj, POINTER_HASH (obj));
|
||||
saving_htab->find_with_hash (obj, POINTER_HASH (obj));
|
||||
gcc_assert (data && data->note_ptr_cookie == note_ptr_cookie);
|
||||
|
||||
data->reorder_fn = reorder_fn;
|
||||
@ -431,7 +431,7 @@ relocate_ptrs (void *ptr_p, void *state_p)
|
||||
return;
|
||||
|
||||
result = (struct ptr_data *)
|
||||
saving_htab.find_with_hash (*ptr, POINTER_HASH (*ptr));
|
||||
saving_htab->find_with_hash (*ptr, POINTER_HASH (*ptr));
|
||||
gcc_assert (result);
|
||||
*ptr = result->new_addr;
|
||||
}
|
||||
@ -460,7 +460,7 @@ write_pch_globals (const struct ggc_root_tab * const *tab,
|
||||
else
|
||||
{
|
||||
new_ptr = (struct ptr_data *)
|
||||
saving_htab.find_with_hash (ptr, POINTER_HASH (ptr));
|
||||
saving_htab->find_with_hash (ptr, POINTER_HASH (ptr));
|
||||
if (fwrite (&new_ptr->new_addr, sizeof (void *), 1, state->f)
|
||||
!= 1)
|
||||
fatal_error ("can%'t write PCH file: %m");
|
||||
@ -494,7 +494,7 @@ gt_pch_save (FILE *f)
|
||||
gt_pch_save_stringpool ();
|
||||
|
||||
timevar_push (TV_PCH_PTR_REALLOC);
|
||||
saving_htab.create (50000);
|
||||
saving_htab = new hash_table<saving_hasher> (50000);
|
||||
|
||||
for (rt = gt_ggc_rtab; *rt; rt++)
|
||||
for (rti = *rt; rti->base != NULL; rti++)
|
||||
@ -510,7 +510,7 @@ gt_pch_save (FILE *f)
|
||||
state.f = f;
|
||||
state.d = init_ggc_pch ();
|
||||
state.count = 0;
|
||||
saving_htab.traverse <traversal_state *, ggc_call_count> (&state);
|
||||
saving_htab->traverse <traversal_state *, ggc_call_count> (&state);
|
||||
|
||||
mmi.size = ggc_pch_total_size (state.d);
|
||||
|
||||
@ -526,7 +526,7 @@ gt_pch_save (FILE *f)
|
||||
state.ptrs = XNEWVEC (struct ptr_data *, state.count);
|
||||
state.ptrs_i = 0;
|
||||
|
||||
saving_htab.traverse <traversal_state *, ggc_call_alloc> (&state);
|
||||
saving_htab->traverse <traversal_state *, ggc_call_alloc> (&state);
|
||||
timevar_pop (TV_PCH_PTR_REALLOC);
|
||||
|
||||
timevar_push (TV_PCH_PTR_SORT);
|
||||
@ -655,7 +655,8 @@ gt_pch_save (FILE *f)
|
||||
|
||||
XDELETE (state.ptrs);
|
||||
XDELETE (this_object);
|
||||
saving_htab.dispose ();
|
||||
delete saving_htab;
|
||||
saving_htab = NULL;
|
||||
}
|
||||
|
||||
/* Read the state of the compiler back in from F. */
|
||||
@ -939,7 +940,7 @@ loc_desc_hasher::equal (const value_type *d, const compare_type *d2)
|
||||
}
|
||||
|
||||
/* Hashtable used for statistics. */
|
||||
static hash_table <loc_desc_hasher> loc_hash;
|
||||
static hash_table<loc_desc_hasher> *loc_hash;
|
||||
|
||||
struct ptr_hash_entry
|
||||
{
|
||||
@ -971,7 +972,7 @@ ptr_hash_hasher::equal (const value_type *p, const compare_type *p2)
|
||||
}
|
||||
|
||||
/* Hashtable converting address of allocated field to loc descriptor. */
|
||||
static hash_table <ptr_hash_hasher> ptr_hash;
|
||||
static hash_table<ptr_hash_hasher> *ptr_hash;
|
||||
|
||||
/* Return descriptor for given call site, create new one if needed. */
|
||||
static struct loc_descriptor *
|
||||
@ -983,10 +984,10 @@ make_loc_descriptor (const char *name, int line, const char *function)
|
||||
loc.file = name;
|
||||
loc.line = line;
|
||||
loc.function = function;
|
||||
if (!loc_hash.is_created ())
|
||||
loc_hash.create (10);
|
||||
if (!loc_hash)
|
||||
loc_hash = new hash_table<loc_desc_hasher> (10);
|
||||
|
||||
slot = loc_hash.find_slot (&loc, INSERT);
|
||||
slot = loc_hash->find_slot (&loc, INSERT);
|
||||
if (*slot)
|
||||
return *slot;
|
||||
*slot = XCNEW (struct loc_descriptor);
|
||||
@ -1008,9 +1009,9 @@ ggc_record_overhead (size_t allocated, size_t overhead, void *ptr,
|
||||
p->ptr = ptr;
|
||||
p->loc = loc;
|
||||
p->size = allocated + overhead;
|
||||
if (!ptr_hash.is_created ())
|
||||
ptr_hash.create (10);
|
||||
slot = ptr_hash.find_slot_with_hash (ptr, htab_hash_pointer (ptr), INSERT);
|
||||
if (!ptr_hash)
|
||||
ptr_hash = new hash_table<ptr_hash_hasher> (10);
|
||||
slot = ptr_hash->find_slot_with_hash (ptr, htab_hash_pointer (ptr), INSERT);
|
||||
gcc_assert (!*slot);
|
||||
*slot = p;
|
||||
|
||||
@ -1028,7 +1029,7 @@ ggc_prune_ptr (ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
|
||||
if (!ggc_marked_p (p->ptr))
|
||||
{
|
||||
p->loc->collected += p->size;
|
||||
ptr_hash.clear_slot (slot);
|
||||
ptr_hash->clear_slot (slot);
|
||||
free (p);
|
||||
}
|
||||
return 1;
|
||||
@ -1039,15 +1040,15 @@ ggc_prune_ptr (ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
|
||||
void
|
||||
ggc_prune_overhead_list (void)
|
||||
{
|
||||
ptr_hash.traverse <void *, ggc_prune_ptr> (NULL);
|
||||
ptr_hash->traverse <void *, ggc_prune_ptr> (NULL);
|
||||
}
|
||||
|
||||
/* Notice that the pointer has been freed. */
|
||||
void
|
||||
ggc_free_overhead (void *ptr)
|
||||
{
|
||||
ptr_hash_entry **slot;
|
||||
slot = ptr_hash.find_slot_with_hash (ptr, htab_hash_pointer (ptr), NO_INSERT);
|
||||
ptr_hash_entry **slot
|
||||
= ptr_hash->find_slot_with_hash (ptr, htab_hash_pointer (ptr), NO_INSERT);
|
||||
struct ptr_hash_entry *p;
|
||||
/* The pointer might be not found if a PCH read happened between allocation
|
||||
and ggc_free () call. FIXME: account memory properly in the presence of
|
||||
@ -1056,7 +1057,7 @@ ggc_free_overhead (void *ptr)
|
||||
return;
|
||||
p = (struct ptr_hash_entry *) *slot;
|
||||
p->loc->freed += p->size;
|
||||
ptr_hash.clear_slot (slot);
|
||||
ptr_hash->clear_slot (slot);
|
||||
free (p);
|
||||
}
|
||||
|
||||
@ -1120,12 +1121,12 @@ dump_ggc_loc_statistics (bool final)
|
||||
ggc_collect ();
|
||||
|
||||
loc_array = XCNEWVEC (struct loc_descriptor *,
|
||||
loc_hash.elements_with_deleted ());
|
||||
loc_hash->elements_with_deleted ());
|
||||
fprintf (stderr, "-------------------------------------------------------\n");
|
||||
fprintf (stderr, "\n%-48s %10s %10s %10s %10s %10s\n",
|
||||
"source location", "Garbage", "Freed", "Leak", "Overhead", "Times");
|
||||
fprintf (stderr, "-------------------------------------------------------\n");
|
||||
loc_hash.traverse <int *, ggc_add_statistics> (&nentries);
|
||||
loc_hash->traverse <int *, ggc_add_statistics> (&nentries);
|
||||
qsort (loc_array, nentries, sizeof (*loc_array),
|
||||
final ? final_cmp_statistic : cmp_statistic);
|
||||
for (i = 0; i < nentries; i++)
|
||||
|
@ -430,7 +430,7 @@ cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
|
||||
}
|
||||
|
||||
/* Hash table embodying a mapping from base exprs to chains of candidates. */
|
||||
static hash_table <cand_chain_hasher> base_cand_map;
|
||||
static hash_table<cand_chain_hasher> *base_cand_map;
|
||||
|
||||
/* Pointer map used by tree_to_aff_combination_expand. */
|
||||
static struct pointer_map_t *name_expansions;
|
||||
@ -507,7 +507,7 @@ find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
|
||||
int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
|
||||
|
||||
mapping_key.base_expr = base_expr;
|
||||
chain = base_cand_map.find (&mapping_key);
|
||||
chain = base_cand_map->find (&mapping_key);
|
||||
|
||||
for (; chain && iters < max_iters; chain = chain->next, ++iters)
|
||||
{
|
||||
@ -604,7 +604,7 @@ record_potential_basis (slsr_cand_t c, tree base)
|
||||
node->base_expr = base;
|
||||
node->cand = c;
|
||||
node->next = NULL;
|
||||
slot = base_cand_map.find_slot (node, INSERT);
|
||||
slot = base_cand_map->find_slot (node, INSERT);
|
||||
|
||||
if (*slot)
|
||||
{
|
||||
@ -1848,7 +1848,8 @@ static void
|
||||
dump_cand_chains (void)
|
||||
{
|
||||
fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
|
||||
base_cand_map.traverse_noresize <void *, ssa_base_cand_dump_callback> (NULL);
|
||||
base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
|
||||
(NULL);
|
||||
fputs ("\n", dump_file);
|
||||
}
|
||||
|
||||
@ -3634,7 +3635,7 @@ pass_strength_reduction::execute (function *fun)
|
||||
gcc_obstack_init (&chain_obstack);
|
||||
|
||||
/* Allocate the mapping from base expressions to candidate chains. */
|
||||
base_cand_map.create (500);
|
||||
base_cand_map = new hash_table<cand_chain_hasher> (500);
|
||||
|
||||
/* Allocate the mapping from bases to alternative bases. */
|
||||
alt_base_map = pointer_map_create ();
|
||||
@ -3661,7 +3662,8 @@ pass_strength_reduction::execute (function *fun)
|
||||
analyze_candidates_and_replace ();
|
||||
|
||||
loop_optimizer_finalize ();
|
||||
base_cand_map.dispose ();
|
||||
delete base_cand_map;
|
||||
base_cand_map = NULL;
|
||||
obstack_free (&chain_obstack, NULL);
|
||||
pointer_map_destroy (stmt_cand_map);
|
||||
cand_vec.release ();
|
||||
|
@ -121,7 +121,7 @@ struct gimplify_ctx
|
||||
|
||||
vec<tree> case_labels;
|
||||
/* The formal temporary table. Should this be persistent? */
|
||||
hash_table <gimplify_hasher> temp_htab;
|
||||
hash_table<gimplify_hasher> *temp_htab;
|
||||
|
||||
int conditions;
|
||||
bool save_stack;
|
||||
@ -256,8 +256,8 @@ pop_gimplify_context (gimple body)
|
||||
else
|
||||
record_vars (c->temps);
|
||||
|
||||
if (c->temp_htab.is_created ())
|
||||
c->temp_htab.dispose ();
|
||||
delete c->temp_htab;
|
||||
c->temp_htab = NULL;
|
||||
ctx_free (c);
|
||||
}
|
||||
|
||||
@ -484,9 +484,9 @@ lookup_tmp_var (tree val, bool is_formal)
|
||||
elt_t **slot;
|
||||
|
||||
elt.val = val;
|
||||
if (!gimplify_ctxp->temp_htab.is_created ())
|
||||
gimplify_ctxp->temp_htab.create (1000);
|
||||
slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
|
||||
if (!gimplify_ctxp->temp_htab)
|
||||
gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
|
||||
slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
elt_p = XNEW (elt_t);
|
||||
|
@ -183,7 +183,7 @@ clast_index_hasher::remove (value_type *c)
|
||||
free (c);
|
||||
}
|
||||
|
||||
typedef hash_table <clast_index_hasher> clast_index_htab_type;
|
||||
typedef hash_table<clast_index_hasher> clast_index_htab_type;
|
||||
|
||||
/* Returns a pointer to a new element of type clast_name_index_p built
|
||||
from NAME, INDEX, LEVEL, BOUND_ONE, and BOUND_TWO. */
|
||||
@ -213,7 +213,7 @@ new_clast_name_index (const char *name, int index, int level,
|
||||
vector of parameters. */
|
||||
|
||||
static inline int
|
||||
clast_name_to_level (clast_name_p name, clast_index_htab_type index_table)
|
||||
clast_name_to_level (clast_name_p name, clast_index_htab_type *index_table)
|
||||
{
|
||||
struct clast_name_index tmp;
|
||||
clast_name_index **slot;
|
||||
@ -222,7 +222,7 @@ clast_name_to_level (clast_name_p name, clast_index_htab_type index_table)
|
||||
tmp.name = ((const struct clast_name *) name)->name;
|
||||
tmp.free_name = NULL;
|
||||
|
||||
slot = index_table.find_slot (&tmp, NO_INSERT);
|
||||
slot = index_table->find_slot (&tmp, NO_INSERT);
|
||||
|
||||
if (slot && *slot)
|
||||
return ((struct clast_name_index *) *slot)->level;
|
||||
@ -235,7 +235,7 @@ clast_name_to_level (clast_name_p name, clast_index_htab_type index_table)
|
||||
SCATTERING_DIMENSIONS vector. */
|
||||
|
||||
static inline int
|
||||
clast_name_to_index (struct clast_name *name, clast_index_htab_type index_table)
|
||||
clast_name_to_index (struct clast_name *name, clast_index_htab_type *index_table)
|
||||
{
|
||||
struct clast_name_index tmp;
|
||||
clast_name_index **slot;
|
||||
@ -243,7 +243,7 @@ clast_name_to_index (struct clast_name *name, clast_index_htab_type index_table)
|
||||
tmp.name = ((const struct clast_name *) name)->name;
|
||||
tmp.free_name = NULL;
|
||||
|
||||
slot = index_table.find_slot (&tmp, NO_INSERT);
|
||||
slot = index_table->find_slot (&tmp, NO_INSERT);
|
||||
|
||||
if (slot && *slot)
|
||||
return (*slot)->index;
|
||||
@ -256,8 +256,9 @@ clast_name_to_index (struct clast_name *name, clast_index_htab_type index_table)
|
||||
found in the INDEX_TABLE, false otherwise. */
|
||||
|
||||
static inline bool
|
||||
clast_name_to_lb_ub (struct clast_name *name, clast_index_htab_type index_table,
|
||||
mpz_t bound_one, mpz_t bound_two)
|
||||
clast_name_to_lb_ub (struct clast_name *name,
|
||||
clast_index_htab_type *index_table, mpz_t bound_one,
|
||||
mpz_t bound_two)
|
||||
{
|
||||
struct clast_name_index tmp;
|
||||
clast_name_index **slot;
|
||||
@ -265,7 +266,7 @@ clast_name_to_lb_ub (struct clast_name *name, clast_index_htab_type index_table,
|
||||
tmp.name = name->name;
|
||||
tmp.free_name = NULL;
|
||||
|
||||
slot = index_table.find_slot (&tmp, NO_INSERT);
|
||||
slot = index_table->find_slot (&tmp, NO_INSERT);
|
||||
|
||||
if (slot && *slot)
|
||||
{
|
||||
@ -280,7 +281,7 @@ clast_name_to_lb_ub (struct clast_name *name, clast_index_htab_type index_table,
|
||||
/* Records in INDEX_TABLE the INDEX and LEVEL for NAME. */
|
||||
|
||||
static inline void
|
||||
save_clast_name_index (clast_index_htab_type index_table, const char *name,
|
||||
save_clast_name_index (clast_index_htab_type *index_table, const char *name,
|
||||
int index, int level, mpz_t bound_one, mpz_t bound_two)
|
||||
{
|
||||
struct clast_name_index tmp;
|
||||
@ -288,7 +289,7 @@ save_clast_name_index (clast_index_htab_type index_table, const char *name,
|
||||
|
||||
tmp.name = name;
|
||||
tmp.free_name = NULL;
|
||||
slot = index_table.find_slot (&tmp, INSERT);
|
||||
slot = index_table->find_slot (&tmp, INSERT);
|
||||
|
||||
if (slot)
|
||||
{
|
||||
@ -307,7 +308,7 @@ save_clast_name_index (clast_index_htab_type index_table, const char *name,
|
||||
|
||||
typedef struct ivs_params {
|
||||
vec<tree> params, *newivs;
|
||||
clast_index_htab_type newivs_index, params_index;
|
||||
clast_index_htab_type *newivs_index, *params_index;
|
||||
sese region;
|
||||
} *ivs_params_p;
|
||||
|
||||
@ -319,7 +320,7 @@ clast_name_to_gcc (struct clast_name *name, ivs_params_p ip)
|
||||
{
|
||||
int index;
|
||||
|
||||
if (ip->params.exists () && ip->params_index.is_created ())
|
||||
if (ip->params.exists () && ip->params_index)
|
||||
{
|
||||
index = clast_name_to_index (name, ip->params_index);
|
||||
|
||||
@ -327,7 +328,7 @@ clast_name_to_gcc (struct clast_name *name, ivs_params_p ip)
|
||||
return ip->params[index];
|
||||
}
|
||||
|
||||
gcc_assert (ip->newivs && ip->newivs_index.is_created ());
|
||||
gcc_assert (ip->newivs && ip->newivs_index);
|
||||
index = clast_name_to_index (name, ip->newivs_index);
|
||||
gcc_assert (index >= 0);
|
||||
|
||||
@ -718,12 +719,12 @@ type_for_clast_name (struct clast_name *name, ivs_params_p ip, mpz_t bound_one,
|
||||
{
|
||||
bool found = false;
|
||||
|
||||
if (ip->params.exists () && ip->params_index.is_created ())
|
||||
if (ip->params.exists () && ip->params_index)
|
||||
found = clast_name_to_lb_ub (name, ip->params_index, bound_one, bound_two);
|
||||
|
||||
if (!found)
|
||||
{
|
||||
gcc_assert (ip->newivs && ip->newivs_index.is_created ());
|
||||
gcc_assert (ip->newivs && ip->newivs_index);
|
||||
found = clast_name_to_lb_ub (name, ip->newivs_index, bound_one,
|
||||
bound_two);
|
||||
gcc_assert (found);
|
||||
@ -1029,13 +1030,13 @@ new_bb_pbb_def (basic_block bb, poly_bb_p pbb)
|
||||
|
||||
static void
|
||||
mark_bb_with_pbb (poly_bb_p pbb, basic_block bb,
|
||||
bb_pbb_htab_type bb_pbb_mapping)
|
||||
bb_pbb_htab_type *bb_pbb_mapping)
|
||||
{
|
||||
bb_pbb_def tmp;
|
||||
bb_pbb_def **x;
|
||||
|
||||
tmp.bb = bb;
|
||||
x = bb_pbb_mapping.find_slot (&tmp, INSERT);
|
||||
x = bb_pbb_mapping->find_slot (&tmp, INSERT);
|
||||
|
||||
if (x && !*x)
|
||||
*x = new_bb_pbb_def (bb, pbb);
|
||||
@ -1044,13 +1045,13 @@ mark_bb_with_pbb (poly_bb_p pbb, basic_block bb,
|
||||
/* Find BB's related poly_bb_p in hash table BB_PBB_MAPPING. */
|
||||
|
||||
poly_bb_p
|
||||
find_pbb_via_hash (bb_pbb_htab_type bb_pbb_mapping, basic_block bb)
|
||||
find_pbb_via_hash (bb_pbb_htab_type *bb_pbb_mapping, basic_block bb)
|
||||
{
|
||||
bb_pbb_def tmp;
|
||||
bb_pbb_def **slot;
|
||||
|
||||
tmp.bb = bb;
|
||||
slot = bb_pbb_mapping.find_slot (&tmp, NO_INSERT);
|
||||
slot = bb_pbb_mapping->find_slot (&tmp, NO_INSERT);
|
||||
|
||||
if (slot && *slot)
|
||||
return ((bb_pbb_def *) *slot)->pbb;
|
||||
@ -1064,7 +1065,7 @@ find_pbb_via_hash (bb_pbb_htab_type bb_pbb_mapping, basic_block bb)
|
||||
related poly_bb_p. */
|
||||
|
||||
scop_p
|
||||
get_loop_body_pbbs (loop_p loop, bb_pbb_htab_type bb_pbb_mapping,
|
||||
get_loop_body_pbbs (loop_p loop, bb_pbb_htab_type *bb_pbb_mapping,
|
||||
vec<poly_bb_p> *pbbs)
|
||||
{
|
||||
unsigned i;
|
||||
@ -1094,7 +1095,7 @@ get_loop_body_pbbs (loop_p loop, bb_pbb_htab_type bb_pbb_mapping,
|
||||
|
||||
static edge
|
||||
translate_clast_user (struct clast_user_stmt *stmt, edge next_e,
|
||||
bb_pbb_htab_type bb_pbb_mapping, ivs_params_p ip)
|
||||
bb_pbb_htab_type *bb_pbb_mapping, ivs_params_p ip)
|
||||
{
|
||||
int i, nb_loops;
|
||||
basic_block new_bb;
|
||||
@ -1163,7 +1164,7 @@ graphite_create_new_loop_guard (edge entry_edge, struct clast_for *stmt,
|
||||
}
|
||||
|
||||
static edge
|
||||
translate_clast (loop_p, struct clast_stmt *, edge, bb_pbb_htab_type,
|
||||
translate_clast (loop_p, struct clast_stmt *, edge, bb_pbb_htab_type *,
|
||||
int, ivs_params_p);
|
||||
|
||||
/* Create the loop for a clast for statement.
|
||||
@ -1173,7 +1174,7 @@ translate_clast (loop_p, struct clast_stmt *, edge, bb_pbb_htab_type,
|
||||
|
||||
static edge
|
||||
translate_clast_for_loop (loop_p context_loop, struct clast_for *stmt,
|
||||
edge next_e, bb_pbb_htab_type bb_pbb_mapping,
|
||||
edge next_e, bb_pbb_htab_type *bb_pbb_mapping,
|
||||
int level, tree type, tree lb, tree ub,
|
||||
ivs_params_p ip)
|
||||
{
|
||||
@ -1211,7 +1212,7 @@ translate_clast_for_loop (loop_p context_loop, struct clast_for *stmt,
|
||||
|
||||
static edge
|
||||
translate_clast_for (loop_p context_loop, struct clast_for *stmt, edge next_e,
|
||||
bb_pbb_htab_type bb_pbb_mapping, int level,
|
||||
bb_pbb_htab_type *bb_pbb_mapping, int level,
|
||||
ivs_params_p ip)
|
||||
{
|
||||
tree type, lb, ub;
|
||||
@ -1270,7 +1271,7 @@ translate_clast_assignment (struct clast_assignment *stmt, edge next_e,
|
||||
|
||||
static edge
|
||||
translate_clast_guard (loop_p context_loop, struct clast_guard *stmt,
|
||||
edge next_e, bb_pbb_htab_type bb_pbb_mapping, int level,
|
||||
edge next_e, bb_pbb_htab_type *bb_pbb_mapping, int level,
|
||||
ivs_params_p ip)
|
||||
{
|
||||
edge last_e = graphite_create_new_guard (next_e, stmt, ip);
|
||||
@ -1289,7 +1290,7 @@ translate_clast_guard (loop_p context_loop, struct clast_guard *stmt,
|
||||
|
||||
static edge
|
||||
translate_clast (loop_p context_loop, struct clast_stmt *stmt, edge next_e,
|
||||
bb_pbb_htab_type bb_pbb_mapping, int level, ivs_params_p ip)
|
||||
bb_pbb_htab_type *bb_pbb_mapping, int level, ivs_params_p ip)
|
||||
{
|
||||
if (!stmt)
|
||||
return next_e;
|
||||
@ -1331,7 +1332,7 @@ translate_clast (loop_p context_loop, struct clast_stmt *stmt, edge next_e,
|
||||
static CloogUnionDomain *
|
||||
add_names_to_union_domain (scop_p scop, CloogUnionDomain *union_domain,
|
||||
int nb_scattering_dims,
|
||||
clast_index_htab_type params_index)
|
||||
clast_index_htab_type *params_index)
|
||||
{
|
||||
sese region = SCOP_REGION (scop);
|
||||
int i;
|
||||
@ -1582,7 +1583,7 @@ int get_max_scattering_dimensions (scop_p scop)
|
||||
}
|
||||
|
||||
static CloogInput *
|
||||
generate_cloog_input (scop_p scop, clast_index_htab_type params_index)
|
||||
generate_cloog_input (scop_p scop, clast_index_htab_type *params_index)
|
||||
{
|
||||
CloogUnionDomain *union_domain;
|
||||
CloogInput *cloog_input;
|
||||
@ -1605,7 +1606,7 @@ generate_cloog_input (scop_p scop, clast_index_htab_type params_index)
|
||||
without a program. */
|
||||
|
||||
static struct clast_stmt *
|
||||
scop_to_clast (scop_p scop, clast_index_htab_type params_index)
|
||||
scop_to_clast (scop_p scop, clast_index_htab_type *params_index)
|
||||
{
|
||||
CloogInput *cloog_input;
|
||||
struct clast_stmt *clast;
|
||||
@ -1634,11 +1635,9 @@ void
|
||||
print_generated_program (FILE *file, scop_p scop)
|
||||
{
|
||||
CloogOptions *options = set_cloog_options ();
|
||||
clast_index_htab_type params_index;
|
||||
clast_index_htab_type *params_index = new clast_index_htab_type (10);
|
||||
struct clast_stmt *clast;
|
||||
|
||||
params_index.create (10);
|
||||
|
||||
clast = scop_to_clast (scop, params_index);
|
||||
|
||||
fprintf (file, " (clast: \n");
|
||||
@ -1663,20 +1662,20 @@ debug_generated_program (scop_p scop)
|
||||
*/
|
||||
|
||||
bool
|
||||
gloog (scop_p scop, bb_pbb_htab_type bb_pbb_mapping)
|
||||
gloog (scop_p scop, bb_pbb_htab_type *bb_pbb_mapping)
|
||||
{
|
||||
auto_vec<tree, 10> newivs;
|
||||
loop_p context_loop;
|
||||
sese region = SCOP_REGION (scop);
|
||||
ifsese if_region = NULL;
|
||||
clast_index_htab_type newivs_index, params_index;
|
||||
clast_index_htab_type *newivs_index, *params_index;
|
||||
struct clast_stmt *clast;
|
||||
struct ivs_params ip;
|
||||
|
||||
timevar_push (TV_GRAPHITE_CODE_GEN);
|
||||
gloog_error = false;
|
||||
|
||||
params_index.create (10);
|
||||
params_index = new clast_index_htab_type (10);
|
||||
|
||||
clast = scop_to_clast (scop, params_index);
|
||||
|
||||
@ -1699,7 +1698,7 @@ gloog (scop_p scop, bb_pbb_htab_type bb_pbb_mapping)
|
||||
graphite_verify ();
|
||||
|
||||
context_loop = SESE_ENTRY (region)->src->loop_father;
|
||||
newivs_index.create (10);
|
||||
newivs_index= new clast_index_htab_type (10);
|
||||
|
||||
ip.newivs = &newivs;
|
||||
ip.newivs_index = newivs_index;
|
||||
@ -1721,8 +1720,10 @@ gloog (scop_p scop, bb_pbb_htab_type bb_pbb_mapping)
|
||||
free (if_region->region);
|
||||
free (if_region);
|
||||
|
||||
newivs_index.dispose ();
|
||||
params_index.dispose ();
|
||||
delete newivs_index;
|
||||
newivs_index = NULL;
|
||||
delete params_index;
|
||||
params_index = NULL;
|
||||
cloog_clast_free (clast);
|
||||
timevar_pop (TV_GRAPHITE_CODE_GEN);
|
||||
|
||||
|
@ -587,7 +587,7 @@ loop_level_carries_dependences (scop_p scop, vec<poly_bb_p> body,
|
||||
poly_bb_p. */
|
||||
|
||||
bool
|
||||
loop_is_parallel_p (loop_p loop, bb_pbb_htab_type bb_pbb_mapping, int depth)
|
||||
loop_is_parallel_p (loop_p loop, bb_pbb_htab_type *bb_pbb_mapping, int depth)
|
||||
{
|
||||
bool dependences;
|
||||
scop_p scop;
|
||||
|
@ -50,11 +50,11 @@ bb_pbb_hasher::equal (const value_type *bp1, const compare_type *bp2)
|
||||
return (bp1->bb->index == bp2->bb->index);
|
||||
}
|
||||
|
||||
typedef hash_table <bb_pbb_hasher> bb_pbb_htab_type;
|
||||
typedef hash_table<bb_pbb_hasher> bb_pbb_htab_type;
|
||||
|
||||
extern bool gloog (scop_p, bb_pbb_htab_type);
|
||||
poly_bb_p find_pbb_via_hash (bb_pbb_htab_type, basic_block);
|
||||
bool loop_is_parallel_p (loop_p, bb_pbb_htab_type, int);
|
||||
scop_p get_loop_body_pbbs (loop_p, bb_pbb_htab_type, vec<poly_bb_p> *);
|
||||
extern bool gloog (scop_p, bb_pbb_htab_type *);
|
||||
poly_bb_p find_pbb_via_hash (bb_pbb_htab_type *, basic_block);
|
||||
bool loop_is_parallel_p (loop_p, bb_pbb_htab_type *, int);
|
||||
scop_p get_loop_body_pbbs (loop_p, bb_pbb_htab_type *, vec<poly_bb_p> *);
|
||||
|
||||
#endif
|
||||
|
@ -269,7 +269,6 @@ graphite_transform_loops (void)
|
||||
scop_p scop;
|
||||
bool need_cfg_cleanup_p = false;
|
||||
vec<scop_p> scops = vNULL;
|
||||
bb_pbb_htab_type bb_pbb_mapping;
|
||||
isl_ctx *ctx;
|
||||
|
||||
/* If a function is parallel it was most probably already run through graphite
|
||||
@ -291,8 +290,7 @@ graphite_transform_loops (void)
|
||||
print_global_statistics (dump_file);
|
||||
}
|
||||
|
||||
bb_pbb_mapping.create (10);
|
||||
|
||||
bb_pbb_htab_type bb_pbb_mapping (10);
|
||||
FOR_EACH_VEC_ELT (scops, i, scop)
|
||||
if (dbg_cnt (graphite_scop))
|
||||
{
|
||||
@ -301,11 +299,10 @@ graphite_transform_loops (void)
|
||||
|
||||
if (POLY_SCOP_P (scop)
|
||||
&& apply_poly_transforms (scop)
|
||||
&& gloog (scop, bb_pbb_mapping))
|
||||
&& gloog (scop, &bb_pbb_mapping))
|
||||
need_cfg_cleanup_p = true;
|
||||
}
|
||||
|
||||
bb_pbb_mapping.dispose ();
|
||||
free_scops (scops);
|
||||
graphite_finalize (need_cfg_cleanup_p);
|
||||
the_isl_ctx = NULL;
|
||||
|
@ -651,8 +651,8 @@ delay_i2_hasher::equal (const value_type *x, const compare_type *y)
|
||||
|
||||
/* Two hash tables to record delay_pairs, one indexed by I1 and the other
|
||||
indexed by I2. */
|
||||
static hash_table <delay_i1_hasher> delay_htab;
|
||||
static hash_table <delay_i2_hasher> delay_htab_i2;
|
||||
static hash_table<delay_i1_hasher> *delay_htab;
|
||||
static hash_table<delay_i2_hasher> *delay_htab_i2;
|
||||
|
||||
/* Called through htab_traverse. Walk the hashtable using I2 as
|
||||
index, and delete all elements involving an UID higher than
|
||||
@ -664,7 +664,7 @@ haifa_htab_i2_traverse (delay_pair **slot, int *data)
|
||||
struct delay_pair *p = *slot;
|
||||
if (INSN_UID (p->i2) >= maxuid || INSN_UID (p->i1) >= maxuid)
|
||||
{
|
||||
delay_htab_i2.clear_slot (slot);
|
||||
delay_htab_i2->clear_slot (slot);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
@ -680,7 +680,7 @@ haifa_htab_i1_traverse (delay_pair **pslot, int *data)
|
||||
|
||||
if (INSN_UID ((*pslot)->i1) >= maxuid)
|
||||
{
|
||||
delay_htab.clear_slot (pslot);
|
||||
delay_htab->clear_slot (pslot);
|
||||
return 1;
|
||||
}
|
||||
pprev = &first;
|
||||
@ -694,7 +694,7 @@ haifa_htab_i1_traverse (delay_pair **pslot, int *data)
|
||||
}
|
||||
*pprev = NULL;
|
||||
if (first == NULL)
|
||||
delay_htab.clear_slot (pslot);
|
||||
delay_htab->clear_slot (pslot);
|
||||
else
|
||||
*pslot = first;
|
||||
return 1;
|
||||
@ -705,8 +705,8 @@ haifa_htab_i1_traverse (delay_pair **pslot, int *data)
|
||||
void
|
||||
discard_delay_pairs_above (int max_uid)
|
||||
{
|
||||
delay_htab.traverse <int *, haifa_htab_i1_traverse> (&max_uid);
|
||||
delay_htab_i2.traverse <int *, haifa_htab_i2_traverse> (&max_uid);
|
||||
delay_htab->traverse <int *, haifa_htab_i1_traverse> (&max_uid);
|
||||
delay_htab_i2->traverse <int *, haifa_htab_i2_traverse> (&max_uid);
|
||||
}
|
||||
|
||||
/* This function can be called by a port just before it starts the final
|
||||
@ -736,15 +736,15 @@ record_delay_slot_pair (rtx i1, rtx i2, int cycles, int stages)
|
||||
p->cycles = cycles;
|
||||
p->stages = stages;
|
||||
|
||||
if (!delay_htab.is_created ())
|
||||
if (!delay_htab)
|
||||
{
|
||||
delay_htab.create (10);
|
||||
delay_htab_i2.create (10);
|
||||
delay_htab = new hash_table<delay_i1_hasher> (10);
|
||||
delay_htab_i2 = new hash_table<delay_i2_hasher> (10);
|
||||
}
|
||||
slot = delay_htab.find_slot_with_hash (i1, htab_hash_pointer (i1), INSERT);
|
||||
slot = delay_htab->find_slot_with_hash (i1, htab_hash_pointer (i1), INSERT);
|
||||
p->next_same_i1 = *slot;
|
||||
*slot = p;
|
||||
slot = delay_htab_i2.find_slot_with_hash (i2, htab_hash_pointer (i2), INSERT);
|
||||
slot = delay_htab_i2->find_slot (p, INSERT);
|
||||
*slot = p;
|
||||
}
|
||||
|
||||
@ -755,10 +755,10 @@ real_insn_for_shadow (rtx insn)
|
||||
{
|
||||
struct delay_pair *pair;
|
||||
|
||||
if (!delay_htab.is_created ())
|
||||
if (!delay_htab)
|
||||
return NULL_RTX;
|
||||
|
||||
pair = delay_htab_i2.find_with_hash (insn, htab_hash_pointer (insn));
|
||||
pair = delay_htab_i2->find_with_hash (insn, htab_hash_pointer (insn));
|
||||
if (!pair || pair->stages > 0)
|
||||
return NULL_RTX;
|
||||
return pair->i1;
|
||||
@ -786,10 +786,10 @@ add_delay_dependencies (rtx insn)
|
||||
sd_iterator_def sd_it;
|
||||
dep_t dep;
|
||||
|
||||
if (!delay_htab.is_created ())
|
||||
if (!delay_htab)
|
||||
return;
|
||||
|
||||
pair = delay_htab_i2.find_with_hash (insn, htab_hash_pointer (insn));
|
||||
pair = delay_htab_i2->find_with_hash (insn, htab_hash_pointer (insn));
|
||||
if (!pair)
|
||||
return;
|
||||
add_dependence (insn, pair->i1, REG_DEP_ANTI);
|
||||
@ -800,7 +800,7 @@ add_delay_dependencies (rtx insn)
|
||||
{
|
||||
rtx pro = DEP_PRO (dep);
|
||||
struct delay_pair *other_pair
|
||||
= delay_htab_i2.find_with_hash (pro, htab_hash_pointer (pro));
|
||||
= delay_htab_i2->find_with_hash (pro, htab_hash_pointer (pro));
|
||||
if (!other_pair || other_pair->stages)
|
||||
continue;
|
||||
if (pair_delay (other_pair) >= pair_delay (pair))
|
||||
@ -1421,11 +1421,11 @@ dep_cost_1 (dep_t link, dw_t dw)
|
||||
if (DEP_COST (link) != UNKNOWN_DEP_COST)
|
||||
return DEP_COST (link);
|
||||
|
||||
if (delay_htab.is_created ())
|
||||
if (delay_htab)
|
||||
{
|
||||
struct delay_pair *delay_entry;
|
||||
delay_entry
|
||||
= delay_htab_i2.find_with_hash (used, htab_hash_pointer (used));
|
||||
= delay_htab_i2->find_with_hash (used, htab_hash_pointer (used));
|
||||
if (delay_entry)
|
||||
{
|
||||
if (delay_entry->i1 == insn)
|
||||
@ -5779,12 +5779,12 @@ prune_ready_list (state_t temp_state, bool first_cycle_insn_p,
|
||||
{
|
||||
int delay_cost = 0;
|
||||
|
||||
if (delay_htab.is_created ())
|
||||
if (delay_htab)
|
||||
{
|
||||
struct delay_pair *delay_entry;
|
||||
delay_entry
|
||||
= delay_htab.find_with_hash (insn,
|
||||
htab_hash_pointer (insn));
|
||||
= delay_htab->find_with_hash (insn,
|
||||
htab_hash_pointer (insn));
|
||||
while (delay_entry && delay_cost == 0)
|
||||
{
|
||||
delay_cost = estimate_shadow_tick (delay_entry);
|
||||
@ -6278,13 +6278,13 @@ schedule_block (basic_block *target_bb, state_t init_state)
|
||||
goto restart_choose_ready;
|
||||
}
|
||||
|
||||
if (delay_htab.is_created ())
|
||||
if (delay_htab)
|
||||
{
|
||||
/* If this insn is the first part of a delay-slot pair, record a
|
||||
backtrack point. */
|
||||
struct delay_pair *delay_entry;
|
||||
delay_entry
|
||||
= delay_htab.find_with_hash (insn, htab_hash_pointer (insn));
|
||||
= delay_htab->find_with_hash (insn, htab_hash_pointer (insn));
|
||||
if (delay_entry)
|
||||
{
|
||||
save_backtrack_point (delay_entry, ls);
|
||||
@ -6873,10 +6873,10 @@ sched_finish (void)
|
||||
void
|
||||
free_delay_pairs (void)
|
||||
{
|
||||
if (delay_htab.is_created ())
|
||||
if (delay_htab)
|
||||
{
|
||||
delay_htab.empty ();
|
||||
delay_htab_i2.empty ();
|
||||
delay_htab->empty ();
|
||||
delay_htab_i2->empty ();
|
||||
}
|
||||
}
|
||||
|
||||
|
776
gcc/hash-table.h
776
gcc/hash-table.h
@ -60,12 +60,6 @@ along with GCC; see the file COPYING3. If not see
|
||||
is allocated. This type is called the allocator type. It is
|
||||
parameterized on the value type. It provides four functions.
|
||||
|
||||
- A static member function named 'control_alloc'. This function
|
||||
allocates the control data blocks for the table.
|
||||
|
||||
- A static member function named 'control_free'. This function
|
||||
frees the control data blocks for the table.
|
||||
|
||||
- A static member function named 'data_alloc'. This function
|
||||
allocates the data elements in the table.
|
||||
|
||||
@ -211,23 +205,11 @@ along with GCC; see the file COPYING3. If not see
|
||||
template <typename Type>
|
||||
struct xcallocator
|
||||
{
|
||||
static Type *control_alloc (size_t count);
|
||||
static Type *data_alloc (size_t count);
|
||||
static void control_free (Type *memory);
|
||||
static void data_free (Type *memory);
|
||||
};
|
||||
|
||||
|
||||
/* Allocate memory for COUNT control blocks. */
|
||||
|
||||
template <typename Type>
|
||||
inline Type *
|
||||
xcallocator <Type>::control_alloc (size_t count)
|
||||
{
|
||||
return static_cast <Type *> (xcalloc (count, sizeof (Type)));
|
||||
}
|
||||
|
||||
|
||||
/* Allocate memory for COUNT data blocks. */
|
||||
|
||||
template <typename Type>
|
||||
@ -238,16 +220,6 @@ xcallocator <Type>::data_alloc (size_t count)
|
||||
}
|
||||
|
||||
|
||||
/* Free memory for control blocks. */
|
||||
|
||||
template <typename Type>
|
||||
inline void
|
||||
xcallocator <Type>::control_free (Type *memory)
|
||||
{
|
||||
return ::free (memory);
|
||||
}
|
||||
|
||||
|
||||
/* Free memory for data blocks. */
|
||||
|
||||
template <typename Type>
|
||||
@ -348,37 +320,6 @@ extern hashval_t hash_table_mod1 (hashval_t hash, unsigned int index);
|
||||
extern hashval_t hash_table_mod2 (hashval_t hash, unsigned int index);
|
||||
|
||||
|
||||
/* Internal implementation type. */
|
||||
|
||||
template <typename T>
|
||||
struct hash_table_control
|
||||
{
|
||||
/* Table itself. */
|
||||
T **entries;
|
||||
|
||||
/* Current size (in entries) of the hash table. */
|
||||
size_t size;
|
||||
|
||||
/* Current number of elements including also deleted elements. */
|
||||
size_t n_elements;
|
||||
|
||||
/* Current number of deleted elements in the table. */
|
||||
size_t n_deleted;
|
||||
|
||||
/* The following member is used for debugging. Its value is number
|
||||
of all calls of `htab_find_slot' for the hash table. */
|
||||
unsigned int searches;
|
||||
|
||||
/* The following member is used for debugging. Its value is number
|
||||
of collisions fixed for time of work with the hash table. */
|
||||
unsigned int collisions;
|
||||
|
||||
/* Current size (in entries) of the hash table, as an index into the
|
||||
table of primes. */
|
||||
unsigned int size_prime_index;
|
||||
};
|
||||
|
||||
|
||||
/* User-facing hash table type.
|
||||
|
||||
The table stores elements of type Descriptor::value_type.
|
||||
@ -400,213 +341,174 @@ struct hash_table_control
|
||||
The default is xcallocator.
|
||||
|
||||
*/
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator = xcallocator>
|
||||
template<typename Type> class Allocator= xcallocator>
|
||||
class hash_table
|
||||
{
|
||||
public:
|
||||
typedef typename Descriptor::value_type value_type;
|
||||
typedef typename Descriptor::compare_type compare_type;
|
||||
|
||||
public:
|
||||
hash_table (size_t);
|
||||
~hash_table ();
|
||||
|
||||
/* Current size (in entries) of the hash table. */
|
||||
size_t size () const { return m_size; }
|
||||
|
||||
/* Return the current number of elements in this hash table. */
|
||||
size_t elements () const { return m_n_elements - m_n_deleted; }
|
||||
|
||||
/* Return the current number of elements in this hash table. */
|
||||
size_t elements_with_deleted () const { return m_n_elements; }
|
||||
|
||||
/* This function clears all entries in the given hash table. */
|
||||
void empty ();
|
||||
|
||||
/* This function clears a specified SLOT in a hash table. It is
|
||||
useful when you've already done the lookup and don't want to do it
|
||||
again. */
|
||||
|
||||
void clear_slot (value_type **);
|
||||
|
||||
/* This function searches for a hash table entry equal to the given
|
||||
COMPARABLE element starting with the given HASH value. It cannot
|
||||
be used to insert or delete an element. */
|
||||
value_type *find_with_hash (const compare_type *, hashval_t);
|
||||
|
||||
/* Like find_slot_with_hash, but compute the hash value from the element. */
|
||||
value_type *find (const value_type *value)
|
||||
{
|
||||
return find_with_hash (value, Descriptor::hash (value));
|
||||
}
|
||||
|
||||
value_type **find_slot (const value_type *value, insert_option insert)
|
||||
{
|
||||
return find_slot_with_hash (value, Descriptor::hash (value), insert);
|
||||
}
|
||||
|
||||
/* This function searches for a hash table slot containing an entry
|
||||
equal to the given COMPARABLE element and starting with the given
|
||||
HASH. To delete an entry, call this with insert=NO_INSERT, then
|
||||
call clear_slot on the slot returned (possibly after doing some
|
||||
checks). To insert an entry, call this with insert=INSERT, then
|
||||
write the value you want into the returned slot. When inserting an
|
||||
entry, NULL may be returned if memory allocation fails. */
|
||||
value_type **find_slot_with_hash (const compare_type *comparable,
|
||||
hashval_t hash, enum insert_option insert);
|
||||
|
||||
/* This function deletes an element with the given COMPARABLE value
|
||||
from hash table starting with the given HASH. If there is no
|
||||
matching element in the hash table, this function does nothing. */
|
||||
void remove_elt_with_hash (const compare_type *, hashval_t);
|
||||
|
||||
/* Like remove_elt_with_hash, but compute the hash value from the element. */
|
||||
void remove_elt (const value_type *value)
|
||||
{
|
||||
remove_elt_with_hash (value, Descriptor::hash (value));
|
||||
}
|
||||
|
||||
/* This function scans over the entire hash table calling CALLBACK for
|
||||
each live entry. If CALLBACK returns false, the iteration stops.
|
||||
ARGUMENT is passed as CALLBACK's second argument. */
|
||||
template <typename Argument,
|
||||
int (*Callback) (value_type **slot, Argument argument)>
|
||||
void traverse_noresize (Argument argument);
|
||||
|
||||
/* Like traverse_noresize, but does resize the table when it is too empty
|
||||
to improve effectivity of subsequent calls. */
|
||||
template <typename Argument,
|
||||
int (*Callback) (value_type **slot, Argument argument)>
|
||||
void traverse (Argument argument);
|
||||
|
||||
class iterator
|
||||
{
|
||||
public:
|
||||
inline iterator ();
|
||||
inline iterator (value_type **, value_type **);
|
||||
inline value_type &operator * ();
|
||||
iterator () : m_slot (NULL), m_limit (NULL) {}
|
||||
|
||||
iterator (value_type **slot, value_type **limit) :
|
||||
m_slot (slot), m_limit (limit) {}
|
||||
|
||||
inline value_type &operator * () { return **m_slot; }
|
||||
void slide ();
|
||||
inline iterator &operator ++ ();
|
||||
inline bool operator != (const iterator &) const;
|
||||
bool operator != (const iterator &other) const
|
||||
{
|
||||
return m_slot != other.m_slot || m_limit != other.m_limit;
|
||||
}
|
||||
|
||||
private:
|
||||
value_type **m_slot;
|
||||
value_type **m_limit;
|
||||
};
|
||||
|
||||
private:
|
||||
hash_table_control <value_type> *htab;
|
||||
iterator begin () const
|
||||
{
|
||||
iterator iter (m_entries, m_entries + m_size);
|
||||
iter.slide ();
|
||||
return iter;
|
||||
}
|
||||
|
||||
value_type **find_empty_slot_for_expand (hashval_t hash);
|
||||
iterator end () const { return iterator (); }
|
||||
|
||||
double collisions () const
|
||||
{
|
||||
return m_searches ? static_cast <double> (m_collisions) / m_searches : 0;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
value_type **find_empty_slot_for_expand (hashval_t);
|
||||
void expand ();
|
||||
|
||||
public:
|
||||
hash_table ();
|
||||
void create (size_t initial_slots);
|
||||
bool is_created ();
|
||||
void dispose ();
|
||||
value_type *find (const value_type *value);
|
||||
value_type *find_with_hash (const compare_type *comparable, hashval_t hash);
|
||||
value_type **find_slot (const value_type *value, enum insert_option insert);
|
||||
value_type **find_slot_with_hash (const compare_type *comparable,
|
||||
hashval_t hash, enum insert_option insert);
|
||||
void empty ();
|
||||
void clear_slot (value_type **slot);
|
||||
void remove_elt (const value_type *value);
|
||||
void remove_elt_with_hash (const compare_type *comparable, hashval_t hash);
|
||||
size_t size ();
|
||||
size_t elements ();
|
||||
size_t elements_with_deleted ();
|
||||
double collisions ();
|
||||
/* Table itself. */
|
||||
typename Descriptor::value_type **m_entries;
|
||||
|
||||
template <typename Argument,
|
||||
int (*Callback) (value_type **slot, Argument argument)>
|
||||
void traverse_noresize (Argument argument);
|
||||
size_t m_size;
|
||||
|
||||
template <typename Argument,
|
||||
int (*Callback) (value_type **slot, Argument argument)>
|
||||
void traverse (Argument argument);
|
||||
/* Current number of elements including also deleted elements. */
|
||||
size_t m_n_elements;
|
||||
|
||||
iterator begin ();
|
||||
iterator end ();
|
||||
/* Current number of deleted elements in the table. */
|
||||
size_t m_n_deleted;
|
||||
|
||||
/* The following member is used for debugging. Its value is number
|
||||
of all calls of `htab_find_slot' for the hash table. */
|
||||
unsigned int m_searches;
|
||||
|
||||
/* The following member is used for debugging. Its value is number
|
||||
of collisions fixed for time of work with the hash table. */
|
||||
unsigned int m_collisions;
|
||||
|
||||
/* Current size (in entries) of the hash table, as an index into the
|
||||
table of primes. */
|
||||
unsigned int m_size_prime_index;
|
||||
};
|
||||
|
||||
|
||||
/* Construct the hash table. The only useful operation next is create. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline
|
||||
hash_table <Descriptor, Allocator>::hash_table ()
|
||||
: htab (NULL)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
/* See if the table has been created, as opposed to constructed. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline bool
|
||||
hash_table <Descriptor, Allocator>::is_created ()
|
||||
{
|
||||
return htab != NULL;
|
||||
}
|
||||
|
||||
|
||||
/* Like find_with_hash, but compute the hash value from the element. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline typename Descriptor::value_type *
|
||||
hash_table <Descriptor, Allocator>::find (const value_type *value)
|
||||
{
|
||||
return find_with_hash (value, Descriptor::hash (value));
|
||||
}
|
||||
|
||||
|
||||
/* Like find_slot_with_hash, but compute the hash value from the element. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline typename Descriptor::value_type **
|
||||
hash_table <Descriptor, Allocator>
|
||||
::find_slot (const value_type *value, enum insert_option insert)
|
||||
{
|
||||
return find_slot_with_hash (value, Descriptor::hash (value), insert);
|
||||
}
|
||||
|
||||
|
||||
/* Like remove_elt_with_hash, but compute the hash value from the element. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline void
|
||||
hash_table <Descriptor, Allocator>::remove_elt (const value_type *value)
|
||||
{
|
||||
remove_elt_with_hash (value, Descriptor::hash (value));
|
||||
}
|
||||
|
||||
|
||||
/* Return the current size of this hash table. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline size_t
|
||||
hash_table <Descriptor, Allocator>::size ()
|
||||
{
|
||||
return htab->size;
|
||||
}
|
||||
|
||||
|
||||
/* Return the current number of elements in this hash table. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline size_t
|
||||
hash_table <Descriptor, Allocator>::elements ()
|
||||
{
|
||||
return htab->n_elements - htab->n_deleted;
|
||||
}
|
||||
|
||||
|
||||
/* Return the current number of elements in this hash table. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline size_t
|
||||
hash_table <Descriptor, Allocator>::elements_with_deleted ()
|
||||
{
|
||||
return htab->n_elements;
|
||||
}
|
||||
|
||||
|
||||
/* Return the fraction of fixed collisions during all work with given
|
||||
hash table. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline double
|
||||
hash_table <Descriptor, Allocator>::collisions ()
|
||||
{
|
||||
if (htab->searches == 0)
|
||||
return 0.0;
|
||||
|
||||
return static_cast <double> (htab->collisions) / htab->searches;
|
||||
}
|
||||
|
||||
|
||||
/* Create a hash table with at least the given number of INITIAL_SLOTS. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::create (size_t size)
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Allocator>::hash_table (size_t size) :
|
||||
m_n_elements (0), m_n_deleted (0), m_searches (0), m_collisions (0)
|
||||
{
|
||||
unsigned int size_prime_index;
|
||||
|
||||
size_prime_index = hash_table_higher_prime_index (size);
|
||||
size = prime_tab[size_prime_index].prime;
|
||||
|
||||
htab = Allocator <hash_table_control <value_type> > ::control_alloc (1);
|
||||
gcc_assert (htab != NULL);
|
||||
htab->entries = Allocator <value_type*> ::data_alloc (size);
|
||||
gcc_assert (htab->entries != NULL);
|
||||
htab->size = size;
|
||||
htab->size_prime_index = size_prime_index;
|
||||
m_entries = Allocator <value_type*> ::data_alloc (size);
|
||||
gcc_assert (m_entries != NULL);
|
||||
m_size = size;
|
||||
m_size_prime_index = size_prime_index;
|
||||
}
|
||||
|
||||
|
||||
/* Dispose of a hash table. Free all memory and return this hash table to
|
||||
the non-created state. Naturally the hash table must already exist. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::dispose ()
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Allocator>::~hash_table ()
|
||||
{
|
||||
size_t size = htab->size;
|
||||
value_type **entries = htab->entries;
|
||||
for (size_t i = m_size - 1; i < m_size; i--)
|
||||
if (m_entries[i] != HTAB_EMPTY_ENTRY && m_entries[i] != HTAB_DELETED_ENTRY)
|
||||
Descriptor::remove (m_entries[i]);
|
||||
|
||||
for (int i = size - 1; i >= 0; i--)
|
||||
if (entries[i] != HTAB_EMPTY_ENTRY && entries[i] != HTAB_DELETED_ENTRY)
|
||||
Descriptor::remove (entries[i]);
|
||||
|
||||
Allocator <value_type *> ::data_free (entries);
|
||||
Allocator <hash_table_control <value_type> > ::control_free (htab);
|
||||
htab = NULL;
|
||||
Allocator <value_type *> ::data_free (m_entries);
|
||||
}
|
||||
|
||||
|
||||
/* Similar to find_slot, but without several unwanted side effects:
|
||||
- Does not call equal when it finds an existing entry.
|
||||
- Does not change the count of elements/searches/collisions in the
|
||||
@ -614,14 +516,13 @@ hash_table <Descriptor, Allocator>::dispose ()
|
||||
This function also assumes there are no deleted entries in the table.
|
||||
HASH is the hash value for the element to be inserted. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
typename Descriptor::value_type **
|
||||
hash_table <Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
hash_table<Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
{
|
||||
hashval_t index = hash_table_mod1 (hash, htab->size_prime_index);
|
||||
size_t size = htab->size;
|
||||
value_type **slot = htab->entries + index;
|
||||
hashval_t index = hash_table_mod1 (hash, m_size_prime_index);
|
||||
size_t size = m_size;
|
||||
value_type **slot = m_entries + index;
|
||||
hashval_t hash2;
|
||||
|
||||
if (*slot == HTAB_EMPTY_ENTRY)
|
||||
@ -629,14 +530,14 @@ hash_table <Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
else if (*slot == HTAB_DELETED_ENTRY)
|
||||
abort ();
|
||||
|
||||
hash2 = hash_table_mod2 (hash, htab->size_prime_index);
|
||||
hash2 = hash_table_mod2 (hash, m_size_prime_index);
|
||||
for (;;)
|
||||
{
|
||||
index += hash2;
|
||||
if (index >= size)
|
||||
index -= size;
|
||||
|
||||
slot = htab->entries + index;
|
||||
slot = m_entries + index;
|
||||
if (*slot == HTAB_EMPTY_ENTRY)
|
||||
return slot;
|
||||
else if (*slot == HTAB_DELETED_ENTRY)
|
||||
@ -644,7 +545,6 @@ hash_table <Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* The following function changes size of memory allocated for the
|
||||
entries and repeatedly inserts the table elements. The occupancy
|
||||
of the table after the call will be about 50%. Naturally the hash
|
||||
@ -652,26 +552,20 @@ hash_table <Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
table entries is changed. If memory allocation fails, this function
|
||||
will abort. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::expand ()
|
||||
hash_table<Descriptor, Allocator>::expand ()
|
||||
{
|
||||
value_type **oentries;
|
||||
value_type **olimit;
|
||||
value_type **p;
|
||||
value_type **nentries;
|
||||
size_t nsize, osize, elts;
|
||||
unsigned int oindex, nindex;
|
||||
|
||||
oentries = htab->entries;
|
||||
oindex = htab->size_prime_index;
|
||||
osize = htab->size;
|
||||
olimit = oentries + osize;
|
||||
elts = elements ();
|
||||
value_type **oentries = m_entries;
|
||||
unsigned int oindex = m_size_prime_index;
|
||||
size_t osize = size ();
|
||||
value_type **olimit = oentries + osize;
|
||||
size_t elts = elements ();
|
||||
|
||||
/* Resize only when table after removal of unused elements is either
|
||||
too full or too empty. */
|
||||
unsigned int nindex;
|
||||
size_t nsize;
|
||||
if (elts * 2 > osize || (elts * 8 < osize && osize > 32))
|
||||
{
|
||||
nindex = hash_table_higher_prime_index (elts * 2);
|
||||
@ -683,15 +577,15 @@ hash_table <Descriptor, Allocator>::expand ()
|
||||
nsize = osize;
|
||||
}
|
||||
|
||||
nentries = Allocator <value_type *> ::data_alloc (nsize);
|
||||
value_type **nentries = Allocator <value_type *> ::data_alloc (nsize);
|
||||
gcc_assert (nentries != NULL);
|
||||
htab->entries = nentries;
|
||||
htab->size = nsize;
|
||||
htab->size_prime_index = nindex;
|
||||
htab->n_elements -= htab->n_deleted;
|
||||
htab->n_deleted = 0;
|
||||
m_entries = nentries;
|
||||
m_size = nsize;
|
||||
m_size_prime_index = nindex;
|
||||
m_n_elements -= m_n_deleted;
|
||||
m_n_deleted = 0;
|
||||
|
||||
p = oentries;
|
||||
value_type **p = oentries;
|
||||
do
|
||||
{
|
||||
value_type *x = *p;
|
||||
@ -710,132 +604,12 @@ hash_table <Descriptor, Allocator>::expand ()
|
||||
Allocator <value_type *> ::data_free (oentries);
|
||||
}
|
||||
|
||||
|
||||
/* This function searches for a hash table entry equal to the given
|
||||
COMPARABLE element starting with the given HASH value. It cannot
|
||||
be used to insert or delete an element. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
typename Descriptor::value_type *
|
||||
hash_table <Descriptor, Allocator>
|
||||
::find_with_hash (const compare_type *comparable, hashval_t hash)
|
||||
{
|
||||
hashval_t index, hash2;
|
||||
size_t size;
|
||||
value_type *entry;
|
||||
|
||||
htab->searches++;
|
||||
size = htab->size;
|
||||
index = hash_table_mod1 (hash, htab->size_prime_index);
|
||||
|
||||
entry = htab->entries[index];
|
||||
if (entry == HTAB_EMPTY_ENTRY
|
||||
|| (entry != HTAB_DELETED_ENTRY && Descriptor::equal (entry, comparable)))
|
||||
return entry;
|
||||
|
||||
hash2 = hash_table_mod2 (hash, htab->size_prime_index);
|
||||
for (;;)
|
||||
{
|
||||
htab->collisions++;
|
||||
index += hash2;
|
||||
if (index >= size)
|
||||
index -= size;
|
||||
|
||||
entry = htab->entries[index];
|
||||
if (entry == HTAB_EMPTY_ENTRY
|
||||
|| (entry != HTAB_DELETED_ENTRY
|
||||
&& Descriptor::equal (entry, comparable)))
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* This function searches for a hash table slot containing an entry
|
||||
equal to the given COMPARABLE element and starting with the given
|
||||
HASH. To delete an entry, call this with insert=NO_INSERT, then
|
||||
call clear_slot on the slot returned (possibly after doing some
|
||||
checks). To insert an entry, call this with insert=INSERT, then
|
||||
write the value you want into the returned slot. When inserting an
|
||||
entry, NULL may be returned if memory allocation fails. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
typename Descriptor::value_type **
|
||||
hash_table <Descriptor, Allocator>
|
||||
::find_slot_with_hash (const compare_type *comparable, hashval_t hash,
|
||||
enum insert_option insert)
|
||||
{
|
||||
value_type **first_deleted_slot;
|
||||
hashval_t index, hash2;
|
||||
size_t size;
|
||||
value_type *entry;
|
||||
|
||||
size = htab->size;
|
||||
if (insert == INSERT && size * 3 <= htab->n_elements * 4)
|
||||
{
|
||||
expand ();
|
||||
size = htab->size;
|
||||
}
|
||||
|
||||
index = hash_table_mod1 (hash, htab->size_prime_index);
|
||||
|
||||
htab->searches++;
|
||||
first_deleted_slot = NULL;
|
||||
|
||||
entry = htab->entries[index];
|
||||
if (entry == HTAB_EMPTY_ENTRY)
|
||||
goto empty_entry;
|
||||
else if (entry == HTAB_DELETED_ENTRY)
|
||||
first_deleted_slot = &htab->entries[index];
|
||||
else if (Descriptor::equal (entry, comparable))
|
||||
return &htab->entries[index];
|
||||
|
||||
hash2 = hash_table_mod2 (hash, htab->size_prime_index);
|
||||
for (;;)
|
||||
{
|
||||
htab->collisions++;
|
||||
index += hash2;
|
||||
if (index >= size)
|
||||
index -= size;
|
||||
|
||||
entry = htab->entries[index];
|
||||
if (entry == HTAB_EMPTY_ENTRY)
|
||||
goto empty_entry;
|
||||
else if (entry == HTAB_DELETED_ENTRY)
|
||||
{
|
||||
if (!first_deleted_slot)
|
||||
first_deleted_slot = &htab->entries[index];
|
||||
}
|
||||
else if (Descriptor::equal (entry, comparable))
|
||||
return &htab->entries[index];
|
||||
}
|
||||
|
||||
empty_entry:
|
||||
if (insert == NO_INSERT)
|
||||
return NULL;
|
||||
|
||||
if (first_deleted_slot)
|
||||
{
|
||||
htab->n_deleted--;
|
||||
*first_deleted_slot = static_cast <value_type *> (HTAB_EMPTY_ENTRY);
|
||||
return first_deleted_slot;
|
||||
}
|
||||
|
||||
htab->n_elements++;
|
||||
return &htab->entries[index];
|
||||
}
|
||||
|
||||
|
||||
/* This function clears all entries in the given hash table. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::empty ()
|
||||
hash_table<Descriptor, Allocator>::empty ()
|
||||
{
|
||||
size_t size = htab->size;
|
||||
value_type **entries = htab->entries;
|
||||
size_t size = m_size;
|
||||
value_type **entries = m_entries;
|
||||
int i;
|
||||
|
||||
for (i = size - 1; i >= 0; i--)
|
||||
@ -848,77 +622,166 @@ hash_table <Descriptor, Allocator>::empty ()
|
||||
int nindex = hash_table_higher_prime_index (1024 / sizeof (PTR));
|
||||
int nsize = prime_tab[nindex].prime;
|
||||
|
||||
Allocator <value_type *> ::data_free (htab->entries);
|
||||
htab->entries = Allocator <value_type *> ::data_alloc (nsize);
|
||||
htab->size = nsize;
|
||||
htab->size_prime_index = nindex;
|
||||
Allocator <value_type *> ::data_free (m_entries);
|
||||
m_entries = Allocator <value_type *> ::data_alloc (nsize);
|
||||
m_size = nsize;
|
||||
m_size_prime_index = nindex;
|
||||
}
|
||||
else
|
||||
memset (entries, 0, size * sizeof (value_type *));
|
||||
htab->n_deleted = 0;
|
||||
htab->n_elements = 0;
|
||||
m_n_deleted = 0;
|
||||
m_n_elements = 0;
|
||||
}
|
||||
|
||||
|
||||
/* This function clears a specified SLOT in a hash table. It is
|
||||
useful when you've already done the lookup and don't want to do it
|
||||
again. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::clear_slot (value_type **slot)
|
||||
hash_table<Descriptor, Allocator>::clear_slot (value_type **slot)
|
||||
{
|
||||
if (slot < htab->entries || slot >= htab->entries + htab->size
|
||||
if (slot < m_entries || slot >= m_entries + size ()
|
||||
|| *slot == HTAB_EMPTY_ENTRY || *slot == HTAB_DELETED_ENTRY)
|
||||
abort ();
|
||||
|
||||
Descriptor::remove (*slot);
|
||||
|
||||
*slot = static_cast <value_type *> (HTAB_DELETED_ENTRY);
|
||||
htab->n_deleted++;
|
||||
m_n_deleted++;
|
||||
}
|
||||
|
||||
/* This function searches for a hash table entry equal to the given
|
||||
COMPARABLE element starting with the given HASH value. It cannot
|
||||
be used to insert or delete an element. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
typename Descriptor::value_type *
|
||||
hash_table<Descriptor, Allocator>
|
||||
::find_with_hash (const compare_type *comparable, hashval_t hash)
|
||||
{
|
||||
m_searches++;
|
||||
size_t size = m_size;
|
||||
hashval_t index = hash_table_mod1 (hash, m_size_prime_index);
|
||||
|
||||
value_type *entry = m_entries[index];
|
||||
if (entry == HTAB_EMPTY_ENTRY
|
||||
|| (entry != HTAB_DELETED_ENTRY && Descriptor::equal (entry, comparable)))
|
||||
return entry;
|
||||
|
||||
hashval_t hash2 = hash_table_mod2 (hash, m_size_prime_index);
|
||||
for (;;)
|
||||
{
|
||||
m_collisions++;
|
||||
index += hash2;
|
||||
if (index >= size)
|
||||
index -= size;
|
||||
|
||||
entry = m_entries[index];
|
||||
if (entry == HTAB_EMPTY_ENTRY
|
||||
|| (entry != HTAB_DELETED_ENTRY
|
||||
&& Descriptor::equal (entry, comparable)))
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
/* This function searches for a hash table slot containing an entry
|
||||
equal to the given COMPARABLE element and starting with the given
|
||||
HASH. To delete an entry, call this with insert=NO_INSERT, then
|
||||
call clear_slot on the slot returned (possibly after doing some
|
||||
checks). To insert an entry, call this with insert=INSERT, then
|
||||
write the value you want into the returned slot. When inserting an
|
||||
entry, NULL may be returned if memory allocation fails. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
typename Descriptor::value_type **
|
||||
hash_table<Descriptor, Allocator>
|
||||
::find_slot_with_hash (const compare_type *comparable, hashval_t hash,
|
||||
enum insert_option insert)
|
||||
{
|
||||
if (insert == INSERT && m_size * 3 <= m_n_elements * 4)
|
||||
expand ();
|
||||
|
||||
m_searches++;
|
||||
|
||||
value_type **first_deleted_slot = NULL;
|
||||
hashval_t index = hash_table_mod1 (hash, m_size_prime_index);
|
||||
hashval_t hash2 = hash_table_mod2 (hash, m_size_prime_index);
|
||||
value_type *entry = m_entries[index];
|
||||
size_t size = m_size;
|
||||
if (entry == HTAB_EMPTY_ENTRY)
|
||||
goto empty_entry;
|
||||
else if (entry == HTAB_DELETED_ENTRY)
|
||||
first_deleted_slot = &m_entries[index];
|
||||
else if (Descriptor::equal (entry, comparable))
|
||||
return &m_entries[index];
|
||||
|
||||
for (;;)
|
||||
{
|
||||
m_collisions++;
|
||||
index += hash2;
|
||||
if (index >= size)
|
||||
index -= size;
|
||||
|
||||
entry = m_entries[index];
|
||||
if (entry == HTAB_EMPTY_ENTRY)
|
||||
goto empty_entry;
|
||||
else if (entry == HTAB_DELETED_ENTRY)
|
||||
{
|
||||
if (!first_deleted_slot)
|
||||
first_deleted_slot = &m_entries[index];
|
||||
}
|
||||
else if (Descriptor::equal (entry, comparable))
|
||||
return &m_entries[index];
|
||||
}
|
||||
|
||||
empty_entry:
|
||||
if (insert == NO_INSERT)
|
||||
return NULL;
|
||||
|
||||
if (first_deleted_slot)
|
||||
{
|
||||
m_n_deleted--;
|
||||
*first_deleted_slot = static_cast <value_type *> (HTAB_EMPTY_ENTRY);
|
||||
return first_deleted_slot;
|
||||
}
|
||||
|
||||
m_n_elements++;
|
||||
return &m_entries[index];
|
||||
}
|
||||
|
||||
/* This function deletes an element with the given COMPARABLE value
|
||||
from hash table starting with the given HASH. If there is no
|
||||
matching element in the hash table, this function does nothing. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>
|
||||
hash_table<Descriptor, Allocator>
|
||||
::remove_elt_with_hash (const compare_type *comparable, hashval_t hash)
|
||||
{
|
||||
value_type **slot;
|
||||
|
||||
slot = find_slot_with_hash (comparable, hash, NO_INSERT);
|
||||
value_type **slot = find_slot_with_hash (comparable, hash, NO_INSERT);
|
||||
if (*slot == HTAB_EMPTY_ENTRY)
|
||||
return;
|
||||
|
||||
Descriptor::remove (*slot);
|
||||
|
||||
*slot = static_cast <value_type *> (HTAB_DELETED_ENTRY);
|
||||
htab->n_deleted++;
|
||||
m_n_deleted++;
|
||||
}
|
||||
|
||||
|
||||
/* This function scans over the entire hash table calling CALLBACK for
|
||||
each live entry. If CALLBACK returns false, the iteration stops.
|
||||
ARGUMENT is passed as CALLBACK's second argument. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
template <typename Argument,
|
||||
template<typename Descriptor,
|
||||
template<typename Type> class Allocator>
|
||||
template<typename Argument,
|
||||
int (*Callback) (typename Descriptor::value_type **slot, Argument argument)>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::traverse_noresize (Argument argument)
|
||||
hash_table<Descriptor, Allocator>::traverse_noresize (Argument argument)
|
||||
{
|
||||
value_type **slot;
|
||||
value_type **limit;
|
||||
|
||||
slot = htab->entries;
|
||||
limit = slot + htab->size;
|
||||
value_type **slot = m_entries;
|
||||
value_type **limit = slot + size ();
|
||||
|
||||
do
|
||||
{
|
||||
@ -931,7 +794,6 @@ hash_table <Descriptor, Allocator>::traverse_noresize (Argument argument)
|
||||
while (++slot < limit);
|
||||
}
|
||||
|
||||
|
||||
/* Like traverse_noresize, but does resize the table when it is too empty
|
||||
to improve effectivity of subsequent calls. */
|
||||
|
||||
@ -941,55 +803,20 @@ template <typename Argument,
|
||||
int (*Callback) (typename Descriptor::value_type **slot,
|
||||
Argument argument)>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::traverse (Argument argument)
|
||||
hash_table<Descriptor, Allocator>::traverse (Argument argument)
|
||||
{
|
||||
size_t size = htab->size;
|
||||
size_t size = m_size;
|
||||
if (elements () * 8 < size && size > 32)
|
||||
expand ();
|
||||
|
||||
traverse_noresize <Argument, Callback> (argument);
|
||||
}
|
||||
|
||||
|
||||
/* Iterator definitions. */
|
||||
|
||||
/* The default constructor produces the end value. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline
|
||||
hash_table <Descriptor, Allocator>::iterator::iterator ()
|
||||
: m_slot (NULL), m_limit (NULL)
|
||||
{
|
||||
}
|
||||
|
||||
/* The parameterized constructor produces the begin value. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline
|
||||
hash_table <Descriptor, Allocator>::iterator::iterator
|
||||
(value_type **slot, value_type **limit)
|
||||
: m_slot (slot), m_limit (limit)
|
||||
{
|
||||
}
|
||||
|
||||
/* Obtain the element. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline typename hash_table <Descriptor, Allocator>::value_type &
|
||||
hash_table <Descriptor, Allocator>::iterator::operator * ()
|
||||
{
|
||||
return **m_slot;
|
||||
}
|
||||
|
||||
/* Slide down the iterator slots until an active entry is found. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table <Descriptor, Allocator>::iterator::slide ()
|
||||
hash_table<Descriptor, Allocator>::iterator::slide ()
|
||||
{
|
||||
for ( ; m_slot < m_limit; ++m_slot )
|
||||
{
|
||||
@ -1003,50 +830,15 @@ hash_table <Descriptor, Allocator>::iterator::slide ()
|
||||
|
||||
/* Bump the iterator. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline typename hash_table <Descriptor, Allocator>::iterator &
|
||||
hash_table <Descriptor, Allocator>::iterator::operator ++ ()
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
inline typename hash_table<Descriptor, Allocator>::iterator &
|
||||
hash_table<Descriptor, Allocator>::iterator::operator ++ ()
|
||||
{
|
||||
++m_slot;
|
||||
slide ();
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* Compare iterators. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline bool
|
||||
hash_table <Descriptor, Allocator>::iterator::
|
||||
operator != (const iterator &other) const
|
||||
{
|
||||
return m_slot != other.m_slot || m_limit != other.m_limit;
|
||||
}
|
||||
|
||||
/* Hash table iterator producers. */
|
||||
|
||||
/* The beginning of a hash table iteration. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline typename hash_table <Descriptor, Allocator>::iterator
|
||||
hash_table <Descriptor, Allocator>::begin ()
|
||||
{
|
||||
iterator hti (htab->entries, htab->entries + htab->size);
|
||||
hti.slide ();
|
||||
return hti;
|
||||
}
|
||||
|
||||
/* The end of a hash table iteration. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Type> class Allocator>
|
||||
inline typename hash_table <Descriptor, Allocator>::iterator
|
||||
hash_table <Descriptor, Allocator>::end ()
|
||||
{
|
||||
return iterator ();
|
||||
}
|
||||
|
||||
/* Iterate through the elements of hash_table HTAB,
|
||||
using hash_table <....>::iterator ITER,
|
||||
|
@ -324,8 +324,8 @@ odr_hasher::remove (value_type *v)
|
||||
|
||||
/* ODR type hash used to lookup ODR type based on tree type node. */
|
||||
|
||||
typedef hash_table <odr_hasher> odr_hash_type;
|
||||
static odr_hash_type odr_hash;
|
||||
typedef hash_table<odr_hasher> odr_hash_type;
|
||||
static odr_hash_type *odr_hash;
|
||||
|
||||
/* ODR types are also stored into ODR_TYPE vector to allow consistent
|
||||
walking. Bases appear before derived types. Vector is garbage collected
|
||||
@ -473,7 +473,8 @@ get_odr_type (tree type, bool insert)
|
||||
type = TYPE_MAIN_VARIANT (type);
|
||||
gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
|
||||
hash = hash_type_name (type);
|
||||
slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
|
||||
slot
|
||||
= odr_hash->find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
|
||||
if (!slot)
|
||||
return NULL;
|
||||
|
||||
@ -611,11 +612,11 @@ build_type_inheritance_graph (void)
|
||||
FILE *inheritance_dump_file;
|
||||
int flags;
|
||||
|
||||
if (odr_hash.is_created ())
|
||||
if (odr_hash)
|
||||
return;
|
||||
timevar_push (TV_IPA_INHERITANCE);
|
||||
inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
|
||||
odr_hash.create (23);
|
||||
odr_hash = new odr_hash_type (23);
|
||||
|
||||
/* We reconstruct the graph starting of types of all methods seen in the
|
||||
the unit. */
|
||||
@ -1011,9 +1012,9 @@ polymorphic_call_target_hasher::remove (value_type *v)
|
||||
|
||||
/* Polymorphic call target query cache. */
|
||||
|
||||
typedef hash_table <polymorphic_call_target_hasher>
|
||||
typedef hash_table<polymorphic_call_target_hasher>
|
||||
polymorphic_call_target_hash_type;
|
||||
static polymorphic_call_target_hash_type polymorphic_call_target_hash;
|
||||
static polymorphic_call_target_hash_type *polymorphic_call_target_hash;
|
||||
|
||||
/* Destroy polymorphic call target query cache. */
|
||||
|
||||
@ -1022,7 +1023,8 @@ free_polymorphic_call_targets_hash ()
|
||||
{
|
||||
if (cached_polymorphic_call_targets)
|
||||
{
|
||||
polymorphic_call_target_hash.dispose ();
|
||||
delete polymorphic_call_target_hash;
|
||||
polymorphic_call_target_hash = NULL;
|
||||
pointer_set_destroy (cached_polymorphic_call_targets);
|
||||
cached_polymorphic_call_targets = NULL;
|
||||
}
|
||||
@ -1599,7 +1601,7 @@ possible_polymorphic_call_targets (tree otr_type,
|
||||
bool skipped = false;
|
||||
|
||||
/* If ODR is not initialized, return empty incomplete list. */
|
||||
if (!odr_hash.is_created ())
|
||||
if (!odr_hash)
|
||||
{
|
||||
if (completep)
|
||||
*completep = false;
|
||||
@ -1656,7 +1658,8 @@ possible_polymorphic_call_targets (tree otr_type,
|
||||
if (!cached_polymorphic_call_targets)
|
||||
{
|
||||
cached_polymorphic_call_targets = pointer_set_create ();
|
||||
polymorphic_call_target_hash.create (23);
|
||||
polymorphic_call_target_hash
|
||||
= new polymorphic_call_target_hash_type (23);
|
||||
if (!node_removal_hook_holder)
|
||||
{
|
||||
node_removal_hook_holder =
|
||||
@ -1670,7 +1673,7 @@ possible_polymorphic_call_targets (tree otr_type,
|
||||
key.type = type;
|
||||
key.otr_token = otr_token;
|
||||
key.context = context;
|
||||
slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
|
||||
slot = polymorphic_call_target_hash->find_slot (&key, INSERT);
|
||||
if (cache_token)
|
||||
*cache_token = (void *)*slot;
|
||||
if (*slot)
|
||||
@ -1865,7 +1868,7 @@ possible_polymorphic_call_target_p (tree otr_type,
|
||||
|| fcode == BUILT_IN_TRAP))
|
||||
return true;
|
||||
|
||||
if (!odr_hash.is_created ())
|
||||
if (!odr_hash)
|
||||
return true;
|
||||
targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
|
||||
for (i = 0; i < targets.length (); i++)
|
||||
@ -1888,7 +1891,7 @@ update_type_inheritance_graph (void)
|
||||
{
|
||||
struct cgraph_node *n;
|
||||
|
||||
if (!odr_hash.is_created ())
|
||||
if (!odr_hash)
|
||||
return;
|
||||
free_polymorphic_call_targets_hash ();
|
||||
timevar_push (TV_IPA_INHERITANCE);
|
||||
|
@ -112,12 +112,12 @@ histogram_hash::equal (const histogram_entry *val, const histogram_entry *val2)
|
||||
HASHTABLE is the on-side hash kept to avoid duplicates. */
|
||||
|
||||
static void
|
||||
account_time_size (hash_table <histogram_hash> hashtable,
|
||||
account_time_size (hash_table<histogram_hash> *hashtable,
|
||||
vec<histogram_entry *> &histogram,
|
||||
gcov_type count, int time, int size)
|
||||
{
|
||||
histogram_entry key = {count, 0, 0};
|
||||
histogram_entry **val = hashtable.find_slot (&key, INSERT);
|
||||
histogram_entry **val = hashtable->find_slot (&key, INSERT);
|
||||
|
||||
if (!*val)
|
||||
{
|
||||
@ -179,10 +179,9 @@ ipa_profile_generate_summary (void)
|
||||
{
|
||||
struct cgraph_node *node;
|
||||
gimple_stmt_iterator gsi;
|
||||
hash_table <histogram_hash> hashtable;
|
||||
basic_block bb;
|
||||
|
||||
hashtable.create (10);
|
||||
hash_table<histogram_hash> hashtable (10);
|
||||
histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
|
||||
10);
|
||||
|
||||
@ -230,9 +229,8 @@ ipa_profile_generate_summary (void)
|
||||
time += estimate_num_insns (stmt, &eni_time_weights);
|
||||
size += estimate_num_insns (stmt, &eni_size_weights);
|
||||
}
|
||||
account_time_size (hashtable, histogram, bb->count, time, size);
|
||||
account_time_size (&hashtable, histogram, bb->count, time, size);
|
||||
}
|
||||
hashtable.dispose ();
|
||||
histogram.qsort (cmp_counts);
|
||||
}
|
||||
|
||||
@ -263,10 +261,9 @@ ipa_profile_read_summary (void)
|
||||
struct lto_file_decl_data ** file_data_vec
|
||||
= lto_get_file_decl_data ();
|
||||
struct lto_file_decl_data * file_data;
|
||||
hash_table <histogram_hash> hashtable;
|
||||
int j = 0;
|
||||
|
||||
hashtable.create (10);
|
||||
hash_table<histogram_hash> hashtable (10);
|
||||
histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
|
||||
10);
|
||||
|
||||
@ -287,7 +284,7 @@ ipa_profile_read_summary (void)
|
||||
gcov_type count = streamer_read_gcov_count (ib);
|
||||
int time = streamer_read_uhwi (ib);
|
||||
int size = streamer_read_uhwi (ib);
|
||||
account_time_size (hashtable, histogram,
|
||||
account_time_size (&hashtable, histogram,
|
||||
count, time, size);
|
||||
}
|
||||
lto_destroy_simple_input_block (file_data,
|
||||
@ -295,7 +292,6 @@ ipa_profile_read_summary (void)
|
||||
ib, data, len);
|
||||
}
|
||||
}
|
||||
hashtable.dispose ();
|
||||
histogram.qsort (cmp_counts);
|
||||
}
|
||||
|
||||
|
@ -222,13 +222,13 @@ allocno_hard_regs_hasher::equal (const value_type *hv1, const compare_type *hv2)
|
||||
}
|
||||
|
||||
/* Hash table of unique allocno hard registers. */
|
||||
static hash_table <allocno_hard_regs_hasher> allocno_hard_regs_htab;
|
||||
static hash_table<allocno_hard_regs_hasher> *allocno_hard_regs_htab;
|
||||
|
||||
/* Return allocno hard registers in the hash table equal to HV. */
|
||||
static allocno_hard_regs_t
|
||||
find_hard_regs (allocno_hard_regs_t hv)
|
||||
{
|
||||
return allocno_hard_regs_htab.find (hv);
|
||||
return allocno_hard_regs_htab->find (hv);
|
||||
}
|
||||
|
||||
/* Insert allocno hard registers HV in the hash table (if it is not
|
||||
@ -236,7 +236,7 @@ find_hard_regs (allocno_hard_regs_t hv)
|
||||
static allocno_hard_regs_t
|
||||
insert_hard_regs (allocno_hard_regs_t hv)
|
||||
{
|
||||
allocno_hard_regs **slot = allocno_hard_regs_htab.find_slot (hv, INSERT);
|
||||
allocno_hard_regs **slot = allocno_hard_regs_htab->find_slot (hv, INSERT);
|
||||
|
||||
if (*slot == NULL)
|
||||
*slot = hv;
|
||||
@ -248,7 +248,8 @@ static void
|
||||
init_allocno_hard_regs (void)
|
||||
{
|
||||
allocno_hard_regs_vec.create (200);
|
||||
allocno_hard_regs_htab.create (200);
|
||||
allocno_hard_regs_htab
|
||||
= new hash_table<allocno_hard_regs_hasher> (200);
|
||||
}
|
||||
|
||||
/* Add (or update info about) allocno hard registers with SET and
|
||||
@ -286,7 +287,8 @@ finish_allocno_hard_regs (void)
|
||||
allocno_hard_regs_vec.iterate (i, &hv);
|
||||
i++)
|
||||
ira_free (hv);
|
||||
allocno_hard_regs_htab.dispose ();
|
||||
delete allocno_hard_regs_htab;
|
||||
allocno_hard_regs_htab = NULL;
|
||||
allocno_hard_regs_vec.release ();
|
||||
}
|
||||
|
||||
|
@ -167,7 +167,7 @@ cost_classes_hasher::remove (value_type *v)
|
||||
}
|
||||
|
||||
/* Hash table of unique cost classes. */
|
||||
static hash_table <cost_classes_hasher> cost_classes_htab;
|
||||
static hash_table<cost_classes_hasher> *cost_classes_htab;
|
||||
|
||||
/* Map allocno class -> cost classes for pseudo of given allocno
|
||||
class. */
|
||||
@ -188,7 +188,7 @@ initiate_regno_cost_classes (void)
|
||||
sizeof (cost_classes_t) * N_REG_CLASSES);
|
||||
memset (cost_classes_mode_cache, 0,
|
||||
sizeof (cost_classes_t) * MAX_MACHINE_MODE);
|
||||
cost_classes_htab.create (200);
|
||||
cost_classes_htab = new hash_table<cost_classes_hasher> (200);
|
||||
}
|
||||
|
||||
/* Create new cost classes from cost classes FROM and set up members
|
||||
@ -262,7 +262,7 @@ setup_regno_cost_classes_by_aclass (int regno, enum reg_class aclass)
|
||||
}
|
||||
classes.classes[classes.num++] = cl;
|
||||
}
|
||||
slot = cost_classes_htab.find_slot (&classes, INSERT);
|
||||
slot = cost_classes_htab->find_slot (&classes, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
classes_ptr = setup_cost_classes (&classes);
|
||||
@ -301,7 +301,7 @@ setup_regno_cost_classes_by_mode (int regno, enum machine_mode mode)
|
||||
continue;
|
||||
classes.classes[classes.num++] = cl;
|
||||
}
|
||||
slot = cost_classes_htab.find_slot (&classes, INSERT);
|
||||
slot = cost_classes_htab->find_slot (&classes, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
classes_ptr = setup_cost_classes (&classes);
|
||||
@ -319,7 +319,8 @@ static void
|
||||
finish_regno_cost_classes (void)
|
||||
{
|
||||
ira_free (regno_cost_classes);
|
||||
cost_classes_htab.dispose ();
|
||||
delete cost_classes_htab;
|
||||
cost_classes_htab = NULL;
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,3 +1,7 @@
|
||||
2014-06-24 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* jcf-io.c: Adjust.
|
||||
|
||||
2014-06-11 Jan Hubicka <hubicka@ucw.cz>
|
||||
|
||||
* java/class.c (build_utf8_ref): Update handling for section names
|
||||
|
@ -299,7 +299,7 @@ charstar_hash::equal (const value_type *existing, const compare_type *candidate)
|
||||
during class lookup. (There is no need to cache the values
|
||||
associated with names that were found; they are saved in
|
||||
IDENTIFIER_CLASS_VALUE.) */
|
||||
static hash_table <charstar_hash> memoized_class_lookups;
|
||||
static hash_table<charstar_hash> *memoized_class_lookups;
|
||||
|
||||
/* Returns a freshly malloc'd string with the fully qualified pathname
|
||||
of the .class file for the class CLASSNAME. CLASSNAME must be
|
||||
@ -321,13 +321,13 @@ find_class (const char *classname, int classname_length, JCF *jcf)
|
||||
hashval_t hash;
|
||||
|
||||
/* Create the hash table, if it does not already exist. */
|
||||
if (!memoized_class_lookups.is_created ())
|
||||
memoized_class_lookups.create (37);
|
||||
if (!memoized_class_lookups)
|
||||
memoized_class_lookups = new hash_table<charstar_hash> (37);
|
||||
|
||||
/* Loop for this class in the hashtable. If it is present, we've
|
||||
already looked for this class and failed to find it. */
|
||||
hash = charstar_hash::hash (classname);
|
||||
if (memoized_class_lookups.find_with_hash (classname, hash))
|
||||
if (memoized_class_lookups->find_with_hash (classname, hash))
|
||||
return NULL;
|
||||
|
||||
/* Allocate and zero out the buffer, since we don't explicitly put a
|
||||
@ -402,7 +402,7 @@ find_class (const char *classname, int classname_length, JCF *jcf)
|
||||
|
||||
/* Remember that this class could not be found so that we do not
|
||||
have to look again. */
|
||||
*memoized_class_lookups.find_slot_with_hash (classname, hash, INSERT)
|
||||
*memoized_class_lookups->find_slot_with_hash (classname, hash, INSERT)
|
||||
= classname;
|
||||
|
||||
return NULL;
|
||||
|
@ -453,14 +453,14 @@ invariant_expr_hasher::equal (const value_type *entry1,
|
||||
entry2->inv->insn, entry2->expr);
|
||||
}
|
||||
|
||||
typedef hash_table <invariant_expr_hasher> invariant_htab_type;
|
||||
typedef hash_table<invariant_expr_hasher> invariant_htab_type;
|
||||
|
||||
/* Checks whether invariant with value EXPR in machine mode MODE is
|
||||
recorded in EQ. If this is the case, return the invariant. Otherwise
|
||||
insert INV to the table for this expression and return INV. */
|
||||
|
||||
static struct invariant *
|
||||
find_or_insert_inv (invariant_htab_type eq, rtx expr, enum machine_mode mode,
|
||||
find_or_insert_inv (invariant_htab_type *eq, rtx expr, enum machine_mode mode,
|
||||
struct invariant *inv)
|
||||
{
|
||||
hashval_t hash = hash_invariant_expr_1 (inv->insn, expr);
|
||||
@ -471,7 +471,7 @@ find_or_insert_inv (invariant_htab_type eq, rtx expr, enum machine_mode mode,
|
||||
pentry.expr = expr;
|
||||
pentry.inv = inv;
|
||||
pentry.mode = mode;
|
||||
slot = eq.find_slot_with_hash (&pentry, hash, INSERT);
|
||||
slot = eq->find_slot_with_hash (&pentry, hash, INSERT);
|
||||
entry = *slot;
|
||||
|
||||
if (entry)
|
||||
@ -491,7 +491,7 @@ find_or_insert_inv (invariant_htab_type eq, rtx expr, enum machine_mode mode,
|
||||
hash table of the invariants. */
|
||||
|
||||
static void
|
||||
find_identical_invariants (invariant_htab_type eq, struct invariant *inv)
|
||||
find_identical_invariants (invariant_htab_type *eq, struct invariant *inv)
|
||||
{
|
||||
unsigned depno;
|
||||
bitmap_iterator bi;
|
||||
@ -528,13 +528,10 @@ merge_identical_invariants (void)
|
||||
{
|
||||
unsigned i;
|
||||
struct invariant *inv;
|
||||
invariant_htab_type eq;
|
||||
eq.create (invariants.length ());
|
||||
invariant_htab_type eq (invariants.length ());
|
||||
|
||||
FOR_EACH_VEC_ELT (invariants, i, inv)
|
||||
find_identical_invariants (eq, inv);
|
||||
|
||||
eq.dispose ();
|
||||
find_identical_invariants (&eq, inv);
|
||||
}
|
||||
|
||||
/* Determines the basic blocks inside LOOP that are always executed and
|
||||
|
@ -134,7 +134,7 @@ biv_entry_hasher::equal (const value_type *b, const compare_type *r)
|
||||
|
||||
/* Bivs of the current loop. */
|
||||
|
||||
static hash_table <biv_entry_hasher> bivs;
|
||||
static hash_table<biv_entry_hasher> *bivs;
|
||||
|
||||
static bool iv_analyze_op (rtx, rtx, struct rtx_iv *);
|
||||
|
||||
@ -269,7 +269,7 @@ clear_iv_info (void)
|
||||
}
|
||||
}
|
||||
|
||||
bivs.empty ();
|
||||
bivs->empty ();
|
||||
}
|
||||
|
||||
|
||||
@ -284,7 +284,7 @@ iv_analysis_loop_init (struct loop *loop)
|
||||
if (clean_slate)
|
||||
{
|
||||
df_set_flags (DF_EQ_NOTES + DF_DEFER_INSN_RESCAN);
|
||||
bivs.create (10);
|
||||
bivs = new hash_table<biv_entry_hasher> (10);
|
||||
clean_slate = false;
|
||||
}
|
||||
else
|
||||
@ -844,7 +844,7 @@ record_iv (df_ref def, struct rtx_iv *iv)
|
||||
static bool
|
||||
analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
|
||||
{
|
||||
struct biv_entry *biv = bivs.find_with_hash (def, REGNO (def));
|
||||
struct biv_entry *biv = bivs->find_with_hash (def, REGNO (def));
|
||||
|
||||
if (!biv)
|
||||
return false;
|
||||
@ -857,7 +857,7 @@ static void
|
||||
record_biv (rtx def, struct rtx_iv *iv)
|
||||
{
|
||||
struct biv_entry *biv = XNEW (struct biv_entry);
|
||||
biv_entry **slot = bivs.find_slot_with_hash (def, REGNO (def), INSERT);
|
||||
biv_entry **slot = bivs->find_slot_with_hash (def, REGNO (def), INSERT);
|
||||
|
||||
biv->regno = REGNO (def);
|
||||
biv->iv = *iv;
|
||||
@ -1299,7 +1299,8 @@ iv_analysis_done (void)
|
||||
clear_iv_info ();
|
||||
clean_slate = true;
|
||||
df_finish_pass (true);
|
||||
bivs.dispose ();
|
||||
delete bivs;
|
||||
bivs = NULL;
|
||||
free (iv_ref_table);
|
||||
iv_ref_table = NULL;
|
||||
iv_ref_table_size = 0;
|
||||
|
@ -161,11 +161,11 @@ var_expand_hasher::equal (const value_type *i1, const compare_type *i2)
|
||||
|
||||
struct opt_info
|
||||
{
|
||||
hash_table <iv_split_hasher> insns_to_split; /* A hashtable of insns to
|
||||
hash_table<iv_split_hasher> *insns_to_split; /* A hashtable of insns to
|
||||
split. */
|
||||
struct iv_to_split *iv_to_split_head; /* The first iv to split. */
|
||||
struct iv_to_split **iv_to_split_tail; /* Pointer to the tail of the list. */
|
||||
hash_table <var_expand_hasher> insns_with_var_to_expand; /* A hashtable of
|
||||
hash_table<var_expand_hasher> *insns_with_var_to_expand; /* A hashtable of
|
||||
insns with accumulators to expand. */
|
||||
struct var_to_expand *var_to_expand_head; /* The first var to expand. */
|
||||
struct var_to_expand **var_to_expand_tail; /* Pointer to the tail of the list. */
|
||||
@ -1974,7 +1974,8 @@ analyze_insns_in_loop (struct loop *loop)
|
||||
|
||||
if (flag_split_ivs_in_unroller)
|
||||
{
|
||||
opt_info->insns_to_split.create (5 * loop->num_nodes);
|
||||
opt_info->insns_to_split
|
||||
= new hash_table<iv_split_hasher> (5 * loop->num_nodes);
|
||||
opt_info->iv_to_split_head = NULL;
|
||||
opt_info->iv_to_split_tail = &opt_info->iv_to_split_head;
|
||||
}
|
||||
@ -1995,7 +1996,8 @@ analyze_insns_in_loop (struct loop *loop)
|
||||
if (flag_variable_expansion_in_unroller
|
||||
&& can_apply)
|
||||
{
|
||||
opt_info->insns_with_var_to_expand.create (5 * loop->num_nodes);
|
||||
opt_info->insns_with_var_to_expand
|
||||
= new hash_table<var_expand_hasher> (5 * loop->num_nodes);
|
||||
opt_info->var_to_expand_head = NULL;
|
||||
opt_info->var_to_expand_tail = &opt_info->var_to_expand_head;
|
||||
}
|
||||
@ -2011,12 +2013,12 @@ analyze_insns_in_loop (struct loop *loop)
|
||||
if (!INSN_P (insn))
|
||||
continue;
|
||||
|
||||
if (opt_info->insns_to_split.is_created ())
|
||||
if (opt_info->insns_to_split)
|
||||
ivts = analyze_iv_to_split_insn (insn);
|
||||
|
||||
if (ivts)
|
||||
{
|
||||
slot1 = opt_info->insns_to_split.find_slot (ivts, INSERT);
|
||||
slot1 = opt_info->insns_to_split->find_slot (ivts, INSERT);
|
||||
gcc_assert (*slot1 == NULL);
|
||||
*slot1 = ivts;
|
||||
*opt_info->iv_to_split_tail = ivts;
|
||||
@ -2024,12 +2026,12 @@ analyze_insns_in_loop (struct loop *loop)
|
||||
continue;
|
||||
}
|
||||
|
||||
if (opt_info->insns_with_var_to_expand.is_created ())
|
||||
if (opt_info->insns_with_var_to_expand)
|
||||
ves = analyze_insn_to_expand_var (loop, insn);
|
||||
|
||||
if (ves)
|
||||
{
|
||||
slot2 = opt_info->insns_with_var_to_expand.find_slot (ves, INSERT);
|
||||
slot2 = opt_info->insns_with_var_to_expand->find_slot (ves, INSERT);
|
||||
gcc_assert (*slot2 == NULL);
|
||||
*slot2 = ves;
|
||||
*opt_info->var_to_expand_tail = ves;
|
||||
@ -2407,7 +2409,7 @@ apply_opt_in_copies (struct opt_info *opt_info,
|
||||
gcc_assert (!unrolling || rewrite_original_loop);
|
||||
|
||||
/* Allocate the basic variables (i0). */
|
||||
if (opt_info->insns_to_split.is_created ())
|
||||
if (opt_info->insns_to_split)
|
||||
for (ivts = opt_info->iv_to_split_head; ivts; ivts = ivts->next)
|
||||
allocate_basic_variable (ivts);
|
||||
|
||||
@ -2441,11 +2443,11 @@ apply_opt_in_copies (struct opt_info *opt_info,
|
||||
ve_templ.insn = orig_insn;
|
||||
|
||||
/* Apply splitting iv optimization. */
|
||||
if (opt_info->insns_to_split.is_created ())
|
||||
if (opt_info->insns_to_split)
|
||||
{
|
||||
maybe_strip_eq_note_for_split_iv (opt_info, insn);
|
||||
|
||||
ivts = opt_info->insns_to_split.find (&ivts_templ);
|
||||
ivts = opt_info->insns_to_split->find (&ivts_templ);
|
||||
|
||||
if (ivts)
|
||||
{
|
||||
@ -2458,10 +2460,10 @@ apply_opt_in_copies (struct opt_info *opt_info,
|
||||
}
|
||||
}
|
||||
/* Apply variable expansion optimization. */
|
||||
if (unrolling && opt_info->insns_with_var_to_expand.is_created ())
|
||||
if (unrolling && opt_info->insns_with_var_to_expand)
|
||||
{
|
||||
ves = (struct var_to_expand *)
|
||||
opt_info->insns_with_var_to_expand.find (&ve_templ);
|
||||
opt_info->insns_with_var_to_expand->find (&ve_templ);
|
||||
if (ves)
|
||||
{
|
||||
gcc_assert (GET_CODE (PATTERN (insn))
|
||||
@ -2478,7 +2480,7 @@ apply_opt_in_copies (struct opt_info *opt_info,
|
||||
|
||||
/* Initialize the variable expansions in the loop preheader
|
||||
and take care of combining them at the loop exit. */
|
||||
if (opt_info->insns_with_var_to_expand.is_created ())
|
||||
if (opt_info->insns_with_var_to_expand)
|
||||
{
|
||||
for (ves = opt_info->var_to_expand_head; ves; ves = ves->next)
|
||||
insert_var_expansion_initialization (ves, opt_info->loop_preheader);
|
||||
@ -2509,12 +2511,12 @@ apply_opt_in_copies (struct opt_info *opt_info,
|
||||
continue;
|
||||
|
||||
ivts_templ.insn = orig_insn;
|
||||
if (opt_info->insns_to_split.is_created ())
|
||||
if (opt_info->insns_to_split)
|
||||
{
|
||||
maybe_strip_eq_note_for_split_iv (opt_info, orig_insn);
|
||||
|
||||
ivts = (struct iv_to_split *)
|
||||
opt_info->insns_to_split.find (&ivts_templ);
|
||||
opt_info->insns_to_split->find (&ivts_templ);
|
||||
if (ivts)
|
||||
{
|
||||
if (!delta)
|
||||
@ -2533,15 +2535,16 @@ apply_opt_in_copies (struct opt_info *opt_info,
|
||||
static void
|
||||
free_opt_info (struct opt_info *opt_info)
|
||||
{
|
||||
if (opt_info->insns_to_split.is_created ())
|
||||
opt_info->insns_to_split.dispose ();
|
||||
if (opt_info->insns_with_var_to_expand.is_created ())
|
||||
delete opt_info->insns_to_split;
|
||||
opt_info->insns_to_split = NULL;
|
||||
if (opt_info->insns_with_var_to_expand)
|
||||
{
|
||||
struct var_to_expand *ves;
|
||||
|
||||
for (ves = opt_info->var_to_expand_head; ves; ves = ves->next)
|
||||
ves->var_expansions.release ();
|
||||
opt_info->insns_with_var_to_expand.dispose ();
|
||||
delete opt_info->insns_with_var_to_expand;
|
||||
opt_info->insns_with_var_to_expand = NULL;
|
||||
}
|
||||
free (opt_info);
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ freeing_string_slot_hasher::remove (value_type *v)
|
||||
}
|
||||
|
||||
/* The table to hold the file names. */
|
||||
static hash_table <freeing_string_slot_hasher> file_name_hash_table;
|
||||
static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
|
||||
|
||||
|
||||
/* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
|
||||
@ -123,7 +123,7 @@ canon_file_name (const char *string)
|
||||
s_slot.s = string;
|
||||
s_slot.len = len;
|
||||
|
||||
slot = file_name_hash_table.find_slot (&s_slot, INSERT);
|
||||
slot = file_name_hash_table->find_slot (&s_slot, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
char *saved_string;
|
||||
@ -1363,7 +1363,8 @@ void
|
||||
lto_reader_init (void)
|
||||
{
|
||||
lto_streamer_init ();
|
||||
file_name_hash_table.create (37);
|
||||
file_name_hash_table
|
||||
= new hash_table<freeing_string_slot_hasher> (37);
|
||||
}
|
||||
|
||||
|
||||
|
@ -87,7 +87,7 @@ create_output_block (enum lto_section_type section_type)
|
||||
|
||||
clear_line_info (ob);
|
||||
|
||||
ob->string_hash_table.create (37);
|
||||
ob->string_hash_table = new hash_table<string_slot_hasher> (37);
|
||||
gcc_obstack_init (&ob->obstack);
|
||||
|
||||
return ob;
|
||||
@ -101,7 +101,8 @@ destroy_output_block (struct output_block *ob)
|
||||
{
|
||||
enum lto_section_type section_type = ob->section_type;
|
||||
|
||||
ob->string_hash_table.dispose ();
|
||||
delete ob->string_hash_table;
|
||||
ob->string_hash_table = NULL;
|
||||
|
||||
free (ob->main_stream);
|
||||
free (ob->string_stream);
|
||||
|
@ -290,7 +290,7 @@ tree_entry_hasher::equal (const value_type *e1, const compare_type *e2)
|
||||
return (e1->key == e2->key);
|
||||
}
|
||||
|
||||
static hash_table <tree_hash_entry> tree_htab;
|
||||
static hash_table<tree_hash_entry> *tree_htab;
|
||||
#endif
|
||||
|
||||
/* Initialization common to the LTO reader and writer. */
|
||||
@ -305,7 +305,7 @@ lto_streamer_init (void)
|
||||
streamer_check_handled_ts_structures ();
|
||||
|
||||
#ifdef LTO_STREAMER_DEBUG
|
||||
tree_htab.create (31);
|
||||
tree_htab = new hash_table<tree_hash_entry> (31);
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -340,7 +340,7 @@ lto_orig_address_map (tree t, intptr_t orig_t)
|
||||
|
||||
ent.key = t;
|
||||
ent.value = orig_t;
|
||||
slot = tree_htab.find_slot (&ent, INSERT);
|
||||
slot = tree_htab->find_slot (&ent, INSERT);
|
||||
gcc_assert (!*slot);
|
||||
*slot = XNEW (struct tree_hash_entry);
|
||||
**slot = ent;
|
||||
@ -357,7 +357,7 @@ lto_orig_address_get (tree t)
|
||||
struct tree_hash_entry **slot;
|
||||
|
||||
ent.key = t;
|
||||
slot = tree_htab.find_slot (&ent, NO_INSERT);
|
||||
slot = tree_htab->find_slot (&ent, NO_INSERT);
|
||||
return (slot ? (*slot)->value : 0);
|
||||
}
|
||||
|
||||
@ -371,10 +371,10 @@ lto_orig_address_remove (tree t)
|
||||
struct tree_hash_entry **slot;
|
||||
|
||||
ent.key = t;
|
||||
slot = tree_htab.find_slot (&ent, NO_INSERT);
|
||||
slot = tree_htab->find_slot (&ent, NO_INSERT);
|
||||
gcc_assert (slot);
|
||||
free (*slot);
|
||||
tree_htab.clear_slot (slot);
|
||||
tree_htab->clear_slot (slot);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -682,7 +682,7 @@ struct output_block
|
||||
|
||||
/* The hash table that contains the set of strings we have seen so
|
||||
far and the indexes assigned to them. */
|
||||
hash_table <string_slot_hasher> string_hash_table;
|
||||
hash_table<string_slot_hasher> *string_hash_table;
|
||||
|
||||
/* The current cgraph_node that we are currently serializing. Null
|
||||
if we are serializing something else. */
|
||||
|
@ -1,3 +1,7 @@
|
||||
2014-06-24 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* lto.c: Adjust.
|
||||
|
||||
2014-06-20 Jan Hubicka <hubicka@ucw.cz>
|
||||
|
||||
* lto-symtab.c (lto_varpool_replace_node): Report TLS model conflicts.
|
||||
|
@ -1137,7 +1137,7 @@ tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
|
||||
return true;
|
||||
}
|
||||
|
||||
static hash_table <tree_scc_hasher> tree_scc_hash;
|
||||
static hash_table<tree_scc_hasher> *tree_scc_hash;
|
||||
static struct obstack tree_scc_hash_obstack;
|
||||
|
||||
static unsigned long num_merged_types;
|
||||
@ -1734,7 +1734,7 @@ unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
|
||||
|
||||
/* Look for the list of candidate SCCs to compare against. */
|
||||
tree_scc **slot;
|
||||
slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
|
||||
slot = tree_scc_hash->find_slot_with_hash (scc, scc_hash, INSERT);
|
||||
if (*slot)
|
||||
{
|
||||
/* Try unifying against each candidate. */
|
||||
@ -2921,7 +2921,7 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
|
||||
gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
|
||||
gimple_canonical_type_eq, 0);
|
||||
gcc_obstack_init (&tree_scc_hash_obstack);
|
||||
tree_scc_hash.create (4096);
|
||||
tree_scc_hash = new hash_table<tree_scc_hasher> (4096);
|
||||
|
||||
/* Register the common node types with the canonical type machinery so
|
||||
we properly share alias-sets across languages and TUs. Do not
|
||||
@ -2987,7 +2987,8 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
|
||||
print_lto_report_1 ();
|
||||
|
||||
/* Free gimple type merging datastructures. */
|
||||
tree_scc_hash.dispose ();
|
||||
delete tree_scc_hash;
|
||||
tree_scc_hash = NULL;
|
||||
obstack_free (&tree_scc_hash_obstack, NULL);
|
||||
htab_delete (gimple_canonical_types);
|
||||
gimple_canonical_types = NULL;
|
||||
@ -3159,17 +3160,17 @@ print_lto_report_1 (void)
|
||||
fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
|
||||
pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
|
||||
fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
|
||||
if (flag_wpa && tree_scc_hash.is_created ())
|
||||
if (flag_wpa && tree_scc_hash)
|
||||
{
|
||||
fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
|
||||
"collision ratio: %f\n", pfx,
|
||||
(long) tree_scc_hash.size (),
|
||||
(long) tree_scc_hash.elements (),
|
||||
tree_scc_hash.collisions ());
|
||||
(long) tree_scc_hash->size (),
|
||||
(long) tree_scc_hash->elements (),
|
||||
tree_scc_hash->collisions ());
|
||||
hash_table<tree_scc_hasher>::iterator hiter;
|
||||
tree_scc *scc, *max_scc = NULL;
|
||||
unsigned max_length = 0;
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*tree_scc_hash, scc, x, hiter)
|
||||
{
|
||||
unsigned length = 0;
|
||||
tree_scc *s = scc;
|
||||
|
@ -1,3 +1,7 @@
|
||||
2014-06-24 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* objc-act.c: Adjust.
|
||||
|
||||
2014-05-17 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* objc-act.c (objc_build_string_object): Adjust.
|
||||
|
@ -3940,8 +3940,7 @@ objc_detect_field_duplicates (bool check_superclasses_only)
|
||||
{
|
||||
/* First, build the hashtable by putting all the instance
|
||||
variables of superclasses in it. */
|
||||
hash_table <decl_name_hash> htab;
|
||||
htab.create (37);
|
||||
hash_table<decl_name_hash> htab (37);
|
||||
tree interface;
|
||||
for (interface = lookup_interface (CLASS_SUPER_NAME
|
||||
(objc_interface_context));
|
||||
@ -4018,7 +4017,6 @@ objc_detect_field_duplicates (bool check_superclasses_only)
|
||||
}
|
||||
}
|
||||
}
|
||||
htab.dispose ();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
12
gcc/passes.c
12
gcc/passes.c
@ -719,7 +719,7 @@ pass_registry_hasher::equal (const value_type *s1, const compare_type *s2)
|
||||
return !strcmp (s1->unique_name, s2->unique_name);
|
||||
}
|
||||
|
||||
static hash_table <pass_registry_hasher> name_to_pass_map;
|
||||
static hash_table<pass_registry_hasher> *name_to_pass_map;
|
||||
|
||||
/* Register PASS with NAME. */
|
||||
|
||||
@ -729,11 +729,11 @@ register_pass_name (opt_pass *pass, const char *name)
|
||||
struct pass_registry **slot;
|
||||
struct pass_registry pr;
|
||||
|
||||
if (!name_to_pass_map.is_created ())
|
||||
name_to_pass_map.create (256);
|
||||
if (!name_to_pass_map)
|
||||
name_to_pass_map = new hash_table<pass_registry_hasher> (256);
|
||||
|
||||
pr.unique_name = name;
|
||||
slot = name_to_pass_map.find_slot (&pr, INSERT);
|
||||
slot = name_to_pass_map->find_slot (&pr, INSERT);
|
||||
if (!*slot)
|
||||
{
|
||||
struct pass_registry *new_pr;
|
||||
@ -777,7 +777,7 @@ create_pass_tab (void)
|
||||
return;
|
||||
|
||||
pass_tab.safe_grow_cleared (g->get_passes ()->passes_by_id_size + 1);
|
||||
name_to_pass_map.traverse <void *, passes_pass_traverse> (NULL);
|
||||
name_to_pass_map->traverse <void *, passes_pass_traverse> (NULL);
|
||||
}
|
||||
|
||||
static bool override_gate_status (opt_pass *, tree, bool);
|
||||
@ -867,7 +867,7 @@ get_pass_by_name (const char *name)
|
||||
struct pass_registry **slot, pr;
|
||||
|
||||
pr.unique_name = name;
|
||||
slot = name_to_pass_map.find_slot (&pr, NO_INSERT);
|
||||
slot = name_to_pass_map->find_slot (&pr, NO_INSERT);
|
||||
|
||||
if (!slot || !*slot)
|
||||
return NULL;
|
||||
|
13
gcc/plugin.c
13
gcc/plugin.c
@ -80,7 +80,7 @@ event_hasher::equal (const value_type *s1, const compare_type *s2)
|
||||
|
||||
/* A hash table to map event names to the position of the names in the
|
||||
plugin_event_name table. */
|
||||
static hash_table <event_hasher> event_tab;
|
||||
static hash_table<event_hasher> *event_tab;
|
||||
|
||||
/* Keep track of the limit of allocated events and space ready for
|
||||
allocating events. */
|
||||
@ -345,19 +345,19 @@ get_named_event_id (const char *name, enum insert_option insert)
|
||||
{
|
||||
const char ***slot;
|
||||
|
||||
if (!event_tab.is_created ())
|
||||
if (!event_tab)
|
||||
{
|
||||
int i;
|
||||
|
||||
event_tab.create (150);
|
||||
event_tab = new hash_table<event_hasher> (150);
|
||||
for (i = 0; i < event_last; i++)
|
||||
{
|
||||
slot = event_tab.find_slot (&plugin_event_name[i], INSERT);
|
||||
slot = event_tab->find_slot (&plugin_event_name[i], INSERT);
|
||||
gcc_assert (*slot == HTAB_EMPTY_ENTRY);
|
||||
*slot = &plugin_event_name[i];
|
||||
}
|
||||
}
|
||||
slot = event_tab.find_slot (&name, insert);
|
||||
slot = event_tab->find_slot (&name, insert);
|
||||
if (slot == NULL)
|
||||
return -1;
|
||||
if (*slot != HTAB_EMPTY_ENTRY)
|
||||
@ -383,7 +383,8 @@ get_named_event_id (const char *name, enum insert_option insert)
|
||||
plugin_callbacks, event_horizon);
|
||||
}
|
||||
/* All the pointers in the hash table will need to be updated. */
|
||||
event_tab.dispose ();
|
||||
delete event_tab;
|
||||
event_tab = NULL;
|
||||
}
|
||||
else
|
||||
*slot = &plugin_event_name[event_last];
|
||||
|
@ -148,7 +148,7 @@ expr_hasher::equal (const value_type *exp1, const compare_type *exp2)
|
||||
}
|
||||
|
||||
/* The table itself. */
|
||||
static hash_table <expr_hasher> expr_table;
|
||||
static hash_table<expr_hasher> *expr_table;
|
||||
|
||||
|
||||
static struct obstack expr_obstack;
|
||||
@ -279,7 +279,7 @@ alloc_mem (void)
|
||||
make the hash table too small, but unnecessarily making it too large
|
||||
also doesn't help. The i/4 is a gcse.c relic, and seems like a
|
||||
reasonable choice. */
|
||||
expr_table.create (MAX (i / 4, 13));
|
||||
expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
|
||||
|
||||
/* We allocate everything on obstacks because we often can roll back
|
||||
the whole obstack to some point. Freeing obstacks is very fast. */
|
||||
@ -306,7 +306,8 @@ free_mem (void)
|
||||
{
|
||||
free (uid_cuid);
|
||||
|
||||
expr_table.dispose ();
|
||||
delete expr_table;
|
||||
expr_table = NULL;
|
||||
|
||||
obstack_free (&expr_obstack, NULL);
|
||||
obstack_free (&occr_obstack, NULL);
|
||||
@ -348,7 +349,7 @@ insert_expr_in_table (rtx x, rtx insn)
|
||||
cur_expr->hash = hash;
|
||||
cur_expr->avail_occr = NULL;
|
||||
|
||||
slot = expr_table.find_slot_with_hash (cur_expr, hash, INSERT);
|
||||
slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
|
||||
|
||||
if (! (*slot))
|
||||
/* The expression isn't found, so insert it. */
|
||||
@ -416,7 +417,7 @@ lookup_expr_in_table (rtx pat)
|
||||
tmp_expr->hash = hash;
|
||||
tmp_expr->avail_occr = NULL;
|
||||
|
||||
slot = expr_table.find_slot_with_hash (tmp_expr, hash, INSERT);
|
||||
slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
|
||||
obstack_free (&expr_obstack, tmp_expr);
|
||||
|
||||
if (!slot)
|
||||
@ -457,13 +458,13 @@ dump_hash_table (FILE *file)
|
||||
{
|
||||
fprintf (file, "\n\nexpression hash table\n");
|
||||
fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
|
||||
(long) expr_table.size (),
|
||||
(long) expr_table.elements (),
|
||||
expr_table.collisions ());
|
||||
if (expr_table.elements () > 0)
|
||||
(long) expr_table->size (),
|
||||
(long) expr_table->elements (),
|
||||
expr_table->collisions ());
|
||||
if (expr_table->elements () > 0)
|
||||
{
|
||||
fprintf (file, "\n\ntable entries:\n");
|
||||
expr_table.traverse <FILE *, dump_expr_hash_table_entry> (file);
|
||||
expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
|
||||
}
|
||||
fprintf (file, "\n");
|
||||
}
|
||||
@ -1253,7 +1254,7 @@ delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
|
||||
static void
|
||||
delete_redundant_insns (void)
|
||||
{
|
||||
expr_table.traverse <void *, delete_redundant_insns_1> (NULL);
|
||||
expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "\n");
|
||||
}
|
||||
@ -1279,7 +1280,7 @@ gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
|
||||
if (dump_file)
|
||||
dump_hash_table (dump_file);
|
||||
|
||||
if (expr_table.elements () > 0)
|
||||
if (expr_table->elements () > 0)
|
||||
{
|
||||
eliminate_partially_redundant_loads ();
|
||||
delete_redundant_insns ();
|
||||
|
24
gcc/sese.c
24
gcc/sese.c
@ -102,15 +102,15 @@ rename_map_hasher::equal (const value_type *elt1, const compare_type *elt2)
|
||||
return (elt1->old_name == elt2->old_name);
|
||||
}
|
||||
|
||||
typedef hash_table <rename_map_hasher> rename_map_type;
|
||||
typedef hash_table<rename_map_hasher> rename_map_type;
|
||||
|
||||
|
||||
/* Print to stderr all the elements of RENAME_MAP. */
|
||||
|
||||
DEBUG_FUNCTION void
|
||||
debug_rename_map (rename_map_type rename_map)
|
||||
debug_rename_map (rename_map_type *rename_map)
|
||||
{
|
||||
rename_map.traverse <void *, debug_rename_map_1> (NULL);
|
||||
rename_map->traverse <void *, debug_rename_map_1> (NULL);
|
||||
}
|
||||
|
||||
/* Computes a hash function for database element ELT. */
|
||||
@ -416,14 +416,14 @@ get_false_edge_from_guard_bb (basic_block bb)
|
||||
/* Returns the expression associated to OLD_NAME in RENAME_MAP. */
|
||||
|
||||
static tree
|
||||
get_rename (rename_map_type rename_map, tree old_name)
|
||||
get_rename (rename_map_type *rename_map, tree old_name)
|
||||
{
|
||||
struct rename_map_elt_s tmp;
|
||||
rename_map_elt_s **slot;
|
||||
|
||||
gcc_assert (TREE_CODE (old_name) == SSA_NAME);
|
||||
tmp.old_name = old_name;
|
||||
slot = rename_map.find_slot (&tmp, NO_INSERT);
|
||||
slot = rename_map->find_slot (&tmp, NO_INSERT);
|
||||
|
||||
if (slot && *slot)
|
||||
return (*slot)->expr;
|
||||
@ -434,7 +434,7 @@ get_rename (rename_map_type rename_map, tree old_name)
|
||||
/* Register in RENAME_MAP the rename tuple (OLD_NAME, EXPR). */
|
||||
|
||||
static void
|
||||
set_rename (rename_map_type rename_map, tree old_name, tree expr)
|
||||
set_rename (rename_map_type *rename_map, tree old_name, tree expr)
|
||||
{
|
||||
struct rename_map_elt_s tmp;
|
||||
rename_map_elt_s **slot;
|
||||
@ -443,7 +443,7 @@ set_rename (rename_map_type rename_map, tree old_name, tree expr)
|
||||
return;
|
||||
|
||||
tmp.old_name = old_name;
|
||||
slot = rename_map.find_slot (&tmp, INSERT);
|
||||
slot = rename_map->find_slot (&tmp, INSERT);
|
||||
|
||||
if (!slot)
|
||||
return;
|
||||
@ -461,7 +461,7 @@ set_rename (rename_map_type rename_map, tree old_name, tree expr)
|
||||
is set when the code generation cannot continue. */
|
||||
|
||||
static bool
|
||||
rename_uses (gimple copy, rename_map_type rename_map,
|
||||
rename_uses (gimple copy, rename_map_type *rename_map,
|
||||
gimple_stmt_iterator *gsi_tgt,
|
||||
sese region, loop_p loop, vec<tree> iv_map,
|
||||
bool *gloog_error)
|
||||
@ -568,7 +568,7 @@ rename_uses (gimple copy, rename_map_type rename_map,
|
||||
|
||||
static void
|
||||
graphite_copy_stmts_from_block (basic_block bb, basic_block new_bb,
|
||||
rename_map_type rename_map,
|
||||
rename_map_type *rename_map,
|
||||
vec<tree> iv_map, sese region,
|
||||
bool *gloog_error)
|
||||
{
|
||||
@ -636,14 +636,12 @@ copy_bb_and_scalar_dependences (basic_block bb, sese region,
|
||||
bool *gloog_error)
|
||||
{
|
||||
basic_block new_bb = split_edge (next_e);
|
||||
rename_map_type rename_map;
|
||||
rename_map.create (10);
|
||||
rename_map_type rename_map (10);
|
||||
|
||||
next_e = single_succ_edge (new_bb);
|
||||
graphite_copy_stmts_from_block (bb, new_bb, rename_map, iv_map, region,
|
||||
graphite_copy_stmts_from_block (bb, new_bb, &rename_map, iv_map, region,
|
||||
gloog_error);
|
||||
remove_phi_nodes (new_bb);
|
||||
rename_map.dispose ();
|
||||
|
||||
return next_e;
|
||||
}
|
||||
|
@ -80,16 +80,16 @@ stats_counter_hasher::remove (value_type *v)
|
||||
free (v);
|
||||
}
|
||||
|
||||
typedef hash_table <stats_counter_hasher> stats_counter_table_type;
|
||||
typedef hash_table<stats_counter_hasher> stats_counter_table_type;
|
||||
|
||||
/* Array of statistic hashes, indexed by pass id. */
|
||||
static stats_counter_table_type *statistics_hashes;
|
||||
static stats_counter_table_type **statistics_hashes;
|
||||
static unsigned nr_statistics_hashes;
|
||||
|
||||
/* Return the current hashtable to be used for recording or printing
|
||||
statistics. */
|
||||
|
||||
static stats_counter_table_type
|
||||
static stats_counter_table_type *
|
||||
curr_statistics_hash (void)
|
||||
{
|
||||
unsigned idx;
|
||||
@ -98,20 +98,20 @@ curr_statistics_hash (void)
|
||||
idx = current_pass->static_pass_number;
|
||||
|
||||
if (idx < nr_statistics_hashes
|
||||
&& statistics_hashes[idx].is_created ())
|
||||
&& statistics_hashes[idx])
|
||||
return statistics_hashes[idx];
|
||||
|
||||
if (idx >= nr_statistics_hashes)
|
||||
{
|
||||
statistics_hashes = XRESIZEVEC (stats_counter_table_type,
|
||||
statistics_hashes = XRESIZEVEC (stats_counter_table_type *,
|
||||
statistics_hashes, idx+1);
|
||||
memset (statistics_hashes + nr_statistics_hashes, 0,
|
||||
(idx + 1 - nr_statistics_hashes)
|
||||
* sizeof (stats_counter_table_type));
|
||||
* sizeof (stats_counter_table_type *));
|
||||
nr_statistics_hashes = idx + 1;
|
||||
}
|
||||
|
||||
statistics_hashes[idx].create (15);
|
||||
statistics_hashes[idx] = new stats_counter_table_type (15);
|
||||
|
||||
return statistics_hashes[idx];
|
||||
}
|
||||
@ -195,16 +195,16 @@ statistics_fini_pass (void)
|
||||
fprintf (dump_file, "Pass statistics:\n");
|
||||
fprintf (dump_file, "----------------\n");
|
||||
curr_statistics_hash ()
|
||||
.traverse_noresize <void *, statistics_fini_pass_1> (NULL);
|
||||
->traverse_noresize <void *, statistics_fini_pass_1> (NULL);
|
||||
fprintf (dump_file, "\n");
|
||||
}
|
||||
if (statistics_dump_file
|
||||
&& !(statistics_dump_flags & TDF_STATS
|
||||
|| statistics_dump_flags & TDF_DETAILS))
|
||||
curr_statistics_hash ()
|
||||
.traverse_noresize <void *, statistics_fini_pass_2> (NULL);
|
||||
->traverse_noresize <void *, statistics_fini_pass_2> (NULL);
|
||||
curr_statistics_hash ()
|
||||
.traverse_noresize <void *, statistics_fini_pass_3> (NULL);
|
||||
->traverse_noresize <void *, statistics_fini_pass_3> (NULL);
|
||||
}
|
||||
|
||||
/* Helper for printing summary information. */
|
||||
@ -245,10 +245,10 @@ statistics_fini (void)
|
||||
{
|
||||
unsigned i;
|
||||
for (i = 0; i < nr_statistics_hashes; ++i)
|
||||
if (statistics_hashes[i].is_created ()
|
||||
if (statistics_hashes[i]
|
||||
&& passes->get_pass_for_id (i) != NULL)
|
||||
statistics_hashes[i]
|
||||
.traverse_noresize <opt_pass *, statistics_fini_1>
|
||||
->traverse_noresize <opt_pass *, statistics_fini_1>
|
||||
(passes->get_pass_for_id (i));
|
||||
}
|
||||
|
||||
@ -280,14 +280,14 @@ statistics_init (void)
|
||||
and HISTOGRAM_P. */
|
||||
|
||||
static statistics_counter_t *
|
||||
lookup_or_add_counter (stats_counter_table_type hash, const char *id, int val,
|
||||
lookup_or_add_counter (stats_counter_table_type *hash, const char *id, int val,
|
||||
bool histogram_p)
|
||||
{
|
||||
statistics_counter_t **counter;
|
||||
statistics_counter_t c;
|
||||
c.id = id;
|
||||
c.val = val;
|
||||
counter = hash.find_slot (&c, INSERT);
|
||||
counter = hash->find_slot (&c, INSERT);
|
||||
if (!*counter)
|
||||
{
|
||||
*counter = XNEW (struct statistics_counter_s);
|
||||
|
@ -129,7 +129,7 @@ st_expr_hasher::equal (const value_type *ptr1, const compare_type *ptr2)
|
||||
}
|
||||
|
||||
/* Hashtable for the load/store memory refs. */
|
||||
static hash_table <st_expr_hasher> store_motion_mems_table;
|
||||
static hash_table<st_expr_hasher> *store_motion_mems_table;
|
||||
|
||||
/* This will search the st_expr list for a matching expression. If it
|
||||
doesn't find one, we create one and initialize it. */
|
||||
@ -147,7 +147,7 @@ st_expr_entry (rtx x)
|
||||
NULL, /*have_reg_qty=*/false);
|
||||
|
||||
e.pattern = x;
|
||||
slot = store_motion_mems_table.find_slot_with_hash (&e, hash, INSERT);
|
||||
slot = store_motion_mems_table->find_slot_with_hash (&e, hash, INSERT);
|
||||
if (*slot)
|
||||
return *slot;
|
||||
|
||||
@ -183,8 +183,8 @@ free_st_expr_entry (struct st_expr * ptr)
|
||||
static void
|
||||
free_store_motion_mems (void)
|
||||
{
|
||||
if (store_motion_mems_table.is_created ())
|
||||
store_motion_mems_table.dispose ();
|
||||
delete store_motion_mems_table;
|
||||
store_motion_mems_table = NULL;
|
||||
|
||||
while (store_motion_mems)
|
||||
{
|
||||
@ -651,7 +651,7 @@ compute_store_table (void)
|
||||
unsigned int max_gcse_regno = max_reg_num ();
|
||||
|
||||
store_motion_mems = NULL;
|
||||
store_motion_mems_table.create (13);
|
||||
store_motion_mems_table = new hash_table<st_expr_hasher> (13);
|
||||
last_set_in = XCNEWVEC (int, max_gcse_regno);
|
||||
already_set = XNEWVEC (int, max_gcse_regno);
|
||||
|
||||
@ -713,7 +713,7 @@ compute_store_table (void)
|
||||
if (! ptr->avail_stores)
|
||||
{
|
||||
*prev_next_ptr_ptr = ptr->next;
|
||||
store_motion_mems_table.remove_elt_with_hash (ptr, ptr->hash_index);
|
||||
store_motion_mems_table->remove_elt_with_hash (ptr, ptr->hash_index);
|
||||
free_st_expr_entry (ptr);
|
||||
}
|
||||
else
|
||||
@ -1152,7 +1152,8 @@ one_store_motion_pass (void)
|
||||
num_stores = compute_store_table ();
|
||||
if (num_stores == 0)
|
||||
{
|
||||
store_motion_mems_table.dispose ();
|
||||
delete store_motion_mems_table;
|
||||
store_motion_mems_table = NULL;
|
||||
end_alias_analysis ();
|
||||
return 0;
|
||||
}
|
||||
|
@ -982,7 +982,7 @@ log_entry_hasher::remove (value_type *lp)
|
||||
|
||||
|
||||
/* The actual log. */
|
||||
static hash_table <log_entry_hasher> tm_log;
|
||||
static hash_table<log_entry_hasher> *tm_log;
|
||||
|
||||
/* Addresses to log with a save/restore sequence. These should be in
|
||||
dominator order. */
|
||||
@ -1027,14 +1027,14 @@ tm_mem_map_hasher::equal (const value_type *v, const compare_type *c)
|
||||
|
||||
/* Map for an SSA_NAME originally pointing to a non aliased new piece
|
||||
of memory (malloc, alloc, etc). */
|
||||
static hash_table <tm_mem_map_hasher> tm_new_mem_hash;
|
||||
static hash_table<tm_mem_map_hasher> *tm_new_mem_hash;
|
||||
|
||||
/* Initialize logging data structures. */
|
||||
static void
|
||||
tm_log_init (void)
|
||||
{
|
||||
tm_log.create (10);
|
||||
tm_new_mem_hash.create (5);
|
||||
tm_log = new hash_table<log_entry_hasher> (10);
|
||||
tm_new_mem_hash = new hash_table<tm_mem_map_hasher> (5);
|
||||
tm_log_save_addresses.create (5);
|
||||
}
|
||||
|
||||
@ -1042,8 +1042,10 @@ tm_log_init (void)
|
||||
static void
|
||||
tm_log_delete (void)
|
||||
{
|
||||
tm_log.dispose ();
|
||||
tm_new_mem_hash.dispose ();
|
||||
delete tm_log;
|
||||
tm_log = NULL;
|
||||
delete tm_new_mem_hash;
|
||||
tm_new_mem_hash = NULL;
|
||||
tm_log_save_addresses.release ();
|
||||
}
|
||||
|
||||
@ -1088,7 +1090,7 @@ tm_log_add (basic_block entry_block, tree addr, gimple stmt)
|
||||
struct tm_log_entry l, *lp;
|
||||
|
||||
l.addr = addr;
|
||||
slot = tm_log.find_slot (&l, INSERT);
|
||||
slot = tm_log->find_slot (&l, INSERT);
|
||||
if (!*slot)
|
||||
{
|
||||
tree type = TREE_TYPE (addr);
|
||||
@ -1231,10 +1233,10 @@ tm_log_emit_stmt (tree addr, gimple stmt)
|
||||
static void
|
||||
tm_log_emit (void)
|
||||
{
|
||||
hash_table <log_entry_hasher>::iterator hi;
|
||||
hash_table<log_entry_hasher>::iterator hi;
|
||||
struct tm_log_entry *lp;
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (tm_log, lp, tm_log_entry_t, hi)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*tm_log, lp, tm_log_entry_t, hi)
|
||||
{
|
||||
size_t i;
|
||||
gimple stmt;
|
||||
@ -1276,7 +1278,7 @@ tm_log_emit_saves (basic_block entry_block, basic_block bb)
|
||||
for (i = 0; i < tm_log_save_addresses.length (); ++i)
|
||||
{
|
||||
l.addr = tm_log_save_addresses[i];
|
||||
lp = *(tm_log.find_slot (&l, NO_INSERT));
|
||||
lp = *(tm_log->find_slot (&l, NO_INSERT));
|
||||
gcc_assert (lp->save_var != NULL);
|
||||
|
||||
/* We only care about variables in the current transaction. */
|
||||
@ -1312,7 +1314,7 @@ tm_log_emit_restores (basic_block entry_block, basic_block bb)
|
||||
for (i = tm_log_save_addresses.length () - 1; i >= 0; i--)
|
||||
{
|
||||
l.addr = tm_log_save_addresses[i];
|
||||
lp = *(tm_log.find_slot (&l, NO_INSERT));
|
||||
lp = *(tm_log->find_slot (&l, NO_INSERT));
|
||||
gcc_assert (lp->save_var != NULL);
|
||||
|
||||
/* We only care about variables in the current transaction. */
|
||||
@ -1363,7 +1365,7 @@ thread_private_new_memory (basic_block entry_block, tree x)
|
||||
|
||||
/* Look in cache first. */
|
||||
elt.val = x;
|
||||
slot = tm_new_mem_hash.find_slot (&elt, INSERT);
|
||||
slot = tm_new_mem_hash->find_slot (&elt, INSERT);
|
||||
elt_p = *slot;
|
||||
if (elt_p)
|
||||
return elt_p->local_new_memory;
|
||||
@ -3340,7 +3342,7 @@ static bitmap_obstack tm_memopt_obstack;
|
||||
/* Unique counter for TM loads and stores. Loads and stores of the
|
||||
same address get the same ID. */
|
||||
static unsigned int tm_memopt_value_id;
|
||||
static hash_table <tm_memop_hasher> tm_memopt_value_numbers;
|
||||
static hash_table<tm_memop_hasher> *tm_memopt_value_numbers;
|
||||
|
||||
#define STORE_AVAIL_IN(BB) \
|
||||
((struct tm_memopt_bitmaps *) ((BB)->aux))->store_avail_in
|
||||
@ -3374,7 +3376,7 @@ tm_memopt_value_number (gimple stmt, enum insert_option op)
|
||||
|
||||
gcc_assert (is_tm_load (stmt) || is_tm_store (stmt));
|
||||
tmpmem.addr = gimple_call_arg (stmt, 0);
|
||||
slot = tm_memopt_value_numbers.find_slot (&tmpmem, op);
|
||||
slot = tm_memopt_value_numbers->find_slot (&tmpmem, op);
|
||||
if (*slot)
|
||||
mem = *slot;
|
||||
else if (op == INSERT)
|
||||
@ -3434,11 +3436,11 @@ dump_tm_memopt_set (const char *set_name, bitmap bits)
|
||||
fprintf (dump_file, "TM memopt: %s: [", set_name);
|
||||
EXECUTE_IF_SET_IN_BITMAP (bits, 0, i, bi)
|
||||
{
|
||||
hash_table <tm_memop_hasher>::iterator hi;
|
||||
hash_table<tm_memop_hasher>::iterator hi;
|
||||
struct tm_memop *mem = NULL;
|
||||
|
||||
/* Yeah, yeah, yeah. Whatever. This is just for debugging. */
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (tm_memopt_value_numbers, mem, tm_memop_t, hi)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*tm_memopt_value_numbers, mem, tm_memop_t, hi)
|
||||
if (mem->value_id == i)
|
||||
break;
|
||||
gcc_assert (mem->value_id == i);
|
||||
@ -3874,7 +3876,7 @@ execute_tm_memopt (void)
|
||||
vec<basic_block> bbs;
|
||||
|
||||
tm_memopt_value_id = 0;
|
||||
tm_memopt_value_numbers.create (10);
|
||||
tm_memopt_value_numbers = new hash_table<tm_memop_hasher> (10);
|
||||
|
||||
for (region = all_tm_regions; region; region = region->next)
|
||||
{
|
||||
@ -3908,10 +3910,11 @@ execute_tm_memopt (void)
|
||||
tm_memopt_free_sets (bbs);
|
||||
bbs.release ();
|
||||
bitmap_obstack_release (&tm_memopt_obstack);
|
||||
tm_memopt_value_numbers.empty ();
|
||||
tm_memopt_value_numbers->empty ();
|
||||
}
|
||||
|
||||
tm_memopt_value_numbers.dispose ();
|
||||
delete tm_memopt_value_numbers;
|
||||
tm_memopt_value_numbers = NULL;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ tree_upper_hasher::equal (const value_type *parent, const compare_type *node)
|
||||
}
|
||||
|
||||
/* Static variables. */
|
||||
static hash_table <tree_upper_hasher> TB_up_ht;
|
||||
static hash_table<tree_upper_hasher> *TB_up_ht;
|
||||
static vec<tree, va_gc> *TB_history_stack;
|
||||
static int TB_verbose = 1;
|
||||
|
||||
@ -167,7 +167,7 @@ browse_tree (tree begin)
|
||||
|
||||
/* Store in a hashtable information about previous and upper statements. */
|
||||
{
|
||||
TB_up_ht.create (1023);
|
||||
TB_up_ht = new hash_table<tree_upper_hasher> (1023);
|
||||
TB_update_up (head);
|
||||
}
|
||||
|
||||
@ -645,7 +645,8 @@ browse_tree (tree begin)
|
||||
}
|
||||
|
||||
ret:;
|
||||
TB_up_ht.dispose ();
|
||||
delete TB_up_ht;
|
||||
TB_up_ht = NULL;
|
||||
return;
|
||||
}
|
||||
|
||||
@ -691,7 +692,7 @@ TB_up_expr (tree node)
|
||||
if (node == NULL_TREE)
|
||||
return NULL_TREE;
|
||||
|
||||
res = TB_up_ht.find (node);
|
||||
res = TB_up_ht->find (node);
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -769,7 +770,7 @@ store_child_info (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
|
||||
for (i = 0; i < n; i++)
|
||||
{
|
||||
tree op = TREE_OPERAND (node, i);
|
||||
slot = TB_up_ht.find_slot (op, INSERT);
|
||||
slot = TB_up_ht->find_slot (op, INSERT);
|
||||
*slot = node;
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +143,7 @@ locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
|
||||
return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
|
||||
}
|
||||
|
||||
static hash_table <locus_discrim_hasher> discriminator_per_locus;
|
||||
static hash_table<locus_discrim_hasher> *discriminator_per_locus;
|
||||
|
||||
/* Basic blocks and flowgraphs. */
|
||||
static void make_blocks (gimple_seq);
|
||||
@ -244,11 +244,12 @@ build_gimple_cfg (gimple_seq seq)
|
||||
group_case_labels ();
|
||||
|
||||
/* Create the edges of the flowgraph. */
|
||||
discriminator_per_locus.create (13);
|
||||
discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
|
||||
make_edges ();
|
||||
assign_discriminators ();
|
||||
cleanup_dead_labels ();
|
||||
discriminator_per_locus.dispose ();
|
||||
delete discriminator_per_locus;
|
||||
discriminator_per_locus = NULL;
|
||||
}
|
||||
|
||||
|
||||
@ -938,7 +939,7 @@ next_discriminator_for_locus (location_t locus)
|
||||
|
||||
item.locus = locus;
|
||||
item.discriminator = 0;
|
||||
slot = discriminator_per_locus.find_slot_with_hash (
|
||||
slot = discriminator_per_locus->find_slot_with_hash (
|
||||
&item, LOCATION_LINE (locus), INSERT);
|
||||
gcc_assert (slot);
|
||||
if (*slot == HTAB_EMPTY_ENTRY)
|
||||
|
@ -73,7 +73,7 @@ static vec<complex_lattice_t> complex_lattice_values;
|
||||
|
||||
/* For each complex variable, a pair of variables for the components exists in
|
||||
the hashtable. */
|
||||
static int_tree_htab_type complex_variable_components;
|
||||
static int_tree_htab_type *complex_variable_components;
|
||||
|
||||
/* For each complex SSA_NAME, a pair of ssa names for the components. */
|
||||
static vec<tree> complex_ssa_name_components;
|
||||
@ -85,7 +85,7 @@ cvc_lookup (unsigned int uid)
|
||||
{
|
||||
struct int_tree_map *h, in;
|
||||
in.uid = uid;
|
||||
h = complex_variable_components.find_with_hash (&in, uid);
|
||||
h = complex_variable_components->find_with_hash (&in, uid);
|
||||
return h ? h->to : NULL;
|
||||
}
|
||||
|
||||
@ -100,7 +100,7 @@ cvc_insert (unsigned int uid, tree to)
|
||||
h = XNEW (struct int_tree_map);
|
||||
h->uid = uid;
|
||||
h->to = to;
|
||||
loc = complex_variable_components.find_slot_with_hash (h, uid, INSERT);
|
||||
loc = complex_variable_components->find_slot_with_hash (h, uid, INSERT);
|
||||
*loc = h;
|
||||
}
|
||||
|
||||
@ -1629,7 +1629,7 @@ tree_lower_complex (void)
|
||||
init_parameter_lattice_values ();
|
||||
ssa_propagate (complex_visit_stmt, complex_visit_phi);
|
||||
|
||||
complex_variable_components.create (10);
|
||||
complex_variable_components = new int_tree_htab_type (10);
|
||||
|
||||
complex_ssa_name_components.create (2 * num_ssa_names);
|
||||
complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names);
|
||||
@ -1650,7 +1650,8 @@ tree_lower_complex (void)
|
||||
|
||||
gsi_commit_edge_inserts ();
|
||||
|
||||
complex_variable_components.dispose ();
|
||||
delete complex_variable_components;
|
||||
complex_variable_components = NULL;
|
||||
complex_ssa_name_components.release ();
|
||||
complex_lattice_values.release ();
|
||||
return 0;
|
||||
|
@ -225,7 +225,7 @@ finally_tree_hasher::equal (const value_type *v, const compare_type *c)
|
||||
}
|
||||
|
||||
/* Note that this table is *not* marked GTY. It is short-lived. */
|
||||
static hash_table <finally_tree_hasher> finally_tree;
|
||||
static hash_table<finally_tree_hasher> *finally_tree;
|
||||
|
||||
static void
|
||||
record_in_finally_tree (treemple child, gimple parent)
|
||||
@ -237,7 +237,7 @@ record_in_finally_tree (treemple child, gimple parent)
|
||||
n->child = child;
|
||||
n->parent = parent;
|
||||
|
||||
slot = finally_tree.find_slot (n, INSERT);
|
||||
slot = finally_tree->find_slot (n, INSERT);
|
||||
gcc_assert (!*slot);
|
||||
*slot = n;
|
||||
}
|
||||
@ -316,7 +316,7 @@ outside_finally_tree (treemple start, gimple target)
|
||||
do
|
||||
{
|
||||
n.child = start;
|
||||
p = finally_tree.find (&n);
|
||||
p = finally_tree->find (&n);
|
||||
if (!p)
|
||||
return true;
|
||||
start.g = p->parent;
|
||||
@ -2161,7 +2161,7 @@ pass_lower_eh::execute (function *fun)
|
||||
if (bodyp == NULL)
|
||||
return 0;
|
||||
|
||||
finally_tree.create (31);
|
||||
finally_tree = new hash_table<finally_tree_hasher> (31);
|
||||
eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
|
||||
memset (&null_state, 0, sizeof (null_state));
|
||||
|
||||
@ -2179,7 +2179,8 @@ pass_lower_eh::execute (function *fun)
|
||||
didn't change its value, and we don't have to re-set the function. */
|
||||
gcc_assert (bodyp == gimple_body (current_function_decl));
|
||||
|
||||
finally_tree.dispose ();
|
||||
delete finally_tree;
|
||||
finally_tree = NULL;
|
||||
BITMAP_FREE (eh_region_may_contain_throw_map);
|
||||
eh_seq = NULL;
|
||||
|
||||
|
@ -224,7 +224,7 @@ var_info_hasher::equal (const value_type *p1, const compare_type *p2)
|
||||
|
||||
/* Each entry in VAR_INFOS contains an element of type STRUCT
|
||||
VAR_INFO_D. */
|
||||
static hash_table <var_info_hasher> var_infos;
|
||||
static hash_table<var_info_hasher> *var_infos;
|
||||
|
||||
|
||||
/* Information stored for SSA names. */
|
||||
@ -383,7 +383,7 @@ get_var_info (tree decl)
|
||||
struct var_info_d vi;
|
||||
var_info_d **slot;
|
||||
vi.var = decl;
|
||||
slot = var_infos.find_slot_with_hash (&vi, DECL_UID (decl), INSERT);
|
||||
slot = var_infos->find_slot_with_hash (&vi, DECL_UID (decl), INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
var_info_p v = XCNEW (struct var_info_d);
|
||||
@ -1084,14 +1084,14 @@ insert_phi_nodes_compare_var_infos (const void *a, const void *b)
|
||||
static void
|
||||
insert_phi_nodes (bitmap_head *dfs)
|
||||
{
|
||||
hash_table <var_info_hasher>::iterator hi;
|
||||
hash_table<var_info_hasher>::iterator hi;
|
||||
unsigned i;
|
||||
var_info_p info;
|
||||
|
||||
timevar_push (TV_TREE_INSERT_PHI_NODES);
|
||||
|
||||
auto_vec<var_info_p> vars (var_infos.elements ());
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (var_infos, info, var_info_p, hi)
|
||||
auto_vec<var_info_p> vars (var_infos->elements ());
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*var_infos, info, var_info_p, hi)
|
||||
if (info->info.need_phi_state != NEED_PHI_STATE_NO)
|
||||
vars.quick_push (info);
|
||||
|
||||
@ -1654,7 +1654,7 @@ debug_tree_ssa (void)
|
||||
/* Dump statistics for the hash table HTAB. */
|
||||
|
||||
static void
|
||||
htab_statistics (FILE *file, hash_table <var_info_hasher> htab)
|
||||
htab_statistics (FILE *file, const hash_table<var_info_hasher> &htab)
|
||||
{
|
||||
fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
|
||||
(long) htab.size (),
|
||||
@ -1668,11 +1668,11 @@ htab_statistics (FILE *file, hash_table <var_info_hasher> htab)
|
||||
void
|
||||
dump_tree_ssa_stats (FILE *file)
|
||||
{
|
||||
if (var_infos.is_created ())
|
||||
if (var_infos)
|
||||
{
|
||||
fprintf (file, "\nHash table statistics:\n");
|
||||
fprintf (file, " var_infos: ");
|
||||
htab_statistics (file, var_infos);
|
||||
htab_statistics (file, *var_infos);
|
||||
fprintf (file, "\n");
|
||||
}
|
||||
}
|
||||
@ -1713,8 +1713,8 @@ void
|
||||
dump_var_infos (FILE *file)
|
||||
{
|
||||
fprintf (file, "\n\nDefinition and live-in blocks:\n\n");
|
||||
if (var_infos.is_created ())
|
||||
var_infos.traverse <FILE *, debug_var_infos_r> (file);
|
||||
if (var_infos)
|
||||
var_infos->traverse <FILE *, debug_var_infos_r> (file);
|
||||
}
|
||||
|
||||
|
||||
@ -2206,7 +2206,7 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what)
|
||||
if (dump_file && (dump_flags & TDF_STATS))
|
||||
{
|
||||
dump_dfa_stats (dump_file);
|
||||
if (var_infos.is_created ())
|
||||
if (var_infos)
|
||||
dump_tree_ssa_stats (dump_file);
|
||||
}
|
||||
|
||||
@ -2261,8 +2261,9 @@ init_ssa_renamer (void)
|
||||
cfun->gimple_df->in_ssa_p = false;
|
||||
|
||||
/* Allocate memory for the DEF_BLOCKS hash table. */
|
||||
gcc_assert (!var_infos.is_created ());
|
||||
var_infos.create (vec_safe_length (cfun->local_decls));
|
||||
gcc_assert (!var_infos);
|
||||
var_infos = new hash_table<var_info_hasher>
|
||||
(vec_safe_length (cfun->local_decls));
|
||||
|
||||
bitmap_obstack_initialize (&update_ssa_obstack);
|
||||
}
|
||||
@ -2273,8 +2274,8 @@ init_ssa_renamer (void)
|
||||
static void
|
||||
fini_ssa_renamer (void)
|
||||
{
|
||||
if (var_infos.is_created ())
|
||||
var_infos.dispose ();
|
||||
delete var_infos;
|
||||
var_infos = NULL;
|
||||
|
||||
bitmap_obstack_release (&update_ssa_obstack);
|
||||
|
||||
@ -3208,7 +3209,7 @@ update_ssa (unsigned update_flags)
|
||||
{
|
||||
/* If we rename bare symbols initialize the mapping to
|
||||
auxiliar info we need to keep track of. */
|
||||
var_infos.create (47);
|
||||
var_infos = new hash_table<var_info_hasher> (47);
|
||||
|
||||
/* If we have to rename some symbols from scratch, we need to
|
||||
start the process at the root of the CFG. FIXME, it should
|
||||
|
@ -227,20 +227,20 @@ reduction_hasher::hash (const value_type *a)
|
||||
return a->reduc_version;
|
||||
}
|
||||
|
||||
typedef hash_table <reduction_hasher> reduction_info_table_type;
|
||||
typedef hash_table<reduction_hasher> reduction_info_table_type;
|
||||
|
||||
|
||||
static struct reduction_info *
|
||||
reduction_phi (reduction_info_table_type reduction_list, gimple phi)
|
||||
reduction_phi (reduction_info_table_type *reduction_list, gimple phi)
|
||||
{
|
||||
struct reduction_info tmpred, *red;
|
||||
|
||||
if (reduction_list.elements () == 0 || phi == NULL)
|
||||
if (reduction_list->elements () == 0 || phi == NULL)
|
||||
return NULL;
|
||||
|
||||
tmpred.reduc_phi = phi;
|
||||
tmpred.reduc_version = gimple_uid (phi);
|
||||
red = reduction_list.find (&tmpred);
|
||||
red = reduction_list->find (&tmpred);
|
||||
|
||||
return red;
|
||||
}
|
||||
@ -279,7 +279,7 @@ name_to_copy_hasher::hash (const value_type *a)
|
||||
return (hashval_t) a->version;
|
||||
}
|
||||
|
||||
typedef hash_table <name_to_copy_hasher> name_to_copy_table_type;
|
||||
typedef hash_table<name_to_copy_hasher> name_to_copy_table_type;
|
||||
|
||||
/* A transformation matrix, which is a self-contained ROWSIZE x COLSIZE
|
||||
matrix. Rather than use floats, we simply keep a single DENOMINATOR that
|
||||
@ -486,7 +486,7 @@ loop_has_blocks_with_irreducible_flag (struct loop *loop)
|
||||
|
||||
static tree
|
||||
take_address_of (tree obj, tree type, edge entry,
|
||||
int_tree_htab_type decl_address, gimple_stmt_iterator *gsi)
|
||||
int_tree_htab_type *decl_address, gimple_stmt_iterator *gsi)
|
||||
{
|
||||
int uid;
|
||||
int_tree_map **dslot;
|
||||
@ -512,7 +512,7 @@ take_address_of (tree obj, tree type, edge entry,
|
||||
on it. */
|
||||
uid = DECL_UID (TREE_OPERAND (TREE_OPERAND (*var_p, 0), 0));
|
||||
ielt.uid = uid;
|
||||
dslot = decl_address.find_slot_with_hash (&ielt, uid, INSERT);
|
||||
dslot = decl_address->find_slot_with_hash (&ielt, uid, INSERT);
|
||||
if (!*dslot)
|
||||
{
|
||||
if (gsi == NULL)
|
||||
@ -609,7 +609,7 @@ struct elv_data
|
||||
{
|
||||
struct walk_stmt_info info;
|
||||
edge entry;
|
||||
int_tree_htab_type decl_address;
|
||||
int_tree_htab_type *decl_address;
|
||||
gimple_stmt_iterator *gsi;
|
||||
bool changed;
|
||||
bool reset;
|
||||
@ -699,7 +699,7 @@ eliminate_local_variables_1 (tree *tp, int *walk_subtrees, void *data)
|
||||
|
||||
static void
|
||||
eliminate_local_variables_stmt (edge entry, gimple_stmt_iterator *gsi,
|
||||
int_tree_htab_type decl_address)
|
||||
int_tree_htab_type *decl_address)
|
||||
{
|
||||
struct elv_data dta;
|
||||
gimple stmt = gsi_stmt (*gsi);
|
||||
@ -756,8 +756,7 @@ eliminate_local_variables (edge entry, edge exit)
|
||||
unsigned i;
|
||||
gimple_stmt_iterator gsi;
|
||||
bool has_debug_stmt = false;
|
||||
int_tree_htab_type decl_address;
|
||||
decl_address.create (10);
|
||||
int_tree_htab_type decl_address (10);
|
||||
basic_block entry_bb = entry->src;
|
||||
basic_block exit_bb = exit->dest;
|
||||
|
||||
@ -772,16 +771,14 @@ eliminate_local_variables (edge entry, edge exit)
|
||||
has_debug_stmt = true;
|
||||
}
|
||||
else
|
||||
eliminate_local_variables_stmt (entry, &gsi, decl_address);
|
||||
eliminate_local_variables_stmt (entry, &gsi, &decl_address);
|
||||
|
||||
if (has_debug_stmt)
|
||||
FOR_EACH_VEC_ELT (body, i, bb)
|
||||
if (bb != entry_bb && bb != exit_bb)
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
if (gimple_debug_bind_p (gsi_stmt (gsi)))
|
||||
eliminate_local_variables_stmt (entry, &gsi, decl_address);
|
||||
|
||||
decl_address.dispose ();
|
||||
eliminate_local_variables_stmt (entry, &gsi, &decl_address);
|
||||
}
|
||||
|
||||
/* Returns true if expression EXPR is not defined between ENTRY and
|
||||
@ -819,8 +816,9 @@ expr_invariant_in_region_p (edge entry, edge exit, tree expr)
|
||||
duplicated, storing the copies in DECL_COPIES. */
|
||||
|
||||
static tree
|
||||
separate_decls_in_region_name (tree name, name_to_copy_table_type name_copies,
|
||||
int_tree_htab_type decl_copies, bool copy_name_p)
|
||||
separate_decls_in_region_name (tree name, name_to_copy_table_type *name_copies,
|
||||
int_tree_htab_type *decl_copies,
|
||||
bool copy_name_p)
|
||||
{
|
||||
tree copy, var, var_copy;
|
||||
unsigned idx, uid, nuid;
|
||||
@ -834,8 +832,8 @@ separate_decls_in_region_name (tree name, name_to_copy_table_type name_copies,
|
||||
|
||||
idx = SSA_NAME_VERSION (name);
|
||||
elt.version = idx;
|
||||
slot = name_copies.find_slot_with_hash (&elt, idx,
|
||||
copy_name_p ? INSERT : NO_INSERT);
|
||||
slot = name_copies->find_slot_with_hash (&elt, idx,
|
||||
copy_name_p ? INSERT : NO_INSERT);
|
||||
if (slot && *slot)
|
||||
return (*slot)->new_name;
|
||||
|
||||
@ -860,7 +858,7 @@ separate_decls_in_region_name (tree name, name_to_copy_table_type name_copies,
|
||||
|
||||
uid = DECL_UID (var);
|
||||
ielt.uid = uid;
|
||||
dslot = decl_copies.find_slot_with_hash (&ielt, uid, INSERT);
|
||||
dslot = decl_copies->find_slot_with_hash (&ielt, uid, INSERT);
|
||||
if (!*dslot)
|
||||
{
|
||||
var_copy = create_tmp_var (TREE_TYPE (var), get_name (var));
|
||||
@ -874,7 +872,7 @@ separate_decls_in_region_name (tree name, name_to_copy_table_type name_copies,
|
||||
it again. */
|
||||
nuid = DECL_UID (var_copy);
|
||||
ielt.uid = nuid;
|
||||
dslot = decl_copies.find_slot_with_hash (&ielt, nuid, INSERT);
|
||||
dslot = decl_copies->find_slot_with_hash (&ielt, nuid, INSERT);
|
||||
gcc_assert (!*dslot);
|
||||
nielt = XNEW (struct int_tree_map);
|
||||
nielt->uid = nuid;
|
||||
@ -897,8 +895,8 @@ separate_decls_in_region_name (tree name, name_to_copy_table_type name_copies,
|
||||
|
||||
static void
|
||||
separate_decls_in_region_stmt (edge entry, edge exit, gimple stmt,
|
||||
name_to_copy_table_type name_copies,
|
||||
int_tree_htab_type decl_copies)
|
||||
name_to_copy_table_type *name_copies,
|
||||
int_tree_htab_type *decl_copies)
|
||||
{
|
||||
use_operand_p use;
|
||||
def_operand_p def;
|
||||
@ -937,8 +935,8 @@ separate_decls_in_region_stmt (edge entry, edge exit, gimple stmt,
|
||||
|
||||
static bool
|
||||
separate_decls_in_region_debug (gimple stmt,
|
||||
name_to_copy_table_type name_copies,
|
||||
int_tree_htab_type decl_copies)
|
||||
name_to_copy_table_type *name_copies,
|
||||
int_tree_htab_type *decl_copies)
|
||||
{
|
||||
use_operand_p use;
|
||||
ssa_op_iter oi;
|
||||
@ -958,7 +956,7 @@ separate_decls_in_region_debug (gimple stmt,
|
||||
return true;
|
||||
gcc_assert (DECL_P (var) && SSA_VAR_P (var));
|
||||
ielt.uid = DECL_UID (var);
|
||||
dslot = decl_copies.find_slot_with_hash (&ielt, ielt.uid, NO_INSERT);
|
||||
dslot = decl_copies->find_slot_with_hash (&ielt, ielt.uid, NO_INSERT);
|
||||
if (!dslot)
|
||||
return true;
|
||||
if (gimple_debug_bind_p (stmt))
|
||||
@ -973,7 +971,7 @@ separate_decls_in_region_debug (gimple stmt,
|
||||
continue;
|
||||
|
||||
elt.version = SSA_NAME_VERSION (name);
|
||||
slot = name_copies.find_slot_with_hash (&elt, elt.version, NO_INSERT);
|
||||
slot = name_copies->find_slot_with_hash (&elt, elt.version, NO_INSERT);
|
||||
if (!slot)
|
||||
{
|
||||
gimple_debug_bind_reset_value (stmt);
|
||||
@ -1133,14 +1131,14 @@ create_call_for_reduction_1 (reduction_info **slot, struct clsn_data *clsn_data)
|
||||
shared data is stored in and loaded from. */
|
||||
static void
|
||||
create_call_for_reduction (struct loop *loop,
|
||||
reduction_info_table_type reduction_list,
|
||||
reduction_info_table_type *reduction_list,
|
||||
struct clsn_data *ld_st_data)
|
||||
{
|
||||
reduction_list.traverse <struct loop *, create_phi_for_local_result> (loop);
|
||||
reduction_list->traverse <struct loop *, create_phi_for_local_result> (loop);
|
||||
/* Find the fallthru edge from GIMPLE_OMP_CONTINUE. */
|
||||
ld_st_data->load_bb = FALLTHRU_EDGE (loop->latch)->dest;
|
||||
reduction_list
|
||||
.traverse <struct clsn_data *, create_call_for_reduction_1> (ld_st_data);
|
||||
->traverse <struct clsn_data *, create_call_for_reduction_1> (ld_st_data);
|
||||
}
|
||||
|
||||
/* Callback for htab_traverse. Loads the final reduction value at the
|
||||
@ -1182,7 +1180,7 @@ create_loads_for_reductions (reduction_info **slot, struct clsn_data *clsn_data)
|
||||
REDUCTION_LIST describes the list of reductions that the
|
||||
loads should be generated for. */
|
||||
static void
|
||||
create_final_loads_for_reduction (reduction_info_table_type reduction_list,
|
||||
create_final_loads_for_reduction (reduction_info_table_type *reduction_list,
|
||||
struct clsn_data *ld_st_data)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
@ -1196,7 +1194,7 @@ create_final_loads_for_reduction (reduction_info_table_type reduction_list,
|
||||
gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
|
||||
|
||||
reduction_list
|
||||
.traverse <struct clsn_data *, create_loads_for_reductions> (ld_st_data);
|
||||
->traverse <struct clsn_data *, create_loads_for_reductions> (ld_st_data);
|
||||
|
||||
}
|
||||
|
||||
@ -1288,17 +1286,15 @@ create_loads_and_stores_for_name (name_to_copy_elt **slot,
|
||||
|
||||
static void
|
||||
separate_decls_in_region (edge entry, edge exit,
|
||||
reduction_info_table_type reduction_list,
|
||||
reduction_info_table_type *reduction_list,
|
||||
tree *arg_struct, tree *new_arg_struct,
|
||||
struct clsn_data *ld_st_data)
|
||||
|
||||
{
|
||||
basic_block bb1 = split_edge (entry);
|
||||
basic_block bb0 = single_pred (bb1);
|
||||
name_to_copy_table_type name_copies;
|
||||
name_copies.create (10);
|
||||
int_tree_htab_type decl_copies;
|
||||
decl_copies.create (10);
|
||||
name_to_copy_table_type name_copies (10);
|
||||
int_tree_htab_type decl_copies (10);
|
||||
unsigned i;
|
||||
tree type, type_name, nvar;
|
||||
gimple_stmt_iterator gsi;
|
||||
@ -1318,7 +1314,7 @@ separate_decls_in_region (edge entry, edge exit,
|
||||
{
|
||||
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
separate_decls_in_region_stmt (entry, exit, gsi_stmt (gsi),
|
||||
name_copies, decl_copies);
|
||||
&name_copies, &decl_copies);
|
||||
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
{
|
||||
@ -1328,7 +1324,7 @@ separate_decls_in_region (edge entry, edge exit,
|
||||
has_debug_stmt = true;
|
||||
else
|
||||
separate_decls_in_region_stmt (entry, exit, stmt,
|
||||
name_copies, decl_copies);
|
||||
&name_copies, &decl_copies);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1349,8 +1345,8 @@ separate_decls_in_region (edge entry, edge exit,
|
||||
|
||||
if (is_gimple_debug (stmt))
|
||||
{
|
||||
if (separate_decls_in_region_debug (stmt, name_copies,
|
||||
decl_copies))
|
||||
if (separate_decls_in_region_debug (stmt, &name_copies,
|
||||
&decl_copies))
|
||||
{
|
||||
gsi_remove (&gsi, true);
|
||||
continue;
|
||||
@ -1361,7 +1357,7 @@ separate_decls_in_region (edge entry, edge exit,
|
||||
}
|
||||
}
|
||||
|
||||
if (name_copies.elements () == 0 && reduction_list.elements () == 0)
|
||||
if (name_copies.elements () == 0 && reduction_list->elements () == 0)
|
||||
{
|
||||
/* It may happen that there is nothing to copy (if there are only
|
||||
loop carried and external variables in the loop). */
|
||||
@ -1378,10 +1374,10 @@ separate_decls_in_region (edge entry, edge exit,
|
||||
TYPE_NAME (type) = type_name;
|
||||
|
||||
name_copies.traverse <tree, add_field_for_name> (type);
|
||||
if (reduction_list.is_created () && reduction_list.elements () > 0)
|
||||
if (reduction_list && reduction_list->elements () > 0)
|
||||
{
|
||||
/* Create the fields for reductions. */
|
||||
reduction_list.traverse <tree, add_field_for_reduction> (type);
|
||||
reduction_list->traverse <tree, add_field_for_reduction> (type);
|
||||
}
|
||||
layout_type (type);
|
||||
|
||||
@ -1401,20 +1397,17 @@ separate_decls_in_region (edge entry, edge exit,
|
||||
|
||||
/* Load the calculation from memory (after the join of the threads). */
|
||||
|
||||
if (reduction_list.is_created () && reduction_list.elements () > 0)
|
||||
if (reduction_list && reduction_list->elements () > 0)
|
||||
{
|
||||
reduction_list
|
||||
.traverse <struct clsn_data *, create_stores_for_reduction>
|
||||
(ld_st_data);
|
||||
->traverse <struct clsn_data *, create_stores_for_reduction>
|
||||
(ld_st_data);
|
||||
clsn_data.load = make_ssa_name (nvar, NULL);
|
||||
clsn_data.load_bb = exit->dest;
|
||||
clsn_data.store = ld_st_data->store;
|
||||
create_final_loads_for_reduction (reduction_list, &clsn_data);
|
||||
}
|
||||
}
|
||||
|
||||
decl_copies.dispose ();
|
||||
name_copies.dispose ();
|
||||
}
|
||||
|
||||
/* Bitmap containing uids of functions created by parallelization. We cannot
|
||||
@ -1504,7 +1497,7 @@ create_loop_fn (location_t loc)
|
||||
|
||||
static void
|
||||
transform_to_exit_first_loop (struct loop *loop,
|
||||
reduction_info_table_type reduction_list,
|
||||
reduction_info_table_type *reduction_list,
|
||||
tree nit)
|
||||
{
|
||||
basic_block *bbs, *nbbs, ex_bb, orig_header;
|
||||
@ -1574,7 +1567,7 @@ transform_to_exit_first_loop (struct loop *loop,
|
||||
PHI_RESULT of this phi is the resulting value of the reduction
|
||||
variable when exiting the loop. */
|
||||
|
||||
if (reduction_list.elements () > 0)
|
||||
if (reduction_list->elements () > 0)
|
||||
{
|
||||
struct reduction_info *red;
|
||||
|
||||
@ -1749,7 +1742,8 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
|
||||
REDUCTION_LIST describes the reductions existent in the LOOP. */
|
||||
|
||||
static void
|
||||
gen_parallel_loop (struct loop *loop, reduction_info_table_type reduction_list,
|
||||
gen_parallel_loop (struct loop *loop,
|
||||
reduction_info_table_type *reduction_list,
|
||||
unsigned n_threads, struct tree_niter_desc *niter)
|
||||
{
|
||||
tree many_iterations_cond, type, nit;
|
||||
@ -1874,8 +1868,8 @@ gen_parallel_loop (struct loop *loop, reduction_info_table_type reduction_list,
|
||||
transform_to_exit_first_loop (loop, reduction_list, nit);
|
||||
|
||||
/* Generate initializations for reductions. */
|
||||
if (reduction_list.elements () > 0)
|
||||
reduction_list.traverse <struct loop *, initialize_reductions> (loop);
|
||||
if (reduction_list->elements () > 0)
|
||||
reduction_list->traverse <struct loop *, initialize_reductions> (loop);
|
||||
|
||||
/* Eliminate the references to local variables from the loop. */
|
||||
gcc_assert (single_exit (loop));
|
||||
@ -1895,7 +1889,7 @@ gen_parallel_loop (struct loop *loop, reduction_info_table_type reduction_list,
|
||||
loc = gimple_location (cond_stmt);
|
||||
parallel_head = create_parallel_loop (loop, create_loop_fn (loc), arg_struct,
|
||||
new_arg_struct, n_threads, loc);
|
||||
if (reduction_list.elements () > 0)
|
||||
if (reduction_list->elements () > 0)
|
||||
create_call_for_reduction (loop, reduction_list, &clsn_data);
|
||||
|
||||
scev_reset ();
|
||||
@ -1942,7 +1936,7 @@ loop_has_vector_phi_nodes (struct loop *loop ATTRIBUTE_UNUSED)
|
||||
and PHI, insert it to the REDUCTION_LIST. */
|
||||
|
||||
static void
|
||||
build_new_reduction (reduction_info_table_type reduction_list,
|
||||
build_new_reduction (reduction_info_table_type *reduction_list,
|
||||
gimple reduc_stmt, gimple phi)
|
||||
{
|
||||
reduction_info **slot;
|
||||
@ -1964,7 +1958,7 @@ build_new_reduction (reduction_info_table_type reduction_list,
|
||||
new_reduction->reduc_phi = phi;
|
||||
new_reduction->reduc_version = SSA_NAME_VERSION (gimple_phi_result (phi));
|
||||
new_reduction->reduction_code = gimple_assign_rhs_code (reduc_stmt);
|
||||
slot = reduction_list.find_slot (new_reduction, INSERT);
|
||||
slot = reduction_list->find_slot (new_reduction, INSERT);
|
||||
*slot = new_reduction;
|
||||
}
|
||||
|
||||
@ -1981,7 +1975,7 @@ set_reduc_phi_uids (reduction_info **slot, void *data ATTRIBUTE_UNUSED)
|
||||
/* Detect all reductions in the LOOP, insert them into REDUCTION_LIST. */
|
||||
|
||||
static void
|
||||
gather_scalar_reductions (loop_p loop, reduction_info_table_type reduction_list)
|
||||
gather_scalar_reductions (loop_p loop, reduction_info_table_type *reduction_list)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
loop_vec_info simple_loop_info;
|
||||
@ -2013,7 +2007,7 @@ gather_scalar_reductions (loop_p loop, reduction_info_table_type reduction_list)
|
||||
/* As gimple_uid is used by the vectorizer in between vect_analyze_loop_form
|
||||
and destroy_loop_vec_info, we can set gimple_uid of reduc_phi stmts
|
||||
only now. */
|
||||
reduction_list.traverse <void *, set_reduc_phi_uids> (NULL);
|
||||
reduction_list->traverse <void *, set_reduc_phi_uids> (NULL);
|
||||
}
|
||||
|
||||
/* Try to initialize NITER for code generation part. */
|
||||
@ -2043,7 +2037,7 @@ try_get_loop_niter (loop_p loop, struct tree_niter_desc *niter)
|
||||
|
||||
static bool
|
||||
try_create_reduction_list (loop_p loop,
|
||||
reduction_info_table_type reduction_list)
|
||||
reduction_info_table_type *reduction_list)
|
||||
{
|
||||
edge exit = single_dom_exit (loop);
|
||||
gimple_stmt_iterator gsi;
|
||||
@ -2074,7 +2068,7 @@ try_create_reduction_list (loop_p loop,
|
||||
fprintf (dump_file,
|
||||
" checking if it a part of reduction pattern: \n");
|
||||
}
|
||||
if (reduction_list.elements () == 0)
|
||||
if (reduction_list->elements () == 0)
|
||||
{
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
fprintf (dump_file,
|
||||
@ -2147,7 +2141,6 @@ parallelize_loops (void)
|
||||
bool changed = false;
|
||||
struct loop *loop;
|
||||
struct tree_niter_desc niter_desc;
|
||||
reduction_info_table_type reduction_list;
|
||||
struct obstack parloop_obstack;
|
||||
HOST_WIDE_INT estimated;
|
||||
source_location loop_loc;
|
||||
@ -2159,7 +2152,7 @@ parallelize_loops (void)
|
||||
return false;
|
||||
|
||||
gcc_obstack_init (&parloop_obstack);
|
||||
reduction_list.create (10);
|
||||
reduction_info_table_type reduction_list (10);
|
||||
init_stmt_vec_info_vec ();
|
||||
|
||||
FOR_EACH_LOOP (loop, 0)
|
||||
@ -2215,7 +2208,7 @@ parallelize_loops (void)
|
||||
if (!try_get_loop_niter (loop, &niter_desc))
|
||||
continue;
|
||||
|
||||
if (!try_create_reduction_list (loop, reduction_list))
|
||||
if (!try_create_reduction_list (loop, &reduction_list))
|
||||
continue;
|
||||
|
||||
if (!flag_loop_parallelize_all
|
||||
@ -2234,12 +2227,11 @@ parallelize_loops (void)
|
||||
fprintf (dump_file, "\nloop at %s:%d: ",
|
||||
LOCATION_FILE (loop_loc), LOCATION_LINE (loop_loc));
|
||||
}
|
||||
gen_parallel_loop (loop, reduction_list,
|
||||
gen_parallel_loop (loop, &reduction_list,
|
||||
n_threads, &niter_desc);
|
||||
}
|
||||
|
||||
free_stmt_vec_info_vec ();
|
||||
reduction_list.dispose ();
|
||||
obstack_free (&parloop_obstack, NULL);
|
||||
|
||||
/* Parallelization will cause new function calls to be inserted through
|
||||
|
@ -320,7 +320,7 @@ uid_decl_hasher::equal (const value_type *a, const compare_type *b)
|
||||
|
||||
/* Set of candidates. */
|
||||
static bitmap candidate_bitmap;
|
||||
static hash_table <uid_decl_hasher> candidates;
|
||||
static hash_table<uid_decl_hasher> *candidates;
|
||||
|
||||
/* For a candidate UID return the candidates decl. */
|
||||
|
||||
@ -329,7 +329,7 @@ candidate (unsigned uid)
|
||||
{
|
||||
tree_node t;
|
||||
t.decl_minimal.uid = uid;
|
||||
return candidates.find_with_hash (&t, static_cast <hashval_t> (uid));
|
||||
return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
|
||||
}
|
||||
|
||||
/* Bitmap of candidates which we should try to entirely scalarize away and
|
||||
@ -660,7 +660,8 @@ static void
|
||||
sra_initialize (void)
|
||||
{
|
||||
candidate_bitmap = BITMAP_ALLOC (NULL);
|
||||
candidates.create (vec_safe_length (cfun->local_decls) / 2);
|
||||
candidates = new hash_table<uid_decl_hasher>
|
||||
(vec_safe_length (cfun->local_decls) / 2);
|
||||
should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
|
||||
cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
|
||||
gcc_obstack_init (&name_obstack);
|
||||
@ -690,7 +691,8 @@ static void
|
||||
sra_deinitialize (void)
|
||||
{
|
||||
BITMAP_FREE (candidate_bitmap);
|
||||
candidates.dispose ();
|
||||
delete candidates;
|
||||
candidates = NULL;
|
||||
BITMAP_FREE (should_scalarize_away_bitmap);
|
||||
BITMAP_FREE (cannot_scalarize_away_bitmap);
|
||||
free_alloc_pool (access_pool);
|
||||
@ -707,9 +709,7 @@ static void
|
||||
disqualify_candidate (tree decl, const char *reason)
|
||||
{
|
||||
if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
|
||||
candidates.clear_slot (candidates.find_slot_with_hash (decl,
|
||||
DECL_UID (decl),
|
||||
NO_INSERT));
|
||||
candidates->remove_elt_with_hash (decl, DECL_UID (decl));
|
||||
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
{
|
||||
@ -1833,7 +1833,7 @@ maybe_add_sra_candidate (tree var)
|
||||
}
|
||||
|
||||
bitmap_set_bit (candidate_bitmap, DECL_UID (var));
|
||||
slot = candidates.find_slot_with_hash (var, DECL_UID (var), INSERT);
|
||||
slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
|
||||
*slot = var;
|
||||
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
@ -3792,7 +3792,7 @@ find_param_candidates (void)
|
||||
continue;
|
||||
|
||||
bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
|
||||
slot = candidates.find_slot_with_hash (parm, DECL_UID (parm), INSERT);
|
||||
slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
|
||||
*slot = parm;
|
||||
|
||||
ret = true;
|
||||
|
@ -1896,14 +1896,14 @@ evaluate_stmt (gimple stmt)
|
||||
return val;
|
||||
}
|
||||
|
||||
typedef hash_table <pointer_hash <gimple_statement_base> > gimple_htab;
|
||||
typedef hash_table<pointer_hash<gimple_statement_base> > gimple_htab;
|
||||
|
||||
/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
|
||||
each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
|
||||
|
||||
static void
|
||||
insert_clobber_before_stack_restore (tree saved_val, tree var,
|
||||
gimple_htab *visited)
|
||||
gimple_htab **visited)
|
||||
{
|
||||
gimple stmt, clobber_stmt;
|
||||
tree clobber;
|
||||
@ -1924,10 +1924,10 @@ insert_clobber_before_stack_restore (tree saved_val, tree var,
|
||||
}
|
||||
else if (gimple_code (stmt) == GIMPLE_PHI)
|
||||
{
|
||||
if (!visited->is_created ())
|
||||
visited->create (10);
|
||||
if (!*visited)
|
||||
*visited = new gimple_htab (10);
|
||||
|
||||
slot = visited->find_slot (stmt, INSERT);
|
||||
slot = (*visited)->find_slot (stmt, INSERT);
|
||||
if (*slot != NULL)
|
||||
continue;
|
||||
|
||||
@ -1973,7 +1973,7 @@ insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
|
||||
{
|
||||
gimple stmt;
|
||||
tree saved_val;
|
||||
gimple_htab visited;
|
||||
gimple_htab *visited = NULL;
|
||||
|
||||
for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
|
||||
{
|
||||
@ -1990,8 +1990,7 @@ insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
|
||||
break;
|
||||
}
|
||||
|
||||
if (visited.is_created ())
|
||||
visited.dispose ();
|
||||
delete visited;
|
||||
}
|
||||
|
||||
/* Detects a __builtin_alloca_with_align with constant size argument. Declares
|
||||
|
@ -92,7 +92,7 @@ coalesce_pair_hasher::equal (const value_type *p1, const compare_type *p2)
|
||||
&& p1->second_element == p2->second_element);
|
||||
}
|
||||
|
||||
typedef hash_table <coalesce_pair_hasher> coalesce_table_type;
|
||||
typedef hash_table<coalesce_pair_hasher> coalesce_table_type;
|
||||
typedef coalesce_table_type::iterator coalesce_iterator_type;
|
||||
|
||||
|
||||
@ -107,7 +107,7 @@ typedef struct cost_one_pair_d
|
||||
|
||||
typedef struct coalesce_list_d
|
||||
{
|
||||
coalesce_table_type list; /* Hash table. */
|
||||
coalesce_table_type *list; /* Hash table. */
|
||||
coalesce_pair_p *sorted; /* List when sorted. */
|
||||
int num_sorted; /* Number in the sorted list. */
|
||||
cost_one_pair_p cost_one_list;/* Single use coalesces with cost 1. */
|
||||
@ -244,7 +244,7 @@ create_coalesce_list (void)
|
||||
size = 40;
|
||||
|
||||
list = (coalesce_list_p) xmalloc (sizeof (struct coalesce_list_d));
|
||||
list->list.create (size);
|
||||
list->list = new coalesce_table_type (size);
|
||||
list->sorted = NULL;
|
||||
list->num_sorted = 0;
|
||||
list->cost_one_list = NULL;
|
||||
@ -258,7 +258,8 @@ static inline void
|
||||
delete_coalesce_list (coalesce_list_p cl)
|
||||
{
|
||||
gcc_assert (cl->cost_one_list == NULL);
|
||||
cl->list.dispose ();
|
||||
delete cl->list;
|
||||
cl->list = NULL;
|
||||
free (cl->sorted);
|
||||
gcc_assert (cl->num_sorted == 0);
|
||||
free (cl);
|
||||
@ -289,7 +290,7 @@ find_coalesce_pair (coalesce_list_p cl, int p1, int p2, bool create)
|
||||
}
|
||||
|
||||
hash = coalesce_pair_hasher::hash (&p);
|
||||
slot = cl->list.find_slot_with_hash (&p, hash, create ? INSERT : NO_INSERT);
|
||||
slot = cl->list->find_slot_with_hash (&p, hash, create ? INSERT : NO_INSERT);
|
||||
if (!slot)
|
||||
return NULL;
|
||||
|
||||
@ -372,14 +373,14 @@ compare_pairs (const void *p1, const void *p2)
|
||||
static inline int
|
||||
num_coalesce_pairs (coalesce_list_p cl)
|
||||
{
|
||||
return cl->list.elements ();
|
||||
return cl->list->elements ();
|
||||
}
|
||||
|
||||
|
||||
/* Iterate over CL using ITER, returning values in PAIR. */
|
||||
|
||||
#define FOR_EACH_PARTITION_PAIR(PAIR, ITER, CL) \
|
||||
FOR_EACH_HASH_TABLE_ELEMENT ((CL)->list, (PAIR), coalesce_pair_p, (ITER))
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*(CL)->list, (PAIR), coalesce_pair_p, (ITER))
|
||||
|
||||
|
||||
/* Prepare CL for removal of preferred pairs. When finished they are sorted
|
||||
@ -1267,9 +1268,8 @@ coalesce_ssa_name (void)
|
||||
from the same SSA_NAME_VAR so debug info remains undisturbed. */
|
||||
if (!optimize)
|
||||
{
|
||||
hash_table <ssa_name_var_hash> ssa_name_hash;
|
||||
hash_table<ssa_name_var_hash> ssa_name_hash (10);
|
||||
|
||||
ssa_name_hash.create (10);
|
||||
for (i = 1; i < num_ssa_names; i++)
|
||||
{
|
||||
tree a = ssa_name (i);
|
||||
@ -1303,7 +1303,6 @@ coalesce_ssa_name (void)
|
||||
}
|
||||
}
|
||||
}
|
||||
ssa_name_hash.dispose ();
|
||||
}
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
dump_var_map (dump_file, map);
|
||||
|
@ -223,7 +223,7 @@ expr_elt_hasher::remove (value_type *element)
|
||||
global redundancy elimination). Similarly as we pass through conditionals
|
||||
we record the conditional itself as having either a true or false value
|
||||
in this table. */
|
||||
static hash_table <expr_elt_hasher> avail_exprs;
|
||||
static hash_table<expr_elt_hasher> *avail_exprs;
|
||||
|
||||
/* Stack of dest,src pairs that need to be restored during finalization.
|
||||
|
||||
@ -254,7 +254,8 @@ static struct opt_stats_d opt_stats;
|
||||
static void optimize_stmt (basic_block, gimple_stmt_iterator);
|
||||
static tree lookup_avail_expr (gimple, bool);
|
||||
static hashval_t avail_expr_hash (const void *);
|
||||
static void htab_statistics (FILE *, hash_table <expr_elt_hasher>);
|
||||
static void htab_statistics (FILE *,
|
||||
const hash_table<expr_elt_hasher> &);
|
||||
static void record_cond (cond_equivalence *);
|
||||
static void record_const_or_copy (tree, tree);
|
||||
static void record_equality (tree, tree);
|
||||
@ -876,7 +877,7 @@ pass_dominator::execute (function *fun)
|
||||
memset (&opt_stats, 0, sizeof (opt_stats));
|
||||
|
||||
/* Create our hash tables. */
|
||||
avail_exprs.create (1024);
|
||||
avail_exprs = new hash_table<expr_elt_hasher> (1024);
|
||||
avail_exprs_stack.create (20);
|
||||
const_and_copies_stack.create (20);
|
||||
need_eh_cleanup = BITMAP_ALLOC (NULL);
|
||||
@ -976,7 +977,8 @@ pass_dominator::execute (function *fun)
|
||||
loop_optimizer_finalize ();
|
||||
|
||||
/* Delete our main hashtable. */
|
||||
avail_exprs.dispose ();
|
||||
delete avail_exprs;
|
||||
avail_exprs = NULL;
|
||||
|
||||
/* Free asserted bitmaps and stacks. */
|
||||
BITMAP_FREE (need_eh_cleanup);
|
||||
@ -1070,9 +1072,9 @@ remove_local_expressions_from_table (void)
|
||||
print_expr_hash_elt (dump_file, victim);
|
||||
}
|
||||
|
||||
slot = avail_exprs.find_slot_with_hash (victim, victim->hash, NO_INSERT);
|
||||
slot = avail_exprs->find_slot (victim, NO_INSERT);
|
||||
gcc_assert (slot && *slot == victim);
|
||||
avail_exprs.clear_slot (slot);
|
||||
avail_exprs->clear_slot (slot);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1351,7 +1353,7 @@ dump_dominator_optimization_stats (FILE *file)
|
||||
fprintf (file, "\nHash table statistics:\n");
|
||||
|
||||
fprintf (file, " avail_exprs: ");
|
||||
htab_statistics (file, avail_exprs);
|
||||
htab_statistics (file, *avail_exprs);
|
||||
}
|
||||
|
||||
|
||||
@ -1367,7 +1369,7 @@ debug_dominator_optimization_stats (void)
|
||||
/* Dump statistics for the hash table HTAB. */
|
||||
|
||||
static void
|
||||
htab_statistics (FILE *file, hash_table <expr_elt_hasher> htab)
|
||||
htab_statistics (FILE *file, const hash_table<expr_elt_hasher> &htab)
|
||||
{
|
||||
fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
|
||||
(long) htab.size (),
|
||||
@ -1388,7 +1390,7 @@ record_cond (cond_equivalence *p)
|
||||
|
||||
initialize_hash_element_from_expr (&p->cond, p->value, element);
|
||||
|
||||
slot = avail_exprs.find_slot_with_hash (element, element->hash, INSERT);
|
||||
slot = avail_exprs->find_slot_with_hash (element, element->hash, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
*slot = element;
|
||||
@ -2551,8 +2553,7 @@ lookup_avail_expr (gimple stmt, bool insert)
|
||||
return NULL_TREE;
|
||||
|
||||
/* Finally try to find the expression in the main expression hash table. */
|
||||
slot = avail_exprs.find_slot_with_hash (&element, element.hash,
|
||||
(insert ? INSERT : NO_INSERT));
|
||||
slot = avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
|
||||
if (slot == NULL)
|
||||
{
|
||||
free_expr_hash_elt_contents (&element);
|
||||
|
@ -97,11 +97,10 @@ var_map_base_init (var_map map)
|
||||
{
|
||||
int x, num_part;
|
||||
tree var;
|
||||
hash_table <tree_int_map_hasher> tree_to_index;
|
||||
struct tree_int_map *m, *mapstorage;
|
||||
|
||||
num_part = num_var_partitions (map);
|
||||
tree_to_index.create (num_part);
|
||||
hash_table<tree_int_map_hasher> tree_to_index (num_part);
|
||||
/* We can have at most num_part entries in the hash tables, so it's
|
||||
enough to allocate so many map elements once, saving some malloc
|
||||
calls. */
|
||||
@ -149,7 +148,6 @@ var_map_base_init (var_map map)
|
||||
map->num_basevars = m - mapstorage;
|
||||
|
||||
free (mapstorage);
|
||||
tree_to_index. dispose ();
|
||||
}
|
||||
|
||||
|
||||
|
@ -184,7 +184,7 @@ mem_ref_hasher::equal (const value_type *mem1, const compare_type *obj2)
|
||||
static struct
|
||||
{
|
||||
/* The hash table of memory references accessed in loops. */
|
||||
hash_table <mem_ref_hasher> refs;
|
||||
hash_table<mem_ref_hasher> *refs;
|
||||
|
||||
/* The list of memory references. */
|
||||
vec<mem_ref_p> refs_list;
|
||||
@ -609,7 +609,7 @@ mem_ref_in_stmt (gimple stmt)
|
||||
gcc_assert (!store);
|
||||
|
||||
hash = iterative_hash_expr (*mem, 0);
|
||||
ref = memory_accesses.refs.find_with_hash (*mem, hash);
|
||||
ref = memory_accesses.refs->find_with_hash (*mem, hash);
|
||||
|
||||
gcc_assert (ref != NULL);
|
||||
return ref;
|
||||
@ -1485,7 +1485,7 @@ gather_mem_refs_stmt (struct loop *loop, gimple stmt)
|
||||
else
|
||||
{
|
||||
hash = iterative_hash_expr (*mem, 0);
|
||||
slot = memory_accesses.refs.find_slot_with_hash (*mem, hash, INSERT);
|
||||
slot = memory_accesses.refs->find_slot_with_hash (*mem, hash, INSERT);
|
||||
if (*slot)
|
||||
{
|
||||
ref = (mem_ref_p) *slot;
|
||||
@ -2436,7 +2436,7 @@ tree_ssa_lim_initialize (void)
|
||||
|
||||
alloc_aux_for_edges (0);
|
||||
|
||||
memory_accesses.refs.create (100);
|
||||
memory_accesses.refs = new hash_table<mem_ref_hasher> (100);
|
||||
memory_accesses.refs_list.create (100);
|
||||
/* Allocate a special, unanalyzable mem-ref with ID zero. */
|
||||
memory_accesses.refs_list.quick_push
|
||||
@ -2486,7 +2486,8 @@ tree_ssa_lim_finalize (void)
|
||||
bitmap_obstack_release (&lim_bitmap_obstack);
|
||||
pointer_map_destroy (lim_aux_data_map);
|
||||
|
||||
memory_accesses.refs.dispose ();
|
||||
delete memory_accesses.refs;
|
||||
memory_accesses.refs = NULL;
|
||||
|
||||
FOR_EACH_VEC_ELT (memory_accesses.refs_list, i, ref)
|
||||
memref_free (ref);
|
||||
|
@ -306,7 +306,7 @@ struct ivopts_data
|
||||
|
||||
/* The hashtable of loop invariant expressions created
|
||||
by ivopt. */
|
||||
hash_table <iv_inv_expr_hasher> inv_expr_tab;
|
||||
hash_table<iv_inv_expr_hasher> *inv_expr_tab;
|
||||
|
||||
/* Loop invariant expression id. */
|
||||
int inv_expr_id;
|
||||
@ -875,7 +875,7 @@ tree_ssa_iv_optimize_init (struct ivopts_data *data)
|
||||
data->niters = NULL;
|
||||
data->iv_uses.create (20);
|
||||
data->iv_candidates.create (20);
|
||||
data->inv_expr_tab.create (10);
|
||||
data->inv_expr_tab = new hash_table<iv_inv_expr_hasher> (10);
|
||||
data->inv_expr_id = 0;
|
||||
decl_rtl_to_reset.create (20);
|
||||
}
|
||||
@ -3955,7 +3955,7 @@ get_expr_id (struct ivopts_data *data, tree expr)
|
||||
|
||||
ent.expr = expr;
|
||||
ent.hash = iterative_hash_expr (expr, 0);
|
||||
slot = data->inv_expr_tab.find_slot (&ent, INSERT);
|
||||
slot = data->inv_expr_tab->find_slot (&ent, INSERT);
|
||||
if (*slot)
|
||||
return (*slot)->id;
|
||||
|
||||
@ -6785,7 +6785,7 @@ free_loop_data (struct ivopts_data *data)
|
||||
|
||||
decl_rtl_to_reset.truncate (0);
|
||||
|
||||
data->inv_expr_tab.empty ();
|
||||
data->inv_expr_tab->empty ();
|
||||
data->inv_expr_id = 0;
|
||||
}
|
||||
|
||||
@ -6803,7 +6803,8 @@ tree_ssa_iv_optimize_finalize (struct ivopts_data *data)
|
||||
decl_rtl_to_reset.release ();
|
||||
data->iv_uses.release ();
|
||||
data->iv_candidates.release ();
|
||||
data->inv_expr_tab.dispose ();
|
||||
delete data->inv_expr_tab;
|
||||
data->inv_expr_tab = NULL;
|
||||
}
|
||||
|
||||
/* Returns true if the loop body BODY includes any function calls. */
|
||||
|
@ -1466,86 +1466,28 @@ ssa_names_hasher::equal (const value_type *n1, const compare_type *n2)
|
||||
&& n1->size == n2->size;
|
||||
}
|
||||
|
||||
/* The hash table for remembering what we've seen. */
|
||||
static hash_table <ssa_names_hasher> seen_ssa_names;
|
||||
|
||||
/* We see the expression EXP in basic block BB. If it's an interesting
|
||||
expression (an MEM_REF through an SSA_NAME) possibly insert the
|
||||
expression into the set NONTRAP or the hash table of seen expressions.
|
||||
STORE is true if this expression is on the LHS, otherwise it's on
|
||||
the RHS. */
|
||||
static void
|
||||
add_or_mark_expr (basic_block bb, tree exp,
|
||||
struct pointer_set_t *nontrap, bool store)
|
||||
{
|
||||
HOST_WIDE_INT size;
|
||||
|
||||
if (TREE_CODE (exp) == MEM_REF
|
||||
&& TREE_CODE (TREE_OPERAND (exp, 0)) == SSA_NAME
|
||||
&& tree_fits_shwi_p (TREE_OPERAND (exp, 1))
|
||||
&& (size = int_size_in_bytes (TREE_TYPE (exp))) > 0)
|
||||
{
|
||||
tree name = TREE_OPERAND (exp, 0);
|
||||
struct name_to_bb map;
|
||||
name_to_bb **slot;
|
||||
struct name_to_bb *n2bb;
|
||||
basic_block found_bb = 0;
|
||||
|
||||
/* Try to find the last seen MEM_REF through the same
|
||||
SSA_NAME, which can trap. */
|
||||
map.ssa_name_ver = SSA_NAME_VERSION (name);
|
||||
map.phase = 0;
|
||||
map.bb = 0;
|
||||
map.store = store;
|
||||
map.offset = tree_to_shwi (TREE_OPERAND (exp, 1));
|
||||
map.size = size;
|
||||
|
||||
slot = seen_ssa_names.find_slot (&map, INSERT);
|
||||
n2bb = *slot;
|
||||
if (n2bb && n2bb->phase >= nt_call_phase)
|
||||
found_bb = n2bb->bb;
|
||||
|
||||
/* If we've found a trapping MEM_REF, _and_ it dominates EXP
|
||||
(it's in a basic block on the path from us to the dominator root)
|
||||
then we can't trap. */
|
||||
if (found_bb && (((size_t)found_bb->aux) & 1) == 1)
|
||||
{
|
||||
pointer_set_insert (nontrap, exp);
|
||||
}
|
||||
else
|
||||
{
|
||||
/* EXP might trap, so insert it into the hash table. */
|
||||
if (n2bb)
|
||||
{
|
||||
n2bb->phase = nt_call_phase;
|
||||
n2bb->bb = bb;
|
||||
}
|
||||
else
|
||||
{
|
||||
n2bb = XNEW (struct name_to_bb);
|
||||
n2bb->ssa_name_ver = SSA_NAME_VERSION (name);
|
||||
n2bb->phase = nt_call_phase;
|
||||
n2bb->bb = bb;
|
||||
n2bb->store = store;
|
||||
n2bb->offset = map.offset;
|
||||
n2bb->size = size;
|
||||
*slot = n2bb;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class nontrapping_dom_walker : public dom_walker
|
||||
{
|
||||
public:
|
||||
nontrapping_dom_walker (cdi_direction direction, pointer_set_t *ps)
|
||||
: dom_walker (direction), m_nontrapping (ps) {}
|
||||
: dom_walker (direction), m_nontrapping (ps), m_seen_ssa_names (128) {}
|
||||
|
||||
virtual void before_dom_children (basic_block);
|
||||
virtual void after_dom_children (basic_block);
|
||||
|
||||
private:
|
||||
|
||||
/* We see the expression EXP in basic block BB. If it's an interesting
|
||||
expression (an MEM_REF through an SSA_NAME) possibly insert the
|
||||
expression into the set NONTRAP or the hash table of seen expressions.
|
||||
STORE is true if this expression is on the LHS, otherwise it's on
|
||||
the RHS. */
|
||||
void add_or_mark_expr (basic_block, tree, bool);
|
||||
|
||||
pointer_set_t *m_nontrapping;
|
||||
|
||||
/* The hash table for remembering what we've seen. */
|
||||
hash_table<ssa_names_hasher> m_seen_ssa_names;
|
||||
};
|
||||
|
||||
/* Called by walk_dominator_tree, when entering the block BB. */
|
||||
@ -1576,8 +1518,8 @@ nontrapping_dom_walker::before_dom_children (basic_block bb)
|
||||
nt_call_phase++;
|
||||
else if (gimple_assign_single_p (stmt) && !gimple_has_volatile_ops (stmt))
|
||||
{
|
||||
add_or_mark_expr (bb, gimple_assign_lhs (stmt), m_nontrapping, true);
|
||||
add_or_mark_expr (bb, gimple_assign_rhs1 (stmt), m_nontrapping, false);
|
||||
add_or_mark_expr (bb, gimple_assign_lhs (stmt), true);
|
||||
add_or_mark_expr (bb, gimple_assign_rhs1 (stmt), false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1590,6 +1532,71 @@ nontrapping_dom_walker::after_dom_children (basic_block bb)
|
||||
bb->aux = (void*)2;
|
||||
}
|
||||
|
||||
/* We see the expression EXP in basic block BB. If it's an interesting
|
||||
expression (an MEM_REF through an SSA_NAME) possibly insert the
|
||||
expression into the set NONTRAP or the hash table of seen expressions.
|
||||
STORE is true if this expression is on the LHS, otherwise it's on
|
||||
the RHS. */
|
||||
void
|
||||
nontrapping_dom_walker::add_or_mark_expr (basic_block bb, tree exp, bool store)
|
||||
{
|
||||
HOST_WIDE_INT size;
|
||||
|
||||
if (TREE_CODE (exp) == MEM_REF
|
||||
&& TREE_CODE (TREE_OPERAND (exp, 0)) == SSA_NAME
|
||||
&& tree_fits_shwi_p (TREE_OPERAND (exp, 1))
|
||||
&& (size = int_size_in_bytes (TREE_TYPE (exp))) > 0)
|
||||
{
|
||||
tree name = TREE_OPERAND (exp, 0);
|
||||
struct name_to_bb map;
|
||||
name_to_bb **slot;
|
||||
struct name_to_bb *n2bb;
|
||||
basic_block found_bb = 0;
|
||||
|
||||
/* Try to find the last seen MEM_REF through the same
|
||||
SSA_NAME, which can trap. */
|
||||
map.ssa_name_ver = SSA_NAME_VERSION (name);
|
||||
map.phase = 0;
|
||||
map.bb = 0;
|
||||
map.store = store;
|
||||
map.offset = tree_to_shwi (TREE_OPERAND (exp, 1));
|
||||
map.size = size;
|
||||
|
||||
slot = m_seen_ssa_names.find_slot (&map, INSERT);
|
||||
n2bb = *slot;
|
||||
if (n2bb && n2bb->phase >= nt_call_phase)
|
||||
found_bb = n2bb->bb;
|
||||
|
||||
/* If we've found a trapping MEM_REF, _and_ it dominates EXP
|
||||
(it's in a basic block on the path from us to the dominator root)
|
||||
then we can't trap. */
|
||||
if (found_bb && (((size_t)found_bb->aux) & 1) == 1)
|
||||
{
|
||||
pointer_set_insert (m_nontrapping, exp);
|
||||
}
|
||||
else
|
||||
{
|
||||
/* EXP might trap, so insert it into the hash table. */
|
||||
if (n2bb)
|
||||
{
|
||||
n2bb->phase = nt_call_phase;
|
||||
n2bb->bb = bb;
|
||||
}
|
||||
else
|
||||
{
|
||||
n2bb = XNEW (struct name_to_bb);
|
||||
n2bb->ssa_name_ver = SSA_NAME_VERSION (name);
|
||||
n2bb->phase = nt_call_phase;
|
||||
n2bb->bb = bb;
|
||||
n2bb->store = store;
|
||||
n2bb->offset = map.offset;
|
||||
n2bb->size = size;
|
||||
*slot = n2bb;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* This is the entry point of gathering non trapping memory accesses.
|
||||
It will do a dominator walk over the whole function, and it will
|
||||
make use of the bb->aux pointers. It returns a set of trees
|
||||
@ -1599,7 +1606,6 @@ get_non_trapping (void)
|
||||
{
|
||||
nt_call_phase = 0;
|
||||
pointer_set_t *nontrap = pointer_set_create ();
|
||||
seen_ssa_names.create (128);
|
||||
/* We're going to do a dominator walk, so ensure that we have
|
||||
dominance information. */
|
||||
calculate_dominance_info (CDI_DOMINATORS);
|
||||
@ -1607,8 +1613,6 @@ get_non_trapping (void)
|
||||
nontrapping_dom_walker (CDI_DOMINATORS, nontrap)
|
||||
.walk (cfun->cfg->x_entry_block_ptr);
|
||||
|
||||
seen_ssa_names.dispose ();
|
||||
|
||||
clear_aux_for_blocks ();
|
||||
return nontrap;
|
||||
}
|
||||
|
@ -253,7 +253,7 @@ static unsigned int next_expression_id;
|
||||
|
||||
/* Mapping from expression to id number we can use in bitmap sets. */
|
||||
static vec<pre_expr> expressions;
|
||||
static hash_table <pre_expr_d> expression_to_id;
|
||||
static hash_table<pre_expr_d> *expression_to_id;
|
||||
static vec<unsigned> name_to_id;
|
||||
|
||||
/* Allocate an expression id for EXPR. */
|
||||
@ -280,7 +280,7 @@ alloc_expression_id (pre_expr expr)
|
||||
}
|
||||
else
|
||||
{
|
||||
slot = expression_to_id.find_slot (expr, INSERT);
|
||||
slot = expression_to_id->find_slot (expr, INSERT);
|
||||
gcc_assert (!*slot);
|
||||
*slot = expr;
|
||||
}
|
||||
@ -309,7 +309,7 @@ lookup_expression_id (const pre_expr expr)
|
||||
}
|
||||
else
|
||||
{
|
||||
slot = expression_to_id.find_slot (expr, NO_INSERT);
|
||||
slot = expression_to_id->find_slot (expr, NO_INSERT);
|
||||
if (!slot)
|
||||
return 0;
|
||||
return ((pre_expr)*slot)->id;
|
||||
@ -542,7 +542,7 @@ expr_pred_trans_d::equal (const value_type *ve1,
|
||||
|
||||
/* The phi_translate_table caches phi translations for a given
|
||||
expression and predecessor. */
|
||||
static hash_table <expr_pred_trans_d> phi_translate_table;
|
||||
static hash_table<expr_pred_trans_d> *phi_translate_table;
|
||||
|
||||
/* Add the tuple mapping from {expression E, basic block PRED} to
|
||||
the phi translation table and return whether it pre-existed. */
|
||||
@ -557,7 +557,7 @@ phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
|
||||
tem.e = e;
|
||||
tem.pred = pred;
|
||||
tem.hashcode = hash;
|
||||
slot = phi_translate_table.find_slot_with_hash (&tem, hash, INSERT);
|
||||
slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
|
||||
if (*slot)
|
||||
{
|
||||
*entry = *slot;
|
||||
@ -1783,7 +1783,7 @@ phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
|
||||
else
|
||||
/* Remove failed translations again, they cause insert
|
||||
iteration to not pick up new opportunities reliably. */
|
||||
phi_translate_table.remove_elt_with_hash (slot, slot->hashcode);
|
||||
phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
|
||||
}
|
||||
|
||||
return phitrans;
|
||||
@ -4711,8 +4711,8 @@ init_pre (void)
|
||||
calculate_dominance_info (CDI_DOMINATORS);
|
||||
|
||||
bitmap_obstack_initialize (&grand_bitmap_obstack);
|
||||
phi_translate_table.create (5110);
|
||||
expression_to_id.create (num_ssa_names * 3);
|
||||
phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
|
||||
expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
|
||||
bitmap_set_pool = create_alloc_pool ("Bitmap sets",
|
||||
sizeof (struct bitmap_set), 30);
|
||||
pre_expr_pool = create_alloc_pool ("pre_expr nodes",
|
||||
@ -4738,8 +4738,10 @@ fini_pre ()
|
||||
bitmap_obstack_release (&grand_bitmap_obstack);
|
||||
free_alloc_pool (bitmap_set_pool);
|
||||
free_alloc_pool (pre_expr_pool);
|
||||
phi_translate_table.dispose ();
|
||||
expression_to_id.dispose ();
|
||||
delete phi_translate_table;
|
||||
phi_translate_table = NULL;
|
||||
delete expression_to_id;
|
||||
expression_to_id = NULL;
|
||||
name_to_id.release ();
|
||||
|
||||
free_aux_for_blocks ();
|
||||
|
@ -1405,7 +1405,6 @@ undistribute_ops_list (enum tree_code opcode,
|
||||
unsigned nr_candidates, nr_candidates2;
|
||||
sbitmap_iterator sbi0;
|
||||
vec<operand_entry_t> *subops;
|
||||
hash_table <oecount_hasher> ctable;
|
||||
bool changed = false;
|
||||
int next_oecount_id = 0;
|
||||
|
||||
@ -1453,7 +1452,9 @@ undistribute_ops_list (enum tree_code opcode,
|
||||
|
||||
/* Build linearized sub-operand lists and the counting table. */
|
||||
cvec.create (0);
|
||||
ctable.create (15);
|
||||
|
||||
hash_table<oecount_hasher> ctable (15);
|
||||
|
||||
/* ??? Macro arguments cannot have multi-argument template types in
|
||||
them. This typedef is needed to workaround that limitation. */
|
||||
typedef vec<operand_entry_t> vec_operand_entry_t_heap;
|
||||
@ -1492,7 +1493,6 @@ undistribute_ops_list (enum tree_code opcode,
|
||||
}
|
||||
}
|
||||
}
|
||||
ctable.dispose ();
|
||||
|
||||
/* Sort the counting table. */
|
||||
cvec.qsort (oecount_cmp);
|
||||
|
@ -139,7 +139,7 @@ vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
|
||||
return vn_nary_op_eq (vno1, vno2);
|
||||
}
|
||||
|
||||
typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
|
||||
typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
|
||||
typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
|
||||
|
||||
|
||||
@ -181,7 +181,7 @@ vn_phi_hasher::remove (value_type *phi)
|
||||
phi->phiargs.release ();
|
||||
}
|
||||
|
||||
typedef hash_table <vn_phi_hasher> vn_phi_table_type;
|
||||
typedef hash_table<vn_phi_hasher> vn_phi_table_type;
|
||||
typedef vn_phi_table_type::iterator vn_phi_iterator_type;
|
||||
|
||||
|
||||
@ -245,7 +245,7 @@ vn_reference_hasher::remove (value_type *v)
|
||||
free_reference (v);
|
||||
}
|
||||
|
||||
typedef hash_table <vn_reference_hasher> vn_reference_table_type;
|
||||
typedef hash_table<vn_reference_hasher> vn_reference_table_type;
|
||||
typedef vn_reference_table_type::iterator vn_reference_iterator_type;
|
||||
|
||||
|
||||
@ -253,9 +253,9 @@ typedef vn_reference_table_type::iterator vn_reference_iterator_type;
|
||||
|
||||
typedef struct vn_tables_s
|
||||
{
|
||||
vn_nary_op_table_type nary;
|
||||
vn_phi_table_type phis;
|
||||
vn_reference_table_type references;
|
||||
vn_nary_op_table_type *nary;
|
||||
vn_phi_table_type *phis;
|
||||
vn_reference_table_type *references;
|
||||
struct obstack nary_obstack;
|
||||
alloc_pool phis_pool;
|
||||
alloc_pool references_pool;
|
||||
@ -291,7 +291,7 @@ vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
|
||||
return vn_constant_eq_with_type (vc1->constant, vc2->constant);
|
||||
}
|
||||
|
||||
static hash_table <vn_constant_hasher> constant_to_value_id;
|
||||
static hash_table<vn_constant_hasher> *constant_to_value_id;
|
||||
static bitmap constant_value_ids;
|
||||
|
||||
|
||||
@ -552,7 +552,7 @@ get_constant_value_id (tree constant)
|
||||
|
||||
vc.hashcode = vn_hash_constant_with_type (constant);
|
||||
vc.constant = constant;
|
||||
slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
|
||||
slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
|
||||
if (slot)
|
||||
return (*slot)->value_id;
|
||||
return 0;
|
||||
@ -570,7 +570,7 @@ get_or_alloc_constant_value_id (tree constant)
|
||||
|
||||
vc.hashcode = vn_hash_constant_with_type (constant);
|
||||
vc.constant = constant;
|
||||
slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
|
||||
slot = constant_to_value_id->find_slot (&vc, INSERT);
|
||||
if (*slot)
|
||||
return (*slot)->value_id;
|
||||
|
||||
@ -1474,9 +1474,9 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
|
||||
hashval_t hash;
|
||||
|
||||
hash = vr->hashcode;
|
||||
slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
if (!slot && current_info == optimistic_info)
|
||||
slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
if (slot)
|
||||
{
|
||||
if (vnresult)
|
||||
@ -1519,9 +1519,9 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
|
||||
vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
|
||||
|
||||
hash = vr->hashcode;
|
||||
slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
if (!slot && current_info == optimistic_info)
|
||||
slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
|
||||
if (slot)
|
||||
return *slot;
|
||||
|
||||
@ -2173,8 +2173,8 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
|
||||
vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
|
||||
vr1->result_vdef = vdef;
|
||||
|
||||
slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
|
||||
INSERT);
|
||||
slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
|
||||
INSERT);
|
||||
|
||||
/* Because we lookup stores using vuses, and value number failures
|
||||
using the vdefs (see visit_reference_op_store for how and why),
|
||||
@ -2216,8 +2216,8 @@ vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
|
||||
result = SSA_VAL (result);
|
||||
vr1->result = result;
|
||||
|
||||
slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
|
||||
INSERT);
|
||||
slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
|
||||
INSERT);
|
||||
|
||||
/* At this point we should have all the things inserted that we have
|
||||
seen before, and we should never try inserting something that
|
||||
@ -2386,9 +2386,11 @@ vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
|
||||
*vnresult = NULL;
|
||||
|
||||
vno->hashcode = vn_nary_op_compute_hash (vno);
|
||||
slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
|
||||
slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
|
||||
NO_INSERT);
|
||||
if (!slot && current_info == optimistic_info)
|
||||
slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
|
||||
slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
|
||||
NO_INSERT);
|
||||
if (!slot)
|
||||
return NULL_TREE;
|
||||
if (vnresult)
|
||||
@ -2471,7 +2473,7 @@ alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
|
||||
VNO->HASHCODE first. */
|
||||
|
||||
static vn_nary_op_t
|
||||
vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
|
||||
vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
|
||||
bool compute_hash)
|
||||
{
|
||||
vn_nary_op_s **slot;
|
||||
@ -2479,7 +2481,7 @@ vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
|
||||
if (compute_hash)
|
||||
vno->hashcode = vn_nary_op_compute_hash (vno);
|
||||
|
||||
slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
|
||||
slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
|
||||
gcc_assert (!*slot);
|
||||
|
||||
*slot = vno;
|
||||
@ -2614,9 +2616,11 @@ vn_phi_lookup (gimple phi)
|
||||
vp1.phiargs = shared_lookup_phiargs;
|
||||
vp1.block = gimple_bb (phi);
|
||||
vp1.hashcode = vn_phi_compute_hash (&vp1);
|
||||
slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
|
||||
slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
|
||||
NO_INSERT);
|
||||
if (!slot && current_info == optimistic_info)
|
||||
slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
|
||||
slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
|
||||
NO_INSERT);
|
||||
if (!slot)
|
||||
return NULL_TREE;
|
||||
return (*slot)->result;
|
||||
@ -2647,7 +2651,7 @@ vn_phi_insert (gimple phi, tree result)
|
||||
vp1->result = result;
|
||||
vp1->hashcode = vn_phi_compute_hash (vp1);
|
||||
|
||||
slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
|
||||
slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
|
||||
|
||||
/* Because we iterate over phi operations more than once, it's
|
||||
possible the slot might already exist here, hence no assert.*/
|
||||
@ -2886,8 +2890,8 @@ visit_reference_op_call (tree lhs, gimple stmt)
|
||||
vr2->hashcode = vr1.hashcode;
|
||||
vr2->result = lhs;
|
||||
vr2->result_vdef = vdef;
|
||||
slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
|
||||
INSERT);
|
||||
slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
|
||||
INSERT);
|
||||
if (*slot)
|
||||
free_reference (*slot);
|
||||
*slot = vr2;
|
||||
@ -3748,7 +3752,7 @@ copy_phi (vn_phi_t ophi, vn_tables_t info)
|
||||
vn_phi_s **slot;
|
||||
memcpy (phi, ophi, sizeof (*phi));
|
||||
ophi->phiargs.create (0);
|
||||
slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
|
||||
slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
|
||||
gcc_assert (!*slot);
|
||||
*slot = phi;
|
||||
}
|
||||
@ -3763,7 +3767,7 @@ copy_reference (vn_reference_t oref, vn_tables_t info)
|
||||
ref = (vn_reference_t) pool_alloc (info->references_pool);
|
||||
memcpy (ref, oref, sizeof (*ref));
|
||||
oref->operands.create (0);
|
||||
slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
|
||||
slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
|
||||
if (*slot)
|
||||
free_reference (*slot);
|
||||
*slot = ref;
|
||||
@ -3820,9 +3824,9 @@ process_scc (vec<tree> scc)
|
||||
/* As we are value-numbering optimistically we have to
|
||||
clear the expression tables and the simplified expressions
|
||||
in each iteration until we converge. */
|
||||
optimistic_info->nary.empty ();
|
||||
optimistic_info->phis.empty ();
|
||||
optimistic_info->references.empty ();
|
||||
optimistic_info->nary->empty ();
|
||||
optimistic_info->phis->empty ();
|
||||
optimistic_info->references->empty ();
|
||||
obstack_free (&optimistic_info->nary_obstack, NULL);
|
||||
gcc_obstack_init (&optimistic_info->nary_obstack);
|
||||
empty_alloc_pool (optimistic_info->phis_pool);
|
||||
@ -3839,11 +3843,11 @@ process_scc (vec<tree> scc)
|
||||
|
||||
/* Finally, copy the contents of the no longer used optimistic
|
||||
table to the valid table. */
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
|
||||
copy_nary (nary, valid_info);
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
|
||||
copy_phi (phi, valid_info);
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
|
||||
ref, vn_reference_t, hir)
|
||||
copy_reference (ref, valid_info);
|
||||
|
||||
@ -3998,9 +4002,9 @@ continue_walking:
|
||||
static void
|
||||
allocate_vn_table (vn_tables_t table)
|
||||
{
|
||||
table->phis.create (23);
|
||||
table->nary.create (23);
|
||||
table->references.create (23);
|
||||
table->phis = new vn_phi_table_type (23);
|
||||
table->nary = new vn_nary_op_table_type (23);
|
||||
table->references = new vn_reference_table_type (23);
|
||||
|
||||
gcc_obstack_init (&table->nary_obstack);
|
||||
table->phis_pool = create_alloc_pool ("VN phis",
|
||||
@ -4016,9 +4020,12 @@ allocate_vn_table (vn_tables_t table)
|
||||
static void
|
||||
free_vn_table (vn_tables_t table)
|
||||
{
|
||||
table->phis.dispose ();
|
||||
table->nary.dispose ();
|
||||
table->references.dispose ();
|
||||
delete table->phis;
|
||||
table->phis = NULL;
|
||||
delete table->nary;
|
||||
table->nary = NULL;
|
||||
delete table->references;
|
||||
table->references = NULL;
|
||||
obstack_free (&table->nary_obstack, NULL);
|
||||
free_alloc_pool (table->phis_pool);
|
||||
free_alloc_pool (table->references_pool);
|
||||
@ -4033,7 +4040,7 @@ init_scc_vn (void)
|
||||
|
||||
calculate_dominance_info (CDI_DOMINATORS);
|
||||
sccstack.create (0);
|
||||
constant_to_value_id.create (23);
|
||||
constant_to_value_id = new hash_table<vn_constant_hasher> (23);
|
||||
|
||||
constant_value_ids = BITMAP_ALLOC (NULL);
|
||||
|
||||
@ -4090,7 +4097,8 @@ free_scc_vn (void)
|
||||
{
|
||||
size_t i;
|
||||
|
||||
constant_to_value_id.dispose ();
|
||||
delete constant_to_value_id;
|
||||
constant_to_value_id = NULL;
|
||||
BITMAP_FREE (constant_value_ids);
|
||||
shared_lookup_phiargs.release ();
|
||||
shared_lookup_references.release ();
|
||||
@ -4141,13 +4149,14 @@ set_hashtable_value_ids (void)
|
||||
/* Now set the value ids of the things we had put in the hash
|
||||
table. */
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
|
||||
set_value_id_for_result (vno->result, &vno->value_id);
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
|
||||
set_value_id_for_result (vp->result, &vp->value_id);
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
|
||||
hir)
|
||||
set_value_id_for_result (vr->result, &vr->value_id);
|
||||
}
|
||||
|
||||
|
@ -158,7 +158,7 @@ stridxlist_hasher::equal (const value_type *v, const compare_type *c)
|
||||
|
||||
/* Hash table for mapping decls to a chained list of offset -> idx
|
||||
mappings. */
|
||||
static hash_table <stridxlist_hasher> decl_to_stridxlist_htab;
|
||||
static hash_table<stridxlist_hasher> *decl_to_stridxlist_htab;
|
||||
|
||||
/* Obstack for struct stridxlist and struct decl_stridxlist_map. */
|
||||
static struct obstack stridx_obstack;
|
||||
@ -183,7 +183,7 @@ get_addr_stridx (tree exp)
|
||||
struct stridxlist *list;
|
||||
tree base;
|
||||
|
||||
if (!decl_to_stridxlist_htab.is_created ())
|
||||
if (!decl_to_stridxlist_htab)
|
||||
return 0;
|
||||
|
||||
base = get_addr_base_and_unit_offset (exp, &off);
|
||||
@ -191,7 +191,7 @@ get_addr_stridx (tree exp)
|
||||
return 0;
|
||||
|
||||
ent.base.from = base;
|
||||
e = decl_to_stridxlist_htab.find_with_hash (&ent, DECL_UID (base));
|
||||
e = decl_to_stridxlist_htab->find_with_hash (&ent, DECL_UID (base));
|
||||
if (e == NULL)
|
||||
return 0;
|
||||
|
||||
@ -279,14 +279,14 @@ addr_stridxptr (tree exp)
|
||||
if (base == NULL_TREE || !DECL_P (base))
|
||||
return NULL;
|
||||
|
||||
if (!decl_to_stridxlist_htab.is_created ())
|
||||
if (!decl_to_stridxlist_htab)
|
||||
{
|
||||
decl_to_stridxlist_htab.create (64);
|
||||
decl_to_stridxlist_htab = new hash_table<stridxlist_hasher> (64);
|
||||
gcc_obstack_init (&stridx_obstack);
|
||||
}
|
||||
ent.base.from = base;
|
||||
slot = decl_to_stridxlist_htab.find_slot_with_hash (&ent, DECL_UID (base),
|
||||
INSERT);
|
||||
slot = decl_to_stridxlist_htab->find_slot_with_hash (&ent, DECL_UID (base),
|
||||
INSERT);
|
||||
if (*slot)
|
||||
{
|
||||
int i;
|
||||
@ -2106,10 +2106,11 @@ pass_strlen::execute (function *fun)
|
||||
|
||||
ssa_ver_to_stridx.release ();
|
||||
free_alloc_pool (strinfo_pool);
|
||||
if (decl_to_stridxlist_htab.is_created ())
|
||||
if (decl_to_stridxlist_htab)
|
||||
{
|
||||
obstack_free (&stridx_obstack, NULL);
|
||||
decl_to_stridxlist_htab.dispose ();
|
||||
delete decl_to_stridxlist_htab;
|
||||
decl_to_stridxlist_htab = NULL;
|
||||
}
|
||||
laststmt.stmt = NULL;
|
||||
laststmt.len = NULL_TREE;
|
||||
|
@ -1927,25 +1927,26 @@ equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
|
||||
|
||||
/* A hashtable for mapping a bitmap of labels->pointer equivalence
|
||||
classes. */
|
||||
static hash_table <equiv_class_hasher> pointer_equiv_class_table;
|
||||
static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
|
||||
|
||||
/* A hashtable for mapping a bitmap of labels->location equivalence
|
||||
classes. */
|
||||
static hash_table <equiv_class_hasher> location_equiv_class_table;
|
||||
static hash_table<equiv_class_hasher> *location_equiv_class_table;
|
||||
|
||||
/* Lookup a equivalence class in TABLE by the bitmap of LABELS with
|
||||
hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
|
||||
is equivalent to. */
|
||||
|
||||
static equiv_class_label *
|
||||
equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
|
||||
equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
|
||||
bitmap labels)
|
||||
{
|
||||
equiv_class_label **slot;
|
||||
equiv_class_label ecl;
|
||||
|
||||
ecl.labels = labels;
|
||||
ecl.hashcode = bitmap_hash (labels);
|
||||
slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
|
||||
slot = table->find_slot (&ecl, INSERT);
|
||||
if (!*slot)
|
||||
{
|
||||
*slot = XNEW (struct equiv_class_label);
|
||||
@ -2281,8 +2282,9 @@ perform_var_substitution (constraint_graph_t graph)
|
||||
struct scc_info *si = init_scc_info (size);
|
||||
|
||||
bitmap_obstack_initialize (&iteration_obstack);
|
||||
pointer_equiv_class_table.create (511);
|
||||
location_equiv_class_table.create (511);
|
||||
pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
|
||||
location_equiv_class_table
|
||||
= new hash_table<equiv_class_hasher> (511);
|
||||
pointer_equiv_class = 1;
|
||||
location_equiv_class = 1;
|
||||
|
||||
@ -2415,8 +2417,10 @@ free_var_substitution_info (struct scc_info *si)
|
||||
free (graph->points_to);
|
||||
free (graph->eq_rep);
|
||||
sbitmap_free (graph->direct_nodes);
|
||||
pointer_equiv_class_table.dispose ();
|
||||
location_equiv_class_table.dispose ();
|
||||
delete pointer_equiv_class_table;
|
||||
pointer_equiv_class_table = NULL;
|
||||
delete location_equiv_class_table;
|
||||
location_equiv_class_table = NULL;
|
||||
bitmap_obstack_release (&iteration_obstack);
|
||||
}
|
||||
|
||||
@ -5974,7 +5978,7 @@ shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
|
||||
|
||||
/* Shared_bitmap hashtable. */
|
||||
|
||||
static hash_table <shared_bitmap_hasher> shared_bitmap_table;
|
||||
static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
|
||||
|
||||
/* Lookup a bitmap in the shared bitmap hashtable, and return an already
|
||||
existing instance if there is one, NULL otherwise. */
|
||||
@ -5988,8 +5992,7 @@ shared_bitmap_lookup (bitmap pt_vars)
|
||||
sbi.pt_vars = pt_vars;
|
||||
sbi.hashcode = bitmap_hash (pt_vars);
|
||||
|
||||
slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
|
||||
NO_INSERT);
|
||||
slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
|
||||
if (!slot)
|
||||
return NULL;
|
||||
else
|
||||
@ -6008,7 +6011,7 @@ shared_bitmap_add (bitmap pt_vars)
|
||||
sbi->pt_vars = pt_vars;
|
||||
sbi->hashcode = bitmap_hash (pt_vars);
|
||||
|
||||
slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
|
||||
slot = shared_bitmap_table->find_slot (sbi, INSERT);
|
||||
gcc_assert (!*slot);
|
||||
*slot = sbi;
|
||||
}
|
||||
@ -6682,7 +6685,7 @@ init_alias_vars (void)
|
||||
call_stmt_vars = pointer_map_create ();
|
||||
|
||||
memset (&stats, 0, sizeof (stats));
|
||||
shared_bitmap_table.create (511);
|
||||
shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
|
||||
init_base_vars ();
|
||||
|
||||
gcc_obstack_init (&fake_var_decl_obstack);
|
||||
@ -6930,7 +6933,8 @@ delete_points_to_sets (void)
|
||||
{
|
||||
unsigned int i;
|
||||
|
||||
shared_bitmap_table.dispose ();
|
||||
delete shared_bitmap_table;
|
||||
shared_bitmap_table = NULL;
|
||||
if (dump_file && (dump_flags & TDF_STATS))
|
||||
fprintf (dump_file, "Points to sets created:%d\n",
|
||||
stats.points_to_sets_created);
|
||||
|
@ -643,7 +643,7 @@ same_succ_reset (same_succ same)
|
||||
same->succ_flags.truncate (0);
|
||||
}
|
||||
|
||||
static hash_table <same_succ_def> same_succ_htab;
|
||||
static hash_table<same_succ_def> *same_succ_htab;
|
||||
|
||||
/* Array that is used to store the edge flags for a successor. */
|
||||
|
||||
@ -664,7 +664,7 @@ extern void debug_same_succ (void);
|
||||
DEBUG_FUNCTION void
|
||||
debug_same_succ ( void)
|
||||
{
|
||||
same_succ_htab.traverse <FILE *, ssa_same_succ_print_traverse> (stderr);
|
||||
same_succ_htab->traverse <FILE *, ssa_same_succ_print_traverse> (stderr);
|
||||
}
|
||||
|
||||
|
||||
@ -731,7 +731,7 @@ find_same_succ_bb (basic_block bb, same_succ *same_p)
|
||||
|
||||
same->hashval = same_succ_hash (same);
|
||||
|
||||
slot = same_succ_htab.find_slot_with_hash (same, same->hashval, INSERT);
|
||||
slot = same_succ_htab->find_slot_with_hash (same, same->hashval, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
*slot = same;
|
||||
@ -774,7 +774,7 @@ static void
|
||||
init_worklist (void)
|
||||
{
|
||||
alloc_aux_for_blocks (sizeof (struct aux_bb_info));
|
||||
same_succ_htab.create (n_basic_blocks_for_fn (cfun));
|
||||
same_succ_htab = new hash_table<same_succ_def> (n_basic_blocks_for_fn (cfun));
|
||||
same_succ_edge_flags = XCNEWVEC (int, last_basic_block_for_fn (cfun));
|
||||
deleted_bbs = BITMAP_ALLOC (NULL);
|
||||
deleted_bb_preds = BITMAP_ALLOC (NULL);
|
||||
@ -794,7 +794,8 @@ static void
|
||||
delete_worklist (void)
|
||||
{
|
||||
free_aux_for_blocks ();
|
||||
same_succ_htab.dispose ();
|
||||
delete same_succ_htab;
|
||||
same_succ_htab = NULL;
|
||||
XDELETEVEC (same_succ_edge_flags);
|
||||
same_succ_edge_flags = NULL;
|
||||
BITMAP_FREE (deleted_bbs);
|
||||
@ -824,7 +825,7 @@ same_succ_flush_bb (basic_block bb)
|
||||
same_succ same = BB_SAME_SUCC (bb);
|
||||
BB_SAME_SUCC (bb) = NULL;
|
||||
if (bitmap_single_bit_set_p (same->bbs))
|
||||
same_succ_htab.remove_elt_with_hash (same, same->hashval);
|
||||
same_succ_htab->remove_elt_with_hash (same, same->hashval);
|
||||
else
|
||||
bitmap_clear_bit (same->bbs, bb->index);
|
||||
}
|
||||
@ -1714,7 +1715,7 @@ tail_merge_optimize (unsigned int todo)
|
||||
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
fprintf (dump_file, "htab collision / search: %f\n",
|
||||
same_succ_htab.collisions ());
|
||||
same_succ_htab->collisions ());
|
||||
|
||||
if (nr_bbs_removed_total > 0)
|
||||
{
|
||||
|
@ -311,7 +311,7 @@ create_block_for_threading (basic_block bb,
|
||||
|
||||
/* Main data structure to hold information for duplicates of BB. */
|
||||
|
||||
static hash_table <redirection_data> redirection_data;
|
||||
static hash_table<redirection_data> *redirection_data;
|
||||
|
||||
/* Given an outgoing edge E lookup and return its entry in our hash table.
|
||||
|
||||
@ -334,7 +334,7 @@ lookup_redirection_data (edge e, enum insert_option insert)
|
||||
elt->dup_blocks[1] = NULL;
|
||||
elt->incoming_edges = NULL;
|
||||
|
||||
slot = redirection_data.find_slot (elt, insert);
|
||||
slot = redirection_data->find_slot (elt, insert);
|
||||
|
||||
/* This will only happen if INSERT is false and the entry is not
|
||||
in the hash table. */
|
||||
@ -850,7 +850,8 @@ thread_block_1 (basic_block bb, bool noloop_only, bool joiners)
|
||||
use a hash table. For normal code there should be no noticeable
|
||||
difference. However, if we have a block with a large number of
|
||||
incoming and outgoing edges such linear searches can get expensive. */
|
||||
redirection_data.create (EDGE_COUNT (bb->succs));
|
||||
redirection_data
|
||||
= new hash_table<struct redirection_data> (EDGE_COUNT (bb->succs));
|
||||
|
||||
/* If we thread the latch of the loop to its exit, the loop ceases to
|
||||
exist. Make sure we do not restrict ourselves in order to preserve
|
||||
@ -961,7 +962,7 @@ thread_block_1 (basic_block bb, bool noloop_only, bool joiners)
|
||||
local_info.template_block = NULL;
|
||||
local_info.bb = bb;
|
||||
local_info.jumps_threaded = false;
|
||||
redirection_data.traverse <ssa_local_info_t *, ssa_create_duplicates>
|
||||
redirection_data->traverse <ssa_local_info_t *, ssa_create_duplicates>
|
||||
(&local_info);
|
||||
|
||||
/* The template does not have an outgoing edge. Create that outgoing
|
||||
@ -969,18 +970,19 @@ thread_block_1 (basic_block bb, bool noloop_only, bool joiners)
|
||||
|
||||
We do this after creating all the duplicates to avoid creating
|
||||
unnecessary edges. */
|
||||
redirection_data.traverse <ssa_local_info_t *, ssa_fixup_template_block>
|
||||
redirection_data->traverse <ssa_local_info_t *, ssa_fixup_template_block>
|
||||
(&local_info);
|
||||
|
||||
/* The hash table traversals above created the duplicate blocks (and the
|
||||
statements within the duplicate blocks). This loop creates PHI nodes for
|
||||
the duplicated blocks and redirects the incoming edges into BB to reach
|
||||
the duplicates of BB. */
|
||||
redirection_data.traverse <ssa_local_info_t *, ssa_redirect_edges>
|
||||
redirection_data->traverse <ssa_local_info_t *, ssa_redirect_edges>
|
||||
(&local_info);
|
||||
|
||||
/* Done with this block. Clear REDIRECTION_DATA. */
|
||||
redirection_data.dispose ();
|
||||
delete redirection_data;
|
||||
redirection_data = NULL;
|
||||
|
||||
if (noloop_only
|
||||
&& bb == bb->loop_father->header)
|
||||
|
@ -321,7 +321,7 @@ val_ssa_equiv_hasher::remove (value_type *elt)
|
||||
/* Global hash table implementing a mapping from invariant values
|
||||
to a list of SSA_NAMEs which have the same value. We might be
|
||||
able to reuse tree-vn for this code. */
|
||||
static hash_table <val_ssa_equiv_hasher> val_ssa_equiv;
|
||||
static hash_table<val_ssa_equiv_hasher> *val_ssa_equiv;
|
||||
|
||||
static void uncprop_into_successor_phis (basic_block);
|
||||
|
||||
@ -336,7 +336,7 @@ remove_equivalence (tree value)
|
||||
an_equiv_elt.value = value;
|
||||
an_equiv_elt.equivalences.create (0);
|
||||
|
||||
slot = val_ssa_equiv.find_slot (&an_equiv_elt, NO_INSERT);
|
||||
slot = val_ssa_equiv->find_slot (&an_equiv_elt, NO_INSERT);
|
||||
|
||||
an_equiv_elt_p = *slot;
|
||||
an_equiv_elt_p->equivalences.pop ();
|
||||
@ -354,7 +354,7 @@ record_equiv (tree value, tree equivalence)
|
||||
an_equiv_elt_p->value = value;
|
||||
an_equiv_elt_p->equivalences.create (0);
|
||||
|
||||
slot = val_ssa_equiv.find_slot (an_equiv_elt_p, INSERT);
|
||||
slot = val_ssa_equiv->find_slot (an_equiv_elt_p, INSERT);
|
||||
|
||||
if (*slot == NULL)
|
||||
*slot = an_equiv_elt_p;
|
||||
@ -446,7 +446,7 @@ uncprop_into_successor_phis (basic_block bb)
|
||||
/* Lookup this argument's value in the hash table. */
|
||||
an_equiv_elt.value = arg;
|
||||
an_equiv_elt.equivalences.create (0);
|
||||
slot = val_ssa_equiv.find_slot (&an_equiv_elt, NO_INSERT);
|
||||
slot = val_ssa_equiv->find_slot (&an_equiv_elt, NO_INSERT);
|
||||
|
||||
if (slot)
|
||||
{
|
||||
@ -578,7 +578,7 @@ pass_uncprop::execute (function *fun)
|
||||
associate_equivalences_with_edges ();
|
||||
|
||||
/* Create our global data structures. */
|
||||
val_ssa_equiv.create (1024);
|
||||
val_ssa_equiv = new hash_table<val_ssa_equiv_hasher> (1024);
|
||||
|
||||
/* We're going to do a dominator walk, so ensure that we have
|
||||
dominance information. */
|
||||
@ -590,7 +590,8 @@ pass_uncprop::execute (function *fun)
|
||||
|
||||
/* we just need to empty elements out of the hash table, and cleanup the
|
||||
AUX field on the edges. */
|
||||
val_ssa_equiv.dispose ();
|
||||
delete val_ssa_equiv;
|
||||
val_ssa_equiv = NULL;
|
||||
FOR_EACH_BB_FN (bb, fun)
|
||||
{
|
||||
edge e;
|
||||
|
@ -1068,7 +1068,7 @@ vect_peeling_hash_insert (loop_vec_info loop_vinfo, struct data_reference *dr,
|
||||
bool supportable_dr_alignment = vect_supportable_dr_alignment (dr, true);
|
||||
|
||||
elem.npeel = npeel;
|
||||
slot = LOOP_VINFO_PEELING_HTAB (loop_vinfo).find (&elem);
|
||||
slot = LOOP_VINFO_PEELING_HTAB (loop_vinfo)->find (&elem);
|
||||
if (slot)
|
||||
slot->count++;
|
||||
else
|
||||
@ -1077,7 +1077,8 @@ vect_peeling_hash_insert (loop_vec_info loop_vinfo, struct data_reference *dr,
|
||||
slot->npeel = npeel;
|
||||
slot->dr = dr;
|
||||
slot->count = 1;
|
||||
new_slot = LOOP_VINFO_PEELING_HTAB (loop_vinfo).find_slot (slot, INSERT);
|
||||
new_slot
|
||||
= LOOP_VINFO_PEELING_HTAB (loop_vinfo)->find_slot (slot, INSERT);
|
||||
*new_slot = slot;
|
||||
}
|
||||
|
||||
@ -1197,15 +1198,15 @@ vect_peeling_hash_choose_best_peeling (loop_vec_info loop_vinfo,
|
||||
res.inside_cost = INT_MAX;
|
||||
res.outside_cost = INT_MAX;
|
||||
LOOP_VINFO_PEELING_HTAB (loop_vinfo)
|
||||
.traverse <_vect_peel_extended_info *,
|
||||
vect_peeling_hash_get_lowest_cost> (&res);
|
||||
->traverse <_vect_peel_extended_info *,
|
||||
vect_peeling_hash_get_lowest_cost> (&res);
|
||||
}
|
||||
else
|
||||
{
|
||||
res.peel_info.count = 0;
|
||||
LOOP_VINFO_PEELING_HTAB (loop_vinfo)
|
||||
.traverse <_vect_peel_extended_info *,
|
||||
vect_peeling_hash_get_most_frequent> (&res);
|
||||
->traverse <_vect_peel_extended_info *,
|
||||
vect_peeling_hash_get_most_frequent> (&res);
|
||||
}
|
||||
|
||||
*npeel = res.peel_info.npeel;
|
||||
@ -1397,8 +1398,9 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
||||
size_zero_node) < 0;
|
||||
|
||||
/* Save info about DR in the hash table. */
|
||||
if (!LOOP_VINFO_PEELING_HTAB (loop_vinfo).is_created ())
|
||||
LOOP_VINFO_PEELING_HTAB (loop_vinfo).create (1);
|
||||
if (!LOOP_VINFO_PEELING_HTAB (loop_vinfo))
|
||||
LOOP_VINFO_PEELING_HTAB (loop_vinfo)
|
||||
= new hash_table<peel_info_hasher> (1);
|
||||
|
||||
vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
nelements = TYPE_VECTOR_SUBPARTS (vectype);
|
||||
|
@ -1031,8 +1031,8 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
|
||||
LOOP_VINFO_REDUCTIONS (loop_vinfo).release ();
|
||||
LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo).release ();
|
||||
|
||||
if (LOOP_VINFO_PEELING_HTAB (loop_vinfo).is_created ())
|
||||
LOOP_VINFO_PEELING_HTAB (loop_vinfo).dispose ();
|
||||
delete LOOP_VINFO_PEELING_HTAB (loop_vinfo);
|
||||
LOOP_VINFO_PEELING_HTAB (loop_vinfo) = NULL;
|
||||
|
||||
destroy_cost_data (LOOP_VINFO_TARGET_COST_DATA (loop_vinfo));
|
||||
|
||||
|
@ -157,7 +157,7 @@ simd_array_to_simduid::equal (const value_type *p1, const value_type *p2)
|
||||
into their corresponding constants. */
|
||||
|
||||
static void
|
||||
adjust_simduid_builtins (hash_table <simduid_to_vf> &htab)
|
||||
adjust_simduid_builtins (hash_table<simduid_to_vf> **htab)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -189,8 +189,8 @@ adjust_simduid_builtins (hash_table <simduid_to_vf> &htab)
|
||||
gcc_assert (TREE_CODE (arg) == SSA_NAME);
|
||||
simduid_to_vf *p = NULL, data;
|
||||
data.simduid = DECL_UID (SSA_NAME_VAR (arg));
|
||||
if (htab.is_created ())
|
||||
p = htab.find (&data);
|
||||
if (*htab)
|
||||
p = (*htab)->find (&data);
|
||||
if (p)
|
||||
vf = p->vf;
|
||||
switch (ifn)
|
||||
@ -216,7 +216,7 @@ adjust_simduid_builtins (hash_table <simduid_to_vf> &htab)
|
||||
|
||||
struct note_simd_array_uses_struct
|
||||
{
|
||||
hash_table <simd_array_to_simduid> *htab;
|
||||
hash_table<simd_array_to_simduid> **htab;
|
||||
unsigned int simduid;
|
||||
};
|
||||
|
||||
@ -236,11 +236,11 @@ note_simd_array_uses_cb (tree *tp, int *walk_subtrees, void *data)
|
||||
&& DECL_CONTEXT (*tp) == current_function_decl)
|
||||
{
|
||||
simd_array_to_simduid data;
|
||||
if (!ns->htab->is_created ())
|
||||
ns->htab->create (15);
|
||||
if (!*ns->htab)
|
||||
*ns->htab = new hash_table<simd_array_to_simduid> (15);
|
||||
data.decl = *tp;
|
||||
data.simduid = ns->simduid;
|
||||
simd_array_to_simduid **slot = ns->htab->find_slot (&data, INSERT);
|
||||
simd_array_to_simduid **slot = (*ns->htab)->find_slot (&data, INSERT);
|
||||
if (*slot == NULL)
|
||||
{
|
||||
simd_array_to_simduid *p = XNEW (simd_array_to_simduid);
|
||||
@ -258,7 +258,7 @@ note_simd_array_uses_cb (tree *tp, int *walk_subtrees, void *data)
|
||||
simduid. */
|
||||
|
||||
static void
|
||||
note_simd_array_uses (hash_table <simd_array_to_simduid> *htab)
|
||||
note_simd_array_uses (hash_table<simd_array_to_simduid> **htab)
|
||||
{
|
||||
basic_block bb;
|
||||
gimple_stmt_iterator gsi;
|
||||
@ -389,8 +389,8 @@ vectorize_loops (void)
|
||||
unsigned int num_vectorized_loops = 0;
|
||||
unsigned int vect_loops_num;
|
||||
struct loop *loop;
|
||||
hash_table <simduid_to_vf> simduid_to_vf_htab;
|
||||
hash_table <simd_array_to_simduid> simd_array_to_simduid_htab;
|
||||
hash_table<simduid_to_vf> *simduid_to_vf_htab = NULL;
|
||||
hash_table<simd_array_to_simduid> *simd_array_to_simduid_htab = NULL;
|
||||
bool any_ifcvt_loops = false;
|
||||
unsigned ret = 0;
|
||||
|
||||
@ -400,7 +400,7 @@ vectorize_loops (void)
|
||||
if (vect_loops_num <= 1)
|
||||
{
|
||||
if (cfun->has_simduid_loops)
|
||||
adjust_simduid_builtins (simduid_to_vf_htab);
|
||||
adjust_simduid_builtins (&simduid_to_vf_htab);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -484,11 +484,11 @@ vectorize_loops (void)
|
||||
if (loop->simduid)
|
||||
{
|
||||
simduid_to_vf *simduid_to_vf_data = XNEW (simduid_to_vf);
|
||||
if (!simduid_to_vf_htab.is_created ())
|
||||
simduid_to_vf_htab.create (15);
|
||||
if (!simduid_to_vf_htab)
|
||||
simduid_to_vf_htab = new hash_table<simduid_to_vf> (15);
|
||||
simduid_to_vf_data->simduid = DECL_UID (loop->simduid);
|
||||
simduid_to_vf_data->vf = loop_vinfo->vectorization_factor;
|
||||
*simduid_to_vf_htab.find_slot (simduid_to_vf_data, INSERT)
|
||||
*simduid_to_vf_htab->find_slot (simduid_to_vf_data, INSERT)
|
||||
= simduid_to_vf_data;
|
||||
}
|
||||
|
||||
@ -541,24 +541,24 @@ vectorize_loops (void)
|
||||
|
||||
/* Fold IFN_GOMP_SIMD_{VF,LANE,LAST_LANE} builtins. */
|
||||
if (cfun->has_simduid_loops)
|
||||
adjust_simduid_builtins (simduid_to_vf_htab);
|
||||
adjust_simduid_builtins (&simduid_to_vf_htab);
|
||||
|
||||
/* Shrink any "omp array simd" temporary arrays to the
|
||||
actual vectorization factors. */
|
||||
if (simd_array_to_simduid_htab.is_created ())
|
||||
if (simd_array_to_simduid_htab)
|
||||
{
|
||||
for (hash_table <simd_array_to_simduid>::iterator iter
|
||||
= simd_array_to_simduid_htab.begin ();
|
||||
iter != simd_array_to_simduid_htab.end (); ++iter)
|
||||
for (hash_table<simd_array_to_simduid>::iterator iter
|
||||
= simd_array_to_simduid_htab->begin ();
|
||||
iter != simd_array_to_simduid_htab->end (); ++iter)
|
||||
if ((*iter).simduid != -1U)
|
||||
{
|
||||
tree decl = (*iter).decl;
|
||||
int vf = 1;
|
||||
if (simduid_to_vf_htab.is_created ())
|
||||
if (simduid_to_vf_htab)
|
||||
{
|
||||
simduid_to_vf *p = NULL, data;
|
||||
data.simduid = (*iter).simduid;
|
||||
p = simduid_to_vf_htab.find (&data);
|
||||
p = simduid_to_vf_htab->find (&data);
|
||||
if (p)
|
||||
vf = p->vf;
|
||||
}
|
||||
@ -568,10 +568,10 @@ vectorize_loops (void)
|
||||
relayout_decl (decl);
|
||||
}
|
||||
|
||||
simd_array_to_simduid_htab.dispose ();
|
||||
delete simd_array_to_simduid_htab;
|
||||
}
|
||||
if (simduid_to_vf_htab.is_created ())
|
||||
simduid_to_vf_htab.dispose ();
|
||||
delete simduid_to_vf_htab;
|
||||
simduid_to_vf_htab = NULL;
|
||||
|
||||
if (num_vectorized_loops > 0)
|
||||
{
|
||||
|
@ -332,7 +332,7 @@ typedef struct _loop_vec_info {
|
||||
vec<gimple> reduction_chains;
|
||||
|
||||
/* Hash table used to choose the best peeling option. */
|
||||
hash_table <peel_info_hasher> peeling_htab;
|
||||
hash_table<peel_info_hasher> *peeling_htab;
|
||||
|
||||
/* Cost data used by the target cost model. */
|
||||
void *target_cost_data;
|
||||
|
@ -214,6 +214,7 @@ void
|
||||
dead_debug_global_init (struct dead_debug_global *debug, bitmap used)
|
||||
{
|
||||
debug->used = used;
|
||||
debug->htab = NULL;
|
||||
if (used)
|
||||
bitmap_clear (used);
|
||||
}
|
||||
@ -250,7 +251,7 @@ dead_debug_global_find (struct dead_debug_global *global, rtx reg)
|
||||
dead_debug_global_entry temp_entry;
|
||||
temp_entry.reg = reg;
|
||||
|
||||
dead_debug_global_entry *entry = global->htab.find (&temp_entry);
|
||||
dead_debug_global_entry *entry = global->htab->find (&temp_entry);
|
||||
gcc_checking_assert (entry && entry->reg == temp_entry.reg);
|
||||
|
||||
return entry;
|
||||
@ -265,10 +266,11 @@ dead_debug_global_insert (struct dead_debug_global *global, rtx reg, rtx dtemp)
|
||||
temp_entry.reg = reg;
|
||||
temp_entry.dtemp = dtemp;
|
||||
|
||||
if (!global->htab.is_created ())
|
||||
global->htab.create (31);
|
||||
if (!global->htab)
|
||||
global->htab = new hash_table<dead_debug_hash_descr> (31);
|
||||
|
||||
dead_debug_global_entry **slot = global->htab.find_slot (&temp_entry, INSERT);
|
||||
dead_debug_global_entry **slot = global->htab->find_slot (&temp_entry,
|
||||
INSERT);
|
||||
gcc_checking_assert (!*slot);
|
||||
*slot = XNEW (dead_debug_global_entry);
|
||||
**slot = temp_entry;
|
||||
@ -493,8 +495,8 @@ dead_debug_global_finish (struct dead_debug_global *global, bitmap used)
|
||||
if (global->used != used)
|
||||
BITMAP_FREE (global->used);
|
||||
|
||||
if (global->htab.is_created ())
|
||||
global->htab.dispose ();
|
||||
delete global->htab;
|
||||
global->htab = NULL;
|
||||
}
|
||||
|
||||
/* Add USE to DEBUG, or substitute it right away if it's a pseudo in
|
||||
|
@ -84,7 +84,7 @@ dead_debug_hash_descr::remove (value_type *p)
|
||||
struct dead_debug_global
|
||||
{
|
||||
/* This hash table that maps pseudos to debug temps. */
|
||||
hash_table <dead_debug_hash_descr> htab;
|
||||
hash_table<dead_debug_hash_descr> *htab;
|
||||
/* For each entry in htab, the bit corresponding to its REGNO will
|
||||
be set. */
|
||||
bitmap used;
|
||||
|
@ -501,7 +501,7 @@ variable_hasher::remove (value_type *var)
|
||||
variable_htab_free (var);
|
||||
}
|
||||
|
||||
typedef hash_table <variable_hasher> variable_table_type;
|
||||
typedef hash_table<variable_hasher> variable_table_type;
|
||||
typedef variable_table_type::iterator variable_iterator_type;
|
||||
|
||||
/* Structure for passing some other parameters to function
|
||||
@ -515,7 +515,7 @@ typedef struct emit_note_data_def
|
||||
enum emit_note_where where;
|
||||
|
||||
/* The variables and values active at this point. */
|
||||
variable_table_type vars;
|
||||
variable_table_type *vars;
|
||||
} emit_note_data;
|
||||
|
||||
/* Structure holding a refcounted hash table. If refcount > 1,
|
||||
@ -526,7 +526,7 @@ typedef struct shared_hash_def
|
||||
int refcount;
|
||||
|
||||
/* Actual hash table. */
|
||||
variable_table_type htab;
|
||||
variable_table_type *htab;
|
||||
} *shared_hash;
|
||||
|
||||
/* Structure holding the IN or OUT set for a basic block. */
|
||||
@ -589,7 +589,7 @@ static alloc_pool shared_hash_pool;
|
||||
static alloc_pool loc_exp_dep_pool;
|
||||
|
||||
/* Changed variables, notes will be emitted for them. */
|
||||
static variable_table_type changed_variables;
|
||||
static variable_table_type *changed_variables;
|
||||
|
||||
/* Shall notes be emitted? */
|
||||
static bool emit_notes;
|
||||
@ -597,7 +597,7 @@ static bool emit_notes;
|
||||
/* Values whose dynamic location lists have gone empty, but whose
|
||||
cselib location lists are still usable. Use this to hold the
|
||||
current location, the backlinks, etc, during emit_notes. */
|
||||
static variable_table_type dropped_values;
|
||||
static variable_table_type *dropped_values;
|
||||
|
||||
/* Empty shared hashtable. */
|
||||
static shared_hash empty_shared_hash;
|
||||
@ -635,7 +635,7 @@ static void attrs_list_union (attrs *, attrs);
|
||||
|
||||
static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
|
||||
variable var, enum var_init_status);
|
||||
static void vars_copy (variable_table_type, variable_table_type);
|
||||
static void vars_copy (variable_table_type *, variable_table_type *);
|
||||
static tree var_debug_decl (tree);
|
||||
static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
|
||||
static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
|
||||
@ -652,7 +652,7 @@ static void dataflow_set_clear (dataflow_set *);
|
||||
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
|
||||
static int variable_union_info_cmp_pos (const void *, const void *);
|
||||
static void dataflow_set_union (dataflow_set *, dataflow_set *);
|
||||
static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type);
|
||||
static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
|
||||
static bool canon_value_cmp (rtx, rtx);
|
||||
static int loc_cmp (rtx, rtx);
|
||||
static bool variable_part_different_p (variable_part *, variable_part *);
|
||||
@ -672,7 +672,7 @@ static bool vt_find_locations (void);
|
||||
|
||||
static void dump_attrs_list (attrs);
|
||||
static void dump_var (variable);
|
||||
static void dump_vars (variable_table_type);
|
||||
static void dump_vars (variable_table_type *);
|
||||
static void dump_dataflow_set (dataflow_set *);
|
||||
static void dump_dataflow_sets (void);
|
||||
|
||||
@ -1582,7 +1582,7 @@ shared_hash_shared (shared_hash vars)
|
||||
|
||||
/* Return the hash table for VARS. */
|
||||
|
||||
static inline variable_table_type
|
||||
static inline variable_table_type *
|
||||
shared_hash_htab (shared_hash vars)
|
||||
{
|
||||
return vars->htab;
|
||||
@ -1606,7 +1606,7 @@ shared_hash_unshare (shared_hash vars)
|
||||
shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
|
||||
gcc_assert (vars->refcount > 1);
|
||||
new_vars->refcount = 1;
|
||||
new_vars->htab.create (vars->htab.elements () + 3);
|
||||
new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
|
||||
vars_copy (new_vars->htab, vars->htab);
|
||||
vars->refcount--;
|
||||
return new_vars;
|
||||
@ -1630,7 +1630,7 @@ shared_hash_destroy (shared_hash vars)
|
||||
gcc_checking_assert (vars->refcount > 0);
|
||||
if (--vars->refcount == 0)
|
||||
{
|
||||
vars->htab.dispose ();
|
||||
delete vars->htab;
|
||||
pool_free (shared_hash_pool, vars);
|
||||
}
|
||||
}
|
||||
@ -1644,7 +1644,7 @@ shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
|
||||
{
|
||||
if (shared_hash_shared (*pvars))
|
||||
*pvars = shared_hash_unshare (*pvars);
|
||||
return shared_hash_htab (*pvars).find_slot_with_hash (dv, dvhash, ins);
|
||||
return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
|
||||
}
|
||||
|
||||
static inline variable_def **
|
||||
@ -1661,9 +1661,9 @@ shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
|
||||
static inline variable_def **
|
||||
shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
|
||||
{
|
||||
return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash,
|
||||
shared_hash_shared (vars)
|
||||
? NO_INSERT : INSERT);
|
||||
return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
|
||||
shared_hash_shared (vars)
|
||||
? NO_INSERT : INSERT);
|
||||
}
|
||||
|
||||
static inline variable_def **
|
||||
@ -1678,7 +1678,7 @@ static inline variable_def **
|
||||
shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
|
||||
hashval_t dvhash)
|
||||
{
|
||||
return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash, NO_INSERT);
|
||||
return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
|
||||
}
|
||||
|
||||
static inline variable_def **
|
||||
@ -1693,7 +1693,7 @@ shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
|
||||
static inline variable
|
||||
shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
|
||||
{
|
||||
return shared_hash_htab (vars).find_with_hash (dv, dvhash);
|
||||
return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
|
||||
}
|
||||
|
||||
static inline variable
|
||||
@ -1790,8 +1790,9 @@ unshare_variable (dataflow_set *set, variable_def **slot, variable var,
|
||||
if (var->in_changed_variables)
|
||||
{
|
||||
variable_def **cslot
|
||||
= changed_variables.find_slot_with_hash (var->dv,
|
||||
dv_htab_hash (var->dv), NO_INSERT);
|
||||
= changed_variables->find_slot_with_hash (var->dv,
|
||||
dv_htab_hash (var->dv),
|
||||
NO_INSERT);
|
||||
gcc_assert (*cslot == (void *) var);
|
||||
var->in_changed_variables = false;
|
||||
variable_htab_free (var);
|
||||
@ -1804,16 +1805,17 @@ unshare_variable (dataflow_set *set, variable_def **slot, variable var,
|
||||
/* Copy all variables from hash table SRC to hash table DST. */
|
||||
|
||||
static void
|
||||
vars_copy (variable_table_type dst, variable_table_type src)
|
||||
vars_copy (variable_table_type *dst, variable_table_type *src)
|
||||
{
|
||||
variable_iterator_type hi;
|
||||
variable var;
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (src, var, variable, hi)
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
|
||||
{
|
||||
variable_def **dstp;
|
||||
var->refcount++;
|
||||
dstp = dst.find_slot_with_hash (var->dv, dv_htab_hash (var->dv), INSERT);
|
||||
dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
|
||||
INSERT);
|
||||
*dstp = var;
|
||||
}
|
||||
}
|
||||
@ -2324,7 +2326,7 @@ clobber_overlapping_mems (dataflow_set *set, rtx loc)
|
||||
|
||||
set->traversed_vars = set->vars;
|
||||
shared_hash_htab (set->vars)
|
||||
.traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
|
||||
->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
|
||||
set->traversed_vars = NULL;
|
||||
}
|
||||
|
||||
@ -3125,7 +3127,7 @@ dataflow_set_union (dataflow_set *dst, dataflow_set *src)
|
||||
variable_iterator_type hi;
|
||||
variable var;
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src->vars),
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
|
||||
var, variable, hi)
|
||||
variable_union (var, dst);
|
||||
}
|
||||
@ -3189,7 +3191,7 @@ dv_changed_p (decl_or_value dv)
|
||||
be in star-canonical form. */
|
||||
|
||||
static location_chain
|
||||
find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars)
|
||||
find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
|
||||
{
|
||||
location_chain node;
|
||||
enum rtx_code loc_code;
|
||||
@ -3246,7 +3248,7 @@ find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars)
|
||||
gcc_checking_assert (!node->next);
|
||||
|
||||
dv = dv_from_value (node->loc);
|
||||
rvar = vars.find_with_hash (dv, dv_htab_hash (dv));
|
||||
rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
|
||||
return find_loc_in_1pdv (loc, rvar, vars);
|
||||
}
|
||||
|
||||
@ -4226,14 +4228,14 @@ dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
|
||||
variable_iterator_type hi;
|
||||
variable var;
|
||||
|
||||
src1_elems = shared_hash_htab (src1->vars).elements ();
|
||||
src2_elems = shared_hash_htab (src2->vars).elements ();
|
||||
src1_elems = shared_hash_htab (src1->vars)->elements ();
|
||||
src2_elems = shared_hash_htab (src2->vars)->elements ();
|
||||
dataflow_set_init (dst);
|
||||
dst->stack_adjust = cur.stack_adjust;
|
||||
shared_hash_destroy (dst->vars);
|
||||
dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
|
||||
dst->vars->refcount = 1;
|
||||
dst->vars->htab.create (MAX (src1_elems, src2_elems));
|
||||
dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
|
||||
|
||||
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
||||
attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
|
||||
@ -4243,10 +4245,10 @@ dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
|
||||
dsm.cur = src1;
|
||||
dsm.src_onepart_cnt = 0;
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.src->vars),
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
|
||||
var, variable, hi)
|
||||
variable_merge_over_src (var, &dsm);
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.cur->vars),
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
|
||||
var, variable, hi)
|
||||
variable_merge_over_cur (var, &dsm);
|
||||
|
||||
@ -4593,14 +4595,14 @@ dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
|
||||
dfpm.permp = permp;
|
||||
|
||||
shared_hash_htab (set->vars)
|
||||
.traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
|
||||
->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
|
||||
if (*permp)
|
||||
shared_hash_htab ((*permp)->vars)
|
||||
.traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
|
||||
->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
|
||||
shared_hash_htab (set->vars)
|
||||
.traverse <dataflow_set *, canonicalize_values_star> (set);
|
||||
->traverse <dataflow_set *, canonicalize_values_star> (set);
|
||||
shared_hash_htab (set->vars)
|
||||
.traverse <dataflow_set *, canonicalize_vars_star> (set);
|
||||
->traverse <dataflow_set *, canonicalize_vars_star> (set);
|
||||
}
|
||||
|
||||
/* Return a node whose loc is a MEM that refers to EXPR in the
|
||||
@ -4608,7 +4610,7 @@ dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
|
||||
any values recursively mentioned in the location lists. */
|
||||
|
||||
static location_chain
|
||||
find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars)
|
||||
find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
|
||||
{
|
||||
location_chain node;
|
||||
decl_or_value dv;
|
||||
@ -4622,7 +4624,7 @@ find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars)
|
||||
&& !VALUE_RECURSED_INTO (val));
|
||||
|
||||
dv = dv_from_value (val);
|
||||
var = vars.find_with_hash (dv, dv_htab_hash (dv));
|
||||
var = vars->find_with_hash (dv, dv_htab_hash (dv));
|
||||
|
||||
if (!var)
|
||||
return NULL;
|
||||
@ -4873,10 +4875,10 @@ dataflow_set_clear_at_call (dataflow_set *set)
|
||||
{
|
||||
set->traversed_vars = set->vars;
|
||||
shared_hash_htab (set->vars)
|
||||
.traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
|
||||
->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
|
||||
set->traversed_vars = set->vars;
|
||||
shared_hash_htab (set->vars)
|
||||
.traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
|
||||
->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
|
||||
set->traversed_vars = NULL;
|
||||
}
|
||||
}
|
||||
@ -4981,15 +4983,15 @@ dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
|
||||
if (old_set->vars == new_set->vars)
|
||||
return false;
|
||||
|
||||
if (shared_hash_htab (old_set->vars).elements ()
|
||||
!= shared_hash_htab (new_set->vars).elements ())
|
||||
if (shared_hash_htab (old_set->vars)->elements ()
|
||||
!= shared_hash_htab (new_set->vars)->elements ())
|
||||
return true;
|
||||
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set->vars),
|
||||
FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
|
||||
var1, variable, hi)
|
||||
{
|
||||
variable_table_type htab = shared_hash_htab (new_set->vars);
|
||||
variable var2 = htab.find_with_hash (var1->dv, dv_htab_hash (var1->dv));
|
||||
variable_table_type *htab = shared_hash_htab (new_set->vars);
|
||||
variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
|
||||
if (!var2)
|
||||
{
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
@ -6945,12 +6947,12 @@ compute_bb_dataflow (basic_block bb)
|
||||
|
||||
dataflow_set_equiv_regs (out);
|
||||
shared_hash_htab (out->vars)
|
||||
.traverse <dataflow_set *, canonicalize_values_mark> (out);
|
||||
->traverse <dataflow_set *, canonicalize_values_mark> (out);
|
||||
shared_hash_htab (out->vars)
|
||||
.traverse <dataflow_set *, canonicalize_values_star> (out);
|
||||
->traverse <dataflow_set *, canonicalize_values_star> (out);
|
||||
#if ENABLE_CHECKING
|
||||
shared_hash_htab (out->vars)
|
||||
.traverse <dataflow_set *, canonicalize_loc_order_check> (out);
|
||||
->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
|
||||
#endif
|
||||
}
|
||||
changed = dataflow_set_different (&old_out, out);
|
||||
@ -7022,10 +7024,11 @@ vt_find_locations (void)
|
||||
if (VTI (bb)->in.vars)
|
||||
{
|
||||
htabsz
|
||||
-= shared_hash_htab (VTI (bb)->in.vars).size ()
|
||||
+ shared_hash_htab (VTI (bb)->out.vars).size ();
|
||||
oldinsz = shared_hash_htab (VTI (bb)->in.vars).elements ();
|
||||
oldoutsz = shared_hash_htab (VTI (bb)->out.vars).elements ();
|
||||
-= shared_hash_htab (VTI (bb)->in.vars)->size ()
|
||||
+ shared_hash_htab (VTI (bb)->out.vars)->size ();
|
||||
oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
|
||||
oldoutsz
|
||||
= shared_hash_htab (VTI (bb)->out.vars)->elements ();
|
||||
}
|
||||
else
|
||||
oldinsz = oldoutsz = 0;
|
||||
@ -7064,8 +7067,8 @@ vt_find_locations (void)
|
||||
/* Merge and merge_adjust should keep entries in
|
||||
canonical order. */
|
||||
shared_hash_htab (in->vars)
|
||||
.traverse <dataflow_set *,
|
||||
canonicalize_loc_order_check> (in);
|
||||
->traverse <dataflow_set *,
|
||||
canonicalize_loc_order_check> (in);
|
||||
#endif
|
||||
if (dst_can_be_shared)
|
||||
{
|
||||
@ -7085,8 +7088,8 @@ vt_find_locations (void)
|
||||
}
|
||||
|
||||
changed = compute_bb_dataflow (bb);
|
||||
htabsz += shared_hash_htab (VTI (bb)->in.vars).size ()
|
||||
+ shared_hash_htab (VTI (bb)->out.vars).size ();
|
||||
htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
|
||||
+ shared_hash_htab (VTI (bb)->out.vars)->size ();
|
||||
|
||||
if (htabmax && htabsz > htabmax)
|
||||
{
|
||||
@ -7133,9 +7136,9 @@ vt_find_locations (void)
|
||||
fprintf (dump_file,
|
||||
"BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
|
||||
bb->index,
|
||||
(int)shared_hash_htab (VTI (bb)->in.vars).size (),
|
||||
(int)shared_hash_htab (VTI (bb)->in.vars)->size (),
|
||||
oldinsz,
|
||||
(int)shared_hash_htab (VTI (bb)->out.vars).size (),
|
||||
(int)shared_hash_htab (VTI (bb)->out.vars)->size (),
|
||||
oldoutsz,
|
||||
(int)worklist->nodes, (int)pending->nodes, htabsz);
|
||||
|
||||
@ -7242,12 +7245,12 @@ dump_var (variable var)
|
||||
/* Print the information about variables from hash table VARS to dump file. */
|
||||
|
||||
static void
|
||||
dump_vars (variable_table_type vars)
|
||||
dump_vars (variable_table_type *vars)
|
||||
{
|
||||
if (vars.elements () > 0)
|
||||
if (vars->elements () > 0)
|
||||
{
|
||||
fprintf (dump_file, "Variables:\n");
|
||||
vars.traverse <void *, dump_var_tracking_slot> (NULL);
|
||||
vars->traverse <void *, dump_var_tracking_slot> (NULL);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7299,7 +7302,7 @@ variable_from_dropped (decl_or_value dv, enum insert_option insert)
|
||||
variable empty_var;
|
||||
onepart_enum_t onepart;
|
||||
|
||||
slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv), insert);
|
||||
slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
|
||||
|
||||
if (!slot)
|
||||
return NULL;
|
||||
@ -7370,7 +7373,7 @@ variable_was_changed (variable var, dataflow_set *set)
|
||||
/* Remember this decl or VALUE has been added to changed_variables. */
|
||||
set_dv_changed (var->dv, true);
|
||||
|
||||
slot = changed_variables.find_slot_with_hash (var->dv, hash, INSERT);
|
||||
slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
|
||||
|
||||
if (*slot)
|
||||
{
|
||||
@ -7397,9 +7400,9 @@ variable_was_changed (variable var, dataflow_set *set)
|
||||
|
||||
if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
|
||||
{
|
||||
dslot = dropped_values.find_slot_with_hash (var->dv,
|
||||
dv_htab_hash (var->dv),
|
||||
INSERT);
|
||||
dslot = dropped_values->find_slot_with_hash (var->dv,
|
||||
dv_htab_hash (var->dv),
|
||||
INSERT);
|
||||
empty_var = *dslot;
|
||||
|
||||
if (empty_var)
|
||||
@ -7464,7 +7467,7 @@ variable_was_changed (variable var, dataflow_set *set)
|
||||
if (shared_hash_shared (set->vars))
|
||||
slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
|
||||
NO_INSERT);
|
||||
shared_hash_htab (set->vars).clear_slot (slot);
|
||||
shared_hash_htab (set->vars)->clear_slot (slot);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -7976,7 +7979,7 @@ delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
|
||||
struct expand_loc_callback_data
|
||||
{
|
||||
/* The variables and values active at this point. */
|
||||
variable_table_type vars;
|
||||
variable_table_type *vars;
|
||||
|
||||
/* Stack of values and debug_exprs under expansion, and their
|
||||
children. */
|
||||
@ -8065,7 +8068,7 @@ loc_exp_dep_clear (variable var)
|
||||
back-links in VARS. */
|
||||
|
||||
static void
|
||||
loc_exp_insert_dep (variable var, rtx x, variable_table_type vars)
|
||||
loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
|
||||
{
|
||||
decl_or_value dv;
|
||||
variable xvar;
|
||||
@ -8075,7 +8078,7 @@ loc_exp_insert_dep (variable var, rtx x, variable_table_type vars)
|
||||
|
||||
/* ??? Build a vector of variables parallel to EXPANDING, to avoid
|
||||
an additional look up? */
|
||||
xvar = vars.find_with_hash (dv, dv_htab_hash (dv));
|
||||
xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
|
||||
|
||||
if (!xvar)
|
||||
{
|
||||
@ -8116,7 +8119,7 @@ loc_exp_insert_dep (variable var, rtx x, variable_table_type vars)
|
||||
|
||||
static bool
|
||||
loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
|
||||
variable_table_type vars)
|
||||
variable_table_type *vars)
|
||||
{
|
||||
bool pending_recursion = false;
|
||||
|
||||
@ -8145,7 +8148,7 @@ loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
|
||||
attempt to compute a current location. */
|
||||
|
||||
static void
|
||||
notify_dependents_of_resolved_value (variable ivar, variable_table_type vars)
|
||||
notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
|
||||
{
|
||||
loc_exp_dep *led, *next;
|
||||
|
||||
@ -8183,7 +8186,7 @@ notify_dependents_of_resolved_value (variable ivar, variable_table_type vars)
|
||||
continue;
|
||||
}
|
||||
|
||||
var = vars.find_with_hash (dv, dv_htab_hash (dv));
|
||||
var = vars->find_with_hash (dv, dv_htab_hash (dv));
|
||||
|
||||
if (!var)
|
||||
var = variable_from_dropped (dv, NO_INSERT);
|
||||
@ -8427,7 +8430,7 @@ vt_expand_loc_callback (rtx x, bitmap regs,
|
||||
return NULL;
|
||||
}
|
||||
|
||||
var = elcd->vars.find_with_hash (dv, dv_htab_hash (dv));
|
||||
var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
|
||||
|
||||
if (!var)
|
||||
{
|
||||
@ -8534,7 +8537,7 @@ resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
|
||||
equivalences in VARS, updating their CUR_LOCs in the process. */
|
||||
|
||||
static rtx
|
||||
vt_expand_loc (rtx loc, variable_table_type vars)
|
||||
vt_expand_loc (rtx loc, variable_table_type *vars)
|
||||
{
|
||||
struct expand_loc_callback_data data;
|
||||
rtx result;
|
||||
@ -8556,7 +8559,7 @@ vt_expand_loc (rtx loc, variable_table_type vars)
|
||||
in VARS, updating their CUR_LOCs in the process. */
|
||||
|
||||
static rtx
|
||||
vt_expand_1pvar (variable var, variable_table_type vars)
|
||||
vt_expand_1pvar (variable var, variable_table_type *vars)
|
||||
{
|
||||
struct expand_loc_callback_data data;
|
||||
rtx loc;
|
||||
@ -8587,7 +8590,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
|
||||
variable var = *varp;
|
||||
rtx insn = data->insn;
|
||||
enum emit_note_where where = data->where;
|
||||
variable_table_type vars = data->vars;
|
||||
variable_table_type *vars = data->vars;
|
||||
rtx note, note_vl;
|
||||
int i, j, n_var_parts;
|
||||
bool complete;
|
||||
@ -8802,7 +8805,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
|
||||
set_dv_changed (var->dv, false);
|
||||
gcc_assert (var->in_changed_variables);
|
||||
var->in_changed_variables = false;
|
||||
changed_variables.clear_slot (varp);
|
||||
changed_variables->clear_slot (varp);
|
||||
|
||||
/* Continue traversing the hash table. */
|
||||
return 1;
|
||||
@ -8834,11 +8837,11 @@ remove_value_from_changed_variables (rtx val)
|
||||
variable_def **slot;
|
||||
variable var;
|
||||
|
||||
slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
|
||||
slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
|
||||
NO_INSERT);
|
||||
var = *slot;
|
||||
var->in_changed_variables = false;
|
||||
changed_variables.clear_slot (slot);
|
||||
changed_variables->clear_slot (slot);
|
||||
}
|
||||
|
||||
/* If VAL (a value or debug_expr) has backlinks to variables actively
|
||||
@ -8847,7 +8850,7 @@ remove_value_from_changed_variables (rtx val)
|
||||
have dependencies of their own to notify. */
|
||||
|
||||
static void
|
||||
notify_dependents_of_changed_value (rtx val, variable_table_type htab,
|
||||
notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
|
||||
vec<rtx, va_heap> *changed_values_stack)
|
||||
{
|
||||
variable_def **slot;
|
||||
@ -8855,13 +8858,13 @@ notify_dependents_of_changed_value (rtx val, variable_table_type htab,
|
||||
loc_exp_dep *led;
|
||||
decl_or_value dv = dv_from_rtx (val);
|
||||
|
||||
slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
|
||||
slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
|
||||
NO_INSERT);
|
||||
if (!slot)
|
||||
slot = htab.find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
|
||||
slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
|
||||
if (!slot)
|
||||
slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv),
|
||||
NO_INSERT);
|
||||
slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
|
||||
NO_INSERT);
|
||||
var = *slot;
|
||||
|
||||
while ((led = VAR_LOC_DEP_LST (var)))
|
||||
@ -8892,14 +8895,14 @@ notify_dependents_of_changed_value (rtx val, variable_table_type htab,
|
||||
break;
|
||||
|
||||
case ONEPART_VDECL:
|
||||
ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
|
||||
ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
|
||||
gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
|
||||
variable_was_changed (ivar, NULL);
|
||||
break;
|
||||
|
||||
case NOT_ONEPART:
|
||||
pool_free (loc_exp_dep_pool, led);
|
||||
ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
|
||||
ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
|
||||
if (ivar)
|
||||
{
|
||||
int i = ivar->n_var_parts;
|
||||
@ -8929,7 +8932,7 @@ notify_dependents_of_changed_value (rtx val, variable_table_type htab,
|
||||
CHANGED_VARIABLES. */
|
||||
|
||||
static void
|
||||
process_changed_values (variable_table_type htab)
|
||||
process_changed_values (variable_table_type *htab)
|
||||
{
|
||||
int i, n;
|
||||
rtx val;
|
||||
@ -8937,7 +8940,7 @@ process_changed_values (variable_table_type htab)
|
||||
|
||||
/* Move values from changed_variables to changed_values_stack. */
|
||||
changed_variables
|
||||
.traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
|
||||
->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
|
||||
(&changed_values_stack);
|
||||
|
||||
/* Back-propagate change notifications in values while popping
|
||||
@ -8969,9 +8972,9 @@ emit_notes_for_changes (rtx insn, enum emit_note_where where,
|
||||
shared_hash vars)
|
||||
{
|
||||
emit_note_data data;
|
||||
variable_table_type htab = shared_hash_htab (vars);
|
||||
variable_table_type *htab = shared_hash_htab (vars);
|
||||
|
||||
if (!changed_variables.elements ())
|
||||
if (!changed_variables->elements ())
|
||||
return;
|
||||
|
||||
if (MAY_HAVE_DEBUG_INSNS)
|
||||
@ -8982,19 +8985,19 @@ emit_notes_for_changes (rtx insn, enum emit_note_where where,
|
||||
data.vars = htab;
|
||||
|
||||
changed_variables
|
||||
.traverse <emit_note_data*, emit_note_insn_var_location> (&data);
|
||||
->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
|
||||
}
|
||||
|
||||
/* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
|
||||
same variable in hash table DATA or is not there at all. */
|
||||
|
||||
int
|
||||
emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars)
|
||||
emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
|
||||
{
|
||||
variable old_var, new_var;
|
||||
|
||||
old_var = *slot;
|
||||
new_var = new_vars.find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
|
||||
new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
|
||||
|
||||
if (!new_var)
|
||||
{
|
||||
@ -9061,12 +9064,12 @@ emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars)
|
||||
table DATA. */
|
||||
|
||||
int
|
||||
emit_notes_for_differences_2 (variable_def **slot, variable_table_type old_vars)
|
||||
emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
|
||||
{
|
||||
variable old_var, new_var;
|
||||
|
||||
new_var = *slot;
|
||||
old_var = old_vars.find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
|
||||
old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
|
||||
if (!old_var)
|
||||
{
|
||||
int i;
|
||||
@ -9087,10 +9090,10 @@ emit_notes_for_differences (rtx insn, dataflow_set *old_set,
|
||||
dataflow_set *new_set)
|
||||
{
|
||||
shared_hash_htab (old_set->vars)
|
||||
.traverse <variable_table_type, emit_notes_for_differences_1>
|
||||
->traverse <variable_table_type *, emit_notes_for_differences_1>
|
||||
(shared_hash_htab (new_set->vars));
|
||||
shared_hash_htab (new_set->vars)
|
||||
.traverse <variable_table_type, emit_notes_for_differences_2>
|
||||
->traverse <variable_table_type *, emit_notes_for_differences_2>
|
||||
(shared_hash_htab (old_set->vars));
|
||||
emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
|
||||
}
|
||||
@ -9446,7 +9449,7 @@ vt_emit_notes (void)
|
||||
basic_block bb;
|
||||
dataflow_set cur;
|
||||
|
||||
gcc_assert (!changed_variables.elements ());
|
||||
gcc_assert (!changed_variables->elements ());
|
||||
|
||||
/* Free memory occupied by the out hash tables, as they aren't used
|
||||
anymore. */
|
||||
@ -9459,7 +9462,7 @@ vt_emit_notes (void)
|
||||
|
||||
if (MAY_HAVE_DEBUG_INSNS)
|
||||
{
|
||||
dropped_values.create (cselib_get_next_uid () * 2);
|
||||
dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
|
||||
loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
|
||||
sizeof (loc_exp_dep), 64);
|
||||
}
|
||||
@ -9488,13 +9491,14 @@ vt_emit_notes (void)
|
||||
}
|
||||
#ifdef ENABLE_CHECKING
|
||||
shared_hash_htab (cur.vars)
|
||||
.traverse <variable_table_type, emit_notes_for_differences_1>
|
||||
->traverse <variable_table_type *, emit_notes_for_differences_1>
|
||||
(shared_hash_htab (empty_shared_hash));
|
||||
#endif
|
||||
dataflow_set_destroy (&cur);
|
||||
|
||||
if (MAY_HAVE_DEBUG_INSNS)
|
||||
dropped_values.dispose ();
|
||||
delete dropped_values;
|
||||
dropped_values = NULL;
|
||||
|
||||
emit_notes = false;
|
||||
}
|
||||
@ -9891,8 +9895,8 @@ vt_initialize (void)
|
||||
sizeof (struct shared_hash_def), 256);
|
||||
empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
|
||||
empty_shared_hash->refcount = 1;
|
||||
empty_shared_hash->htab.create (1);
|
||||
changed_variables.create (10);
|
||||
empty_shared_hash->htab = new variable_table_type (1);
|
||||
changed_variables = new variable_table_type (10);
|
||||
|
||||
/* Init the IN and OUT sets. */
|
||||
FOR_ALL_BB_FN (bb, cfun)
|
||||
@ -10246,8 +10250,10 @@ vt_finalize (void)
|
||||
}
|
||||
}
|
||||
free_aux_for_blocks ();
|
||||
empty_shared_hash->htab.dispose ();
|
||||
changed_variables.dispose ();
|
||||
delete empty_shared_hash->htab;
|
||||
empty_shared_hash->htab = NULL;
|
||||
delete changed_variables;
|
||||
changed_variables = NULL;
|
||||
free_alloc_pool (attrs_pool);
|
||||
free_alloc_pool (var_pool);
|
||||
free_alloc_pool (loc_chain_pool);
|
||||
|
@ -182,11 +182,10 @@ vtbl_map_node_registration_find (struct vtbl_map_node *node,
|
||||
struct vtable_registration key;
|
||||
struct vtable_registration **slot;
|
||||
|
||||
gcc_assert (node && node->registered.is_created ());
|
||||
gcc_assert (node && node->registered);
|
||||
|
||||
key.vtable_decl = vtable_decl;
|
||||
slot = (struct vtable_registration **) node->registered.find_slot (&key,
|
||||
NO_INSERT);
|
||||
slot = node->registered->find_slot (&key, NO_INSERT);
|
||||
|
||||
if (slot && (*slot))
|
||||
{
|
||||
@ -212,12 +211,11 @@ vtbl_map_node_registration_insert (struct vtbl_map_node *node,
|
||||
struct vtable_registration **slot;
|
||||
bool inserted_something = false;
|
||||
|
||||
if (!node || !node->registered.is_created ())
|
||||
if (!node || !node->registered)
|
||||
return false;
|
||||
|
||||
key.vtable_decl = vtable_decl;
|
||||
slot = (struct vtable_registration **) node->registered.find_slot (&key,
|
||||
INSERT);
|
||||
slot = node->registered->find_slot (&key, INSERT);
|
||||
|
||||
if (! *slot)
|
||||
{
|
||||
@ -307,11 +305,11 @@ vtbl_map_hasher::equal (const value_type *p1, const compare_type *p2)
|
||||
to find the nodes for various tasks (see comments in vtable-verify.h
|
||||
for more details. */
|
||||
|
||||
typedef hash_table <vtbl_map_hasher> vtbl_map_table_type;
|
||||
typedef hash_table<vtbl_map_hasher> vtbl_map_table_type;
|
||||
typedef vtbl_map_table_type::iterator vtbl_map_iterator_type;
|
||||
|
||||
/* Vtable map variable nodes stored in a hash table. */
|
||||
static vtbl_map_table_type vtbl_map_hash;
|
||||
static vtbl_map_table_type *vtbl_map_hash;
|
||||
|
||||
/* Vtable map variable nodes stored in a vector. */
|
||||
vec<struct vtbl_map_node *> vtbl_map_nodes_vec;
|
||||
@ -328,7 +326,7 @@ vtbl_map_get_node (tree class_type)
|
||||
tree class_name;
|
||||
unsigned int type_quals;
|
||||
|
||||
if (!vtbl_map_hash.is_created ())
|
||||
if (!vtbl_map_hash)
|
||||
return NULL;
|
||||
|
||||
gcc_assert (TREE_CODE (class_type) == RECORD_TYPE);
|
||||
@ -346,8 +344,7 @@ vtbl_map_get_node (tree class_type)
|
||||
class_name = DECL_ASSEMBLER_NAME (class_type_decl);
|
||||
|
||||
key.class_name = class_name;
|
||||
slot = (struct vtbl_map_node **) vtbl_map_hash.find_slot (&key,
|
||||
NO_INSERT);
|
||||
slot = (struct vtbl_map_node **) vtbl_map_hash->find_slot (&key, NO_INSERT);
|
||||
if (!slot)
|
||||
return NULL;
|
||||
return *slot;
|
||||
@ -365,8 +362,8 @@ find_or_create_vtbl_map_node (tree base_class_type)
|
||||
tree class_type_decl;
|
||||
unsigned int type_quals;
|
||||
|
||||
if (!vtbl_map_hash.is_created ())
|
||||
vtbl_map_hash.create (10);
|
||||
if (!vtbl_map_hash)
|
||||
vtbl_map_hash = new vtbl_map_table_type (10);
|
||||
|
||||
/* Find the TYPE_DECL for the class. */
|
||||
class_type_decl = TYPE_NAME (base_class_type);
|
||||
@ -377,8 +374,7 @@ find_or_create_vtbl_map_node (tree base_class_type)
|
||||
|
||||
gcc_assert (HAS_DECL_ASSEMBLER_NAME_P (class_type_decl));
|
||||
key.class_name = DECL_ASSEMBLER_NAME (class_type_decl);
|
||||
slot = (struct vtbl_map_node **) vtbl_map_hash.find_slot (&key,
|
||||
INSERT);
|
||||
slot = (struct vtbl_map_node **) vtbl_map_hash->find_slot (&key, INSERT);
|
||||
|
||||
if (*slot)
|
||||
return *slot;
|
||||
@ -396,7 +392,7 @@ find_or_create_vtbl_map_node (tree base_class_type)
|
||||
(node->class_info->parents).create (4);
|
||||
(node->class_info->children).create (4);
|
||||
|
||||
node->registered.create (16);
|
||||
node->registered = new register_table_type (16);
|
||||
|
||||
node->is_used = false;
|
||||
|
||||
|
@ -64,7 +64,7 @@ struct registration_hasher : typed_noop_remove <struct vtable_registration>
|
||||
static inline bool equal (const value_type *, const compare_type *);
|
||||
};
|
||||
|
||||
typedef hash_table <registration_hasher> register_table_type;
|
||||
typedef hash_table<registration_hasher> register_table_type;
|
||||
typedef register_table_type::iterator registration_iterator_type;
|
||||
|
||||
/* This struct is used to represent the class hierarchy information
|
||||
@ -116,7 +116,7 @@ struct vtbl_map_node {
|
||||
variable. */
|
||||
struct vtbl_map_node *next, *prev; /* Pointers for the linked list
|
||||
structure. */
|
||||
register_table_type registered; /* Hashtable of vtable pointers for which
|
||||
register_table_type *registered; /* Hashtable of vtable pointers for which
|
||||
we have generated a _VLTRegisterPair
|
||||
call with this vtable map variable. */
|
||||
bool is_used; /* Boolean indicating if we used this vtable map
|
||||
|
Loading…
x
Reference in New Issue
Block a user