tree-ssa-operands.h (ssa_call_clobbered_cache_valid): Remove.

2006-01-16  Daniel Berlin  <dberlin@dberlin.org>

	* tree-ssa-operands.h (ssa_call_clobbered_cache_valid): Remove.
	(ssa_ro_call_cache_valid): Ditto.
	* tree-ssa-alias.c (sort_tags_by_id): New function.
	(init_transitive_clobber_worklist): Ditto.
	(add_to_worklist): Ditto.
	(mark_aliases_call_clobbered): Ditto.
	(compute_tag_properties): Ditto.
	(set_initial_properties): Ditto.
	(compute_call_clobbered): Ditto.
	(compute_may_aliases):	Call compute_call_clobbered and grouping.
	(compute_flow_sensitive_aliasing): Remove clobbering related code.
	(compute_flow_insensitive_aliasing): Grouping now happens in our
	caller.
	(setup_pointers_and_addressables): Remove clobbering related code.
	(add_may_alias): Ditto.
	(replace_may_alias): Ditto.
	(get_nmt_for): Ditto.
	(create_global_var): 
	(is_escape_site): Return an escape_type enumeration.
	* tree-flow-inline.h (is_call_clobbered):  Global var does not
	imply call clobbered.
	(mark_call_clobbered): Take a reason for marking this. Remove
	marking of globalness, and cache invalidation.
	(clear_call_clobbered): Remove cache invalidation code.
	* tree-dfa.c (dump_variable): If details is on, dump the reason
	for escaping.
	* tree-outof-ssa.c (create_temp): Copy escape mask from original
	variable. 
	* tree-flow.h (struct ptr_info_def): Add escape mask member.
	(struct var_ann_d): Ditto.
	(enum escape_type): New.
	(mark_call_clobbered): Adjust prototype.
	* tree-ssa-structalias.c (update_alias_info): Unmodifiable vars
	are never call clobbered. 
	Record reasons for escaping.
	* tree-ssa-structalias.h (is_escape_site): Update prototype.
	* tree-ssa-operands.c (ssa_call_clobbered_cache_valid): Remove.
	(ssa_ro_call_cache_valid): Ditto.
	(clobbered_v_may_defs): Ditto.
	(clobbered_vuses): Ditto.
	(ro_call_vuses): Ditto.
	(clobber_stats): New.
	(init_ssa_operands): Zero out clobber stats.
	(fini_ssa_operands): Print out clobber stats.
	(get_call_expr_operands): Pass callee fndecl to
	add_call_read_ops).
	(add_call_clobber_ops): Remove use of cache.
	Add use of PURE_CONST information.
	(add_call_read_ops): Remove use of cache.
	Add use of static not_read information.

From-SVN: r109938
This commit is contained in:
Daniel Berlin 2006-01-19 01:42:48 +00:00 committed by Daniel Berlin
parent c8db7d5c17
commit d16a5e3669
12 changed files with 573 additions and 225 deletions

View File

@ -1,3 +1,56 @@
2006-01-16 Daniel Berlin <dberlin@dberlin.org>
* tree-ssa-operands.h (ssa_call_clobbered_cache_valid): Remove.
(ssa_ro_call_cache_valid): Ditto.
* tree-ssa-alias.c (sort_tags_by_id): New function.
(init_transitive_clobber_worklist): Ditto.
(add_to_worklist): Ditto.
(mark_aliases_call_clobbered): Ditto.
(compute_tag_properties): Ditto.
(set_initial_properties): Ditto.
(compute_call_clobbered): Ditto.
(compute_may_aliases): Call compute_call_clobbered and grouping.
(compute_flow_sensitive_aliasing): Remove clobbering related code.
(compute_flow_insensitive_aliasing): Grouping now happens in our
caller.
(setup_pointers_and_addressables): Remove clobbering related code.
(add_may_alias): Ditto.
(replace_may_alias): Ditto.
(get_nmt_for): Ditto.
(create_global_var):
(is_escape_site): Return an escape_type enumeration.
* tree-flow-inline.h (is_call_clobbered): Global var does not
imply call clobbered.
(mark_call_clobbered): Take a reason for marking this. Remove
marking of globalness, and cache invalidation.
(clear_call_clobbered): Remove cache invalidation code.
* tree-dfa.c (dump_variable): If details is on, dump the reason
for escaping.
* tree-outof-ssa.c (create_temp): Copy escape mask from original
variable.
* tree-flow.h (struct ptr_info_def): Add escape mask member.
(struct var_ann_d): Ditto.
(enum escape_type): New.
(mark_call_clobbered): Adjust prototype.
* tree-ssa-structalias.c (update_alias_info): Unmodifiable vars
are never call clobbered.
Record reasons for escaping.
* tree-ssa-structalias.h (is_escape_site): Update prototype.
* tree-ssa-operands.c (ssa_call_clobbered_cache_valid): Remove.
(ssa_ro_call_cache_valid): Ditto.
(clobbered_v_may_defs): Ditto.
(clobbered_vuses): Ditto.
(ro_call_vuses): Ditto.
(clobber_stats): New.
(init_ssa_operands): Zero out clobber stats.
(fini_ssa_operands): Print out clobber stats.
(get_call_expr_operands): Pass callee fndecl to
add_call_read_ops).
(add_call_clobber_ops): Remove use of cache.
Add use of PURE_CONST information.
(add_call_read_ops): Remove use of cache.
Add use of static not_read information.
2006-01-18 Alexandre Oliva <aoliva@redhat.com>
Introduce TLS descriptors for i386 and x86_64.

View File

@ -1,3 +1,7 @@
2006-01-18 Daniel Berlin <dberlin@dberlin.org>
* gcc.dg/tree-ssa/pr24287.c: New test
2006-01-18 Eric Christopher <echristo@apple.com>
* g++.dg/eh/table.C: New.

View File

@ -0,0 +1,25 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-optimized" } */
int g1(int);
int h(int *a, int *b)__attribute__((pure));
void link_error();
/* The calls to link_error should be eliminated, since nothing escapes to
non-pure functions. */
int g(void)
{
int t = 0, t1 = 2;
int t2 = h(&t, &t1);
if (t != 0)
link_error ();
if (t1 != 2)
link_error ();
g1(t2);
if (t != 0)
link_error ();
if (t1 != 2)
link_error ();
return t2 == 2;
}
/* { dg-final { scan-tree-dump-times "link_error" 0 "optimized"} } */
/* { dg-final { cleanup-tree-dump "optimized" } } */

View File

@ -363,7 +363,35 @@ dump_variable (FILE *file, tree var)
fprintf (file, ", is volatile");
if (is_call_clobbered (var))
fprintf (file, ", call clobbered");
{
fprintf (file, ", call clobbered");
if (dump_flags & TDF_DETAILS)
{
var_ann_t va = var_ann (var);
unsigned int escape_mask = va->escape_mask;
fprintf (file, " (");
if (escape_mask & ESCAPE_STORED_IN_GLOBAL)
fprintf (file, ", stored in global");
if (escape_mask & ESCAPE_TO_ASM)
fprintf (file, ", goes through ASM");
if (escape_mask & ESCAPE_TO_CALL)
fprintf (file, ", passed to call");
if (escape_mask & ESCAPE_BAD_CAST)
fprintf (file, ", bad cast");
if (escape_mask & ESCAPE_TO_RETURN)
fprintf (file, ", returned from func");
if (escape_mask & ESCAPE_TO_PURE_CONST)
fprintf (file, ", passed to pure/const");
if (escape_mask & ESCAPE_IS_GLOBAL)
fprintf (file, ", is global var");
if (escape_mask & ESCAPE_IS_PARM)
fprintf (file, ", is incoming pointer");
if (escape_mask & ESCAPE_UNKNOWN)
fprintf (file, ", unknown escape");
fprintf (file, " )");
}
}
if (default_def (var))
{
@ -719,15 +747,11 @@ add_referenced_var (tree var, struct walk_state *walk_state)
*slot = (void *) var;
referenced_var_insert (DECL_UID (var), var);
/* Global variables are always call-clobbered. */
if (is_global_var (var))
mark_call_clobbered (var);
/* Tag's don't have DECL_INITIAL. */
if (MTAG_P (var))
return;
/* Scan DECL_INITIAL for pointer variables as they may contain
address arithmetic referencing the address of other
variables. */

View File

@ -843,34 +843,26 @@ loop_containing_stmt (tree stmt)
static inline bool
is_call_clobbered (tree var)
{
return is_global_var (var)
|| bitmap_bit_p (call_clobbered_vars, DECL_UID (var));
return bitmap_bit_p (call_clobbered_vars, DECL_UID (var));
}
/* Mark variable VAR as being clobbered by function calls. */
static inline void
mark_call_clobbered (tree var)
mark_call_clobbered (tree var, unsigned int escape_type)
{
/* If VAR is a memory tag, then we need to consider it a global
variable. This is because the pointer that VAR represents has
been found to point to either an arbitrary location or to a known
location in global memory. */
if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
MTAG_GLOBAL (var) = 1;
var_ann (var)->escape_mask |= escape_type;
bitmap_set_bit (call_clobbered_vars, DECL_UID (var));
ssa_call_clobbered_cache_valid = false;
ssa_ro_call_cache_valid = false;
}
/* Clear the call-clobbered attribute from variable VAR. */
static inline void
clear_call_clobbered (tree var)
{
var_ann_t ann = var_ann (var);
ann->escape_mask = 0;
if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
MTAG_GLOBAL (var) = 0;
bitmap_clear_bit (call_clobbered_vars, DECL_UID (var));
ssa_call_clobbered_cache_valid = false;
ssa_ro_call_cache_valid = false;
}
/* Mark variable VAR as being non-addressable. */
@ -879,8 +871,6 @@ mark_non_addressable (tree var)
{
bitmap_clear_bit (call_clobbered_vars, DECL_UID (var));
TREE_ADDRESSABLE (var) = 0;
ssa_call_clobbered_cache_valid = false;
ssa_ro_call_cache_valid = false;
}
/* Return the common annotation for T. Return NULL if the annotation

View File

@ -92,6 +92,9 @@ struct ptr_info_def GTY(())
pointer will be represented by this memory tag, instead of the type
tag computed by TBAA. */
tree name_mem_tag;
/* Mask of reasons this pointer's value escapes the function */
unsigned int escape_mask;
};
@ -213,6 +216,10 @@ struct var_ann_d GTY(())
/* If this variable is a structure, this fields holds a list of
symbols representing each of the fields of the structure. */
subvar_t subvars;
/* Mask of values saying the reasons why this variable has escaped
the function. */
unsigned int escape_mask;
};
struct function_ann_d GTY(())
@ -751,9 +758,27 @@ enum move_pos
};
extern enum move_pos movement_possibility (tree);
/* The reasons a variable may escape a function. */
enum escape_type
{
NO_ESCAPE = 0, /* Doesn't escape. */
ESCAPE_STORED_IN_GLOBAL = 1 << 1,
ESCAPE_TO_ASM = 1 << 2, /* Passed by address to an assembly
statement. */
ESCAPE_TO_CALL = 1 << 3, /* Escapes to a function call. */
ESCAPE_BAD_CAST = 1 << 4, /* Cast from pointer to integer */
ESCAPE_TO_RETURN = 1 << 5, /* Returned from function. */
ESCAPE_TO_PURE_CONST = 1 << 6, /* Escapes to a pure or constant
function call. */
ESCAPE_IS_GLOBAL = 1 << 7, /* Is a global variable. */
ESCAPE_IS_PARM = 1 << 8, /* Is an incoming function parameter. */
ESCAPE_UNKNOWN = 1 << 9 /* We believe it escapes for some reason
not enumerated above. */
};
/* In tree-flow-inline.h */
static inline bool is_call_clobbered (tree);
static inline void mark_call_clobbered (tree);
static inline void mark_call_clobbered (tree, unsigned int);
static inline void set_is_used (tree);
static inline bool unmodifiable_var_p (tree);

View File

@ -177,7 +177,7 @@ create_temp (tree t)
inherit from our original variable. */
var_ann (tmp)->type_mem_tag = var_ann (t)->type_mem_tag;
if (is_call_clobbered (t))
mark_call_clobbered (tmp);
mark_call_clobbered (tmp, var_ann (t)->escape_mask);
return tmp;
}

View File

@ -135,6 +135,287 @@ bitmap addressable_vars;
having to keep track of too many V_MAY_DEF expressions at call sites. */
tree global_var;
DEF_VEC_I(int);
DEF_VEC_ALLOC_I(int,heap);
/* qsort comparison function to sort type/name tags by DECL_UID. */
static int
sort_tags_by_id (const void *pa, const void *pb)
{
tree a = *(tree *)pa;
tree b = *(tree *)pb;
return DECL_UID (a) - DECL_UID (b);
}
/* Initialize WORKLIST to contain those memory tags that are marked call
clobbered. Initialized WORKLIST2 to contain the reasons these
memory tags escaped. */
static void
init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
VEC (int, heap) **worklist2)
{
referenced_var_iterator rvi;
tree curr;
FOR_EACH_REFERENCED_VAR (curr, rvi)
{
if (MTAG_P (curr) && is_call_clobbered (curr))
{
VEC_safe_push (tree, heap, *worklist, curr);
VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
}
}
}
/* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
ALIAS is not already marked call clobbered, and is a memory
tag. */
static void
add_to_worklist (tree alias, VEC (tree, heap) **worklist,
VEC (int, heap) **worklist2,
int reason)
{
if (MTAG_P (alias) && !is_call_clobbered (alias))
{
VEC_safe_push (tree, heap, *worklist, alias);
VEC_safe_push (int, heap, *worklist2, reason);
}
}
/* Mark aliases of TAG as call clobbered, and place any tags on the
alias list that were not already call clobbered on WORKLIST. */
static void
mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
VEC (int, heap) **worklist2)
{
unsigned int i;
VEC (tree, gc) *ma;
tree entry;
var_ann_t ta = var_ann (tag);
if (!MTAG_P (tag))
return;
ma = may_aliases (tag);
if (!ma)
return;
for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
{
if (!unmodifiable_var_p (entry))
{
add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
mark_call_clobbered (entry, ta->escape_mask);
}
}
}
/* Tags containing global vars need to be marked as global.
Tags containing call clobbered vars need to be marked as call
clobbered. */
static void
compute_tag_properties (void)
{
referenced_var_iterator rvi;
tree tag;
bool changed = true;
VEC (tree, heap) *taglist = NULL;
FOR_EACH_REFERENCED_VAR (tag, rvi)
{
if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
continue;
VEC_safe_push (tree, heap, taglist, tag);
}
/* We sort the taglist by DECL_UID, for two reasons.
1. To get a sequential ordering to make the bitmap accesses
faster.
2. Because of the way we compute aliases, it's more likely that
an earlier tag is included in a later tag, and this will reduce
the number of iterations.
If we had a real tag graph, we would just topo-order it and be
done with it. */
qsort (VEC_address (tree, taglist),
VEC_length (tree, taglist),
sizeof (tree),
sort_tags_by_id);
/* Go through each tag not marked as global, and if it aliases
global vars, mark it global.
If the tag contains call clobbered vars, mark it call
clobbered.
This loop iterates because tags may appear in the may-aliases
list of other tags when we group. */
while (changed)
{
unsigned int k;
changed = false;
for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
{
VEC (tree, gc) *ma;
unsigned int i;
tree entry;
bool tagcc = is_call_clobbered (tag);
bool tagglobal = MTAG_GLOBAL (tag);
if (tagcc && tagglobal)
continue;
ma = may_aliases (tag);
if (!ma)
continue;
for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
{
/* Call clobbered entries cause the tag to be marked
call clobbered. */
if (!tagcc && is_call_clobbered (entry))
{
mark_call_clobbered (tag, var_ann (entry)->escape_mask);
tagcc = true;
changed = true;
}
/* Global vars cause the tag to be marked global. */
if (!tagglobal && is_global_var (entry))
{
MTAG_GLOBAL (tag) = true;
changed = true;
tagglobal = true;
}
/* Early exit once both global and cc are set, since the
loop can't do any more than that. */
if (tagcc && tagglobal)
break;
}
}
}
VEC_free (tree, heap, taglist);
}
/* Set up the initial variable clobbers and globalness.
When this function completes, only tags whose aliases need to be
clobbered will be set clobbered. Tags clobbered because they
contain call clobbered vars are handled in compute_tag_properties. */
static void
set_initial_properties (struct alias_info *ai)
{
unsigned int i;
referenced_var_iterator rvi;
tree var;
FOR_EACH_REFERENCED_VAR (var, rvi)
{
if (is_global_var (var)
&& (!var_can_have_subvars (var)
|| get_subvars_for_var (var) == NULL))
{
if (!unmodifiable_var_p (var))
mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
}
else if (TREE_CODE (var) == PARM_DECL
&& default_def (var)
&& POINTER_TYPE_P (TREE_TYPE (var)))
{
tree def = default_def (var);
get_ptr_info (def)->value_escapes_p = 1;
get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
}
}
for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
{
tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
if (pi->value_escapes_p)
{
/* If PTR escapes then its associated memory tags and
pointed-to variables are call-clobbered. */
if (pi->name_mem_tag)
mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
if (v_ann->type_mem_tag)
mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
if (pi->pt_vars)
{
bitmap_iterator bi;
unsigned int j;
EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
if (!unmodifiable_var_p (referenced_var (j)))
mark_call_clobbered (referenced_var (j), pi->escape_mask);
}
}
/* If the name tag is call clobbered, so is the type tag
associated with the base VAR_DECL. */
if (pi->name_mem_tag
&& v_ann->type_mem_tag
&& is_call_clobbered (pi->name_mem_tag))
mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
/* Name tags and type tags that we don't know where they point
to, might point to global memory, and thus, are clobbered.
FIXME: This is not quite right. They should only be
clobbered if value_escapes_p is true, regardless of whether
they point to global memory or not.
So removing this code and fixing all the bugs would be nice.
It is the cause of a bunch of clobbering. */
if ((pi->pt_global_mem || pi->pt_anything)
&& pi->is_dereferenced && pi->name_mem_tag)
{
mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
MTAG_GLOBAL (pi->name_mem_tag) = true;
}
if ((pi->pt_global_mem || pi->pt_anything)
&& pi->is_dereferenced && v_ann->type_mem_tag)
{
mark_call_clobbered (v_ann->type_mem_tag, ESCAPE_IS_GLOBAL);
MTAG_GLOBAL (v_ann->type_mem_tag) = true;
}
}
}
/* Compute which variables need to be marked call clobbered because
their tag is call clobbered, and which tags need to be marked
global because they contain global variables. */
static void
compute_call_clobbered (struct alias_info *ai)
{
VEC (tree, heap) *worklist = NULL;
VEC(int,heap) *worklist2 = NULL;
set_initial_properties (ai);
init_transitive_clobber_worklist (&worklist, &worklist2);
while (VEC_length (tree, worklist) != 0)
{
tree curr = VEC_pop (tree, worklist);
int reason = VEC_pop (int, worklist2);
mark_call_clobbered (curr, reason);
mark_aliases_call_clobbered (curr, &worklist, &worklist2);
}
VEC_free (tree, heap, worklist);
VEC_free (int, heap, worklist2);
compute_tag_properties ();
}
/* Compute may-alias information for every variable referenced in function
FNDECL.
@ -277,6 +558,13 @@ compute_may_aliases (void)
memory tags. */
compute_flow_insensitive_aliasing (ai);
/* Determine if we need to enable alias grouping. */
if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
group_aliases (ai);
/* Compute call clobbering information. */
compute_call_clobbered (ai);
/* If the program has too many call-clobbered variables and/or function
calls, create .GLOBAL_VAR and use it to model call-clobbering
semantics at call sites. This reduces the number of virtual operands
@ -703,20 +991,6 @@ compute_flow_sensitive_aliasing (struct alias_info *ai)
var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
bitmap_iterator bi;
if (pi->value_escapes_p || pi->pt_anything)
{
/* If PTR escapes or may point to anything, then its associated
memory tags and pointed-to variables are call-clobbered. */
if (pi->name_mem_tag)
mark_call_clobbered (pi->name_mem_tag);
if (v_ann->type_mem_tag)
mark_call_clobbered (v_ann->type_mem_tag);
if (pi->pt_vars)
EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
mark_call_clobbered (referenced_var (j));
}
/* Set up aliasing information for PTR's name memory tag (if it has
one). Note that only pointers that have been dereferenced will
@ -727,13 +1001,6 @@ compute_flow_sensitive_aliasing (struct alias_info *ai)
add_may_alias (pi->name_mem_tag, referenced_var (j));
add_may_alias (v_ann->type_mem_tag, referenced_var (j));
}
/* If the name tag is call clobbered, so is the type tag
associated with the base VAR_DECL. */
if (pi->name_mem_tag
&& v_ann->type_mem_tag
&& is_call_clobbered (pi->name_mem_tag))
mark_call_clobbered (v_ann->type_mem_tag);
}
}
@ -897,10 +1164,6 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
get_name (current_function_decl),
ai->total_alias_vops);
/* Determine if we need to enable alias grouping. */
if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
group_aliases (ai);
}
@ -1308,12 +1571,6 @@ setup_pointers_and_addressables (struct alias_info *ai)
if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
bitmap_set_bit (ai->written_vars, DECL_UID (tag));
/* If pointer VAR is a global variable or a PARM_DECL,
then its memory tag should be considered a global
variable. */
if (TREE_CODE (var) == PARM_DECL || is_global_var (var))
mark_call_clobbered (tag);
/* All the dereferences of pointer VAR count as
references of TAG. Since TAG can be associated with
several pointers, add the dereferences of VAR to the
@ -1598,16 +1855,6 @@ add_may_alias (tree var, tree alias)
if (alias == al)
return;
/* If VAR is a call-clobbered variable, so is its new ALIAS.
FIXME, call-clobbering should only depend on whether an address
escapes. It should be independent of aliasing. */
if (is_call_clobbered (var))
mark_call_clobbered (alias);
/* Likewise. If ALIAS is call-clobbered, so is VAR. */
else if (is_call_clobbered (alias))
mark_call_clobbered (var);
VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
a_ann->is_alias_tag = 1;
}
@ -1620,16 +1867,6 @@ replace_may_alias (tree var, size_t i, tree new_alias)
{
var_ann_t v_ann = var_ann (var);
VEC_replace (tree, v_ann->may_aliases, i, new_alias);
/* If VAR is a call-clobbered variable, so is NEW_ALIAS.
FIXME, call-clobbering should only depend on whether an address
escapes. It should be independent of aliasing. */
if (is_call_clobbered (var))
mark_call_clobbered (new_alias);
/* Likewise. If NEW_ALIAS is call-clobbered, so is VAR. */
else if (is_call_clobbered (new_alias))
mark_call_clobbered (var);
}
@ -1663,9 +1900,12 @@ set_pt_anything (tree ptr)
3- STMT is an assignment to a non-local variable, or
4- STMT is a return statement.
AI points to the alias information collected so far. */
AI points to the alias information collected so far.
bool
Return the type of escape site found, if we found one, or NO_ESCAPE
if none. */
enum escape_type
is_escape_site (tree stmt, struct alias_info *ai)
{
tree call = get_call_expr_in (stmt);
@ -1674,12 +1914,15 @@ is_escape_site (tree stmt, struct alias_info *ai)
ai->num_calls_found++;
if (!TREE_SIDE_EFFECTS (call))
ai->num_pure_const_calls_found++;
{
ai->num_pure_const_calls_found++;
return ESCAPE_TO_PURE_CONST;
}
return true;
return ESCAPE_TO_CALL;
}
else if (TREE_CODE (stmt) == ASM_EXPR)
return true;
return ESCAPE_TO_ASM;
else if (TREE_CODE (stmt) == MODIFY_EXPR)
{
tree lhs = TREE_OPERAND (stmt, 0);
@ -1691,7 +1934,7 @@ is_escape_site (tree stmt, struct alias_info *ai)
/* If we couldn't recognize the LHS of the assignment, assume that it
is a non-local store. */
if (lhs == NULL_TREE)
return true;
return ESCAPE_UNKNOWN;
/* If the RHS is a conversion between a pointer and an integer, the
pointer escapes since we can't track the integer. */
@ -1701,12 +1944,12 @@ is_escape_site (tree stmt, struct alias_info *ai)
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
(TREE_OPERAND (stmt, 1), 0)))
&& !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
return true;
return ESCAPE_BAD_CAST;
/* If the LHS is an SSA name, it can't possibly represent a non-local
memory store. */
if (TREE_CODE (lhs) == SSA_NAME)
return false;
return NO_ESCAPE;
/* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
local variables we cannot be sure if it will escape, because we
@ -1717,12 +1960,12 @@ is_escape_site (tree stmt, struct alias_info *ai)
Midkiff, ``Escape analysis for java,'' in Proceedings of the
Conference on Object-Oriented Programming Systems, Languages, and
Applications (OOPSLA), pp. 1-19, 1999. */
return true;
return ESCAPE_STORED_IN_GLOBAL;
}
else if (TREE_CODE (stmt) == RETURN_EXPR)
return true;
return ESCAPE_TO_RETURN;
return false;
return NO_ESCAPE;
}
/* Create a new memory tag of type TYPE.
@ -1793,13 +2036,6 @@ get_nmt_for (tree ptr)
if (tag == NULL_TREE)
tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
/* If PTR is a PARM_DECL, it points to a global variable or malloc,
then its name tag should be considered a global variable. */
if (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
|| pi->pt_global_mem)
mark_call_clobbered (tag);
return tag;
}
@ -1896,6 +2132,8 @@ create_global_var (void)
TREE_THIS_VOLATILE (global_var) = 0;
TREE_ADDRESSABLE (global_var) = 0;
create_var_ann (global_var);
mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
add_referenced_tmp_var (global_var);
mark_sym_for_renaming (global_var);
}

View File

@ -119,14 +119,8 @@ static VEC(tree,heap) *build_vuses;
/* Array for building all the v_must_def operands. */
static VEC(tree,heap) *build_v_must_defs;
/* True if the operands for call clobbered vars are cached and valid. */
bool ssa_call_clobbered_cache_valid;
bool ssa_ro_call_cache_valid;
/* These arrays are the cached operand vectors for call clobbered calls. */
static VEC(tree,heap) *clobbered_v_may_defs;
static VEC(tree,heap) *clobbered_vuses;
static VEC(tree,heap) *ro_call_vuses;
static bool ops_active = false;
static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
@ -142,7 +136,7 @@ static inline void append_use (tree *);
static void append_v_may_def (tree);
static void append_v_must_def (tree);
static void add_call_clobber_ops (tree, tree);
static void add_call_read_ops (tree);
static void add_call_read_ops (tree, tree);
static void add_stmt_operand (tree *, stmt_ann_t, int);
static void build_ssa_operands (tree stmt);
@ -220,7 +214,34 @@ ssa_operands_active (void)
return ops_active;
}
/* Structure storing statistics on how many call clobbers we have, and
how many where avoided. */
static struct
{
/* Number of call-clobbered ops we attempt to add to calls in
add_call_clobber_ops. */
unsigned int clobbered_vars;
/* Number of write-clobbers (v_may_defs) avoided by using
not_written information. */
unsigned int static_write_clobbers_avoided;
/* Number of reads (vuses) avoided by using not_read
information. */
unsigned int static_read_clobbers_avoided;
/* Number of write-clobbers avoided because the variable can't escape to
this call. */
unsigned int unescapable_clobbers_avoided;
/* Number of readonly uses we attempt to add to calls in
add_call_read_ops. */
unsigned int readonly_clobbers;
/* Number of readonly uses we avoid using not_read information. */
unsigned int static_readonly_clobbers_avoided;
} clobber_stats;
/* Initialize the operand cache routines. */
void
@ -235,6 +256,8 @@ init_ssa_operands (void)
gcc_assert (operand_memory == NULL);
operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
ops_active = true;
memset (&clobber_stats, 0, sizeof (clobber_stats));
}
@ -260,10 +283,17 @@ fini_ssa_operands (void)
ggc_free (ptr);
}
VEC_free (tree, heap, clobbered_v_may_defs);
VEC_free (tree, heap, clobbered_vuses);
VEC_free (tree, heap, ro_call_vuses);
ops_active = false;
if (dump_file && (dump_flags & TDF_STATS))
{
fprintf (dump_file, "Original clobbered vars:%d\n", clobber_stats.clobbered_vars);
fprintf (dump_file, "Static write clobbers avoided:%d\n", clobber_stats.static_write_clobbers_avoided);
fprintf (dump_file, "Static read clobbers avoided:%d\n", clobber_stats.static_read_clobbers_avoided);
fprintf (dump_file, "Unescapable clobbers avoided:%d\n", clobber_stats.unescapable_clobbers_avoided);
fprintf (dump_file, "Original readonly clobbers:%d\n", clobber_stats.readonly_clobbers);
fprintf (dump_file, "Static readonly clobbers avoided:%d\n", clobber_stats.static_readonly_clobbers_avoided);
}
}
@ -1528,7 +1558,7 @@ get_call_expr_operands (tree stmt, tree expr)
&& !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
add_call_clobber_ops (stmt, get_callee_fndecl (expr));
else if (!(call_flags & ECF_CONST))
add_call_read_ops (stmt);
add_call_read_ops (stmt, get_callee_fndecl (expr));
}
/* Find uses in the called function. */
@ -1715,7 +1745,6 @@ add_to_addressable_set (tree ref, bitmap *addresses_taken)
}
}
/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
clobbered variables in the function. */
@ -1723,12 +1752,10 @@ static void
add_call_clobber_ops (tree stmt, tree callee)
{
unsigned u;
tree t;
bitmap_iterator bi;
stmt_ann_t s_ann = stmt_ann (stmt);
struct stmt_ann_d empty_ann;
bitmap not_read_b, not_written_b;
/* Functions that are not const, pure or never return may clobber
call-clobbered variables. */
if (s_ann)
@ -1742,100 +1769,67 @@ add_call_clobber_ops (tree stmt, tree callee)
return;
}
/* FIXME - if we have better information from the static vars
analysis, we need to make the cache call site specific. This way
we can have the performance benefits even if we are doing good
optimization. */
/* Get info for local and module level statics. There is a bit
set for each static if the call being processed does not read
or write that variable. */
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
/* If cache is valid, copy the elements into the build vectors. */
if (ssa_call_clobbered_cache_valid
&& (!not_read_b || bitmap_empty_p (not_read_b))
&& (!not_written_b || bitmap_empty_p (not_written_b)))
{
for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
{
t = VEC_index (tree, clobbered_vuses, u);
gcc_assert (TREE_CODE (t) != SSA_NAME);
var_ann (t)->in_vuse_list = 1;
VEC_safe_push (tree, heap, build_vuses, (tree)t);
}
for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
{
t = VEC_index (tree, clobbered_v_may_defs, u);
gcc_assert (TREE_CODE (t) != SSA_NAME);
var_ann (t)->in_v_may_def_list = 1;
VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
}
return;
}
memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
/* Add a V_MAY_DEF operand for every call clobbered variable. */
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
{
tree var = referenced_var (u);
unsigned int uid = u;
tree var = referenced_var_lookup (u);
unsigned int escape_mask = var_ann (var)->escape_mask;
tree real_var = var;
bool not_read;
bool not_written;
/* Not read and not written are computed on regular vars, not
subvars, so look at the parent var if this is an SFT. */
if (unmodifiable_var_p (var))
add_stmt_operand (&var, &empty_ann, opf_none);
else
if (TREE_CODE (var) == STRUCT_FIELD_TAG)
real_var = SFT_PARENT_VAR (var);
not_read = not_read_b ? bitmap_bit_p (not_read_b,
DECL_UID (real_var)) : false;
not_written = not_written_b ? bitmap_bit_p (not_written_b,
DECL_UID (real_var)) : false;
gcc_assert (!unmodifiable_var_p (var));
clobber_stats.clobbered_vars++;
/* See if this variable is really clobbered by this function. */
/* Trivial case: Things escaping only to pure/const are not
clobbered by non-pure-const, and only read by pure/const. */
if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
{
bool not_read;
bool not_written;
/* Not read and not written are computed on regular vars, not
subvars, so look at the parent var if this is an SFT. */
if (TREE_CODE (var) == STRUCT_FIELD_TAG)
uid = DECL_UID (SFT_PARENT_VAR (var));
not_read =
not_read_b ? bitmap_bit_p (not_read_b, uid) : false;
not_written =
not_written_b ? bitmap_bit_p (not_written_b, uid) : false;
if (not_written)
tree call = get_call_expr_in (stmt);
if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
{
if (!not_read)
add_stmt_operand (&var, &empty_ann, opf_none);
add_stmt_operand (&var, s_ann, opf_none);
clobber_stats.unescapable_clobbers_avoided++;
continue;
}
else
add_stmt_operand (&var, &empty_ann, opf_is_def);
{
clobber_stats.unescapable_clobbers_avoided++;
continue;
}
}
if (not_written)
{
clobber_stats.static_write_clobbers_avoided++;
if (!not_read)
add_stmt_operand (&var, s_ann, opf_none);
else
clobber_stats.static_read_clobbers_avoided++;
}
else
add_stmt_operand (&var, s_ann, opf_is_def);
}
if ((!not_read_b || bitmap_empty_p (not_read_b))
&& (!not_written_b || bitmap_empty_p (not_written_b)))
{
/* Prepare empty cache vectors. */
VEC_truncate (tree, clobbered_vuses, 0);
VEC_truncate (tree, clobbered_v_may_defs, 0);
/* Now fill the clobbered cache with the values that have been found. */
for (u = 0; u < VEC_length (tree, build_vuses); u++)
VEC_safe_push (tree, heap, clobbered_vuses,
VEC_index (tree, build_vuses, u));
gcc_assert (VEC_length (tree, build_vuses)
== VEC_length (tree, clobbered_vuses));
for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
VEC_safe_push (tree, heap, clobbered_v_may_defs,
VEC_index (tree, build_v_may_defs, u));
gcc_assert (VEC_length (tree, build_v_may_defs)
== VEC_length (tree, clobbered_v_may_defs));
ssa_call_clobbered_cache_valid = true;
}
}
@ -1843,13 +1837,12 @@ add_call_clobber_ops (tree stmt, tree callee)
function. */
static void
add_call_read_ops (tree stmt)
add_call_read_ops (tree stmt, tree callee)
{
unsigned u;
tree t;
bitmap_iterator bi;
stmt_ann_t s_ann = stmt_ann (stmt);
struct stmt_ann_d empty_ann;
bitmap not_read_b;
/* if the function is not pure, it may reference memory. Add
a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
@ -1860,40 +1853,34 @@ add_call_read_ops (tree stmt)
return;
}
/* If cache is valid, copy the elements into the build vector. */
if (ssa_ro_call_cache_valid)
{
for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
{
t = VEC_index (tree, ro_call_vuses, u);
gcc_assert (TREE_CODE (t) != SSA_NAME);
var_ann (t)->in_vuse_list = 1;
VEC_safe_push (tree, heap, build_vuses, (tree)t);
}
return;
}
memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
/* Add a VUSE for each call-clobbered variable. */
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
{
tree var = referenced_var (u);
add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
tree real_var = var;
bool not_read;
clobber_stats.readonly_clobbers++;
/* Not read and not written are computed on regular vars, not
subvars, so look at the parent var if this is an SFT. */
if (TREE_CODE (var) == STRUCT_FIELD_TAG)
real_var = SFT_PARENT_VAR (var);
not_read = not_read_b ? bitmap_bit_p (not_read_b,
DECL_UID (real_var)) : false;
if (not_read)
{
clobber_stats.static_readonly_clobbers_avoided++;
continue;
}
add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
}
/* Prepare empty cache vectors. */
VEC_truncate (tree, ro_call_vuses, 0);
/* Now fill the clobbered cache with the values that have been found. */
for (u = 0; u < VEC_length (tree, build_vuses); u++)
VEC_safe_push (tree, heap, ro_call_vuses,
VEC_index (tree, build_vuses, u));
gcc_assert (VEC_length (tree, build_vuses)
== VEC_length (tree, ro_call_vuses));
ssa_ro_call_cache_valid = true;
}

View File

@ -165,9 +165,6 @@ extern void dump_immediate_uses_for (FILE *file, tree var);
extern void debug_immediate_uses (void);
extern void debug_immediate_uses_for (tree var);
extern bool ssa_call_clobbered_cache_valid;
extern bool ssa_ro_call_cache_valid;
extern bool ssa_operands_active (void);
extern void add_to_addressable_set (tree, bitmap *);

View File

@ -2953,7 +2953,7 @@ update_alias_info (tree stmt, struct alias_info *ai)
bitmap addr_taken;
use_operand_p use_p;
ssa_op_iter iter;
bool stmt_escapes_p = is_escape_site (stmt, ai);
enum escape_type stmt_escape_type = is_escape_site (stmt, ai);
tree op;
/* Mark all the variables whose address are taken by the statement. */
@ -2964,13 +2964,17 @@ update_alias_info (tree stmt, struct alias_info *ai)
/* If STMT is an escape point, all the addresses taken by it are
call-clobbered. */
if (stmt_escapes_p)
if (stmt_escape_type != NO_ESCAPE)
{
bitmap_iterator bi;
unsigned i;
EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
mark_call_clobbered (referenced_var (i));
{
tree rvar = referenced_var (i);
if (!unmodifiable_var_p (rvar))
mark_call_clobbered (rvar, stmt_escape_type);
}
}
}
@ -3094,13 +3098,14 @@ update_alias_info (tree stmt, struct alias_info *ai)
bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
}
if (stmt_escapes_p && num_derefs < num_uses)
if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
{
/* If STMT is an escape point and STMT contains at
least one direct use of OP, then the value of OP
escapes and so the pointed-to variables need to
be marked call-clobbered. */
pi->value_escapes_p = 1;
pi->escape_mask |= stmt_escape_type;
/* If the statement makes a function call, assume
that pointer OP will be dereferenced in a store

View File

@ -80,7 +80,7 @@ struct alias_info
#define NUM_REFERENCES_SET(ANN, VAL) (ANN)->common.aux = (void*) ((void *)(VAL))
/* In tree-ssa-alias.c. */
bool is_escape_site (tree, struct alias_info *);
enum escape_type is_escape_site (tree, struct alias_info *);
/* In tree-ssa-structalias.c. */
extern void compute_points_to_sets (struct alias_info *);