mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-02 05:50:26 +08:00
tree-eh.c (tree_remove_unreachable_handlers): Handle shared labels.
* tree-eh.c (tree_remove_unreachable_handlers): Handle shared labels. (tree_empty_eh_handler_p): Allow non-EH predecestors; allow region to be reached by different label than left. (update_eh_edges): Update comment; remove edge_to_remove if possible and return true if suceeded. (cleanup_empty_eh): Accept sharing map; handle shared regions. (cleanup_eh): Compute sharing map. * except.c (remove_eh_handler_and_replace): Add argument if we should update regions. (remove_unreachable_regions): Update for label sharing. (label_to_region_map): Likewise. (get_next_region_sharing_label): New function. (remove_eh_handler_and_replace): Add update_catch_try parameter; update prev_try pointers. (remove_eh_handler): Update. (remove_eh_region_and_replace_by_outer_of): New function. * except.h (struct eh_region): Add next_region_sharing_label. (remove_eh_region_and_replace_by_outer_of, get_next_region_sharing_label): Declare. * tree-cfgcleanup.c (tree_forwarder_block_p): Simplify. * tree-cfg.c (split_critical_edges): Split also edges where we can't insert code even if they are not critical. * tree-cfg.c (gimple_can_merge_blocks_p): EH edges are unmergable. (gimple_can_remove_branch_p): EH edges won't remove branch by redirection. * tree-inline.c (update_ssa_across_abnormal_edges): Do handle updating of non-abnormal EH edges. * tree-cfg.c (gimple_can_merge_blocks_p): EH edges are unmergable. (gimple_can_remove_branch_p): EH edges are unremovable by redirection. (split_critical_edges): Split also edges where emitting code on them will lead to splitting later. From-SVN: r146763
This commit is contained in:
parent
bc2a4733e2
commit
496a4ef59d
@ -1,3 +1,43 @@
|
||||
2009-04-25 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* tree-eh.c (tree_remove_unreachable_handlers): Handle shared labels.
|
||||
(tree_empty_eh_handler_p): Allow non-EH predecestors; allow region
|
||||
to be reached by different label than left.
|
||||
(update_eh_edges): Update comment; remove edge_to_remove if possible
|
||||
and return true if suceeded.
|
||||
(cleanup_empty_eh): Accept sharing map; handle shared regions.
|
||||
(cleanup_eh): Compute sharing map.
|
||||
* except.c (remove_eh_handler_and_replace): Add argument if we should
|
||||
update regions.
|
||||
(remove_unreachable_regions): Update for label sharing.
|
||||
(label_to_region_map): Likewise.
|
||||
(get_next_region_sharing_label): New function.
|
||||
(remove_eh_handler_and_replace): Add update_catch_try parameter; update
|
||||
prev_try pointers.
|
||||
(remove_eh_handler): Update.
|
||||
(remove_eh_region_and_replace_by_outer_of): New function.
|
||||
* except.h (struct eh_region): Add next_region_sharing_label.
|
||||
(remove_eh_region_and_replace_by_outer_of,
|
||||
get_next_region_sharing_label): Declare.
|
||||
* tree-cfgcleanup.c (tree_forwarder_block_p): Simplify.
|
||||
|
||||
2009-04-25 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* tree-cfg.c (split_critical_edges): Split also edges where we can't
|
||||
insert code even if they are not critical.
|
||||
|
||||
2009-04-25 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* tree-cfg.c (gimple_can_merge_blocks_p): EH edges are unmergable.
|
||||
(gimple_can_remove_branch_p): EH edges won't remove branch by
|
||||
redirection.
|
||||
* tree-inline.c (update_ssa_across_abnormal_edges): Do handle
|
||||
updating of non-abnormal EH edges.
|
||||
* tree-cfg.c (gimple_can_merge_blocks_p): EH edges are unmergable.
|
||||
(gimple_can_remove_branch_p): EH edges are unremovable by redirection.
|
||||
(split_critical_edges): Split also edges where emitting code on them
|
||||
will lead to splitting later.
|
||||
|
||||
2009-04-25 Uros Bizjak <ubizjak@gmail.com>
|
||||
H.J. Lu <hongjiu.lu@intel.com>
|
||||
|
||||
|
83
gcc/except.c
83
gcc/except.c
@ -145,7 +145,7 @@ static void sjlj_build_landing_pads (void);
|
||||
|
||||
static void remove_eh_handler (struct eh_region *);
|
||||
static void remove_eh_handler_and_replace (struct eh_region *,
|
||||
struct eh_region *);
|
||||
struct eh_region *, bool);
|
||||
|
||||
/* The return value of reachable_next_level. */
|
||||
enum reachable_code
|
||||
@ -742,7 +742,7 @@ remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
|
||||
fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
|
||||
r->region_number,
|
||||
first_must_not_throw->region_number);
|
||||
remove_eh_handler_and_replace (r, first_must_not_throw);
|
||||
remove_eh_handler_and_replace (r, first_must_not_throw, false);
|
||||
first_must_not_throw->may_contain_throw |= r->may_contain_throw;
|
||||
}
|
||||
else
|
||||
@ -757,11 +757,12 @@ remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
|
||||
/* Return array mapping LABEL_DECL_UID to region such that region's tree_label
|
||||
is identical to label. */
|
||||
|
||||
VEC(int,heap) *
|
||||
VEC (int, heap) *
|
||||
label_to_region_map (void)
|
||||
{
|
||||
VEC(int,heap) * label_to_region = NULL;
|
||||
VEC (int, heap) * label_to_region = NULL;
|
||||
int i;
|
||||
int idx;
|
||||
|
||||
VEC_safe_grow_cleared (int, heap, label_to_region,
|
||||
cfun->cfg->last_label_uid + 1);
|
||||
@ -769,8 +770,14 @@ label_to_region_map (void)
|
||||
{
|
||||
struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
|
||||
if (r && r->region_number == i
|
||||
&& r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
|
||||
&& r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
|
||||
{
|
||||
if ((idx = VEC_index (int, label_to_region,
|
||||
LABEL_DECL_UID (r->tree_label))) != 0)
|
||||
r->next_region_sharing_label =
|
||||
VEC_index (eh_region, cfun->eh->region_array, idx);
|
||||
else
|
||||
r->next_region_sharing_label = NULL;
|
||||
VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
|
||||
i);
|
||||
}
|
||||
@ -785,6 +792,20 @@ num_eh_regions (void)
|
||||
return cfun->eh->last_region_number + 1;
|
||||
}
|
||||
|
||||
/* Return next region sharing same label as REGION. */
|
||||
|
||||
int
|
||||
get_next_region_sharing_label (int region)
|
||||
{
|
||||
struct eh_region *r;
|
||||
if (!region)
|
||||
return 0;
|
||||
r = VEC_index (eh_region, cfun->eh->region_array, region);
|
||||
if (!r || !r->next_region_sharing_label)
|
||||
return 0;
|
||||
return r->next_region_sharing_label->region_number;
|
||||
}
|
||||
|
||||
/* Set up EH labels for RTL. */
|
||||
|
||||
void
|
||||
@ -2161,16 +2182,46 @@ finish_eh_generation (void)
|
||||
|
||||
/* This section handles removing dead code for flow. */
|
||||
|
||||
/* Splice REGION from the region tree and replace it by REPLACE etc. */
|
||||
/* Splice REGION from the region tree and replace it by REPLACE etc.
|
||||
When UPDATE_CATCH_TRY is true mind updating links from catch to try
|
||||
region.*/
|
||||
|
||||
static void
|
||||
remove_eh_handler_and_replace (struct eh_region *region,
|
||||
struct eh_region *replace)
|
||||
struct eh_region *replace,
|
||||
bool update_catch_try)
|
||||
{
|
||||
struct eh_region **pp, **pp_start, *p, *outer, *inner;
|
||||
rtx lab;
|
||||
|
||||
outer = region->outer;
|
||||
|
||||
/* When we are moving the region in EH tree, update prev_try pointers. */
|
||||
if (outer != replace && region->inner)
|
||||
{
|
||||
struct eh_region *prev_try = find_prev_try (replace);
|
||||
p = region->inner;
|
||||
while (p != region)
|
||||
{
|
||||
if (p->type == ERT_CLEANUP)
|
||||
p->u.cleanup.prev_try = prev_try;
|
||||
if (p->type != ERT_TRY
|
||||
&& p->type != ERT_MUST_NOT_THROW
|
||||
&& (p->type != ERT_ALLOWED_EXCEPTIONS
|
||||
|| p->u.allowed.type_list)
|
||||
&& p->inner)
|
||||
p = p->inner;
|
||||
else if (p->next_peer)
|
||||
p = p->next_peer;
|
||||
else
|
||||
{
|
||||
while (p != region && !p->next_peer)
|
||||
p = p->outer;
|
||||
if (p != region)
|
||||
p = p->next_peer;
|
||||
}
|
||||
}
|
||||
}
|
||||
/* For the benefit of efficiently handling REG_EH_REGION notes,
|
||||
replace this region in the region array with its containing
|
||||
region. Note that previous region deletions may result in
|
||||
@ -2226,7 +2277,8 @@ remove_eh_handler_and_replace (struct eh_region *region,
|
||||
*pp_start = inner;
|
||||
}
|
||||
|
||||
if (region->type == ERT_CATCH)
|
||||
if (region->type == ERT_CATCH
|
||||
&& update_catch_try)
|
||||
{
|
||||
struct eh_region *eh_try, *next, *prev;
|
||||
|
||||
@ -2260,7 +2312,7 @@ remove_eh_handler_and_replace (struct eh_region *region,
|
||||
static void
|
||||
remove_eh_handler (struct eh_region *region)
|
||||
{
|
||||
remove_eh_handler_and_replace (region, region->outer);
|
||||
remove_eh_handler_and_replace (region, region->outer, true);
|
||||
}
|
||||
|
||||
/* Remove Eh region R that has turned out to have no code in its handler. */
|
||||
@ -2274,6 +2326,19 @@ remove_eh_region (int r)
|
||||
remove_eh_handler (region);
|
||||
}
|
||||
|
||||
/* Remove Eh region R that has turned out to have no code in its handler
|
||||
and replace in by R2. */
|
||||
|
||||
void
|
||||
remove_eh_region_and_replace_by_outer_of (int r, int r2)
|
||||
{
|
||||
struct eh_region *region, *region2;
|
||||
|
||||
region = VEC_index (eh_region, cfun->eh->region_array, r);
|
||||
region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
|
||||
remove_eh_handler_and_replace (region, region2->outer, true);
|
||||
}
|
||||
|
||||
/* Invokes CALLBACK for every exception handler label. Only used by old
|
||||
loop hackery; should not be used by new code. */
|
||||
|
||||
|
@ -34,6 +34,9 @@ struct GTY(()) eh_region
|
||||
struct eh_region *inner;
|
||||
struct eh_region *next_peer;
|
||||
|
||||
/* List of regions sharing label. */
|
||||
struct eh_region *next_region_sharing_label;
|
||||
|
||||
/* An identifier for this region. */
|
||||
int region_number;
|
||||
|
||||
@ -155,7 +158,8 @@ extern void init_eh (void);
|
||||
extern void init_eh_for_function (void);
|
||||
|
||||
extern rtx reachable_handlers (rtx);
|
||||
void remove_eh_region (int);
|
||||
extern void remove_eh_region (int);
|
||||
extern void remove_eh_region_and_replace_by_outer_of (int, int);
|
||||
|
||||
extern void convert_from_eh_region_ranges (void);
|
||||
extern unsigned int convert_to_eh_region_ranges (void);
|
||||
@ -274,3 +278,4 @@ extern void set_eh_throw_stmt_table (struct function *, struct htab *);
|
||||
extern void remove_unreachable_regions (sbitmap, sbitmap);
|
||||
extern VEC(int,heap) * label_to_region_map (void);
|
||||
extern int num_eh_regions (void);
|
||||
extern int get_next_region_sharing_label (int);
|
||||
|
@ -1212,7 +1212,7 @@ gimple_can_merge_blocks_p (basic_block a, basic_block b)
|
||||
if (!single_succ_p (a))
|
||||
return false;
|
||||
|
||||
if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
|
||||
if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
|
||||
return false;
|
||||
|
||||
if (single_succ (a) != b)
|
||||
@ -4892,7 +4892,7 @@ gimple_redirect_edge_and_branch (edge e, basic_block dest)
|
||||
static bool
|
||||
gimple_can_remove_branch_p (const_edge e)
|
||||
{
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
@ -6992,10 +6992,31 @@ split_critical_edges (void)
|
||||
FOR_ALL_BB (bb)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
|
||||
{
|
||||
{
|
||||
if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
|
||||
split_edge (e);
|
||||
}
|
||||
/* PRE inserts statements to edges and expects that
|
||||
since split_critical_edges was done beforehand, committing edge
|
||||
insertions will not split more edges. In addition to critical
|
||||
edges we must split edges that have multiple successors and
|
||||
end by control flow statements, such as RESX.
|
||||
Go ahead and split them too. This matches the logic in
|
||||
gimple_find_edge_insert_loc. */
|
||||
else if ((!single_pred_p (e->dest)
|
||||
|| phi_nodes (e->dest)
|
||||
|| e->dest == EXIT_BLOCK_PTR)
|
||||
&& e->src != ENTRY_BLOCK_PTR
|
||||
&& !(e->flags & EDGE_ABNORMAL))
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
|
||||
gsi = gsi_last_bb (e->src);
|
||||
if (!gsi_end_p (gsi)
|
||||
&& stmt_ends_bb_p (gsi_stmt (gsi))
|
||||
&& gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
|
||||
split_edge (e);
|
||||
}
|
||||
}
|
||||
}
|
||||
end_recording_case_labels ();
|
||||
return 0;
|
||||
|
@ -221,9 +221,6 @@ static bool
|
||||
tree_forwarder_block_p (basic_block bb, bool phi_wanted)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
edge_iterator ei;
|
||||
edge e, succ;
|
||||
basic_block dest;
|
||||
|
||||
/* BB must have a single outgoing edge. */
|
||||
if (single_succ_p (bb) != 1
|
||||
@ -274,23 +271,6 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
|
||||
if (dest->loop_father->header == dest)
|
||||
return false;
|
||||
}
|
||||
|
||||
/* If we have an EH edge leaving this block, make sure that the
|
||||
destination of this block has only one predecessor. This ensures
|
||||
that we don't get into the situation where we try to remove two
|
||||
forwarders that go to the same basic block but are handlers for
|
||||
different EH regions. */
|
||||
succ = single_succ_edge (bb);
|
||||
dest = succ->dest;
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
{
|
||||
if (e->flags & EDGE_EH)
|
||||
{
|
||||
if (!single_pred_p (dest))
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
124
gcc/tree-eh.c
124
gcc/tree-eh.c
@ -2695,8 +2695,11 @@ tree_remove_unreachable_handlers (void)
|
||||
if (gimple_code (stmt) == GIMPLE_LABEL && has_eh_preds)
|
||||
{
|
||||
int uid = LABEL_DECL_UID (gimple_label_label (stmt));
|
||||
int region = VEC_index (int, label_to_region, uid);
|
||||
SET_BIT (reachable, region);
|
||||
int region;
|
||||
|
||||
for (region = VEC_index (int, label_to_region, uid);
|
||||
region; region = get_next_region_sharing_label (region))
|
||||
SET_BIT (reachable, region);
|
||||
}
|
||||
if (gimple_code (stmt) == GIMPLE_RESX)
|
||||
SET_BIT (reachable, gimple_resx_region (stmt));
|
||||
@ -2743,8 +2746,11 @@ tree_empty_eh_handler_p (basic_block bb)
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
int region;
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
use_operand_p imm_use;
|
||||
gimple use_stmt;
|
||||
bool found = false;
|
||||
|
||||
gsi = gsi_last_bb (bb);
|
||||
|
||||
@ -2815,15 +2821,17 @@ tree_empty_eh_handler_p (basic_block bb)
|
||||
if (gsi_end_p (gsi))
|
||||
return 0;
|
||||
}
|
||||
while (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
|
||||
{
|
||||
if (gimple_label_label (gsi_stmt (gsi))
|
||||
== get_eh_region_no_tree_label (region))
|
||||
return region;
|
||||
gsi_prev (&gsi);
|
||||
if (gsi_end_p (gsi))
|
||||
return 0;
|
||||
}
|
||||
if (gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
|
||||
return 0;
|
||||
|
||||
/* Be sure that there is at least on EH region reaching the block directly.
|
||||
After EH edge redirection, it is possible that block is reached by one handler
|
||||
but resumed by different. */
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
if ((e->flags & EDGE_EH))
|
||||
found = true;
|
||||
if (found)
|
||||
return region;
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -2955,9 +2963,12 @@ make_eh_edge_and_update_phi (struct eh_region *region, void *data)
|
||||
|
||||
/* Make EH edges corresponding to STMT while updating PHI nodes after removal
|
||||
empty cleanup BB_TO_REMOVE joined to BB containing STMT
|
||||
by EDGE_TO_REMOVE. */
|
||||
by EDGE_TO_REMOVE.
|
||||
|
||||
static void
|
||||
Return if EDGE_TO_REMOVE was really removed. It might stay reachable when
|
||||
not all EH regions are cleaned up. */
|
||||
|
||||
static bool
|
||||
update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
|
||||
{
|
||||
int region_nr;
|
||||
@ -2967,6 +2978,7 @@ update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
int probability_sum = 0;
|
||||
bool removed = false;
|
||||
|
||||
info.bb_to_remove = bb_to_remove;
|
||||
info.bb = gimple_bb (stmt);
|
||||
@ -2980,8 +2992,6 @@ update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
|
||||
else
|
||||
{
|
||||
region_nr = lookup_stmt_eh_region (stmt);
|
||||
if (region_nr < 0)
|
||||
return;
|
||||
is_resx = false;
|
||||
inlinable = inlinable_call_p (stmt);
|
||||
}
|
||||
@ -2993,9 +3003,11 @@ update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
|
||||
/* And remove edges we didn't marked. */
|
||||
for (ei = ei_start (info.bb->succs); (e = ei_safe_edge (ei)); )
|
||||
{
|
||||
if ((e->flags & EDGE_EH) && !e->aux && e != edge_to_remove)
|
||||
if ((e->flags & EDGE_EH) && !e->aux)
|
||||
{
|
||||
dominance_info_invalidated = true;
|
||||
if (e == edge_to_remove)
|
||||
removed = true;
|
||||
remove_edge (e);
|
||||
}
|
||||
else
|
||||
@ -3011,16 +3023,18 @@ update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
|
||||
we get fewer consistency errors in the dumps. */
|
||||
if (is_resx && EDGE_COUNT (info.bb->succs) && !probability_sum)
|
||||
EDGE_SUCC (info.bb, 0)->probability = REG_BR_PROB_BASE;
|
||||
return removed;
|
||||
}
|
||||
|
||||
/* Look for basic blocks containing empty exception handler and remove them.
|
||||
This is similar to jump forwarding, just across EH edges. */
|
||||
|
||||
static bool
|
||||
cleanup_empty_eh (basic_block bb)
|
||||
cleanup_empty_eh (basic_block bb, VEC(int,heap) * label_to_region)
|
||||
{
|
||||
int region;
|
||||
gimple_stmt_iterator si;
|
||||
edge_iterator ei;
|
||||
|
||||
/* When handler of EH region winds up to be empty, we can safely
|
||||
remove it. This leads to inner EH regions to be redirected
|
||||
@ -3030,19 +3044,73 @@ cleanup_empty_eh (basic_block bb)
|
||||
&& all_phis_safe_to_merge (bb))
|
||||
{
|
||||
edge e;
|
||||
bool found = false, removed_some = false, has_non_eh_preds = false;
|
||||
gimple_stmt_iterator gsi;
|
||||
|
||||
remove_eh_region (region);
|
||||
/* Look for all EH regions sharing label of this block.
|
||||
If they are not same as REGION, remove them and replace them
|
||||
by outer region of REGION. Also note if REGION itself is one
|
||||
of them. */
|
||||
|
||||
while ((e = ei_safe_edge (ei_start (bb->preds))))
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
|
||||
{
|
||||
int uid = LABEL_DECL_UID (gimple_label_label (gsi_stmt (gsi)));
|
||||
int r = VEC_index (int, label_to_region, uid);
|
||||
int next;
|
||||
|
||||
while (r)
|
||||
{
|
||||
next = get_next_region_sharing_label (r);
|
||||
if (r == region)
|
||||
found = true;
|
||||
else
|
||||
{
|
||||
removed_some = true;
|
||||
remove_eh_region_and_replace_by_outer_of (r, region);
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
fprintf (dump_file, "Empty EH handler %i removed and "
|
||||
"replaced by %i\n", r, region);
|
||||
}
|
||||
r = next;
|
||||
}
|
||||
}
|
||||
else
|
||||
break;
|
||||
|
||||
gcc_assert (found || removed_some);
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
if (!(e->flags & EDGE_EH))
|
||||
has_non_eh_preds = true;
|
||||
|
||||
/* When block is empty EH cleanup, but it is reachable via non-EH code too,
|
||||
we can not remove the region it is resumed via, because doing so will
|
||||
lead to redirection of its RESX edges.
|
||||
|
||||
This case will be handled later after edge forwarding if the EH cleanup
|
||||
is really dead. */
|
||||
|
||||
if (found && !has_non_eh_preds)
|
||||
remove_eh_region (region);
|
||||
else if (!removed_some)
|
||||
return false;
|
||||
|
||||
for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
|
||||
{
|
||||
basic_block src = e->src;
|
||||
gcc_assert (e->flags & EDGE_EH);
|
||||
if (!(e->flags & EDGE_EH))
|
||||
{
|
||||
ei_next (&ei);
|
||||
continue;
|
||||
}
|
||||
if (stmt_can_throw_internal (last_stmt (src)))
|
||||
update_eh_edges (last_stmt (src), bb, e);
|
||||
remove_edge (e);
|
||||
{
|
||||
if (!update_eh_edges (last_stmt (src), bb, e))
|
||||
ei_next (&ei);
|
||||
}
|
||||
else
|
||||
remove_edge (e);
|
||||
}
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "Empty EH handler %i removed\n", region);
|
||||
|
||||
/* Verify that we eliminated all uses of PHI we are going to remove.
|
||||
If we didn't, rebuild SSA on affected variable (this is allowed only
|
||||
@ -3091,7 +3159,8 @@ cleanup_empty_eh (basic_block bb)
|
||||
}
|
||||
}
|
||||
}
|
||||
delete_basic_block (bb);
|
||||
if (!ei_safe_edge (ei_start (bb->preds)))
|
||||
delete_basic_block (bb);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -3111,6 +3180,7 @@ cleanup_eh (void)
|
||||
{
|
||||
bool changed = false;
|
||||
basic_block bb;
|
||||
VEC(int,heap) * label_to_region;
|
||||
int i;
|
||||
|
||||
if (!cfun->eh)
|
||||
@ -3123,14 +3193,16 @@ cleanup_eh (void)
|
||||
|
||||
if (optimize)
|
||||
{
|
||||
label_to_region = label_to_region_map ();
|
||||
dominance_info_invalidated = false;
|
||||
/* We cannot use FOR_EACH_BB, since the basic blocks may get removed. */
|
||||
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
|
||||
{
|
||||
bb = BASIC_BLOCK (i);
|
||||
if (bb)
|
||||
changed |= cleanup_empty_eh (bb);
|
||||
changed |= cleanup_empty_eh (bb, label_to_region);
|
||||
}
|
||||
VEC_free (int, heap, label_to_region);
|
||||
if (dominance_info_invalidated)
|
||||
{
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
|
@ -1608,8 +1608,6 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
|
||||
gimple phi;
|
||||
gimple_stmt_iterator si;
|
||||
|
||||
gcc_assert (e->flags & EDGE_ABNORMAL);
|
||||
|
||||
if (!nonlocal_goto)
|
||||
gcc_assert (e->flags & EDGE_EH);
|
||||
|
||||
@ -1625,7 +1623,8 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
|
||||
/* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
|
||||
gcc_assert (!e->dest->aux);
|
||||
|
||||
gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
|
||||
gcc_assert ((e->flags & EDGE_EH)
|
||||
|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
|
||||
|
||||
if (!is_gimple_reg (PHI_RESULT (phi)))
|
||||
{
|
||||
|
@ -449,10 +449,14 @@ simulate_block (basic_block block)
|
||||
simulate_stmt (stmt);
|
||||
}
|
||||
|
||||
/* We can not predict when abnormal edges will be executed, so
|
||||
/* We can not predict when abnormal and EH edges will be executed, so
|
||||
once a block is considered executable, we consider any
|
||||
outgoing abnormal edges as executable.
|
||||
|
||||
TODO: This is not exactly true. Simplifying statement might
|
||||
prove it non-throwing and also computed goto can be handled
|
||||
when destination is known.
|
||||
|
||||
At the same time, if this block has only one successor that is
|
||||
reached by non-abnormal edges, then add that successor to the
|
||||
worklist. */
|
||||
@ -460,7 +464,7 @@ simulate_block (basic_block block)
|
||||
normal_edge = NULL;
|
||||
FOR_EACH_EDGE (e, ei, block->succs)
|
||||
{
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
|
||||
add_control_edge (e);
|
||||
else
|
||||
{
|
||||
|
Loading…
x
Reference in New Issue
Block a user