cse.c (delete_trivially_dead_insns): Replace alloca with xmalloc/xcalloc.

* cse.c (delete_trivially_dead_insns): Replace alloca with
	xmalloc/xcalloc.
	* except.c (update_rethrow_references): Likewise.
	(init_eh_nesting_info): Likewise.
	* function.c (identify_blocks): Likewise.
	* gcse.c (dump_hash_table): Likewise.
	* graph.c (print_rtl_graph_with_bb): Likewise.
	* loop.c (combine_movables): Likewise.
	(move_movables): Likewise.
	(count_loop_regs_set): Likewise.
	(strength_reduce): Likewise.
	* profile.c (compute_branch_probabilities): New function, split
	out from ...
	(branch_prob): Here.  Replace alloca with xmalloc/xcalloc.
	* regclass.c (regclass): Likewise.
	* regmove.c (regmove_optimize): Likewise.
	* toplev.c (compile_file): Likewise.
	(main): Don't mess with the stack rlimit.

From-SVN: r30445
This commit is contained in:
Mark Mitchell 1999-11-08 04:56:18 +00:00 committed by Mark Mitchell
parent 8d17059017
commit 4da896b292
11 changed files with 389 additions and 322 deletions

View File

@ -1,3 +1,24 @@
Sun Nov 7 20:55:14 1999 Mark Mitchell <mark@codesourcery.com>
* cse.c (delete_trivially_dead_insns): Replace alloca with
xmalloc/xcalloc.
* except.c (update_rethrow_references): Likewise.
(init_eh_nesting_info): Likewise.
* function.c (identify_blocks): Likewise.
* gcse.c (dump_hash_table): Likewise.
* graph.c (print_rtl_graph_with_bb): Likewise.
* loop.c (combine_movables): Likewise.
(move_movables): Likewise.
(count_loop_regs_set): Likewise.
(strength_reduce): Likewise.
* profile.c (compute_branch_probabilities): New function, split
out from ...
(branch_prob): Here. Replace alloca with xmalloc/xcalloc.
* regclass.c (regclass): Likewise.
* regmove.c (regmove_optimize): Likewise.
* toplev.c (compile_file): Likewise.
(main): Don't mess with the stack rlimit.
Sun Nov 7 19:41:17 1999 Catherine Moore <clm@cygnus.com>
* config/elfos.h (ASM_DECLARE_FUNCTION_NAME): Conditionally define.

View File

@ -7364,7 +7364,7 @@ delete_trivially_dead_insns (insns, nreg)
rtx insns;
int nreg;
{
int *counts = (int *) alloca (nreg * sizeof (int));
int *counts;
rtx insn, prev;
#ifdef HAVE_cc0
rtx tem;
@ -7373,7 +7373,7 @@ delete_trivially_dead_insns (insns, nreg)
int in_libcall = 0, dead_libcall = 0;
/* First count the number of times each register is used. */
bzero ((char *) counts, sizeof (int) * nreg);
counts = (int *) xcalloc (nreg, sizeof (int));
for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
count_reg_usage (insn, counts, NULL_RTX, 1);
@ -7508,4 +7508,7 @@ delete_trivially_dead_insns (insns, nreg)
dead_libcall = 0;
}
}
/* Clean up. */
free (counts);
}

View File

@ -2703,10 +2703,8 @@ update_rethrow_references ()
if (!flag_new_exceptions)
return;
saw_region = (int *) alloca (current_func_eh_entry * sizeof (int));
saw_rethrow = (int *) alloca (current_func_eh_entry * sizeof (int));
bzero ((char *) saw_region, (current_func_eh_entry * sizeof (int)));
bzero ((char *) saw_rethrow, (current_func_eh_entry * sizeof (int)));
saw_region = (int *) xcalloc (current_func_eh_entry, sizeof (int));
saw_rethrow = (int *) xcalloc (current_func_eh_entry, sizeof (int));
/* Determine what regions exist, and whether there are any rethrows
to those regions or not. */
@ -2735,6 +2733,10 @@ update_rethrow_references ()
for (x = 0; x < current_func_eh_entry; x++)
if (saw_region[x])
function_eh_regions[x].rethrow_ref = saw_rethrow[x];
/* Clean up. */
free (saw_region);
free (saw_rethrow);
}
/* Various hooks for the DWARF 2 __throw routine. */
@ -3155,9 +3157,7 @@ init_eh_nesting_info ()
info = (eh_nesting_info *) xmalloc (sizeof (eh_nesting_info));
info->region_index = (int *) xcalloc ((max_label_num () + 1), sizeof (int));
nested_eh_region = (int *) alloca ((max_label_num () + 1) * sizeof (int));
bzero ((char *) nested_eh_region, (max_label_num () + 1) * sizeof (int));
nested_eh_region = (int *) xcalloc (max_label_num () + 1, sizeof (int));
/* Create the nested_eh_region list. If indexed with a block number, it
returns the block number of the next outermost region, if any.
@ -3189,6 +3189,7 @@ init_eh_nesting_info ()
{
free (info->region_index);
free (info);
free (nested_eh_region);
return NULL;
}
@ -3205,6 +3206,10 @@ init_eh_nesting_info ()
process_nestinfo (x, info, nested_eh_region);
}
info->region_count = region_count;
/* Clean up. */
free (nested_eh_region);
return info;
}

View File

@ -5558,7 +5558,7 @@ identify_blocks (block, insns)
block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
all_blocks (block, block_vector);
block_stack = (tree *) alloca (n_blocks * sizeof (tree));
block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE)
@ -5594,6 +5594,7 @@ identify_blocks (block, insns)
abort ();
free (block_vector);
free (block_stack);
}
/* Given a revised instruction chain, rebuild the tree structure of

View File

@ -2067,10 +2067,13 @@ dump_hash_table (file, name, table, table_size, total_size)
{
int i;
/* Flattened out table, so it's printed in proper order. */
struct expr **flat_table = (struct expr **) alloca (total_size * sizeof (struct expr *));
unsigned int *hash_val = (unsigned int *) alloca (total_size * sizeof (unsigned int));
struct expr **flat_table;
unsigned int *hash_val;
flat_table
= (struct expr **) xcalloc (total_size, sizeof (struct expr *));
hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
bzero ((char *) flat_table, total_size * sizeof (struct expr *));
for (i = 0; i < table_size; i++)
{
struct expr *expr;
@ -2096,6 +2099,10 @@ dump_hash_table (file, name, table, table_size, total_size)
}
fprintf (file, "\n");
/* Clean up. */
free (flat_table);
free (hash_val);
}
/* Record register first/last/block set information for REGNO in INSN.

View File

@ -284,10 +284,10 @@ print_rtl_graph_with_bb (base, suffix, rtx_first)
int i;
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
int max_uid = get_max_uid ();
int *start = (int *) alloca (max_uid * sizeof (int));
int *end = (int *) alloca (max_uid * sizeof (int));
int *start = (int *) xmalloc (max_uid * sizeof (int));
int *end = (int *) xmalloc (max_uid * sizeof (int));
enum bb_state *in_bb_p = (enum bb_state *)
alloca (max_uid * sizeof (enum bb_state));
xmalloc (max_uid * sizeof (enum bb_state));
basic_block bb;
for (i = 0; i < max_uid; ++i)
@ -410,6 +410,11 @@ print_rtl_graph_with_bb (base, suffix, rtx_first)
dump_for_graph = 0;
end_fct (fp);
/* Clean up. */
free (start);
free (end);
free (in_bb_p);
}
fclose (fp);

View File

@ -1449,7 +1449,7 @@ combine_movables (movables, nregs)
int nregs;
{
register struct movable *m;
char *matched_regs = (char *) alloca (nregs);
char *matched_regs = (char *) xmalloc (nregs);
enum machine_mode mode;
/* Regs that are set more than once are not allowed to match
@ -1552,6 +1552,9 @@ combine_movables (movables, nregs)
overlap: ;
}
}
/* Clean up. */
free (matched_regs);
}
/* Return 1 if regs X and Y will become the same if moved. */
@ -1753,11 +1756,8 @@ move_movables (movables, threshold, insn_count, loop_start, end, nregs)
/* Map of pseudo-register replacements to handle combining
when we move several insns that load the same value
into different pseudo-registers. */
rtx *reg_map = (rtx *) alloca (nregs * sizeof (rtx));
char *already_moved = (char *) alloca (nregs);
bzero (already_moved, nregs);
bzero ((char *) reg_map, nregs * sizeof (rtx));
rtx *reg_map = (rtx *) xcalloc (nregs, sizeof (rtx));
char *already_moved = (char *) xcalloc (nregs, sizeof (char));
num_movables = 0;
@ -2240,6 +2240,10 @@ move_movables (movables, threshold, insn_count, loop_start, end, nregs)
replace_regs (REG_NOTES (p), reg_map, nregs, 0);
INSN_CODE (p) = -1;
}
/* Clean up. */
free (reg_map);
free (already_moved);
}
#if 0
@ -3580,11 +3584,10 @@ count_loop_regs_set (from, to, may_not_move, single_usage, count_ptr, nregs)
int *count_ptr;
int nregs;
{
register rtx *last_set = (rtx *) alloca (nregs * sizeof (rtx));
register rtx *last_set = (rtx *) xcalloc (nregs, sizeof (rtx));
register rtx insn;
register int count = 0;
bzero ((char *) last_set, nregs * sizeof (rtx));
for (insn = from; insn != to; insn = NEXT_INSN (insn))
{
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
@ -3614,6 +3617,9 @@ count_loop_regs_set (from, to, may_not_move, single_usage, count_ptr, nregs)
bzero ((char *) last_set, nregs * sizeof (rtx));
}
*count_ptr = count;
/* Clean up. */
free (last_set);
}
/* Given a loop that is bounded by LOOP_START and LOOP_END
@ -3770,7 +3776,7 @@ strength_reduce (scan_start, end, loop_top, insn_count,
/* ??? could set this to last value of threshold in move_movables */
int threshold = (loop_info->has_call ? 1 : 2) * (3 + n_non_fixed_regs);
/* Map of pseudo-register replacements. */
rtx *reg_map;
rtx *reg_map = NULL;
int reg_map_size;
int call_seen;
rtx test;
@ -3787,9 +3793,7 @@ strength_reduce (scan_start, end, loop_top, insn_count,
VARRAY_INT_INIT (reg_iv_type, max_reg_before_loop, "reg_iv_type");
VARRAY_GENERIC_PTR_INIT (reg_iv_info, max_reg_before_loop, "reg_iv_info");
reg_biv_class = (struct iv_class **)
alloca (max_reg_before_loop * sizeof (struct iv_class *));
bzero ((char *) reg_biv_class, (max_reg_before_loop
* sizeof (struct iv_class *)));
xcalloc (max_reg_before_loop, sizeof (struct iv_class *));
loop_iv_list = 0;
addr_placeholder = gen_reg_rtx (Pmode);
@ -4676,8 +4680,7 @@ strength_reduce (scan_start, end, loop_top, insn_count,
Some givs might have been made from biv increments, so look at
reg_iv_type for a suitable size. */
reg_map_size = reg_iv_type->num_elements;
reg_map = (rtx *) alloca (reg_map_size * sizeof (rtx));
bzero ((char *) reg_map, reg_map_size * sizeof (rtx));
reg_map = (rtx *) xcalloc (reg_map_size, sizeof (rtx));
/* Examine each iv class for feasibility of strength reduction/induction
variable elimination. */
@ -5300,6 +5303,9 @@ strength_reduce (scan_start, end, loop_top, insn_count,
egress:
VARRAY_FREE (reg_iv_type);
VARRAY_FREE (reg_iv_info);
free (reg_biv_class);
if (reg_map)
free (reg_map);
}
/* Return 1 if X is a valid source for an initial value (or as value being

View File

@ -160,6 +160,7 @@ static void output_arc_profiler PROTO((int, rtx));
static void instrument_arcs PROTO((rtx, int, FILE *));
static void output_gcov_string PROTO((const char *, long));
static int tablejump_entry_p PROTO((rtx, rtx));
static void compute_branch_probabilities PROTO((int, FILE *));
#ifndef LONG_TYPE_SIZE
#define LONG_TYPE_SIZE BITS_PER_WORD
@ -437,6 +438,293 @@ tablejump_entry_p (insn, label)
return 0;
}
/* Compute the branch probabilities for the various branches.
Annotate them accordingly. */
static void
compute_branch_probabilities (num_blocks, dump_file)
int num_blocks;
FILE *dump_file;
{
int i;
int bad_counts = 0;
int num_arcs;
int changes;
int passes;
int prob;
int total;
int num_branches;
int num_never_executed;
int hist_br_prob[20];
struct adj_list *arcptr;
/* For each arc not on the spanning tree, set its execution count from
the .da file. */
/* The first count in the .da file is the number of times that the function
was entered. This is the exec_count for block zero. */
num_arcs = 0;
for (i = 0; i < num_blocks; i++)
for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
if (! arcptr->on_tree)
{
num_arcs++;
if (da_file)
{
long value;
__read_long (&value, da_file, 8);
ARC_COUNT (arcptr) = value;
}
else
ARC_COUNT (arcptr) = 0;
arcptr->count_valid = 1;
bb_graph[i].succ_count--;
bb_graph[ARC_TARGET (arcptr)].pred_count--;
}
if (dump_file)
fprintf (dump_file, "%d arc counts read\n", num_arcs);
/* For every block in the file,
- if every exit/entrance arc has a known count, then set the block count
- if the block count is known, and every exit/entrance arc but one has
a known execution count, then set the count of the remaining arc
As arc counts are set, decrement the succ/pred count, but don't delete
the arc, that way we can easily tell when all arcs are known, or only
one arc is unknown. */
/* The order that the basic blocks are iterated through is important.
Since the code that finds spanning trees starts with block 0, low numbered
arcs are put on the spanning tree in preference to high numbered arcs.
Hence, most instrumented arcs are at the end. Graph solving works much
faster if we propagate numbers from the end to the start.
This takes an average of slightly more than 3 passes. */
changes = 1;
passes = 0;
while (changes)
{
passes++;
changes = 0;
for (i = num_blocks - 1; i >= 0; i--)
{
struct bb_info *binfo = &bb_graph[i];
if (! binfo->count_valid)
{
if (binfo->succ_count == 0)
{
total = 0;
for (arcptr = binfo->succ; arcptr;
arcptr = arcptr->succ_next)
total += ARC_COUNT (arcptr);
binfo->exec_count = total;
binfo->count_valid = 1;
changes = 1;
}
else if (binfo->pred_count == 0)
{
total = 0;
for (arcptr = binfo->pred; arcptr;
arcptr = arcptr->pred_next)
total += ARC_COUNT (arcptr);
binfo->exec_count = total;
binfo->count_valid = 1;
changes = 1;
}
}
if (binfo->count_valid)
{
if (binfo->succ_count == 1)
{
total = 0;
/* One of the counts will be invalid, but it is zero,
so adding it in also doesn't hurt. */
for (arcptr = binfo->succ; arcptr;
arcptr = arcptr->succ_next)
total += ARC_COUNT (arcptr);
/* Calculate count for remaining arc by conservation. */
total = binfo->exec_count - total;
/* Search for the invalid arc, and set its count. */
for (arcptr = binfo->succ; arcptr;
arcptr = arcptr->succ_next)
if (! arcptr->count_valid)
break;
if (! arcptr)
abort ();
arcptr->count_valid = 1;
ARC_COUNT (arcptr) = total;
binfo->succ_count--;
bb_graph[ARC_TARGET (arcptr)].pred_count--;
changes = 1;
}
if (binfo->pred_count == 1)
{
total = 0;
/* One of the counts will be invalid, but it is zero,
so adding it in also doesn't hurt. */
for (arcptr = binfo->pred; arcptr;
arcptr = arcptr->pred_next)
total += ARC_COUNT (arcptr);
/* Calculate count for remaining arc by conservation. */
total = binfo->exec_count - total;
/* Search for the invalid arc, and set its count. */
for (arcptr = binfo->pred; arcptr;
arcptr = arcptr->pred_next)
if (! arcptr->count_valid)
break;
if (! arcptr)
abort ();
arcptr->count_valid = 1;
ARC_COUNT (arcptr) = total;
binfo->pred_count--;
bb_graph[ARC_SOURCE (arcptr)].succ_count--;
changes = 1;
}
}
}
}
total_num_passes += passes;
if (dump_file)
fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
/* If the graph has been correctly solved, every block will have a
succ and pred count of zero. */
for (i = 0; i < num_blocks; i++)
{
struct bb_info *binfo = &bb_graph[i];
if (binfo->succ_count || binfo->pred_count)
abort ();
}
/* For every arc, calculate its branch probability and add a reg_note
to the branch insn to indicate this. */
for (i = 0; i < 20; i++)
hist_br_prob[i] = 0;
num_never_executed = 0;
num_branches = 0;
for (i = 0; i < num_blocks; i++)
{
struct bb_info *binfo = &bb_graph[i];
total = binfo->exec_count;
for (arcptr = binfo->succ; arcptr; arcptr = arcptr->succ_next)
{
if (arcptr->branch_insn)
{
/* This calculates the branch probability as an integer between
0 and REG_BR_PROB_BASE, properly rounded to the nearest
integer. Perform the arithmetic in double to avoid
overflowing the range of ints. */
if (total == 0)
prob = -1;
else
{
rtx pat = PATTERN (arcptr->branch_insn);
prob = (((double)ARC_COUNT (arcptr) * REG_BR_PROB_BASE)
+ (total >> 1)) / total;
if (prob < 0 || prob > REG_BR_PROB_BASE)
{
if (dump_file)
fprintf (dump_file, "bad count: prob for %d-%d thought to be %d (forcibly normalized)\n",
ARC_SOURCE (arcptr), ARC_TARGET (arcptr),
prob);
bad_counts = 1;
prob = REG_BR_PROB_BASE / 2;
}
/* Match up probability with JUMP pattern. */
if (GET_CODE (pat) == SET
&& GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
{
if (ARC_TARGET (arcptr) == ARC_SOURCE (arcptr) + 1)
{
/* A fall through arc should never have a
branch insn. */
abort ();
}
else
{
/* This is the arc for the taken branch. */
if (GET_CODE (XEXP (SET_SRC (pat), 2)) != PC)
prob = REG_BR_PROB_BASE - prob;
}
}
}
if (prob == -1)
num_never_executed++;
else
{
int index = prob * 20 / REG_BR_PROB_BASE;
if (index == 20)
index = 19;
hist_br_prob[index]++;
}
num_branches++;
REG_NOTES (arcptr->branch_insn)
= gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
REG_NOTES (arcptr->branch_insn));
}
}
/* Add a REG_EXEC_COUNT note to the first instruction of this block. */
if (! binfo->first_insn
|| GET_RTX_CLASS (GET_CODE (binfo->first_insn)) != 'i')
{
/* Block 0 is a fake block representing function entry, and does
not have a real first insn. The second last block might not
begin with a real insn. */
if (i == num_blocks - 1)
return_label_execution_count = total;
else if (i != 0 && i != num_blocks - 2)
abort ();
}
else
{
REG_NOTES (binfo->first_insn)
= gen_rtx_EXPR_LIST (REG_EXEC_COUNT, GEN_INT (total),
REG_NOTES (binfo->first_insn));
if (i == num_blocks - 1)
return_label_execution_count = total;
}
}
/* This should never happen. */
if (bad_counts)
warning ("Arc profiling: some arc counts were bad.");
if (dump_file)
{
fprintf (dump_file, "%d branches\n", num_branches);
fprintf (dump_file, "%d branches never executed\n",
num_never_executed);
if (num_branches)
for (i = 0; i < 10; i++)
fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
(hist_br_prob[i]+hist_br_prob[19-i])*100/num_branches,
5*i, 5*i+5);
total_num_branches += num_branches;
total_num_never_executed += num_never_executed;
for (i = 0; i < 20; i++)
total_hist_br_prob[i] += hist_br_prob[i];
}
}
/* Instrument and/or analyze program behavior based on program flow graph.
In either case, this function builds a flow graph for the function being
compiled. The flow graph is stored in BB_GRAPH.
@ -460,11 +748,7 @@ branch_prob (f, dump_file)
{
int i, num_blocks;
struct adj_list *arcptr;
int num_arcs, changes, passes;
int total, prob;
int hist_br_prob[20], num_never_executed, num_branches;
/* Set to non-zero if we got bad count information. */
int bad_counts = 0;
int num_arcs;
/* start of a function. */
if (flag_test_coverage)
@ -620,8 +904,8 @@ branch_prob (f, dump_file)
/* Create and initialize the arrays that will hold bb_graph
and execution count info. */
bb_graph = (struct bb_info *) alloca (num_blocks * sizeof (struct bb_info));
bzero ((char *) bb_graph, (sizeof (struct bb_info) * num_blocks));
bb_graph = (struct bb_info *) xcalloc (num_blocks,
sizeof (struct bb_info));
{
/* Scan the insns again:
@ -965,275 +1249,11 @@ branch_prob (f, dump_file)
}
/* Execute the rest only if doing branch probabilities. */
if (! flag_branch_probabilities)
return;
/* For each arc not on the spanning tree, set its execution count from
the .da file. */
/* The first count in the .da file is the number of times that the function
was entered. This is the exec_count for block zero. */
num_arcs = 0;
for (i = 0; i < num_blocks; i++)
for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
if (! arcptr->on_tree)
{
num_arcs++;
if (da_file)
{
long value;
__read_long (&value, da_file, 8);
ARC_COUNT (arcptr) = value;
}
else
ARC_COUNT (arcptr) = 0;
arcptr->count_valid = 1;
bb_graph[i].succ_count--;
bb_graph[ARC_TARGET (arcptr)].pred_count--;
}
if (dump_file)
fprintf (dump_file, "%d arc counts read\n", num_arcs);
/* For every block in the file,
- if every exit/entrance arc has a known count, then set the block count
- if the block count is known, and every exit/entrance arc but one has
a known execution count, then set the count of the remaining arc
As arc counts are set, decrement the succ/pred count, but don't delete
the arc, that way we can easily tell when all arcs are known, or only
one arc is unknown. */
/* The order that the basic blocks are iterated through is important.
Since the code that finds spanning trees starts with block 0, low numbered
arcs are put on the spanning tree in preference to high numbered arcs.
Hence, most instrumented arcs are at the end. Graph solving works much
faster if we propagate numbers from the end to the start.
This takes an average of slightly more than 3 passes. */
changes = 1;
passes = 0;
while (changes)
{
passes++;
changes = 0;
for (i = num_blocks - 1; i >= 0; i--)
{
struct bb_info *binfo = &bb_graph[i];
if (! binfo->count_valid)
{
if (binfo->succ_count == 0)
{
total = 0;
for (arcptr = binfo->succ; arcptr;
arcptr = arcptr->succ_next)
total += ARC_COUNT (arcptr);
binfo->exec_count = total;
binfo->count_valid = 1;
changes = 1;
}
else if (binfo->pred_count == 0)
{
total = 0;
for (arcptr = binfo->pred; arcptr;
arcptr = arcptr->pred_next)
total += ARC_COUNT (arcptr);
binfo->exec_count = total;
binfo->count_valid = 1;
changes = 1;
}
}
if (binfo->count_valid)
{
if (binfo->succ_count == 1)
{
total = 0;
/* One of the counts will be invalid, but it is zero,
so adding it in also doesn't hurt. */
for (arcptr = binfo->succ; arcptr;
arcptr = arcptr->succ_next)
total += ARC_COUNT (arcptr);
/* Calculate count for remaining arc by conservation. */
total = binfo->exec_count - total;
/* Search for the invalid arc, and set its count. */
for (arcptr = binfo->succ; arcptr;
arcptr = arcptr->succ_next)
if (! arcptr->count_valid)
break;
if (! arcptr)
abort ();
arcptr->count_valid = 1;
ARC_COUNT (arcptr) = total;
binfo->succ_count--;
bb_graph[ARC_TARGET (arcptr)].pred_count--;
changes = 1;
}
if (binfo->pred_count == 1)
{
total = 0;
/* One of the counts will be invalid, but it is zero,
so adding it in also doesn't hurt. */
for (arcptr = binfo->pred; arcptr;
arcptr = arcptr->pred_next)
total += ARC_COUNT (arcptr);
/* Calculate count for remaining arc by conservation. */
total = binfo->exec_count - total;
/* Search for the invalid arc, and set its count. */
for (arcptr = binfo->pred; arcptr;
arcptr = arcptr->pred_next)
if (! arcptr->count_valid)
break;
if (! arcptr)
abort ();
arcptr->count_valid = 1;
ARC_COUNT (arcptr) = total;
binfo->pred_count--;
bb_graph[ARC_SOURCE (arcptr)].succ_count--;
changes = 1;
}
}
}
}
total_num_passes += passes;
if (dump_file)
fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
/* If the graph has been correctly solved, every block will have a
succ and pred count of zero. */
for (i = 0; i < num_blocks; i++)
{
struct bb_info *binfo = &bb_graph[i];
if (binfo->succ_count || binfo->pred_count)
abort ();
}
/* For every arc, calculate its branch probability and add a reg_note
to the branch insn to indicate this. */
for (i = 0; i < 20; i++)
hist_br_prob[i] = 0;
num_never_executed = 0;
num_branches = 0;
for (i = 0; i < num_blocks; i++)
{
struct bb_info *binfo = &bb_graph[i];
total = binfo->exec_count;
for (arcptr = binfo->succ; arcptr; arcptr = arcptr->succ_next)
{
if (arcptr->branch_insn)
{
/* This calculates the branch probability as an integer between
0 and REG_BR_PROB_BASE, properly rounded to the nearest
integer. Perform the arithmetic in double to avoid
overflowing the range of ints. */
if (total == 0)
prob = -1;
else
{
rtx pat = PATTERN (arcptr->branch_insn);
prob = (((double)ARC_COUNT (arcptr) * REG_BR_PROB_BASE)
+ (total >> 1)) / total;
if (prob < 0 || prob > REG_BR_PROB_BASE)
{
if (dump_file)
fprintf (dump_file, "bad count: prob for %d-%d thought to be %d (forcibly normalized)\n",
ARC_SOURCE (arcptr), ARC_TARGET (arcptr),
prob);
bad_counts = 1;
prob = REG_BR_PROB_BASE / 2;
}
/* Match up probability with JUMP pattern. */
if (GET_CODE (pat) == SET
&& GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
{
if (ARC_TARGET (arcptr) == ARC_SOURCE (arcptr) + 1)
{
/* A fall through arc should never have a
branch insn. */
abort ();
}
else
{
/* This is the arc for the taken branch. */
if (GET_CODE (XEXP (SET_SRC (pat), 2)) != PC)
prob = REG_BR_PROB_BASE - prob;
}
}
}
if (prob == -1)
num_never_executed++;
else
{
int index = prob * 20 / REG_BR_PROB_BASE;
if (index == 20)
index = 19;
hist_br_prob[index]++;
}
num_branches++;
REG_NOTES (arcptr->branch_insn)
= gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
REG_NOTES (arcptr->branch_insn));
}
}
/* Add a REG_EXEC_COUNT note to the first instruction of this block. */
if (! binfo->first_insn
|| GET_RTX_CLASS (GET_CODE (binfo->first_insn)) != 'i')
{
/* Block 0 is a fake block representing function entry, and does
not have a real first insn. The second last block might not
begin with a real insn. */
if (i == num_blocks - 1)
return_label_execution_count = total;
else if (i != 0 && i != num_blocks - 2)
abort ();
}
else
{
REG_NOTES (binfo->first_insn)
= gen_rtx_EXPR_LIST (REG_EXEC_COUNT, GEN_INT (total),
REG_NOTES (binfo->first_insn));
if (i == num_blocks - 1)
return_label_execution_count = total;
}
}
/* This should never happen. */
if (bad_counts)
warning ("Arc profiling: some arc counts were bad.");
if (dump_file)
{
fprintf (dump_file, "%d branches\n", num_branches);
fprintf (dump_file, "%d branches never executed\n",
num_never_executed);
if (num_branches)
for (i = 0; i < 10; i++)
fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
(hist_br_prob[i]+hist_br_prob[19-i])*100/num_branches,
5*i, 5*i+5);
total_num_branches += num_branches;
total_num_never_executed += num_never_executed;
for (i = 0; i < 20; i++)
total_hist_br_prob[i] += hist_br_prob[i];
}
if (flag_branch_probabilities)
compute_branch_probabilities (num_blocks, dump_file);
/* Clean up. */
free (bb_graph);
}
/* Initialize a new arc.

View File

@ -965,7 +965,7 @@ regclass (f, nregs)
#ifdef FORBIDDEN_INC_DEC_CLASSES
in_inc_dec = (char *) alloca (nregs);
in_inc_dec = (char *) xmalloc (nregs);
/* Initialize information about which register classes can be used for
pseudos that are auto-incremented or auto-decremented. It would
@ -1109,6 +1109,9 @@ regclass (f, nregs)
}
}
#ifdef FORBIDDEN_INC_DEC_CLASSES
free (in_inc_dec);
#endif
free (costs);
}

View File

@ -1099,10 +1099,10 @@ regmove_optimize (f, nregs, regmove_dump_file)
can supress some optimizations in those zones. */
mark_flags_life_zones (discover_flags_reg ());
regno_src_regno = (int *)alloca (sizeof *regno_src_regno * nregs);
regno_src_regno = (int *) xmalloc (sizeof *regno_src_regno * nregs);
for (i = nregs; --i >= 0; ) regno_src_regno[i] = -1;
regmove_bb_head = (int *)alloca (sizeof (int) * (old_max_uid + 1));
regmove_bb_head = (int *) xmalloc (sizeof (int) * (old_max_uid + 1));
for (i = old_max_uid; i >= 0; i--) regmove_bb_head[i] = -1;
for (i = 0; i < n_basic_blocks; i++)
regmove_bb_head[INSN_UID (BLOCK_HEAD (i))] = i;
@ -1114,7 +1114,7 @@ regmove_optimize (f, nregs, regmove_dump_file)
for (pass = 0; pass <= 2; pass++)
{
if (! flag_regmove && pass >= flag_expensive_optimizations)
return;
goto done;
if (regmove_dump_file)
fprintf (regmove_dump_file, "Starting %s pass...\n",
@ -1574,6 +1574,11 @@ regmove_optimize (f, nregs, regmove_dump_file)
new = next, next = NEXT_INSN (new);
BLOCK_END (i) = new;
}
done:
/* Clean up. */
free (regno_src_regno);
free (regmove_bb_head);
}
/* Returns nonzero if INSN's pattern has matching constraints for any operand.

View File

@ -3240,7 +3240,7 @@ compile_file (name)
{
int len = list_length (globals);
tree *vec = (tree *) alloca (sizeof (tree) * len);
tree *vec = (tree *) xmalloc (sizeof (tree) * len);
int i;
tree decl;
@ -3267,6 +3267,9 @@ compile_file (name)
output_exception_table ();
check_global_declarations (vec, len);
/* Clean up. */
free (vec);
}
/* Write out any pending weak symbol declarations. */
@ -5289,18 +5292,6 @@ main (argc, argv)
--p;
progname = p;
#if defined (RLIMIT_STACK) && defined (HAVE_GETRLIMIT) && defined (HAVE_SETRLIMIT)
/* Get rid of any avoidable limit on stack size. */
{
struct rlimit rlim;
/* Set the stack limit huge so that alloca does not fail. */
getrlimit (RLIMIT_STACK, &rlim);
rlim.rlim_cur = rlim.rlim_max;
setrlimit (RLIMIT_STACK, &rlim);
}
#endif
#ifdef HAVE_LC_MESSAGES
setlocale (LC_MESSAGES, "");
#endif