coverage.h (coverage_counter_alloc): New function.

* coverage.h (coverage_counter_alloc): New function.
	* function.h (struct function): Remove arc_profile flag.
	* coverage.c (fn_ident): Remove.
	(fn_b_ctrs, no_coverage): New.
	(get_coverage_counts): Use current_function_funcdef_no.
	(coverage_counter_alloc): New.
	(coverage_counter_ref): Adjust.
	(coverage_begin_output): Check no_coverage. Use
	current_function_funcdef_no.
	(coverage_end_function): Likewise.
	(create_coverage): Set no_coverage. Set DECL_UNINLINEABLE rather
	than clearing flag_inline_functions. Do not clear arc_profile
	flag.
	* function.c (prepare_function_start): Do not set arc_profile
	flag.
	* profile.c (instrument_edges): Return number of instrumented
	edges. Use a for loop.
	(branch_prob): Call coverage_counter_alloc. Make BB_TO_GCOV_INDEX
	local to here and simplify. Use profile_arc_flag not arc_profile
	flag.
	(find_spanning_tree): Reformat.
	* toplev.c (rest_of_compilation): Use profile_arc_flags and
	flag_test_coverage rather than arc_profile flag.

From-SVN: r66695
This commit is contained in:
Nathan Sidwell 2003-05-11 19:21:32 +00:00 committed by Nathan Sidwell
parent afb19ffb29
commit 6d70e6bee0
7 changed files with 127 additions and 69 deletions

View File

@ -1,3 +1,29 @@
2003-05-11 Nathan Sidwell <nathan@codesourcery.com>
* coverage.h (coverage_counter_alloc): New function.
* function.h (struct function): Remove arc_profile flag.
* coverage.c (fn_ident): Remove.
(fn_b_ctrs, no_coverage): New.
(get_coverage_counts): Use current_function_funcdef_no.
(coverage_counter_alloc): New.
(coverage_counter_ref): Adjust.
(coverage_begin_output): Check no_coverage. Use
current_function_funcdef_no.
(coverage_end_function): Likewise.
(create_coverage): Set no_coverage. Set DECL_UNINLINEABLE rather
than clearing flag_inline_functions. Do not clear arc_profile
flag.
* function.c (prepare_function_start): Do not set arc_profile
flag.
* profile.c (instrument_edges): Return number of instrumented
edges. Use a for loop.
(branch_prob): Call coverage_counter_alloc. Make BB_TO_GCOV_INDEX
local to here and simplify. Use profile_arc_flag not arc_profile
flag.
(find_spanning_tree): Reformat.
* toplev.c (rest_of_compilation): Use profile_arc_flags and
flag_test_coverage rather than arc_profile flag.
2003-05-11 Gabriel Dos Reis <gdr@integrable-solutions.net>
* doc/invoke.texi (Wctor-dtor-privacy): Update documentation.

View File

@ -72,17 +72,18 @@ typedef struct counts_entry
} counts_entry_t;
static unsigned fn_ident = 1;
static struct function_list *functions_head = 0;
static struct function_list **functions_tail = &functions_head;
static unsigned no_coverage = 0;
/* Cumulative counter information for whole program. */
static unsigned prg_ctr_mask; /* Mask of counter types generated. */
static unsigned prg_n_ctrs[GCOV_COUNTERS];
static unsigned prg_n_ctrs[GCOV_COUNTERS]; /* Total counters allocated. */
/* Counter information for current function. */
static unsigned fn_ctr_mask;
static unsigned fn_n_ctrs[GCOV_COUNTERS];
static unsigned fn_ctr_mask; /* Mask of counters used. */
static unsigned fn_n_ctrs[GCOV_COUNTERS]; /* Counters allocated. */
static unsigned fn_b_ctrs[GCOV_COUNTERS]; /* Allocation base. */
/* Name of the output file for coverage output file. */
static char *bbg_file_name;
@ -313,7 +314,7 @@ get_coverage_counts (unsigned counter, unsigned expected,
return NULL;
}
elt.ident = fn_ident;
elt.ident = current_function_funcdef_no + 1;
elt.ctr = counter;
entry = htab_find (counts_hash, &elt);
if (!entry)
@ -337,15 +338,18 @@ get_coverage_counts (unsigned counter, unsigned expected,
return entry->counts;
}
/* Generate a MEM rtl to access COUNTER NO . */
/* Allocate NUM counters of type COUNTER. Returns non-zero if the
allocation succeeded. */
rtx
coverage_counter_ref (unsigned counter, unsigned no)
int
coverage_counter_alloc (unsigned counter, unsigned num)
{
unsigned gcov_size = tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE), 1);
enum machine_mode mode = mode_for_size (gcov_size, MODE_INT, 0);
rtx ref;
if (no_coverage)
return 0;
if (!num)
return 1;
if (!ctr_labels[counter])
{
/* Generate and save a copy of this so it can be shared. */
@ -354,13 +358,24 @@ coverage_counter_ref (unsigned counter, unsigned no)
ASM_GENERATE_INTERNAL_LABEL (buf, "LPBX", counter + 1);
ctr_labels[counter] = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
}
if (no + 1 > fn_n_ctrs[counter])
{
fn_n_ctrs[counter] = no + 1;
fn_ctr_mask |= 1 << counter;
}
fn_b_ctrs[counter] = fn_n_ctrs[counter];
fn_n_ctrs[counter] += num;
fn_ctr_mask |= 1 << counter;
return 1;
}
no += prg_n_ctrs[counter];
/* Generate a MEM rtl to access COUNTER NO. */
rtx
coverage_counter_ref (unsigned counter, unsigned no)
{
unsigned gcov_size = tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE), 1);
enum machine_mode mode = mode_for_size (gcov_size, MODE_INT, 0);
rtx ref;
if (no >= fn_n_ctrs[counter] - fn_b_ctrs[counter])
abort ();
no += prg_n_ctrs[counter] + fn_b_ctrs[counter];
ref = plus_constant (ctr_labels[counter], gcov_size / BITS_PER_UNIT * no);
ref = gen_rtx_MEM (mode, ref);
set_mem_alias_set (ref, new_alias_set ());
@ -415,6 +430,9 @@ compute_checksum ()
int
coverage_begin_output ()
{
if (no_coverage)
return 0;
if (!bbg_function_announced)
{
const char *file = DECL_SOURCE_FILE (current_function_decl);
@ -435,7 +453,7 @@ coverage_begin_output ()
/* Announce function */
offset = gcov_write_tag (GCOV_TAG_FUNCTION);
gcov_write_unsigned (fn_ident);
gcov_write_unsigned (current_function_funcdef_no + 1);
gcov_write_unsigned (compute_checksum ());
gcov_write_string (IDENTIFIER_POINTER
(DECL_ASSEMBLER_NAME (current_function_decl)));
@ -472,20 +490,18 @@ coverage_end_function ()
functions_tail = &item->next;
item->next = 0;
/* It would be nice to use the unique source location. */
item->ident = fn_ident;
item->ident = current_function_funcdef_no + 1;
item->checksum = compute_checksum ();
for (i = 0; i != GCOV_COUNTERS; i++)
{
item->n_ctrs[i] = fn_n_ctrs[i];
prg_n_ctrs[i] += fn_n_ctrs[i];
fn_n_ctrs[i] = 0;
fn_n_ctrs[i] = fn_b_ctrs[i] = 0;
}
prg_ctr_mask |= fn_ctr_mask;
fn_ctr_mask = 0;
}
bbg_function_announced = 0;
fn_ident++;
}
/* Creates the gcov_fn_info RECORD_TYPE. */
@ -799,8 +815,9 @@ create_coverage ()
char *ctor_name;
tree ctor;
rtx gcov_info_address;
int save_flag_inline_functions = flag_inline_functions;
no_coverage = 1; /* Disable any further coverage. */
if (!prg_ctr_mask)
return;
@ -830,6 +847,7 @@ create_coverage ()
TREE_PUBLIC (ctor) = ! targetm.have_ctors_dtors;
TREE_USED (ctor) = 1;
DECL_RESULT (ctor) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
DECL_UNINLINABLE (ctor) = 1;
ctor = (*lang_hooks.decls.pushdecl) (ctor);
rest_of_decl_compilation (ctor, 0, 1, 0);
@ -840,7 +858,6 @@ create_coverage ()
init_function_start (ctor, input_filename, input_line);
(*lang_hooks.decls.pushlevel) (0);
expand_function_start (ctor, 0);
cfun->arc_profile = 0;
/* Actually generate the code to call __gcov_init. */
gcov_info_address = force_reg (Pmode, XEXP (DECL_RTL (gcov_info), 0));
@ -850,16 +867,8 @@ create_coverage ()
expand_function_end (input_filename, input_line, 0);
(*lang_hooks.decls.poplevel) (1, 0, 1);
/* Since ctor isn't in the list of globals, it would never be emitted
when it's considered to be 'safe' for inlining, so turn off
flag_inline_functions. */
flag_inline_functions = 0;
rest_of_compilation (ctor);
/* Reset flag_inline_functions to its original value. */
flag_inline_functions = save_flag_inline_functions;
if (! quiet_flag)
fflush (asm_out_file);
current_function_decl = NULL_TREE;

View File

@ -25,10 +25,21 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
extern void coverage_init (const char *);
extern void coverage_finish (void);
/* Complete the coverage information for the current function. Once
per function. */
extern void coverage_end_function (void);
/* Start outputting coverage information for the current
function. Repeatable per function. */
extern int coverage_begin_output (void);
/* Allocate some counters. Repeatable per function. */
extern int coverage_counter_alloc (unsigned /*counter*/, unsigned/*num*/);
/* Use a counter from the most recent allocation. */
extern rtx coverage_counter_ref (unsigned /*counter*/, unsigned/*num*/);
/* Get all the counters for the current function. */
extern gcov_type *get_coverage_counts (unsigned /*counter*/,
unsigned /*expected*/,
const struct gcov_ctr_summary **);

View File

@ -6434,8 +6434,6 @@ prepare_function_start ()
current_function_funcdef_no = funcdef_no++;
cfun->arc_profile = profile_arc_flag || flag_test_coverage;
cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
cfun->max_jumptable_ents = 0;

View File

@ -452,9 +452,6 @@ struct function GTY(())
generated. */
unsigned int instrument_entry_exit : 1;
/* Nonzero if arc profiling should be done for the function. */
unsigned int arc_profile : 1;
/* Nonzero if profiling code should be generated. */
unsigned int profile : 1;

View File

@ -84,12 +84,6 @@ struct bb_info {
#define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
#define BB_INFO(b) ((struct bb_info *) (b)->aux)
/* Keep all basic block indexes nonnegative in the gcov output. Index 0
is used for entry block, last block exit block. */
#define BB_TO_GCOV_INDEX(bb) ((bb) == ENTRY_BLOCK_PTR ? 0 \
: ((bb) == EXIT_BLOCK_PTR \
? last_basic_block + 1 : (bb)->index + 1))
/* Counter summary from the last set of coverage counts read. */
const struct gcov_ctr_summary *profile_info;
@ -111,7 +105,7 @@ static int total_num_branches;
/* Forward declarations. */
static void find_spanning_tree PARAMS ((struct edge_list *));
static rtx gen_edge_profiler PARAMS ((int));
static void instrument_edges PARAMS ((struct edge_list *));
static unsigned instrument_edges PARAMS ((struct edge_list *));
static void compute_branch_probabilities PARAMS ((void));
static gcov_type * get_exec_counts PARAMS ((void));
static basic_block find_group PARAMS ((basic_block));
@ -123,40 +117,45 @@ static void union_groups PARAMS ((basic_block, basic_block));
F is the first insn of the chain.
NUM_BLOCKS is the number of basic blocks found in F. */
static void
static unsigned
instrument_edges (el)
struct edge_list *el;
{
int num_instr_edges = 0;
unsigned num_instr_edges = 0;
int num_edges = NUM_EDGES (el);
basic_block bb;
remove_fake_edges ();
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
{
edge e = bb->succ;
while (e)
edge e;
for (e = bb->succ; e; e = e->succ_next)
{
struct edge_info *inf = EDGE_INFO (e);
if (!inf->ignore && !inf->on_tree)
{
rtx edge_profile;
if (e->flags & EDGE_ABNORMAL)
abort ();
if (rtl_dump_file)
fprintf (rtl_dump_file, "Edge %d to %d instrumented%s\n",
e->src->index, e->dest->index,
EDGE_CRITICAL_P (e) ? " (and split)" : "");
insert_insn_on_edge (
gen_edge_profiler (num_instr_edges++), e);
edge_profile = gen_edge_profiler (num_instr_edges++);
insert_insn_on_edge (edge_profile, e);
rebuild_jump_labels (e->insns);
}
e = e->succ_next;
}
}
total_num_blocks_created += num_edges;
if (rtl_dump_file)
fprintf (rtl_dump_file, "%d edges instrumented\n", num_instr_edges);
return num_instr_edges;
}
@ -353,9 +352,9 @@ compute_branch_probabilities ()
for (e = bb->pred; e; e = e->pred_next)
total += e->count;
/* Seedgeh for the invalid edge, and set its count. */
/* Search for the invalid edge, and set its count. */
for (e = bb->pred; e; e = e->pred_next)
if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
break;
/* Calculate count for remaining edge by conservation. */
@ -552,6 +551,7 @@ branch_prob ()
basic_block bb;
unsigned i;
unsigned num_edges, ignored_edges;
unsigned num_instrumented;
struct edge_list *el;
total_num_times_called++;
@ -644,18 +644,23 @@ branch_prob ()
as possible to minimize number of edge splits necessary. */
find_spanning_tree (el);
/* Fake edges that are not on the tree will not be instrumented, so
mark them ignored. */
for (i = 0; i < num_edges; i++)
for (num_instrumented = i = 0; i < num_edges; i++)
{
edge e = INDEX_EDGE (el, i);
struct edge_info *inf = EDGE_INFO (e);
if ((e->flags & EDGE_FAKE) && !inf->ignore && !inf->on_tree)
if (inf->ignore || inf->on_tree)
/*NOP*/;
else if (e->flags & EDGE_FAKE)
{
inf->ignore = 1;
ignored_edges++;
}
else
num_instrumented++;
}
total_num_blocks += n_basic_blocks + 2;
@ -684,6 +689,13 @@ branch_prob ()
gcov_write_length (offset);
}
/* Keep all basic block indexes nonnegative in the gcov output.
Index 0 is used for entry block, last index is for exit block.
*/
ENTRY_BLOCK_PTR->index = -1;
EXIT_BLOCK_PTR->index = last_basic_block;
#define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
/* Arcs */
if (coverage_begin_output ())
{
@ -788,15 +800,21 @@ branch_prob ()
}
}
}
ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
EXIT_BLOCK_PTR->index = EXIT_BLOCK;
#undef BB_TO_GCOV_INDEX
if (flag_branch_probabilities)
compute_branch_probabilities ();
/* For each edge not on the spanning tree, add counting code as rtl. */
if (cfun->arc_profile && profile_arc_flag)
if (profile_arc_flag
&& coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
{
instrument_edges (el);
unsigned n_instrumented = instrument_edges (el);
if (n_instrumented != num_instrumented)
abort ();
/* Commit changes done by instrumentation. */
commit_edge_insertions_watch_calls ();
@ -880,8 +898,7 @@ find_spanning_tree (el)
{
edge e = INDEX_EDGE (el, i);
if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
|| e->dest == EXIT_BLOCK_PTR
)
|| e->dest == EXIT_BLOCK_PTR)
&& !EDGE_INFO (e)->ignore
&& (find_group (e->src) != find_group (e->dest)))
{
@ -897,9 +914,8 @@ find_spanning_tree (el)
for (i = 0; i < num_edges; i++)
{
edge e = INDEX_EDGE (el, i);
if ((EDGE_CRITICAL_P (e))
&& !EDGE_INFO (e)->ignore
&& (find_group (e->src) != find_group (e->dest)))
if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
&& find_group (e->src) != find_group (e->dest))
{
if (rtl_dump_file)
fprintf (rtl_dump_file, "Critical edge %d to %d put to tree\n",
@ -913,8 +929,8 @@ find_spanning_tree (el)
for (i = 0; i < num_edges; i++)
{
edge e = INDEX_EDGE (el, i);
if (find_group (e->src) != find_group (e->dest)
&& !EDGE_INFO (e)->ignore)
if (!EDGE_INFO (e)->ignore
&& find_group (e->src) != find_group (e->dest))
{
if (rtl_dump_file)
fprintf (rtl_dump_file, "Normal edge %d to %d put to tree\n",

View File

@ -3087,13 +3087,14 @@ rest_of_compilation (decl)
close_dump_file (DFI_cfg, print_rtl_with_bb, insns);
/* Do branch profiling and static profile estimation passes. */
if (optimize > 0 || cfun->arc_profile || flag_branch_probabilities)
if (optimize > 0
|| profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
{
struct loops loops;
timevar_push (TV_BRANCH_PROB);
open_dump_file (DFI_bp, decl);
if (cfun->arc_profile || flag_branch_probabilities)
if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
branch_prob ();
/* Discover and record the loop depth at the head of each basic