Commit 962e88a9 by Jan Hubicka Committed by Jan Hubicka

coverage.c (get_coverage_counts): Use current_function_decl.


	* coverage.c (get_coverage_counts): Use current_function_decl.
	* profile.c (read_thunk_profile): New function.
	(branch_prob): Add THUNK parameter.
	* tree-profile.c (tree_profiling): Handle thunks.
	* value-prof.c (init_node_map): Handle thunks.
	* value-prof.h (branch_prob): Upate prototype.
	(read_thunk_profile): Declare.

	* g++.dg/tree-prof/devirt.C: Update testcase.

From-SVN: r267495
parent a5544970
2019-01-01 Jan Hubicka <hubicka@ucw.cz>
* coverage.c (get_coverage_counts): Use current_function_decl.
* profile.c (read_thunk_profile): New function.
(branch_prob): Add THUNK parameter.
* tree-profile.c (tree_profiling): Handle thunks.
* value-prof.c (init_node_map): Handle thunks.
* value-prof.h (branch_prob): Upate prototype.
(read_thunk_profile): Declare.
2019-01-01 Jakub Jelinek <jakub@redhat.com> 2019-01-01 Jakub Jelinek <jakub@redhat.com>
Update copyright years. Update copyright years.
......
...@@ -329,7 +329,7 @@ get_coverage_counts (unsigned counter, unsigned cfg_checksum, ...@@ -329,7 +329,7 @@ get_coverage_counts (unsigned counter, unsigned cfg_checksum,
else else
{ {
gcc_assert (coverage_node_map_initialized_p ()); gcc_assert (coverage_node_map_initialized_p ());
elt.ident = cgraph_node::get (cfun->decl)->profile_id; elt.ident = cgraph_node::get (current_function_decl)->profile_id;
} }
elt.ctr = counter; elt.ctr = counter;
entry = counts_hash->find (&elt); entry = counts_hash->find (&elt);
......
...@@ -963,6 +963,25 @@ compare_freqs (const void *p1, const void *p2) ...@@ -963,6 +963,25 @@ compare_freqs (const void *p1, const void *p2)
return e2->dest->index - e1->dest->index; return e2->dest->index - e1->dest->index;
} }
/* Only read execution count for thunks. */
void
read_thunk_profile (struct cgraph_node *node)
{
tree old = current_function_decl;
current_function_decl = node->decl;
gcov_type *counts = get_coverage_counts (GCOV_COUNTER_ARCS, 0, 0, 1);
if (counts)
{
node->callees->count = node->count
= profile_count::from_gcov_type (counts[0]);
free (counts);
}
current_function_decl = old;
return;
}
/* Instrument and/or analyze program behavior based on program the CFG. /* Instrument and/or analyze program behavior based on program the CFG.
This function creates a representation of the control flow graph (of This function creates a representation of the control flow graph (of
...@@ -983,7 +1002,7 @@ compare_freqs (const void *p1, const void *p2) ...@@ -983,7 +1002,7 @@ compare_freqs (const void *p1, const void *p2)
Main entry point of this file. */ Main entry point of this file. */
void void
branch_prob (void) branch_prob (bool thunk)
{ {
basic_block bb; basic_block bb;
unsigned i; unsigned i;
...@@ -1000,118 +1019,121 @@ branch_prob (void) ...@@ -1000,118 +1019,121 @@ branch_prob (void)
hash_set <location_triplet_hash> streamed_locations; hash_set <location_triplet_hash> streamed_locations;
/* We can't handle cyclic regions constructed using abnormal edges. if (!thunk)
To avoid these we replace every source of abnormal edge by a fake
edge from entry node and every destination by fake edge to exit.
This keeps graph acyclic and our calculation exact for all normal
edges except for exit and entrance ones.
We also add fake exit edges for each call and asm statement in the
basic, since it may not return. */
FOR_EACH_BB_FN (bb, cfun)
{ {
int need_exit_edge = 0, need_entry_edge = 0; /* We can't handle cyclic regions constructed using abnormal edges.
int have_exit_edge = 0, have_entry_edge = 0; To avoid these we replace every source of abnormal edge by a fake
edge e; edge from entry node and every destination by fake edge to exit.
edge_iterator ei; This keeps graph acyclic and our calculation exact for all normal
edges except for exit and entrance ones.
/* Functions returning multiple times are not handled by extra edges. We also add fake exit edges for each call and asm statement in the
Instead we simply allow negative counts on edges from exit to the basic, since it may not return. */
block past call and corresponding probabilities. We can't go
with the extra edges because that would result in flowgraph that
needs to have fake edges outside the spanning tree. */
FOR_EACH_EDGE (e, ei, bb->succs) FOR_EACH_BB_FN (bb, cfun)
{ {
gimple_stmt_iterator gsi; int need_exit_edge = 0, need_entry_edge = 0;
gimple *last = NULL; int have_exit_edge = 0, have_entry_edge = 0;
edge e;
/* It may happen that there are compiler generated statements edge_iterator ei;
without a locus at all. Go through the basic block from the
last to the first statement looking for a locus. */ /* Functions returning multiple times are not handled by extra edges.
for (gsi = gsi_last_nondebug_bb (bb); Instead we simply allow negative counts on edges from exit to the
!gsi_end_p (gsi); block past call and corresponding probabilities. We can't go
gsi_prev_nondebug (&gsi)) with the extra edges because that would result in flowgraph that
needs to have fake edges outside the spanning tree. */
FOR_EACH_EDGE (e, ei, bb->succs)
{ {
last = gsi_stmt (gsi); gimple_stmt_iterator gsi;
if (!RESERVED_LOCATION_P (gimple_location (last))) gimple *last = NULL;
break;
} /* It may happen that there are compiler generated statements
without a locus at all. Go through the basic block from the
last to the first statement looking for a locus. */
for (gsi = gsi_last_nondebug_bb (bb);
!gsi_end_p (gsi);
gsi_prev_nondebug (&gsi))
{
last = gsi_stmt (gsi);
if (!RESERVED_LOCATION_P (gimple_location (last)))
break;
}
/* Edge with goto locus might get wrong coverage info unless /* Edge with goto locus might get wrong coverage info unless
it is the only edge out of BB. it is the only edge out of BB.
Don't do that when the locuses match, so Don't do that when the locuses match, so
if (blah) goto something; if (blah) goto something;
is not computed twice. */ is not computed twice. */
if (last if (last
&& gimple_has_location (last) && gimple_has_location (last)
&& !RESERVED_LOCATION_P (e->goto_locus) && !RESERVED_LOCATION_P (e->goto_locus)
&& !single_succ_p (bb) && !single_succ_p (bb)
&& (LOCATION_FILE (e->goto_locus) && (LOCATION_FILE (e->goto_locus)
!= LOCATION_FILE (gimple_location (last)) != LOCATION_FILE (gimple_location (last))
|| (LOCATION_LINE (e->goto_locus) || (LOCATION_LINE (e->goto_locus)
!= LOCATION_LINE (gimple_location (last))))) != LOCATION_LINE (gimple_location (last)))))
{
basic_block new_bb = split_edge (e);
edge ne = single_succ_edge (new_bb);
ne->goto_locus = e->goto_locus;
}
if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
need_exit_edge = 1;
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
have_exit_edge = 1;
}
FOR_EACH_EDGE (e, ei, bb->preds)
{ {
basic_block new_bb = split_edge (e); if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
edge ne = single_succ_edge (new_bb); && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
ne->goto_locus = e->goto_locus; need_entry_edge = 1;
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
have_entry_edge = 1;
} }
if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
need_exit_edge = 1;
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
have_exit_edge = 1;
}
FOR_EACH_EDGE (e, ei, bb->preds)
{
if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
need_entry_edge = 1;
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
have_entry_edge = 1;
}
if (need_exit_edge && !have_exit_edge) if (need_exit_edge && !have_exit_edge)
{
if (dump_file)
fprintf (dump_file, "Adding fake exit edge to bb %i\n",
bb->index);
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
}
if (need_entry_edge && !have_entry_edge)
{
if (dump_file)
fprintf (dump_file, "Adding fake entry edge to bb %i\n",
bb->index);
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
/* Avoid bbs that have both fake entry edge and also some
exit edge. One of those edges wouldn't be added to the
spanning tree, but we can't instrument any of them. */
if (have_exit_edge || need_exit_edge)
{ {
gimple_stmt_iterator gsi;
gimple *first;
gsi = gsi_start_nondebug_after_labels_bb (bb);
gcc_checking_assert (!gsi_end_p (gsi));
first = gsi_stmt (gsi);
/* Don't split the bbs containing __builtin_setjmp_receiver
or ABNORMAL_DISPATCHER calls. These are very
special and don't expect anything to be inserted before
them. */
if (is_gimple_call (first)
&& (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
|| (gimple_call_flags (first) & ECF_RETURNS_TWICE)
|| (gimple_call_internal_p (first)
&& (gimple_call_internal_fn (first)
== IFN_ABNORMAL_DISPATCHER))))
continue;
if (dump_file) if (dump_file)
fprintf (dump_file, "Splitting bb %i after labels\n", fprintf (dump_file, "Adding fake exit edge to bb %i\n",
bb->index); bb->index);
split_block_after_labels (bb); make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
}
if (need_entry_edge && !have_entry_edge)
{
if (dump_file)
fprintf (dump_file, "Adding fake entry edge to bb %i\n",
bb->index);
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
/* Avoid bbs that have both fake entry edge and also some
exit edge. One of those edges wouldn't be added to the
spanning tree, but we can't instrument any of them. */
if (have_exit_edge || need_exit_edge)
{
gimple_stmt_iterator gsi;
gimple *first;
gsi = gsi_start_nondebug_after_labels_bb (bb);
gcc_checking_assert (!gsi_end_p (gsi));
first = gsi_stmt (gsi);
/* Don't split the bbs containing __builtin_setjmp_receiver
or ABNORMAL_DISPATCHER calls. These are very
special and don't expect anything to be inserted before
them. */
if (is_gimple_call (first)
&& (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
|| (gimple_call_flags (first) & ECF_RETURNS_TWICE)
|| (gimple_call_internal_p (first)
&& (gimple_call_internal_fn (first)
== IFN_ABNORMAL_DISPATCHER))))
continue;
if (dump_file)
fprintf (dump_file, "Splitting bb %i after labels\n",
bb->index);
split_block_after_labels (bb);
}
} }
} }
} }
...@@ -1143,7 +1165,18 @@ branch_prob (void) ...@@ -1143,7 +1165,18 @@ branch_prob (void)
on the spanning tree. We insert as many abnormal and critical edges on the spanning tree. We insert as many abnormal and critical edges
as possible to minimize number of edge splits necessary. */ as possible to minimize number of edge splits necessary. */
find_spanning_tree (el); if (!thunk)
find_spanning_tree (el);
else
{
edge e;
edge_iterator ei;
/* Keep only edge from entry block to be instrumented. */
FOR_EACH_BB_FN (bb, cfun)
FOR_EACH_EDGE (e, ei, bb->succs)
EDGE_INFO (e)->ignore = true;
}
/* Fake edges that are not on the tree will not be instrumented, so /* Fake edges that are not on the tree will not be instrumented, so
mark them ignored. */ mark them ignored. */
...@@ -1183,8 +1216,17 @@ branch_prob (void) ...@@ -1183,8 +1216,17 @@ branch_prob (void)
the checksum in only once place, since it depends on the shape the checksum in only once place, since it depends on the shape
of the control flow which can change during of the control flow which can change during
various transformations. */ various transformations. */
cfg_checksum = coverage_compute_cfg_checksum (cfun); if (thunk)
lineno_checksum = coverage_compute_lineno_checksum (); {
/* At stream in time we do not have CFG, so we can not do checksums. */
cfg_checksum = 0;
lineno_checksum = 0;
}
else
{
cfg_checksum = coverage_compute_cfg_checksum (cfun);
lineno_checksum = coverage_compute_lineno_checksum ();
}
/* Write the data from which gcov can reconstruct the basic block /* Write the data from which gcov can reconstruct the basic block
graph and function line numbers (the gcno file). */ graph and function line numbers (the gcno file). */
......
2019-01-01 Jan Hubicka <hubicka@ucw.cz>
* g++.dg/tree-prof/devirt.C: Update testcase.
2019-01-01 Jakub Jelinek <jakub@redhat.com> 2019-01-01 Jakub Jelinek <jakub@redhat.com>
Update copyright years. Update copyright years.
......
...@@ -119,5 +119,5 @@ main () ...@@ -119,5 +119,5 @@ main ()
__builtin_abort (); __builtin_abort ();
} }
/* { dg-final-use-not-autofdo { scan-tree-dump-times "folding virtual function call to virtual unsigned int mozPersonalDictionary::_ZThn16" 3 "dom3" } } */ /* { dg-final-use-not-autofdo { scan-tree-dump-times "folding virtual function call to virtual unsigned int mozPersonalDictionary::_ZThn16" 1 "dom3" } } */
/* { dg-final-use-not-autofdo { scan-tree-dump-times "folding virtual function call to virtual unsigned int mozPersonalDictionary::AddRef" 3 "dom3" } } */ /* { dg-final-use-not-autofdo { scan-tree-dump-times "folding virtual function call to virtual unsigned int mozPersonalDictionary::AddRef" 1 "dom3" } } */
...@@ -739,7 +739,8 @@ tree_profiling (void) ...@@ -739,7 +739,8 @@ tree_profiling (void)
FOR_EACH_DEFINED_FUNCTION (node) FOR_EACH_DEFINED_FUNCTION (node)
{ {
if (!gimple_has_body_p (node->decl)) bool thunk = false;
if (!gimple_has_body_p (node->decl) && !node->thunk.thunk_p)
continue; continue;
/* Don't profile functions produced for builtin stuff. */ /* Don't profile functions produced for builtin stuff. */
...@@ -760,22 +761,43 @@ tree_profiling (void) ...@@ -760,22 +761,43 @@ tree_profiling (void)
if (!include_source_file_for_profile (file)) if (!include_source_file_for_profile (file))
continue; continue;
if (node->thunk.thunk_p)
{
/* We can not expand variadic thunks to Gimple. */
if (stdarg_p (TREE_TYPE (node->decl)))
continue;
thunk = true;
/* When generate profile, expand thunk to gimple so it can be
instrumented same way as other functions. */
if (profile_arc_flag)
node->expand_thunk (false, true);
/* Read cgraph profile but keep function as thunk at profile-use
time. */
else
{
read_thunk_profile (node);
continue;
}
}
push_cfun (DECL_STRUCT_FUNCTION (node->decl)); push_cfun (DECL_STRUCT_FUNCTION (node->decl));
if (dump_file) if (dump_file)
dump_function_header (dump_file, cfun->decl, dump_flags); dump_function_header (dump_file, cfun->decl, dump_flags);
/* Local pure-const may imply need to fixup the cfg. */ /* Local pure-const may imply need to fixup the cfg. */
if (execute_fixup_cfg () & TODO_cleanup_cfg) if (gimple_has_body_p (node->decl)
&& (execute_fixup_cfg () & TODO_cleanup_cfg))
cleanup_tree_cfg (); cleanup_tree_cfg ();
branch_prob (); branch_prob (thunk);
if (! flag_branch_probabilities if (! flag_branch_probabilities
&& flag_profile_values) && flag_profile_values)
gimple_gen_ic_func_profiler (); gimple_gen_ic_func_profiler ();
if (flag_branch_probabilities if (flag_branch_probabilities
&& !thunk
&& flag_profile_values && flag_profile_values
&& flag_value_profile_transformations) && flag_value_profile_transformations)
gimple_value_profile_transformations (); gimple_value_profile_transformations ();
......
...@@ -1188,7 +1188,7 @@ init_node_map (bool local) ...@@ -1188,7 +1188,7 @@ init_node_map (bool local)
cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>; cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>;
FOR_EACH_DEFINED_FUNCTION (n) FOR_EACH_DEFINED_FUNCTION (n)
if (n->has_gimple_body_p ()) if (n->has_gimple_body_p () || n->thunk.thunk_p)
{ {
cgraph_node **val; cgraph_node **val;
if (local) if (local)
......
...@@ -112,7 +112,8 @@ extern struct cgraph_node* find_func_by_profile_id (int func_id); ...@@ -112,7 +112,8 @@ extern struct cgraph_node* find_func_by_profile_id (int func_id);
/* In profile.c. */ /* In profile.c. */
extern void init_branch_prob (void); extern void init_branch_prob (void);
extern void branch_prob (void); extern void branch_prob (bool);
extern void read_thunk_profile (struct cgraph_node *);
extern void end_branch_prob (void); extern void end_branch_prob (void);
#endif /* GCC_VALUE_PROF_H */ #endif /* GCC_VALUE_PROF_H */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment