Commit e0704a46 by Jan Hubicka Committed by Jan Hubicka

cgraph.c: Include tree-gimple.h


	* cgraph.c: Include tree-gimple.h
	(cgraph_edge): Rename expr to stmt.
	(cgraph_create_edge): Likewise.
	(cgraph_clone_node): Likewise.
	* cgraph.h (cgraph_node): Use call_stmt instead of call_expr.
	* cgraphunit.c (record_call_1): Rename to ...
	(record_reference): ... this one; do not build edges.
	(cgraph_varpool_analyze_pending_decls): Call record_reference directly.
	(current_basic_block): Kill.
	(cgraph_create_edges): Rewrite to work on gimple statements nicely.
	(verify_cgraph_node): Likewise.
	* tree-inline.c (copy_body_r): Do not mess up with cgraph edges.
	(copy_bb): Mess up with cgraph edges here; simplify EH handling.
	(copy_edges_for_bb): Simplify EH handling.
	(expand_call_inline): Pass statement to cgraph_edge and
	cgraph_create_edge.

From-SVN: r100276
parent 0ff90076
2005-05-27 Jan Hubicka <jh@suse.cz>
* cgraph.c: Include tree-gimple.h
(cgraph_edge): Rename expr to stmt.
(cgraph_create_edge): Likewise.
(cgraph_clone_node): Likewise.
* cgraph.h (cgraph_node): Use call_stmt instead of call_expr.
* cgraphunit.c (record_call_1): Rename to ...
(record_reference): ... this one; do not build edges.
(cgraph_varpool_analyze_pending_decls): Call record_reference directly.
(current_basic_block): Kill.
(cgraph_create_edges): Rewrite to work on gimple statements nicely.
(verify_cgraph_node): Likewise.
* tree-inline.c (copy_body_r): Do not mess up with cgraph edges.
(copy_bb): Mess up with cgraph edges here; simplify EH handling.
(copy_edges_for_bb): Simplify EH handling.
(expand_call_inline): Pass statement to cgraph_edge and
cgraph_create_edge.
2005-05-27 Richard Guenther <rguenth@gcc.gnu.org>
* tree-inline.c (copy_body_r): Manually fold *& to deal
......
......@@ -97,6 +97,7 @@ The varpool data structure:
#include "varray.h"
#include "output.h"
#include "intl.h"
#include "tree-gimple.h"
static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
......@@ -255,9 +256,9 @@ cgraph_node_for_asm (tree asmname)
return NULL;
}
/* Return callgraph edge representing CALL_EXPR. */
/* Return callgraph edge representing CALL_EXPR statement. */
struct cgraph_edge *
cgraph_edge (struct cgraph_node *node, tree call_expr)
cgraph_edge (struct cgraph_node *node, tree call_stmt)
{
struct cgraph_edge *e;
......@@ -267,7 +268,7 @@ cgraph_edge (struct cgraph_node *node, tree call_expr)
because we want to make possible having multiple cgraph nodes representing
different clones of the same body before the body is actually cloned. */
for (e = node->callees; e; e= e->next_callee)
if (e->call_expr == call_expr)
if (e->call_stmt == call_stmt)
break;
return e;
}
......@@ -276,17 +277,17 @@ cgraph_edge (struct cgraph_node *node, tree call_expr)
struct cgraph_edge *
cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
tree call_expr, gcov_type count, int nest)
tree call_stmt, gcov_type count, int nest)
{
struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
#ifdef ENABLE_CHECKING
struct cgraph_edge *e;
for (e = caller->callees; e; e = e->next_callee)
gcc_assert (e->call_expr != call_expr);
gcc_assert (e->call_stmt != call_stmt);
#endif
gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
gcc_assert (get_call_expr_in (call_stmt));
if (!DECL_SAVED_TREE (callee->decl))
edge->inline_failed = N_("function body not available");
......@@ -302,7 +303,7 @@ cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
edge->caller = caller;
edge->callee = callee;
edge->call_expr = call_expr;
edge->call_stmt = call_stmt;
edge->prev_caller = NULL;
edge->next_caller = callee->callers;
if (callee->callers)
......@@ -839,11 +840,11 @@ cgraph_function_possibly_inlined_p (tree decl)
/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
struct cgraph_edge *
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
tree call_expr, int count_scale, int loop_nest)
tree call_stmt, int count_scale, int loop_nest)
{
struct cgraph_edge *new;
new = cgraph_create_edge (n, e->callee, call_expr,
new = cgraph_create_edge (n, e->callee, call_stmt,
e->count * count_scale / REG_BR_PROB_BASE,
e->loop_nest + loop_nest);
......@@ -880,7 +881,7 @@ cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest)
n->count -= count;
for (e = n->callees;e; e=e->next_callee)
cgraph_clone_edge (e, new, e->call_expr, count_scale, loop_nest);
cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest);
new->next_clone = n->next_clone;
new->prev_clone = n;
......
......@@ -142,7 +142,7 @@ struct cgraph_edge GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_call
struct cgraph_edge *next_caller;
struct cgraph_edge *prev_callee;
struct cgraph_edge *next_callee;
tree call_expr;
tree call_stmt;
PTR GTY ((skip (""))) aux;
/* When NULL, inline this call. When non-NULL, points to the explanation
why function was not inlined. */
......
......@@ -169,15 +169,14 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
static void cgraph_expand_all_functions (void);
static void cgraph_mark_functions_to_output (void);
static void cgraph_expand_function (struct cgraph_node *);
static tree record_call_1 (tree *, int *, void *);
static tree record_reference (tree *, int *, void *);
static void cgraph_mark_local_functions (void);
static void cgraph_analyze_function (struct cgraph_node *node);
static void cgraph_create_edges (struct cgraph_node *node, tree body);
/* Records tree nodes seen in cgraph_create_edges. Simply using
/* Records tree nodes seen in record_reference. Simply using
walk_tree_without_duplicates doesn't guarantee each node is visited
once because it gets a new htab upon each recursive call from
record_calls_1. */
record_reference itself. */
static struct pointer_set_t *visited_nodes;
static FILE *cgraph_dump_file;
......@@ -265,7 +264,12 @@ cgraph_varpool_analyze_pending_decls (void)
cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
if (DECL_INITIAL (decl))
cgraph_create_edges (NULL, DECL_INITIAL (decl));
{
visited_nodes = pointer_set_create ();
walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
changed = true;
}
timevar_pop (TV_CGRAPH);
......@@ -435,9 +439,6 @@ cgraph_finalize_function (tree decl, bool nested)
do_warn_unused_parameter (decl);
}
/* Used only while constructing the callgraph. */
static basic_block current_basic_block;
void
cgraph_lower_function (struct cgraph_node *node)
{
......@@ -449,7 +450,7 @@ cgraph_lower_function (struct cgraph_node *node)
/* Walk tree and record all calls. Called via walk_tree. */
static tree
record_call_1 (tree *tp, int *walk_subtrees, void *data)
record_reference (tree *tp, int *walk_subtrees, void *data)
{
tree t = *tp;
......@@ -480,29 +481,6 @@ record_call_1 (tree *tp, int *walk_subtrees, void *data)
}
break;
case CALL_EXPR:
{
tree decl = get_callee_fndecl (*tp);
if (decl && TREE_CODE (decl) == FUNCTION_DECL)
{
cgraph_create_edge (data, cgraph_node (decl), *tp,
current_basic_block->count,
current_basic_block->loop_depth);
/* When we see a function call, we don't want to look at the
function reference in the ADDR_EXPR that is hanging from
the CALL_EXPR we're examining here, because we would
conclude incorrectly that the function's address could be
taken by something that is not a function call. So only
walk the function parameter list, skip the other subtrees. */
walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data,
visited_nodes);
*walk_subtrees = 0;
}
break;
}
default:
/* Save some cycles by not walking types and declaration as we
won't find anything useful there anyway. */
......@@ -525,97 +503,62 @@ record_call_1 (tree *tp, int *walk_subtrees, void *data)
static void
cgraph_create_edges (struct cgraph_node *node, tree body)
{
/* The nodes we're interested in are never shared, so walk
the tree ignoring duplicates. */
basic_block bb;
struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
block_stmt_iterator bsi;
tree step;
visited_nodes = pointer_set_create ();
gcc_assert (current_basic_block == NULL);
if (TREE_CODE (body) == FUNCTION_DECL)
{
struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
block_stmt_iterator bsi;
tree step;
/* Reach the trees by walking over the CFG, and note the
enclosing basic-blocks in the call edges. */
FOR_EACH_BB_FN (current_basic_block, this_cfun)
for (bsi = bsi_start (current_basic_block); !bsi_end_p (bsi); bsi_next (&bsi))
walk_tree (bsi_stmt_ptr (bsi), record_call_1, node, visited_nodes);
current_basic_block = NULL;
/* Walk over any private statics that may take addresses of functions. */
if (TREE_CODE (DECL_INITIAL (body)) == BLOCK)
{
for (step = BLOCK_VARS (DECL_INITIAL (body));
step;
step = TREE_CHAIN (step))
if (DECL_INITIAL (step))
walk_tree (&DECL_INITIAL (step), record_call_1, node, visited_nodes);
}
/* Also look here for private statics. */
if (DECL_STRUCT_FUNCTION (body))
for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
step;
step = TREE_CHAIN (step))
/* Reach the trees by walking over the CFG, and note the
enclosing basic-blocks in the call edges. */
FOR_EACH_BB_FN (bb, this_cfun)
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
tree stmt = bsi_stmt (bsi);
tree call = get_call_expr_in (stmt);
tree decl;
if (call && (decl = get_callee_fndecl (call)))
{
tree decl = TREE_VALUE (step);
if (DECL_INITIAL (decl) && TREE_STATIC (decl))
walk_tree (&DECL_INITIAL (decl), record_call_1, node, visited_nodes);
cgraph_create_edge (node, cgraph_node (decl), stmt,
bb->count,
bb->loop_depth);
walk_tree (&TREE_OPERAND (call, 1),
record_reference, node, visited_nodes);
if (TREE_CODE (stmt) == MODIFY_EXPR)
walk_tree (&TREE_OPERAND (stmt, 0),
record_reference, node, visited_nodes);
}
else
walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
}
/* Walk over any private statics that may take addresses of functions. */
if (TREE_CODE (DECL_INITIAL (body)) == BLOCK)
{
for (step = BLOCK_VARS (DECL_INITIAL (body));
step;
step = TREE_CHAIN (step))
if (DECL_INITIAL (step))
walk_tree (&DECL_INITIAL (step), record_reference, node, visited_nodes);
}
else
walk_tree (&body, record_call_1, node, visited_nodes);
/* Also look here for private statics. */
if (DECL_STRUCT_FUNCTION (body))
for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
step;
step = TREE_CHAIN (step))
{
tree decl = TREE_VALUE (step);
if (DECL_INITIAL (decl) && TREE_STATIC (decl))
walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
}
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
static bool error_found;
/* Callback of verify_cgraph_node. Check that all call_exprs have
cgraph nodes. */
static tree
verify_cgraph_node_1 (tree *tp, int *walk_subtrees, void *data)
{
tree t = *tp;
tree decl;
if (TREE_CODE (t) == CALL_EXPR && (decl = get_callee_fndecl (t)))
{
struct cgraph_edge *e = cgraph_edge (data, t);
if (e)
{
if (e->aux)
{
error ("Shared call_expr:");
debug_tree (t);
error_found = true;
}
if (e->callee->decl != cgraph_node (decl)->decl)
{
error ("Edge points to wrong declaration:");
debug_tree (e->callee->decl);
fprintf (stderr," Instead of:");
debug_tree (decl);
}
e->aux = (void *)1;
}
else
{
error ("Missing callgraph edge for call expr:");
debug_tree (t);
error_found = true;
}
}
/* Save some cycles by not walking types and declaration as we
won't find anything useful there anyway. */
if (IS_TYPE_OR_DECL_P (*tp))
*walk_subtrees = 0;
return NULL_TREE;
}
/* Verify cgraph nodes of given cgraph node. */
void
......@@ -626,9 +569,9 @@ verify_cgraph_node (struct cgraph_node *node)
struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
basic_block this_block;
block_stmt_iterator bsi;
bool error_found = false;
timevar_push (TV_CGRAPH_VERIFY);
error_found = false;
for (e = node->callees; e; e = e->next_callee)
if (e->aux)
{
......@@ -694,7 +637,38 @@ verify_cgraph_node (struct cgraph_node *node)
enclosing basic-blocks in the call edges. */
FOR_EACH_BB_FN (this_block, this_cfun)
for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
walk_tree (bsi_stmt_ptr (bsi), verify_cgraph_node_1, node, visited_nodes);
{
tree stmt = bsi_stmt (bsi);
tree call = get_call_expr_in (stmt);
tree decl;
if (call && (decl = get_callee_fndecl (call)))
{
struct cgraph_edge *e = cgraph_edge (node, stmt);
if (e)
{
if (e->aux)
{
error ("Shared call_stmt:");
debug_generic_stmt (stmt);
error_found = true;
}
if (e->callee->decl != cgraph_node (decl)->decl)
{
error ("Edge points to wrong declaration:");
debug_tree (e->callee->decl);
fprintf (stderr," Instead of:");
debug_tree (decl);
}
e->aux = (void *)1;
}
else
{
error ("Missing callgraph edge for call stmt:");
debug_generic_stmt (stmt);
error_found = true;
}
}
}
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
......@@ -706,9 +680,10 @@ verify_cgraph_node (struct cgraph_node *node)
{
if (!e->aux)
{
error ("Edge %s->%s has no corresponding call_expr",
error ("Edge %s->%s has no corresponding call_stmt",
cgraph_node_name (e->caller),
cgraph_node_name (e->callee));
debug_generic_stmt (e->call_stmt);
error_found = true;
}
e->aux = 0;
......
......@@ -575,8 +575,6 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
knows not to copy VAR_DECLs, etc., so this is safe. */
else
{
tree old_node = *tp;
/* Here we handle trees that are not completely rewritten.
First we detect some inlining-induced bogosities for
discarding. */
......@@ -636,41 +634,7 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
&& IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
TREE_BLOCK (*tp) = id->block;
/* We're duplicating a CALL_EXPR. Find any corresponding
callgraph edges and update or duplicate them. */
if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
{
if (id->saving_p)
{
struct cgraph_node *node;
struct cgraph_edge *edge;
/* We're saving a copy of the body, so we'll update the
callgraph nodes in place. Note that we avoid
altering the original callgraph node; we begin with
the first clone. */
for (node = id->node->next_clone;
node;
node = node->next_clone)
{
edge = cgraph_edge (node, old_node);
gcc_assert (edge);
edge->call_expr = *tp;
}
}
else
{
struct cgraph_edge *edge;
/* We're cloning or inlining this body; duplicate the
associate callgraph nodes. */
edge = cgraph_edge (id->current_node, old_node);
if (edge)
cgraph_clone_edge (edge, id->node, *tp,
REG_BR_PROB_BASE, 1);
}
}
else if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
TREE_OPERAND (*tp, 0) =
build_int_cst
(NULL_TREE,
......@@ -730,7 +694,43 @@ copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
this is signalled by making stmt pointer NULL. */
if (stmt)
{
tree call, decl;
bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
call = get_call_expr_in (stmt);
/* We're duplicating a CALL_EXPR. Find any corresponding
callgraph edges and update or duplicate them. */
if (call && (decl = get_callee_fndecl (call)))
{
if (id->saving_p)
{
struct cgraph_node *node;
struct cgraph_edge *edge;
/* We're saving a copy of the body, so we'll update the
callgraph nodes in place. Note that we avoid
altering the original callgraph node; we begin with
the first clone. */
for (node = id->node->next_clone;
node;
node = node->next_clone)
{
edge = cgraph_edge (node, orig_stmt);
gcc_assert (edge);
edge->call_stmt = stmt;
}
}
else
{
struct cgraph_edge *edge;
/* We're cloning or inlining this body; duplicate the
associate callgraph nodes. */
edge = cgraph_edge (id->current_node, orig_stmt);
if (edge)
cgraph_clone_edge (edge, id->node, stmt,
REG_BR_PROB_BASE, 1);
}
}
/* If you think we can abort here, you are wrong.
There is no region 0 in tree land. */
gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
......@@ -776,24 +776,24 @@ copy_edges_for_bb (basic_block bb, int count_scale)
/* Use the indices from the original blocks to create edges for the
new ones. */
FOR_EACH_EDGE (old_edge, ei, bb->succs)
{
edge new;
if (!(old_edge->flags & EDGE_EH))
{
edge new;
flags = old_edge->flags;
flags = old_edge->flags;
/* Return edges do get a FALLTHRU flag when the get inlined. */
if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
&& old_edge->dest->aux != EXIT_BLOCK_PTR)
flags |= EDGE_FALLTHRU;
new = make_edge (new_bb, old_edge->dest->aux, flags);
new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
new->probability = old_edge->probability;
}
/* Return edges do get a FALLTHRU flag when the get inlined. */
if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
&& old_edge->dest->aux != EXIT_BLOCK_PTR)
flags |= EDGE_FALLTHRU;
new = make_edge (new_bb, old_edge->dest->aux, flags);
new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
new->probability = old_edge->probability;
}
if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
return;
tree_purge_dead_eh_edges (new_bb);
for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
{
tree copy_stmt;
......@@ -815,9 +815,7 @@ copy_edges_for_bb (basic_block bb, int count_scale)
into a COMPONENT_REF which doesn't. If the copy
can throw, the original could also throw. */
if (TREE_CODE (copy_stmt) == RESX_EXPR
|| (tree_could_throw_p (copy_stmt)
&& lookup_stmt_eh_region (copy_stmt) > 0))
if (tree_can_throw_internal (copy_stmt))
{
if (!bsi_end_p (bsi))
/* Note that bb's predecessor edges aren't necessarily
......@@ -1920,7 +1918,7 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
if (!id->current_node->analyzed)
goto egress;
cg_edge = cgraph_edge (id->current_node, t);
cg_edge = cgraph_edge (id->current_node, stmt);
/* Constant propagation on argument done during previous inlining
may create new direct call. Produce an edge for it. */
......@@ -1933,7 +1931,7 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
constant propagating arguments. In all other cases we hit a bug
(incorrect node sharing is most common reason for missing edges. */
gcc_assert (dest->needed || !flag_unit_at_a_time);
cgraph_create_edge (id->node, dest, t,
cgraph_create_edge (id->node, dest, stmt,
bb->count, bb->loop_depth)->inline_failed
= N_("originally indirect function call not considered for inlining");
goto egress;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment