Commit a5b1779f by Jan Hubicka Committed by Jan Hubicka

ipa-inline-transform.c (can_remove_node_now_p): Move out of...

	* ipa-inline-transform.c (can_remove_node_now_p): Move out of...
	(clone_inlined_nodes): ... here.
	(inline_call): Use cgraph_function_or_thunk_node; redirect edge
	to real destination prior inlining.
	* ipa-inline.c (caller_growth_limits, can_inline_edge_p,
	can_early_inline_edge_p, want_early_inline_function_p,
	want_early_inline_function_p, want_inline_small_function_p,
	want_inline_self_recursive_call_p, want_inline_function_called_once_p,
	edge_badness, update_all_callee_keys, lookup_recursive_calls,
	add_new_edges_to_heap, inline_small_functions, flatten_function,
	inline_always_inline_functions, early_inline_small_functions): Use
	cgraph_function_or_thunk_node.
	* ipa-inline-analysis.c (evaluate_conditions_for_edge,
	dump_inline_edge_summary, estimate_function_body_sizes): Likewise.
	(do_estimate_edge_growth_1): Break out from ...
	(do_estimate_growth) ... here; walk aliases.
	(inline_generate_summary): Skip aliases.

From-SVN: r174901
parent c1ae3ca5
2011-06-10 Jan Hubicka <jh@suse.cz>
* ipa-inline-transform.c (can_remove_node_now_p): Move out of...
(clone_inlined_nodes): ... here.
(inline_call): Use cgraph_function_or_thunk_node; redirect edge
to real destination prior inlining.
* ipa-inline.c (caller_growth_limits, can_inline_edge_p,
can_early_inline_edge_p, want_early_inline_function_p,
want_early_inline_function_p, want_inline_small_function_p,
want_inline_self_recursive_call_p, want_inline_function_called_once_p,
edge_badness, update_all_callee_keys, lookup_recursive_calls,
add_new_edges_to_heap, inline_small_functions, flatten_function,
inline_always_inline_functions, early_inline_small_functions): Use
cgraph_function_or_thunk_node.
* ipa-inline-analysis.c (evaluate_conditions_for_edge,
dump_inline_edge_summary, estimate_function_body_sizes): Likewise.
(do_estimate_edge_growth_1): Break out from ...
(do_estimate_growth) ... here; walk aliases.
(inline_generate_summary): Skip aliases.
2011-06-10 Richard Guenther <rguenther@suse.de> 2011-06-10 Richard Guenther <rguenther@suse.de>
* tree-ssa-forwprop.c (ssa_forward_propagate_and_combine): * tree-ssa-forwprop.c (ssa_forward_propagate_and_combine):
......
...@@ -589,7 +589,8 @@ static clause_t ...@@ -589,7 +589,8 @@ static clause_t
evaluate_conditions_for_edge (struct cgraph_edge *e, bool inline_p) evaluate_conditions_for_edge (struct cgraph_edge *e, bool inline_p)
{ {
clause_t clause = inline_p ? 0 : 1 << predicate_not_inlined_condition; clause_t clause = inline_p ? 0 : 1 << predicate_not_inlined_condition;
struct inline_summary *info = inline_summary (e->callee); struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct inline_summary *info = inline_summary (callee);
int i; int i;
if (ipa_node_params_vector && info->conds if (ipa_node_params_vector && info->conds
...@@ -615,7 +616,7 @@ evaluate_conditions_for_edge (struct cgraph_edge *e, bool inline_p) ...@@ -615,7 +616,7 @@ evaluate_conditions_for_edge (struct cgraph_edge *e, bool inline_p)
if (cst) if (cst)
VEC_replace (tree, known_vals, i, cst); VEC_replace (tree, known_vals, i, cst);
} }
clause = evaluate_conditions_for_known_args (e->callee, clause = evaluate_conditions_for_known_args (callee,
inline_p, known_vals); inline_p, known_vals);
VEC_free (tree, heap, known_vals); VEC_free (tree, heap, known_vals);
} }
...@@ -919,9 +920,10 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node, ...@@ -919,9 +920,10 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node,
for (edge = node->callees; edge; edge = edge->next_callee) for (edge = node->callees; edge; edge = edge->next_callee)
{ {
struct inline_edge_summary *es = inline_edge_summary (edge); struct inline_edge_summary *es = inline_edge_summary (edge);
struct cgraph_node *callee = cgraph_function_or_thunk_node (edge->callee, NULL);
fprintf (f, "%*s%s/%i %s\n%*s loop depth:%2i freq:%4i size:%2i time: %2i callee size:%2i stack:%2i", fprintf (f, "%*s%s/%i %s\n%*s loop depth:%2i freq:%4i size:%2i time: %2i callee size:%2i stack:%2i",
indent, "", cgraph_node_name (edge->callee), indent, "", cgraph_node_name (callee),
edge->callee->uid, callee->uid,
!edge->inline_failed ? "inlined" !edge->inline_failed ? "inlined"
: cgraph_inline_failed_string (edge->inline_failed), : cgraph_inline_failed_string (edge->inline_failed),
indent, "", indent, "",
...@@ -929,8 +931,8 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node, ...@@ -929,8 +931,8 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node,
edge->frequency, edge->frequency,
es->call_stmt_size, es->call_stmt_size,
es->call_stmt_time, es->call_stmt_time,
(int)inline_summary (edge->callee)->size, (int)inline_summary (callee)->size,
(int)inline_summary (edge->callee)->estimated_stack_size); (int)inline_summary (callee)->estimated_stack_size);
if (es->predicate) if (es->predicate)
{ {
fprintf (f, " predicate: "); fprintf (f, " predicate: ");
...@@ -942,10 +944,10 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node, ...@@ -942,10 +944,10 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node,
{ {
fprintf (f, "%*sStack frame offset %i, callee self size %i, callee size %i\n", fprintf (f, "%*sStack frame offset %i, callee self size %i, callee size %i\n",
indent+2, "", indent+2, "",
(int)inline_summary (edge->callee)->stack_frame_offset, (int)inline_summary (callee)->stack_frame_offset,
(int)inline_summary (edge->callee)->estimated_self_stack_size, (int)inline_summary (callee)->estimated_self_stack_size,
(int)inline_summary (edge->callee)->estimated_stack_size); (int)inline_summary (callee)->estimated_stack_size);
dump_inline_edge_summary (f, indent+2, edge->callee, info); dump_inline_edge_summary (f, indent+2, callee, info);
} }
} }
for (edge = node->indirect_calls; edge; edge = edge->next_callee) for (edge = node->indirect_calls; edge; edge = edge->next_callee)
...@@ -1525,7 +1527,10 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early) ...@@ -1525,7 +1527,10 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
/* Do not inline calls where we cannot triviall work around /* Do not inline calls where we cannot triviall work around
mismatches in argument or return types. */ mismatches in argument or return types. */
if (edge->callee if (edge->callee
&& !gimple_check_call_matching_types (stmt, edge->callee->decl)) && cgraph_function_or_thunk_node (edge->callee, NULL)
&& !gimple_check_call_matching_types (stmt,
cgraph_function_or_thunk_node (edge->callee,
NULL)->decl))
{ {
edge->call_stmt_cannot_inline_p = true; edge->call_stmt_cannot_inline_p = true;
gimple_call_set_cannot_inline (stmt, true); gimple_call_set_cannot_inline (stmt, true);
...@@ -2110,6 +2115,7 @@ int ...@@ -2110,6 +2115,7 @@ int
do_estimate_edge_growth (struct cgraph_edge *edge) do_estimate_edge_growth (struct cgraph_edge *edge)
{ {
int size; int size;
struct cgraph_node *callee;
/* When we do caching, use do_estimate_edge_time to populate the entry. */ /* When we do caching, use do_estimate_edge_time to populate the entry. */
...@@ -2122,10 +2128,11 @@ do_estimate_edge_growth (struct cgraph_edge *edge) ...@@ -2122,10 +2128,11 @@ do_estimate_edge_growth (struct cgraph_edge *edge)
gcc_checking_assert (size); gcc_checking_assert (size);
return size - (size > 0); return size - (size > 0);
} }
callee = cgraph_function_or_thunk_node (edge->callee, NULL);
/* Early inliner runs without caching, go ahead and do the dirty work. */ /* Early inliner runs without caching, go ahead and do the dirty work. */
gcc_checking_assert (edge->inline_failed); gcc_checking_assert (edge->inline_failed);
estimate_node_size_and_time (edge->callee, estimate_node_size_and_time (callee,
evaluate_conditions_for_edge (edge, true), evaluate_conditions_for_edge (edge, true),
&size, NULL); &size, NULL);
gcc_checking_assert (inline_edge_summary (edge)->call_stmt_size); gcc_checking_assert (inline_edge_summary (edge)->call_stmt_size);
...@@ -2171,15 +2178,20 @@ estimate_size_after_inlining (struct cgraph_node *node, ...@@ -2171,15 +2178,20 @@ estimate_size_after_inlining (struct cgraph_node *node,
} }
/* Estimate the growth caused by inlining NODE into all callees. */ struct growth_data
{
bool self_recursive;
int growth;
};
int
do_estimate_growth (struct cgraph_node *node) /* Worker for do_estimate_growth. Collect growth for all callers. */
static bool
do_estimate_growth_1 (struct cgraph_node *node, void *data)
{ {
int growth = 0;
struct cgraph_edge *e; struct cgraph_edge *e;
bool self_recursive = false; struct growth_data *d = (struct growth_data *) data;
struct inline_summary *info = inline_summary (node);
for (e = node->callers; e; e = e->next_caller) for (e = node->callers; e; e = e->next_caller)
{ {
...@@ -2188,27 +2200,40 @@ do_estimate_growth (struct cgraph_node *node) ...@@ -2188,27 +2200,40 @@ do_estimate_growth (struct cgraph_node *node)
if (e->caller == node if (e->caller == node
|| (e->caller->global.inlined_to || (e->caller->global.inlined_to
&& e->caller->global.inlined_to == node)) && e->caller->global.inlined_to == node))
self_recursive = true; d->self_recursive = true;
growth += estimate_edge_growth (e); d->growth += estimate_edge_growth (e);
} }
return false;
}
/* Estimate the growth caused by inlining NODE into all callees. */
int
do_estimate_growth (struct cgraph_node *node)
{
struct growth_data d = {0, false};
struct inline_summary *info = inline_summary (node);
cgraph_for_node_and_aliases (node, do_estimate_growth_1, &d, true);
/* For self recursive functions the growth estimation really should be /* For self recursive functions the growth estimation really should be
infinity. We don't want to return very large values because the growth infinity. We don't want to return very large values because the growth
plays various roles in badness computation fractions. Be sure to not plays various roles in badness computation fractions. Be sure to not
return zero or negative growths. */ return zero or negative growths. */
if (self_recursive) if (d.self_recursive)
growth = growth < info->size ? info->size : growth; d.growth = d.growth < info->size ? info->size : d.growth;
else else
{ {
if (cgraph_will_be_removed_from_program_if_no_direct_calls (node) if (!DECL_EXTERNAL (node->decl)
&& !DECL_EXTERNAL (node->decl)) && !cgraph_will_be_removed_from_program_if_no_direct_calls (node))
growth -= info->size; d.growth -= info->size;
/* COMDAT functions are very often not shared across multiple units since they /* COMDAT functions are very often not shared across multiple units since they
come from various template instantiations. Take this into account. */ come from various template instantiations. Take this into account.
FIXME: allow also COMDATs with COMDAT aliases. */
else if (DECL_COMDAT (node->decl) else if (DECL_COMDAT (node->decl)
&& cgraph_can_remove_if_no_direct_calls_p (node)) && cgraph_can_remove_if_no_direct_calls_p (node))
growth -= (info->size d.growth -= (info->size
* (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY)) + 50) / 100; * (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY)) + 50) / 100;
} }
...@@ -2216,9 +2241,9 @@ do_estimate_growth (struct cgraph_node *node) ...@@ -2216,9 +2241,9 @@ do_estimate_growth (struct cgraph_node *node)
{ {
if ((int)VEC_length (int, node_growth_cache) <= node->uid) if ((int)VEC_length (int, node_growth_cache) <= node->uid)
VEC_safe_grow_cleared (int, heap, node_growth_cache, cgraph_max_uid); VEC_safe_grow_cleared (int, heap, node_growth_cache, cgraph_max_uid);
VEC_replace (int, node_growth_cache, node->uid, growth + (growth >= 0)); VEC_replace (int, node_growth_cache, node->uid, d.growth + (d.growth >= 0));
} }
return growth; return d.growth;
} }
...@@ -2282,6 +2307,7 @@ inline_generate_summary (void) ...@@ -2282,6 +2307,7 @@ inline_generate_summary (void)
ipa_register_cgraph_hooks (); ipa_register_cgraph_hooks ();
FOR_EACH_DEFINED_FUNCTION (node) FOR_EACH_DEFINED_FUNCTION (node)
if (!node->alias)
inline_analyze_function (node); inline_analyze_function (node);
} }
......
...@@ -76,6 +76,35 @@ update_noncloned_frequencies (struct cgraph_node *node, ...@@ -76,6 +76,35 @@ update_noncloned_frequencies (struct cgraph_node *node,
} }
} }
/* We removed or are going to remove the last call to NODE.
Return true if we can and want proactively remove the NODE now.
This is important to do, since we want inliner to know when offline
copy of function was removed. */
static bool
can_remove_node_now_p (struct cgraph_node *node)
{
/* FIXME: When address is taken of DECL_EXTERNAL function we still
can remove its offline copy, but we would need to keep unanalyzed node in
the callgraph so references can point to it. */
return (!node->address_taken
&& cgraph_can_remove_if_no_direct_calls_p (node)
/* Inlining might enable more devirtualizing, so we want to remove
those only after all devirtualizable virtual calls are processed.
Lacking may edges in callgraph we just preserve them post
inlining. */
&& (!DECL_VIRTUAL_P (node->decl)
|| (!DECL_COMDAT (node->decl)
&& !DECL_EXTERNAL (node->decl)))
/* Don't reuse if more than one function shares a comdat group.
If the other function(s) are needed, we need to emit even
this function out of line. */
&& !node->same_comdat_group
/* During early inlining some unanalyzed cgraph nodes might be in the
callgraph and they might reffer the function in question. */
&& !cgraph_new_nodes);
}
/* E is expected to be an edge being inlined. Clone destination node of /* E is expected to be an edge being inlined. Clone destination node of
the edge and redirect it to the new clone. the edge and redirect it to the new clone.
...@@ -97,25 +126,7 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate, ...@@ -97,25 +126,7 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
/* Recursive inlining never wants the master clone to /* Recursive inlining never wants the master clone to
be overwritten. */ be overwritten. */
&& update_original && update_original
/* FIXME: When address is taken of DECL_EXTERNAL function we still && can_remove_node_now_p (e->callee))
can remove its offline copy, but we would need to keep unanalyzed
node in the callgraph so references can point to it. */
&& !e->callee->address_taken
&& cgraph_can_remove_if_no_direct_calls_p (e->callee)
/* Inlining might enable more devirtualizing, so we want to remove
those only after all devirtualizable virtual calls are processed.
Lacking may edges in callgraph we just preserve them post
inlining. */
&& (!DECL_VIRTUAL_P (e->callee->decl)
|| (!DECL_COMDAT (e->callee->decl)
&& !DECL_EXTERNAL (e->callee->decl)))
/* Don't reuse if more than one function shares a comdat group.
If the other function(s) are needed, we need to emit even
this function out of line. */
&& !e->callee->same_comdat_group
/* During early inlining some unanalyzed cgraph nodes might be in the
callgraph and they might reffer the function in question. */
&& !cgraph_new_nodes)
{ {
gcc_assert (!e->callee->global.inlined_to); gcc_assert (!e->callee->global.inlined_to);
if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl)) if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
...@@ -164,19 +175,25 @@ inline_call (struct cgraph_edge *e, bool update_original, ...@@ -164,19 +175,25 @@ inline_call (struct cgraph_edge *e, bool update_original,
int old_size = 0, new_size = 0; int old_size = 0, new_size = 0;
struct cgraph_node *to = NULL; struct cgraph_node *to = NULL;
struct cgraph_edge *curr = e; struct cgraph_edge *curr = e;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
/* Don't inline inlined edges. */ /* Don't inline inlined edges. */
gcc_assert (e->inline_failed); gcc_assert (e->inline_failed);
/* Don't even think of inlining inline clone. */ /* Don't even think of inlining inline clone. */
gcc_assert (!e->callee->global.inlined_to); gcc_assert (!callee->global.inlined_to);
e->inline_failed = CIF_OK; e->inline_failed = CIF_OK;
DECL_POSSIBLY_INLINED (e->callee->decl) = true; DECL_POSSIBLY_INLINED (callee->decl) = true;
to = e->caller; to = e->caller;
if (to->global.inlined_to) if (to->global.inlined_to)
to = to->global.inlined_to; to = to->global.inlined_to;
/* If aliases are involved, redirect edge to the actual destination and
possibly remove the aliases. */
if (e->callee != callee)
cgraph_redirect_edge_callee (e, callee);
clone_inlined_nodes (e, true, update_original, overall_size); clone_inlined_nodes (e, true, update_original, overall_size);
gcc_assert (curr->callee->global.inlined_to == to); gcc_assert (curr->callee->global.inlined_to == to);
......
...@@ -137,7 +137,7 @@ static bool ...@@ -137,7 +137,7 @@ static bool
caller_growth_limits (struct cgraph_edge *e) caller_growth_limits (struct cgraph_edge *e)
{ {
struct cgraph_node *to = e->caller; struct cgraph_node *to = e->caller;
struct cgraph_node *what = e->callee; struct cgraph_node *what = cgraph_function_or_thunk_node (e->callee, NULL);
int newsize; int newsize;
int limit = 0; int limit = 0;
HOST_WIDE_INT stack_size_limit = 0, inlined_stack; HOST_WIDE_INT stack_size_limit = 0, inlined_stack;
...@@ -237,22 +237,24 @@ static bool ...@@ -237,22 +237,24 @@ static bool
can_inline_edge_p (struct cgraph_edge *e, bool report) can_inline_edge_p (struct cgraph_edge *e, bool report)
{ {
bool inlinable = true; bool inlinable = true;
enum availability avail;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, &avail);
tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl); tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl);
tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->callee->decl); tree callee_tree = callee ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee->decl) : NULL;
gcc_assert (e->inline_failed); gcc_assert (e->inline_failed);
if (!e->callee->analyzed) if (!callee || !callee->analyzed)
{ {
e->inline_failed = CIF_BODY_NOT_AVAILABLE; e->inline_failed = CIF_BODY_NOT_AVAILABLE;
inlinable = false; inlinable = false;
} }
else if (!inline_summary (e->callee)->inlinable) else if (!inline_summary (callee)->inlinable)
{ {
e->inline_failed = CIF_FUNCTION_NOT_INLINABLE; e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
inlinable = false; inlinable = false;
} }
else if (cgraph_function_body_availability (e->callee) <= AVAIL_OVERWRITABLE) else if (avail <= AVAIL_OVERWRITABLE)
{ {
e->inline_failed = CIF_OVERWRITABLE; e->inline_failed = CIF_OVERWRITABLE;
return false; return false;
...@@ -264,9 +266,9 @@ can_inline_edge_p (struct cgraph_edge *e, bool report) ...@@ -264,9 +266,9 @@ can_inline_edge_p (struct cgraph_edge *e, bool report)
} }
/* Don't inline if the functions have different EH personalities. */ /* Don't inline if the functions have different EH personalities. */
else if (DECL_FUNCTION_PERSONALITY (e->caller->decl) else if (DECL_FUNCTION_PERSONALITY (e->caller->decl)
&& DECL_FUNCTION_PERSONALITY (e->callee->decl) && DECL_FUNCTION_PERSONALITY (callee->decl)
&& (DECL_FUNCTION_PERSONALITY (e->caller->decl) && (DECL_FUNCTION_PERSONALITY (e->caller->decl)
!= DECL_FUNCTION_PERSONALITY (e->callee->decl))) != DECL_FUNCTION_PERSONALITY (callee->decl)))
{ {
e->inline_failed = CIF_EH_PERSONALITY; e->inline_failed = CIF_EH_PERSONALITY;
inlinable = false; inlinable = false;
...@@ -275,9 +277,9 @@ can_inline_edge_p (struct cgraph_edge *e, bool report) ...@@ -275,9 +277,9 @@ can_inline_edge_p (struct cgraph_edge *e, bool report)
caller cannot. caller cannot.
FIXME: this is obviously wrong for LTO where STRUCT_FUNCTION is missing. FIXME: this is obviously wrong for LTO where STRUCT_FUNCTION is missing.
Move the flag into cgraph node or mirror it in the inline summary. */ Move the flag into cgraph node or mirror it in the inline summary. */
else if (DECL_STRUCT_FUNCTION (e->callee->decl) else if (DECL_STRUCT_FUNCTION (callee->decl)
&& DECL_STRUCT_FUNCTION && DECL_STRUCT_FUNCTION
(e->callee->decl)->can_throw_non_call_exceptions (callee->decl)->can_throw_non_call_exceptions
&& !(DECL_STRUCT_FUNCTION (e->caller->decl) && !(DECL_STRUCT_FUNCTION (e->caller->decl)
&& DECL_STRUCT_FUNCTION && DECL_STRUCT_FUNCTION
(e->caller->decl)->can_throw_non_call_exceptions)) (e->caller->decl)->can_throw_non_call_exceptions))
...@@ -287,13 +289,13 @@ can_inline_edge_p (struct cgraph_edge *e, bool report) ...@@ -287,13 +289,13 @@ can_inline_edge_p (struct cgraph_edge *e, bool report)
} }
/* Check compatibility of target optimization options. */ /* Check compatibility of target optimization options. */
else if (!targetm.target_option.can_inline_p (e->caller->decl, else if (!targetm.target_option.can_inline_p (e->caller->decl,
e->callee->decl)) callee->decl))
{ {
e->inline_failed = CIF_TARGET_OPTION_MISMATCH; e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
inlinable = false; inlinable = false;
} }
/* Check if caller growth allows the inlining. */ /* Check if caller growth allows the inlining. */
else if (!DECL_DISREGARD_INLINE_LIMITS (e->callee->decl) else if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl)
&& !lookup_attribute ("flatten", && !lookup_attribute ("flatten",
DECL_ATTRIBUTES DECL_ATTRIBUTES
(e->caller->global.inlined_to (e->caller->global.inlined_to
...@@ -343,10 +345,12 @@ can_inline_edge_p (struct cgraph_edge *e, bool report) ...@@ -343,10 +345,12 @@ can_inline_edge_p (struct cgraph_edge *e, bool report)
static bool static bool
can_early_inline_edge_p (struct cgraph_edge *e) can_early_inline_edge_p (struct cgraph_edge *e)
{ {
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee,
NULL);
/* Early inliner might get called at WPA stage when IPA pass adds new /* Early inliner might get called at WPA stage when IPA pass adds new
function. In this case we can not really do any of early inlining function. In this case we can not really do any of early inlining
because function bodies are missing. */ because function bodies are missing. */
if (!gimple_has_body_p (e->callee->decl)) if (!gimple_has_body_p (callee->decl))
{ {
e->inline_failed = CIF_BODY_NOT_AVAILABLE; e->inline_failed = CIF_BODY_NOT_AVAILABLE;
return false; return false;
...@@ -356,7 +360,7 @@ can_early_inline_edge_p (struct cgraph_edge *e) ...@@ -356,7 +360,7 @@ can_early_inline_edge_p (struct cgraph_edge *e)
the callee by early inliner, yet). We don't have CIF code for this the callee by early inliner, yet). We don't have CIF code for this
case; later we will re-do the decision in the real inliner. */ case; later we will re-do the decision in the real inliner. */
if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->caller->decl)) if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->caller->decl))
|| !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl))) || !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->decl)))
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, " edge not inlinable: not in SSA form\n"); fprintf (dump_file, " edge not inlinable: not in SSA form\n");
...@@ -388,10 +392,11 @@ static bool ...@@ -388,10 +392,11 @@ static bool
want_early_inline_function_p (struct cgraph_edge *e) want_early_inline_function_p (struct cgraph_edge *e)
{ {
bool want_inline = true; bool want_inline = true;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
if (DECL_DISREGARD_INLINE_LIMITS (e->callee->decl)) if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
; ;
else if (!DECL_DECLARED_INLINE_P (e->callee->decl) else if (!DECL_DECLARED_INLINE_P (callee->decl)
&& !flag_inline_small_functions) && !flag_inline_small_functions)
{ {
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE; e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
...@@ -410,18 +415,18 @@ want_early_inline_function_p (struct cgraph_edge *e) ...@@ -410,18 +415,18 @@ want_early_inline_function_p (struct cgraph_edge *e)
fprintf (dump_file, " will not early inline: %s/%i->%s/%i, " fprintf (dump_file, " will not early inline: %s/%i->%s/%i, "
"call is cold and code would grow by %i\n", "call is cold and code would grow by %i\n",
cgraph_node_name (e->caller), e->caller->uid, cgraph_node_name (e->caller), e->caller->uid,
cgraph_node_name (e->callee), e->callee->uid, cgraph_node_name (callee), callee->uid,
growth); growth);
want_inline = false; want_inline = false;
} }
else if (!leaf_node_p (e->callee) else if (!leaf_node_p (callee)
&& growth > 0) && growth > 0)
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, " will not early inline: %s/%i->%s/%i, " fprintf (dump_file, " will not early inline: %s/%i->%s/%i, "
"callee is not leaf and code would grow by %i\n", "callee is not leaf and code would grow by %i\n",
cgraph_node_name (e->caller), e->caller->uid, cgraph_node_name (e->caller), e->caller->uid,
cgraph_node_name (e->callee), e->callee->uid, cgraph_node_name (callee), callee->uid,
growth); growth);
want_inline = false; want_inline = false;
} }
...@@ -431,7 +436,7 @@ want_early_inline_function_p (struct cgraph_edge *e) ...@@ -431,7 +436,7 @@ want_early_inline_function_p (struct cgraph_edge *e)
fprintf (dump_file, " will not early inline: %s/%i->%s/%i, " fprintf (dump_file, " will not early inline: %s/%i->%s/%i, "
"growth %i exceeds --param early-inlining-insns\n", "growth %i exceeds --param early-inlining-insns\n",
cgraph_node_name (e->caller), e->caller->uid, cgraph_node_name (e->caller), e->caller->uid,
cgraph_node_name (e->callee), e->callee->uid, cgraph_node_name (callee), callee->uid,
growth); growth);
want_inline = false; want_inline = false;
} }
...@@ -446,10 +451,11 @@ static bool ...@@ -446,10 +451,11 @@ static bool
want_inline_small_function_p (struct cgraph_edge *e, bool report) want_inline_small_function_p (struct cgraph_edge *e, bool report)
{ {
bool want_inline = true; bool want_inline = true;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
if (DECL_DISREGARD_INLINE_LIMITS (e->callee->decl)) if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
; ;
else if (!DECL_DECLARED_INLINE_P (e->callee->decl) else if (!DECL_DECLARED_INLINE_P (callee->decl)
&& !flag_inline_small_functions) && !flag_inline_small_functions)
{ {
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE; e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
...@@ -461,19 +467,19 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report) ...@@ -461,19 +467,19 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
if (growth <= 0) if (growth <= 0)
; ;
else if (DECL_DECLARED_INLINE_P (e->callee->decl) else if (DECL_DECLARED_INLINE_P (callee->decl)
&& growth >= MAX_INLINE_INSNS_SINGLE) && growth >= MAX_INLINE_INSNS_SINGLE)
{ {
e->inline_failed = CIF_MAX_INLINE_INSNS_SINGLE_LIMIT; e->inline_failed = CIF_MAX_INLINE_INSNS_SINGLE_LIMIT;
want_inline = false; want_inline = false;
} }
else if (!DECL_DECLARED_INLINE_P (e->callee->decl) else if (!DECL_DECLARED_INLINE_P (callee->decl)
&& !flag_inline_functions) && !flag_inline_functions)
{ {
e->inline_failed = CIF_NOT_DECLARED_INLINED; e->inline_failed = CIF_NOT_DECLARED_INLINED;
want_inline = false; want_inline = false;
} }
else if (!DECL_DECLARED_INLINE_P (e->callee->decl) else if (!DECL_DECLARED_INLINE_P (callee->decl)
&& growth >= MAX_INLINE_INSNS_AUTO) && growth >= MAX_INLINE_INSNS_AUTO)
{ {
e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT; e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT;
...@@ -495,7 +501,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report) ...@@ -495,7 +501,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
"good" calls, we will realize that keeping the function around is "good" calls, we will realize that keeping the function around is
better. */ better. */
else if (!cgraph_maybe_hot_edge_p (e) else if (!cgraph_maybe_hot_edge_p (e)
&& (DECL_EXTERNAL (e->callee->decl) && (DECL_EXTERNAL (callee->decl)
/* Unlike for functions called once, we play unsafe with /* Unlike for functions called once, we play unsafe with
COMDATs. We can allow that since we know functions COMDATs. We can allow that since we know functions
...@@ -510,8 +516,8 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report) ...@@ -510,8 +516,8 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
instead of instead of
cgraph_will_be_removed_from_program_if_no_direct_calls */ cgraph_will_be_removed_from_program_if_no_direct_calls */
|| !cgraph_can_remove_if_no_direct_calls_p (e->callee) || !cgraph_can_remove_if_no_direct_calls_p (callee)
|| estimate_growth (e->callee) > 0)) || estimate_growth (callee) > 0))
{ {
e->inline_failed = CIF_UNLIKELY_CALL; e->inline_failed = CIF_UNLIKELY_CALL;
want_inline = false; want_inline = false;
...@@ -544,7 +550,7 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge, ...@@ -544,7 +550,7 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
int caller_freq = CGRAPH_FREQ_BASE; int caller_freq = CGRAPH_FREQ_BASE;
int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO); int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO);
if (DECL_DECLARED_INLINE_P (edge->callee->decl)) if (DECL_DECLARED_INLINE_P (edge->caller->decl))
max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH); max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH);
if (!cgraph_maybe_hot_edge_p (edge)) if (!cgraph_maybe_hot_edge_p (edge))
...@@ -644,6 +650,8 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge, ...@@ -644,6 +650,8 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
static bool static bool
want_inline_function_called_once_p (struct cgraph_node *node) want_inline_function_called_once_p (struct cgraph_node *node)
{ {
if (node->alias)
return false;
/* Already inlined? */ /* Already inlined? */
if (node->global.inlined_to) if (node->global.inlined_to)
return false; return false;
...@@ -708,9 +716,11 @@ edge_badness (struct cgraph_edge *edge, bool dump) ...@@ -708,9 +716,11 @@ edge_badness (struct cgraph_edge *edge, bool dump)
{ {
gcov_type badness; gcov_type badness;
int growth, time_growth; int growth, time_growth;
struct inline_summary *callee_info = inline_summary (edge->callee); struct cgraph_node *callee = cgraph_function_or_thunk_node (edge->callee,
NULL);
struct inline_summary *callee_info = inline_summary (callee);
if (DECL_DISREGARD_INLINE_LIMITS (edge->callee->decl)) if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
return INT_MIN; return INT_MIN;
growth = estimate_edge_growth (edge); growth = estimate_edge_growth (edge);
...@@ -720,7 +730,7 @@ edge_badness (struct cgraph_edge *edge, bool dump) ...@@ -720,7 +730,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
{ {
fprintf (dump_file, " Badness calculation for %s -> %s\n", fprintf (dump_file, " Badness calculation for %s -> %s\n",
cgraph_node_name (edge->caller), cgraph_node_name (edge->caller),
cgraph_node_name (edge->callee)); cgraph_node_name (callee));
fprintf (dump_file, " size growth %i, time growth %i\n", fprintf (dump_file, " size growth %i, time growth %i\n",
growth, growth,
time_growth); time_growth);
...@@ -808,7 +818,7 @@ edge_badness (struct cgraph_edge *edge, bool dump) ...@@ -808,7 +818,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
We might mix the valud into the fraction by taking into account We might mix the valud into the fraction by taking into account
relative growth of the unit, but for now just add the number relative growth of the unit, but for now just add the number
into resulting fraction. */ into resulting fraction. */
growth_for_all = estimate_growth (edge->callee); growth_for_all = estimate_growth (callee);
badness += growth_for_all; badness += growth_for_all;
if (badness > INT_MAX - 1) if (badness > INT_MAX - 1)
badness = INT_MAX - 1; badness = INT_MAX - 1;
...@@ -828,7 +838,7 @@ edge_badness (struct cgraph_edge *edge, bool dump) ...@@ -828,7 +838,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
else else
{ {
int nest = MIN (inline_edge_summary (edge)->loop_depth, 8); int nest = MIN (inline_edge_summary (edge)->loop_depth, 8);
badness = estimate_growth (edge->callee) * 256; badness = estimate_growth (callee) * 256;
/* Decrease badness if call is nested. */ /* Decrease badness if call is nested. */
if (badness > 0) if (badness > 0)
...@@ -1002,13 +1012,16 @@ update_callee_keys (fibheap_t heap, struct cgraph_node *node, ...@@ -1002,13 +1012,16 @@ update_callee_keys (fibheap_t heap, struct cgraph_node *node,
e = e->callee->callees; e = e->callee->callees;
else else
{ {
enum availability avail;
struct cgraph_node *callee;
/* We do not reset callee growth cache here. Since we added a new call, /* We do not reset callee growth cache here. Since we added a new call,
growth chould have just increased and consequentely badness metric growth chould have just increased and consequentely badness metric
don't need updating. */ don't need updating. */
if (e->inline_failed if (e->inline_failed
&& inline_summary (e->callee)->inlinable && (callee = cgraph_function_or_thunk_node (e->callee, &avail))
&& cgraph_function_body_availability (e->callee) >= AVAIL_AVAILABLE && inline_summary (callee)->inlinable
&& !bitmap_bit_p (updated_nodes, e->callee->uid)) && cgraph_function_body_availability (callee) >= AVAIL_AVAILABLE
&& !bitmap_bit_p (updated_nodes, callee->uid))
{ {
if (can_inline_edge_p (e, false) if (can_inline_edge_p (e, false)
&& want_inline_small_function_p (e, false)) && want_inline_small_function_p (e, false))
...@@ -1044,7 +1057,6 @@ update_all_callee_keys (fibheap_t heap, struct cgraph_node *node, ...@@ -1044,7 +1057,6 @@ update_all_callee_keys (fibheap_t heap, struct cgraph_node *node,
bitmap updated_nodes) bitmap updated_nodes)
{ {
struct cgraph_edge *e = node->callees; struct cgraph_edge *e = node->callees;
if (!e) if (!e)
return; return;
while (true) while (true)
...@@ -1052,11 +1064,14 @@ update_all_callee_keys (fibheap_t heap, struct cgraph_node *node, ...@@ -1052,11 +1064,14 @@ update_all_callee_keys (fibheap_t heap, struct cgraph_node *node,
e = e->callee->callees; e = e->callee->callees;
else else
{ {
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee,
NULL);
/* We inlined and thus callees might have different number of calls. /* We inlined and thus callees might have different number of calls.
Reset their caches */ Reset their caches */
reset_node_growth_cache (e->callee); reset_node_growth_cache (callee);
if (e->inline_failed) if (e->inline_failed)
update_caller_keys (heap, e->callee, updated_nodes, e); update_caller_keys (heap, callee, updated_nodes, e);
if (e->next_callee) if (e->next_callee)
e = e->next_callee; e = e->next_callee;
else else
...@@ -1081,8 +1096,12 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where, ...@@ -1081,8 +1096,12 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
fibheap_t heap) fibheap_t heap)
{ {
struct cgraph_edge *e; struct cgraph_edge *e;
enum availability avail;
for (e = where->callees; e; e = e->next_callee) for (e = where->callees; e; e = e->next_callee)
if (e->callee == node) if (e->callee == node
|| (cgraph_function_or_thunk_node (e->callee, &avail) == node
&& avail > AVAIL_OVERWRITABLE))
{ {
/* When profile feedback is available, prioritize by expected number /* When profile feedback is available, prioritize by expected number
of calls. */ of calls. */
...@@ -1240,8 +1259,7 @@ add_new_edges_to_heap (fibheap_t heap, VEC (cgraph_edge_p, heap) *new_edges) ...@@ -1240,8 +1259,7 @@ add_new_edges_to_heap (fibheap_t heap, VEC (cgraph_edge_p, heap) *new_edges)
struct cgraph_edge *edge = VEC_pop (cgraph_edge_p, new_edges); struct cgraph_edge *edge = VEC_pop (cgraph_edge_p, new_edges);
gcc_assert (!edge->aux); gcc_assert (!edge->aux);
if (inline_summary (edge->callee)->inlinable if (edge->inline_failed
&& edge->inline_failed
&& can_inline_edge_p (edge, true) && can_inline_edge_p (edge, true)
&& want_inline_small_function_p (edge, true)) && want_inline_small_function_p (edge, true))
edge->aux = fibheap_insert (heap, edge_badness (edge, false), edge); edge->aux = fibheap_insert (heap, edge_badness (edge, false), edge);
...@@ -1283,10 +1301,14 @@ inline_small_functions (void) ...@@ -1283,10 +1301,14 @@ inline_small_functions (void)
FOR_EACH_DEFINED_FUNCTION (node) FOR_EACH_DEFINED_FUNCTION (node)
if (!node->global.inlined_to) if (!node->global.inlined_to)
{ {
if (cgraph_function_with_gimple_body_p (node)
|| node->thunk.thunk_p)
{
struct inline_summary *info = inline_summary (node); struct inline_summary *info = inline_summary (node);
if (!DECL_EXTERNAL (node->decl)) if (!DECL_EXTERNAL (node->decl))
initial_size += info->size; initial_size += info->size;
}
for (edge = node->callers; edge; edge = edge->next_caller) for (edge = node->callers; edge; edge = edge->next_caller)
if (max_count < edge->count) if (max_count < edge->count)
...@@ -1355,14 +1377,14 @@ inline_small_functions (void) ...@@ -1355,14 +1377,14 @@ inline_small_functions (void)
if (!can_inline_edge_p (edge, true)) if (!can_inline_edge_p (edge, true))
continue; continue;
callee = edge->callee; callee = cgraph_function_or_thunk_node (edge->callee, NULL);
growth = estimate_edge_growth (edge); growth = estimate_edge_growth (edge);
if (dump_file) if (dump_file)
{ {
fprintf (dump_file, fprintf (dump_file,
"\nConsidering %s with %i size\n", "\nConsidering %s with %i size\n",
cgraph_node_name (edge->callee), cgraph_node_name (callee),
inline_summary (edge->callee)->size); inline_summary (callee)->size);
fprintf (dump_file, fprintf (dump_file,
" to be inlined into %s in %s:%i\n" " to be inlined into %s in %s:%i\n"
" Estimated growth after inlined into all is %+i insns.\n" " Estimated growth after inlined into all is %+i insns.\n"
...@@ -1372,7 +1394,7 @@ inline_small_functions (void) ...@@ -1372,7 +1394,7 @@ inline_small_functions (void)
: gimple_filename ((const_gimple) edge->call_stmt), : gimple_filename ((const_gimple) edge->call_stmt),
flag_wpa ? -1 flag_wpa ? -1
: gimple_lineno ((const_gimple) edge->call_stmt), : gimple_lineno ((const_gimple) edge->call_stmt),
estimate_growth (edge->callee), estimate_growth (callee),
badness, badness,
edge->frequency / (double)CGRAPH_FREQ_BASE); edge->frequency / (double)CGRAPH_FREQ_BASE);
if (edge->count) if (edge->count)
...@@ -1383,7 +1405,7 @@ inline_small_functions (void) ...@@ -1383,7 +1405,7 @@ inline_small_functions (void)
} }
if (overall_size + growth > max_size if (overall_size + growth > max_size
&& !DECL_DISREGARD_INLINE_LIMITS (edge->callee->decl)) && !DECL_DISREGARD_INLINE_LIMITS (callee->decl))
{ {
edge->inline_failed = CIF_INLINE_UNIT_GROWTH_LIMIT; edge->inline_failed = CIF_INLINE_UNIT_GROWTH_LIMIT;
report_inline_failed_reason (edge); report_inline_failed_reason (edge);
...@@ -1418,7 +1440,6 @@ inline_small_functions (void) ...@@ -1418,7 +1440,6 @@ inline_small_functions (void)
} }
else else
{ {
struct cgraph_node *callee;
struct cgraph_node *outer_node = NULL; struct cgraph_node *outer_node = NULL;
int depth = 0; int depth = 0;
...@@ -1446,7 +1467,6 @@ inline_small_functions (void) ...@@ -1446,7 +1467,6 @@ inline_small_functions (void)
else if (depth && dump_file) else if (depth && dump_file)
fprintf (dump_file, " Peeling recursion with depth %i\n", depth); fprintf (dump_file, " Peeling recursion with depth %i\n", depth);
callee = edge->callee;
gcc_checking_assert (!callee->global.inlined_to); gcc_checking_assert (!callee->global.inlined_to);
inline_call (edge, true, &new_indirect_edges, &overall_size); inline_call (edge, true, &new_indirect_edges, &overall_size);
if (flag_indirect_inlining) if (flag_indirect_inlining)
...@@ -1529,14 +1549,15 @@ flatten_function (struct cgraph_node *node, bool early) ...@@ -1529,14 +1549,15 @@ flatten_function (struct cgraph_node *node, bool early)
for (e = node->callees; e; e = e->next_callee) for (e = node->callees; e; e = e->next_callee)
{ {
struct cgraph_node *orig_callee; struct cgraph_node *orig_callee;
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
/* We've hit cycle? It is time to give up. */ /* We've hit cycle? It is time to give up. */
if (e->callee->aux) if (callee->aux)
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, fprintf (dump_file,
"Not inlining %s into %s to avoid cycle.\n", "Not inlining %s into %s to avoid cycle.\n",
cgraph_node_name (e->callee), cgraph_node_name (callee),
cgraph_node_name (e->caller)); cgraph_node_name (e->caller));
e->inline_failed = CIF_RECURSIVE_INLINING; e->inline_failed = CIF_RECURSIVE_INLINING;
continue; continue;
...@@ -1546,7 +1567,7 @@ flatten_function (struct cgraph_node *node, bool early) ...@@ -1546,7 +1567,7 @@ flatten_function (struct cgraph_node *node, bool early)
it in order to fully flatten the leaves. */ it in order to fully flatten the leaves. */
if (!e->inline_failed) if (!e->inline_failed)
{ {
flatten_function (e->callee, early); flatten_function (callee, early);
continue; continue;
} }
...@@ -1566,7 +1587,7 @@ flatten_function (struct cgraph_node *node, bool early) ...@@ -1566,7 +1587,7 @@ flatten_function (struct cgraph_node *node, bool early)
} }
if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl)) if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
!= gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl))) != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->decl)))
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, "Not inlining: SSA form does not match.\n"); fprintf (dump_file, "Not inlining: SSA form does not match.\n");
...@@ -1577,9 +1598,9 @@ flatten_function (struct cgraph_node *node, bool early) ...@@ -1577,9 +1598,9 @@ flatten_function (struct cgraph_node *node, bool early)
recursing through the original node if the node was cloned. */ recursing through the original node if the node was cloned. */
if (dump_file) if (dump_file)
fprintf (dump_file, " Inlining %s into %s.\n", fprintf (dump_file, " Inlining %s into %s.\n",
cgraph_node_name (e->callee), cgraph_node_name (callee),
cgraph_node_name (e->caller)); cgraph_node_name (e->caller));
orig_callee = e->callee; orig_callee = callee;
inline_call (e, true, NULL, NULL); inline_call (e, true, NULL, NULL);
if (e->callee != orig_callee) if (e->callee != orig_callee)
orig_callee->aux = (void *) node; orig_callee->aux = (void *) node;
...@@ -1727,7 +1748,8 @@ inline_always_inline_functions (struct cgraph_node *node) ...@@ -1727,7 +1748,8 @@ inline_always_inline_functions (struct cgraph_node *node)
for (e = node->callees; e; e = e->next_callee) for (e = node->callees; e; e = e->next_callee)
{ {
if (!DECL_DISREGARD_INLINE_LIMITS (e->callee->decl)) struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl))
continue; continue;
if (cgraph_edge_recursive_p (e)) if (cgraph_edge_recursive_p (e))
...@@ -1764,19 +1786,20 @@ early_inline_small_functions (struct cgraph_node *node) ...@@ -1764,19 +1786,20 @@ early_inline_small_functions (struct cgraph_node *node)
for (e = node->callees; e; e = e->next_callee) for (e = node->callees; e; e = e->next_callee)
{ {
if (!inline_summary (e->callee)->inlinable struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
if (!inline_summary (callee)->inlinable
|| !e->inline_failed) || !e->inline_failed)
continue; continue;
/* Do not consider functions not declared inline. */ /* Do not consider functions not declared inline. */
if (!DECL_DECLARED_INLINE_P (e->callee->decl) if (!DECL_DECLARED_INLINE_P (callee->decl)
&& !flag_inline_small_functions && !flag_inline_small_functions
&& !flag_inline_functions) && !flag_inline_functions)
continue; continue;
if (dump_file) if (dump_file)
fprintf (dump_file, "Considering inline candidate %s.\n", fprintf (dump_file, "Considering inline candidate %s.\n",
cgraph_node_name (e->callee)); cgraph_node_name (callee));
if (!can_early_inline_edge_p (e)) if (!can_early_inline_edge_p (e))
continue; continue;
...@@ -1793,7 +1816,7 @@ early_inline_small_functions (struct cgraph_node *node) ...@@ -1793,7 +1816,7 @@ early_inline_small_functions (struct cgraph_node *node)
if (dump_file) if (dump_file)
fprintf (dump_file, " Inlining %s into %s.\n", fprintf (dump_file, " Inlining %s into %s.\n",
cgraph_node_name (e->callee), cgraph_node_name (callee),
cgraph_node_name (e->caller)); cgraph_node_name (e->caller));
inline_call (e, true, NULL, NULL); inline_call (e, true, NULL, NULL);
inlined = true; inlined = true;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment