Commit 49d9c9d2 by Jan Hubicka Committed by Jan Hubicka

ipa-inline-analysis.c (do_estimate_growth_1): Add support for capping the growth cumulated.


	* ipa-inline-analysis.c (do_estimate_growth_1): Add support for
	capping the growth cumulated.
	(offline_size): Break out from ...
	(estimate_growth): ... here.
	(check_callers): Add N, OFFLINE and MIN_SIZE and KNOWN_EDGE
	parameters.
	(growth_likely_positive): Turn to ...
	(growth_positive_p): Re-implement.
	* ipa-inline.h (growth_likely_positive): Remove.
	(growth_positive_p): Declare.
	* ipa-inline.c (want_inline_small_function_p): Use
	growth_positive_p.
	(want_inline_function_to_all_callers_p): Likewise.

From-SVN: r278007
parent e3bd08dd
2019-11-09 Jan Hubicka <hubicka@ucw.cz> 2019-11-09 Jan Hubicka <hubicka@ucw.cz>
* ipa-inline-analysis.c (do_estimate_growth_1): Add support for
capping the growth cumulated.
(offline_size): Break out from ...
(estimate_growth): ... here.
(check_callers): Add N, OFFLINE and MIN_SIZE and KNOWN_EDGE
parameters.
(growth_likely_positive): Turn to ...
(growth_positive_p): Re-implement.
* ipa-inline.h (growth_likely_positive): Remove.
(growth_positive_p): Declare.
* ipa-inline.c (want_inline_small_function_p): Use
growth_positive_p.
(want_inline_function_to_all_callers_p): Likewise.
2019-11-09 Jan Hubicka <hubicka@ucw.cz>
* ipa-fnsummary.c (ipa_call_context::estimate_size_and_time): Fix * ipa-fnsummary.c (ipa_call_context::estimate_size_and_time): Fix
calculation of min_size. calculation of min_size.
(ipa_update_overall_fn_summary): Likewise. (ipa_update_overall_fn_summary): Likewise.
...@@ -387,6 +387,7 @@ struct growth_data ...@@ -387,6 +387,7 @@ struct growth_data
bool self_recursive; bool self_recursive;
bool uninlinable; bool uninlinable;
int growth; int growth;
int cap;
}; };
...@@ -406,29 +407,57 @@ do_estimate_growth_1 (struct cgraph_node *node, void *data) ...@@ -406,29 +407,57 @@ do_estimate_growth_1 (struct cgraph_node *node, void *data)
|| !opt_for_fn (e->caller->decl, optimize)) || !opt_for_fn (e->caller->decl, optimize))
{ {
d->uninlinable = true; d->uninlinable = true;
if (d->cap < INT_MAX)
return true;
continue; continue;
} }
if (e->recursive_p ()) if (e->recursive_p ())
{ {
d->self_recursive = true; d->self_recursive = true;
if (d->cap < INT_MAX)
return true;
continue; continue;
} }
d->growth += estimate_edge_growth (e); d->growth += estimate_edge_growth (e);
if (d->growth > d->cap)
return true;
} }
return false; return false;
} }
/* Return estimated savings for eliminating offline copy of NODE by inlining
it everywhere. */
static int
offline_size (struct cgraph_node *node, ipa_size_summary *info)
{
if (!DECL_EXTERNAL (node->decl))
{
if (node->will_be_removed_from_program_if_no_direct_calls_p ())
return info->size;
/* COMDAT functions are very often not shared across multiple units
since they come from various template instantiations.
Take this into account. */
else if (DECL_COMDAT (node->decl)
&& node->can_remove_if_no_direct_calls_p ())
return (info->size
* (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY))
+ 50) / 100;
}
return 0;
}
/* Estimate the growth caused by inlining NODE into all callees. */ /* Estimate the growth caused by inlining NODE into all callees. */
int int
estimate_growth (struct cgraph_node *node) estimate_growth (struct cgraph_node *node)
{ {
struct growth_data d = { node, false, false, 0 }; struct growth_data d = { node, false, false, 0, INT_MAX };
class ipa_size_summary *info = ipa_size_summaries->get (node); ipa_size_summary *info = ipa_size_summaries->get (node);
node->call_for_symbol_and_aliases (do_estimate_growth_1, &d, true); if (node->call_for_symbol_and_aliases (do_estimate_growth_1, &d, true))
return 1;
/* For self recursive functions the growth estimation really should be /* For self recursive functions the growth estimation really should be
infinity. We don't want to return very large values because the growth infinity. We don't want to return very large values because the growth
...@@ -436,21 +465,8 @@ estimate_growth (struct cgraph_node *node) ...@@ -436,21 +465,8 @@ estimate_growth (struct cgraph_node *node)
return zero or negative growths. */ return zero or negative growths. */
if (d.self_recursive) if (d.self_recursive)
d.growth = d.growth < info->size ? info->size : d.growth; d.growth = d.growth < info->size ? info->size : d.growth;
else if (DECL_EXTERNAL (node->decl) || d.uninlinable) else if (!d.uninlinable)
; d.growth -= offline_size (node, info);
else
{
if (node->will_be_removed_from_program_if_no_direct_calls_p ())
d.growth -= info->size;
/* COMDAT functions are very often not shared across multiple units
since they come from various template instantiations.
Take this into account. */
else if (DECL_COMDAT (node->decl)
&& node->can_remove_if_no_direct_calls_p ())
d.growth -= (info->size
* (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY))
+ 50) / 100;
}
return d.growth; return d.growth;
} }
...@@ -458,7 +474,8 @@ estimate_growth (struct cgraph_node *node) ...@@ -458,7 +474,8 @@ estimate_growth (struct cgraph_node *node)
/* Verify if there are fewer than MAX_CALLERS. */ /* Verify if there are fewer than MAX_CALLERS. */
static bool static bool
check_callers (cgraph_node *node, int *max_callers) check_callers (cgraph_node *node, int *growth, int *n, int offline,
int min_size, struct cgraph_edge *known_edge)
{ {
ipa_ref *ref; ipa_ref *ref;
...@@ -467,70 +484,96 @@ check_callers (cgraph_node *node, int *max_callers) ...@@ -467,70 +484,96 @@ check_callers (cgraph_node *node, int *max_callers)
for (cgraph_edge *e = node->callers; e; e = e->next_caller) for (cgraph_edge *e = node->callers; e; e = e->next_caller)
{ {
(*max_callers)--; edge_growth_cache_entry *entry;
if (!*max_callers
|| cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR) if (e == known_edge)
continue;
if (cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR)
return true;
if (edge_growth_cache != NULL
&& (entry = edge_growth_cache->get (e)) != NULL
&& entry->size != 0)
*growth += entry->size - (entry->size > 0);
else
{
class ipa_call_summary *es = ipa_call_summaries->get (e);
if (!es)
return true;
*growth += min_size - es->call_stmt_size;
if (--(*n) < 0)
return false;
}
if (*growth > offline)
return true; return true;
} }
if (*n > 0)
FOR_EACH_ALIAS (node, ref) FOR_EACH_ALIAS (node, ref)
if (check_callers (dyn_cast <cgraph_node *> (ref->referring), max_callers)) if (check_callers (dyn_cast <cgraph_node *> (ref->referring), growth, n,
offline, min_size, known_edge))
return true; return true;
return false; return false;
} }
/* Make cheap estimation if growth of NODE is likely positive knowing /* Decide if growth of NODE is positive. This is cheaper than calculating
EDGE_GROWTH of one particular edge. actual growth. If edge growth of KNOWN_EDGE is known
We assume that most of other edges will have similar growth it is passed by EDGE_GROWTH. */
and skip computation if there are too many callers. */
bool bool
growth_likely_positive (struct cgraph_node *node, growth_positive_p (struct cgraph_node *node,
int edge_growth) struct cgraph_edge * known_edge, int edge_growth)
{ {
int max_callers;
struct cgraph_edge *e; struct cgraph_edge *e;
gcc_checking_assert (edge_growth > 0);
ipa_size_summary *s = ipa_size_summaries->get (node);
/* First quickly check if NODE is removable at all. */ /* First quickly check if NODE is removable at all. */
if (DECL_EXTERNAL (node->decl)) int offline = offline_size (node, s);
return true; if (offline <= 0 && known_edge && edge_growth > 0)
if (!node->can_remove_if_no_direct_calls_and_refs_p ()
|| node->address_taken)
return true; return true;
max_callers = ipa_size_summaries->get (node)->size * 4 / edge_growth + 2; int min_size = ipa_fn_summaries->get (node)->min_size;
int n = 10;
int min_growth = known_edge ? edge_growth : 0;
for (e = node->callers; e; e = e->next_caller) for (e = node->callers; e; e = e->next_caller)
{ {
max_callers--; edge_growth_cache_entry *entry;
if (!max_callers
|| cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR) if (cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR)
return true;
if (e == known_edge)
continue;
if (edge_growth_cache != NULL
&& (entry = edge_growth_cache->get (e)) != NULL
&& entry->size != 0)
min_growth += entry->size - (entry->size > 0);
else
{
class ipa_call_summary *es = ipa_call_summaries->get (e);
if (!es)
return true;
min_growth += min_size - es->call_stmt_size;
if (--n <= 0)
break;
}
if (min_growth > offline)
return true; return true;
} }
ipa_ref *ref; ipa_ref *ref;
if (n > 0)
FOR_EACH_ALIAS (node, ref) FOR_EACH_ALIAS (node, ref)
if (check_callers (dyn_cast <cgraph_node *> (ref->referring), &max_callers)) if (check_callers (dyn_cast <cgraph_node *> (ref->referring),
&min_growth, &n, offline, min_size, known_edge))
return true; return true;
/* Unlike for functions called once, we play unsafe with struct growth_data d = { node, false, false, 0, offline };
COMDATs. We can allow that since we know functions if (node->call_for_symbol_and_aliases (do_estimate_growth_1, &d, true))
in consideration are small (and thus risk is small) and
moreover grow estimates already accounts that COMDAT
functions may or may not disappear when eliminated from
current unit. With good probability making aggressive
choice in all units is going to make overall program
smaller. */
if (DECL_COMDAT (node->decl))
{
if (!node->can_remove_if_no_direct_calls_p ())
return true; return true;
} if (d.self_recursive || d.uninlinable)
else if (!node->will_be_removed_from_program_if_no_direct_calls_p ())
return true; return true;
return (d.growth > offline);
return estimate_growth (node) > 0;
} }
...@@ -883,9 +883,9 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report) ...@@ -883,9 +883,9 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
&& !opt_for_fn (e->caller->decl, flag_inline_functions) && !opt_for_fn (e->caller->decl, flag_inline_functions)
&& growth >= PARAM_VALUE (PARAM_MAX_INLINE_INSNS_SMALL)) && growth >= PARAM_VALUE (PARAM_MAX_INLINE_INSNS_SMALL))
{ {
/* growth_likely_positive is expensive, always test it last. */ /* growth_positive_p is expensive, always test it last. */
if (growth >= inline_insns_single (e->caller, false) if (growth >= inline_insns_single (e->caller, false)
|| growth_likely_positive (callee, growth)) || growth_positive_p (callee, e, growth))
{ {
e->inline_failed = CIF_NOT_DECLARED_INLINED; e->inline_failed = CIF_NOT_DECLARED_INLINED;
want_inline = false; want_inline = false;
...@@ -899,9 +899,9 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report) ...@@ -899,9 +899,9 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
|| growth >= inline_insns_auto (e->caller, true) || growth >= inline_insns_auto (e->caller, true)
|| !big_speedup_p (e))) || !big_speedup_p (e)))
{ {
/* growth_likely_positive is expensive, always test it last. */ /* growth_positive_p is expensive, always test it last. */
if (growth >= inline_insns_single (e->caller, false) if (growth >= inline_insns_single (e->caller, false)
|| growth_likely_positive (callee, growth)) || growth_positive_p (callee, e, growth))
{ {
if (opt_for_fn (e->caller->decl, optimize) >= 3) if (opt_for_fn (e->caller->decl, optimize) >= 3)
e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT; e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT;
...@@ -913,7 +913,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report) ...@@ -913,7 +913,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
/* If call is cold, do not inline when function body would grow. */ /* If call is cold, do not inline when function body would grow. */
else if (!e->maybe_hot_p () else if (!e->maybe_hot_p ()
&& (growth >= inline_insns_single (e->caller, false) && (growth >= inline_insns_single (e->caller, false)
|| growth_likely_positive (callee, growth))) || growth_positive_p (callee, e, growth)))
{ {
e->inline_failed = CIF_UNLIKELY_CALL; e->inline_failed = CIF_UNLIKELY_CALL;
want_inline = false; want_inline = false;
...@@ -1075,7 +1075,7 @@ want_inline_function_to_all_callers_p (struct cgraph_node *node, bool cold) ...@@ -1075,7 +1075,7 @@ want_inline_function_to_all_callers_p (struct cgraph_node *node, bool cold)
if (!node->call_for_symbol_and_aliases (has_caller_p, NULL, true)) if (!node->call_for_symbol_and_aliases (has_caller_p, NULL, true))
return false; return false;
/* Inlining into all callers would increase size? */ /* Inlining into all callers would increase size? */
if (estimate_growth (node) > 0) if (growth_positive_p (node, NULL, INT_MIN) > 0)
return false; return false;
/* All inlines must be possible. */ /* All inlines must be possible. */
if (node->call_for_symbol_and_aliases (check_callers, &has_hot_call, if (node->call_for_symbol_and_aliases (check_callers, &has_hot_call,
......
...@@ -44,7 +44,7 @@ extern fast_call_summary<edge_growth_cache_entry *, va_heap> *edge_growth_cache; ...@@ -44,7 +44,7 @@ extern fast_call_summary<edge_growth_cache_entry *, va_heap> *edge_growth_cache;
/* In ipa-inline-analysis.c */ /* In ipa-inline-analysis.c */
int estimate_size_after_inlining (struct cgraph_node *, struct cgraph_edge *); int estimate_size_after_inlining (struct cgraph_node *, struct cgraph_edge *);
int estimate_growth (struct cgraph_node *); int estimate_growth (struct cgraph_node *);
bool growth_likely_positive (struct cgraph_node *, int); bool growth_positive_p (struct cgraph_node *, struct cgraph_edge *, int);
int do_estimate_edge_size (struct cgraph_edge *edge); int do_estimate_edge_size (struct cgraph_edge *edge);
sreal do_estimate_edge_time (struct cgraph_edge *edge); sreal do_estimate_edge_time (struct cgraph_edge *edge);
ipa_hints do_estimate_edge_hints (struct cgraph_edge *edge); ipa_hints do_estimate_edge_hints (struct cgraph_edge *edge);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment