Commit ec77d61f by Jan Hubicka Committed by Jan Hubicka

re PR lto/59468 (ICE on invalid C++ code with LTO in…

re PR lto/59468 (ICE on invalid C++ code with LTO in gimple_get_virt_method_for_binfo, at gimple-fold.c:3224)


	PR lto/59468
	* ipa-utils.h (possible_polymorphic_call_targets): Update prototype
	and wrapper.
	* ipa-devirt.c: Include demangle.h
	(odr_violation_reported): New static variable.
	(add_type_duplicate): Update odr_violations.
	(maybe_record_node): Add completep parameter; update it.
	(record_target_from_binfo): Add COMPLETEP parameter;
	update it as needed.
	(possible_polymorphic_call_targets_1): Likewise.
	(struct polymorphic_call_target_d): Add nonconstruction_targets;
	rename FINAL to COMPLETE.
	(record_targets_from_bases): Sanity check we found the binfo;
	fix COMPLETEP updating.
	(possible_polymorphic_call_targets): Add NONCONSTRUTION_TARGETSP
	parameter, fix computing of COMPLETEP.
	(dump_possible_polymorphic_call_targets): Imrove readability of dump; at
	LTO time do demangling.
	(ipa_devirt): Use nonconstruction_targets; Improve dumps.
	* gimple-fold.c (gimple_get_virt_method_for_vtable): Add can_refer
	parameter.
	(gimple_get_virt_method_for_binfo): Likewise.
	* gimple-fold.h (gimple_get_virt_method_for_binfo,
	gimple_get_virt_method_for_vtable): Update prototypes.

	PR lto/59468
	* g++.dg/ipa/devirt-27.C: New testcase.
	* g++.dg/ipa/devirt-26.C: New testcase.

From-SVN: r207702
parent 5a4dcd9b
2014-02-11 Jan Hubicka <hubicka@ucw.cz>
PR lto/59468
* ipa-utils.h (possible_polymorphic_call_targets): Update prototype
and wrapper.
* ipa-devirt.c: Include demangle.h
(odr_violation_reported): New static variable.
(add_type_duplicate): Update odr_violations.
(maybe_record_node): Add completep parameter; update it.
(record_target_from_binfo): Add COMPLETEP parameter;
update it as needed.
(possible_polymorphic_call_targets_1): Likewise.
(struct polymorphic_call_target_d): Add nonconstruction_targets;
rename FINAL to COMPLETE.
(record_targets_from_bases): Sanity check we found the binfo;
fix COMPLETEP updating.
(possible_polymorphic_call_targets): Add NONCONSTRUTION_TARGETSP
parameter, fix computing of COMPLETEP.
(dump_possible_polymorphic_call_targets): Imrove readability of dump; at
LTO time do demangling.
(ipa_devirt): Use nonconstruction_targets; Improve dumps.
* gimple-fold.c (gimple_get_virt_method_for_vtable): Add can_refer
parameter.
(gimple_get_virt_method_for_binfo): Likewise.
* gimple-fold.h (gimple_get_virt_method_for_binfo,
gimple_get_virt_method_for_vtable): Update prototypes.
2014-02-11 Vladimir Makarov <vmakarov@redhat.com> 2014-02-11 Vladimir Makarov <vmakarov@redhat.com>
PR target/49008 PR target/49008
......
...@@ -2660,7 +2660,6 @@ import_export_decl (tree decl) ...@@ -2660,7 +2660,6 @@ import_export_decl (tree decl)
comdat_p = (targetm.cxx.class_data_always_comdat () comdat_p = (targetm.cxx.class_data_always_comdat ()
|| (CLASSTYPE_KEY_METHOD (type) || (CLASSTYPE_KEY_METHOD (type)
&& DECL_DECLARED_INLINE_P (CLASSTYPE_KEY_METHOD (type)))); && DECL_DECLARED_INLINE_P (CLASSTYPE_KEY_METHOD (type))));
mark_needed (decl);
if (!flag_weak) if (!flag_weak)
{ {
comdat_p = false; comdat_p = false;
......
...@@ -3170,22 +3170,35 @@ fold_const_aggregate_ref (tree t) ...@@ -3170,22 +3170,35 @@ fold_const_aggregate_ref (tree t)
} }
/* Lookup virtual method with index TOKEN in a virtual table V /* Lookup virtual method with index TOKEN in a virtual table V
at OFFSET. */ at OFFSET.
Set CAN_REFER if non-NULL to false if method
is not referable or if the virtual table is ill-formed (such as rewriten
by non-C++ produced symbol). Otherwise just return NULL in that calse. */
tree tree
gimple_get_virt_method_for_vtable (HOST_WIDE_INT token, gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
tree v, tree v,
unsigned HOST_WIDE_INT offset) unsigned HOST_WIDE_INT offset,
bool *can_refer)
{ {
tree vtable = v, init, fn; tree vtable = v, init, fn;
unsigned HOST_WIDE_INT size; unsigned HOST_WIDE_INT size;
unsigned HOST_WIDE_INT elt_size, access_index; unsigned HOST_WIDE_INT elt_size, access_index;
tree domain_type; tree domain_type;
if (can_refer)
*can_refer = true;
/* First of all double check we have virtual table. */ /* First of all double check we have virtual table. */
if (TREE_CODE (v) != VAR_DECL if (TREE_CODE (v) != VAR_DECL
|| !DECL_VIRTUAL_P (v)) || !DECL_VIRTUAL_P (v))
{
gcc_assert (in_lto_p);
/* Pass down that we lost track of the target. */
if (can_refer)
*can_refer = false;
return NULL_TREE; return NULL_TREE;
}
init = ctor_for_folding (v); init = ctor_for_folding (v);
...@@ -3197,6 +3210,9 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token, ...@@ -3197,6 +3210,9 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
if (init == error_mark_node) if (init == error_mark_node)
{ {
gcc_assert (in_lto_p); gcc_assert (in_lto_p);
/* Pass down that we lost track of the target. */
if (can_refer)
*can_refer = false;
return NULL_TREE; return NULL_TREE;
} }
gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE); gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
...@@ -3247,8 +3263,15 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token, ...@@ -3247,8 +3263,15 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
ends up in other partition, because we found devirtualization ends up in other partition, because we found devirtualization
possibility too late. */ possibility too late. */
if (!can_refer_decl_in_current_unit_p (fn, vtable)) if (!can_refer_decl_in_current_unit_p (fn, vtable))
{
if (can_refer)
{
*can_refer = false;
return fn;
}
return NULL_TREE; return NULL_TREE;
} }
}
/* Make sure we create a cgraph node for functions we'll reference. /* Make sure we create a cgraph node for functions we'll reference.
They can be non-existent if the reference comes from an entry They can be non-existent if the reference comes from an entry
...@@ -3261,10 +3284,14 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token, ...@@ -3261,10 +3284,14 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
KNOWN_BINFO carries the binfo describing the true type of KNOWN_BINFO carries the binfo describing the true type of
OBJ_TYPE_REF_OBJECT(REF). */ OBJ_TYPE_REF_OBJECT(REF).
Set CAN_REFER if non-NULL to false if method
is not referable or if the virtual table is ill-formed (such as rewriten
by non-C++ produced symbol). Otherwise just return NULL in that calse. */
tree tree
gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo) gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
bool *can_refer)
{ {
unsigned HOST_WIDE_INT offset; unsigned HOST_WIDE_INT offset;
tree v; tree v;
...@@ -3275,9 +3302,12 @@ gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo) ...@@ -3275,9 +3302,12 @@ gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
return NULL_TREE; return NULL_TREE;
if (!vtable_pointer_value_to_vtable (v, &v, &offset)) if (!vtable_pointer_value_to_vtable (v, &v, &offset))
{
if (can_refer)
*can_refer = false;
return NULL_TREE; return NULL_TREE;
}
return gimple_get_virt_method_for_vtable (token, v, offset); return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
} }
/* Return true iff VAL is a gimple expression that is known to be /* Return true iff VAL is a gimple expression that is known to be
......
...@@ -36,9 +36,11 @@ extern tree gimple_fold_stmt_to_constant_1 (gimple, tree (*) (tree)); ...@@ -36,9 +36,11 @@ extern tree gimple_fold_stmt_to_constant_1 (gimple, tree (*) (tree));
extern tree gimple_fold_stmt_to_constant (gimple, tree (*) (tree)); extern tree gimple_fold_stmt_to_constant (gimple, tree (*) (tree));
extern tree fold_const_aggregate_ref_1 (tree, tree (*) (tree)); extern tree fold_const_aggregate_ref_1 (tree, tree (*) (tree));
extern tree fold_const_aggregate_ref (tree); extern tree fold_const_aggregate_ref (tree);
extern tree gimple_get_virt_method_for_binfo (HOST_WIDE_INT, tree); extern tree gimple_get_virt_method_for_binfo (HOST_WIDE_INT, tree,
bool *can_refer = NULL);
extern tree gimple_get_virt_method_for_vtable (HOST_WIDE_INT, tree, extern tree gimple_get_virt_method_for_vtable (HOST_WIDE_INT, tree,
unsigned HOST_WIDE_INT); unsigned HOST_WIDE_INT,
bool *can_refer = NULL);
extern bool gimple_val_nonnegative_real_p (tree); extern bool gimple_val_nonnegative_real_p (tree);
extern tree gimple_fold_indirect_ref (tree); extern tree gimple_fold_indirect_ref (tree);
extern bool arith_code_with_undefined_signed_overflow (tree_code); extern bool arith_code_with_undefined_signed_overflow (tree_code);
......
...@@ -128,6 +128,9 @@ along with GCC; see the file COPYING3. If not see ...@@ -128,6 +128,9 @@ along with GCC; see the file COPYING3. If not see
#include "ipa-inline.h" #include "ipa-inline.h"
#include "diagnostic.h" #include "diagnostic.h"
#include "tree-dfa.h" #include "tree-dfa.h"
#include "demangle.h"
static bool odr_violation_reported = false;
/* Dummy polymorphic call context. */ /* Dummy polymorphic call context. */
...@@ -297,6 +300,7 @@ add_type_duplicate (odr_type val, tree type) ...@@ -297,6 +300,7 @@ add_type_duplicate (odr_type val, tree type)
if (!types_compatible_p (val->type, type)) if (!types_compatible_p (val->type, type))
{ {
merge = false; merge = false;
odr_violation_reported = true;
if (BINFO_VTABLE (TYPE_BINFO (val->type)) if (BINFO_VTABLE (TYPE_BINFO (val->type))
&& warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0, && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
"type %qD violates one definition rule ", "type %qD violates one definition rule ",
...@@ -332,6 +336,7 @@ add_type_duplicate (odr_type val, tree type) ...@@ -332,6 +336,7 @@ add_type_duplicate (odr_type val, tree type)
if (base_mismatch) if (base_mismatch)
{ {
merge = false; merge = false;
odr_violation_reported = true;
if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0, if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
"type %qD violates one definition rule ", "type %qD violates one definition rule ",
...@@ -594,17 +599,31 @@ build_type_inheritance_graph (void) ...@@ -594,17 +599,31 @@ build_type_inheritance_graph (void)
} }
/* If TARGET has associated node, record it in the NODES array. /* If TARGET has associated node, record it in the NODES array.
if TARGET can not be inserted (for example because its body was CAN_REFER specify if program can refer to the target directly.
already removed and there is no way to refer to it), clear COMPLETEP. */ if TARGET is unknown (NULL) or it can not be inserted (for example because
its body was already removed and there is no way to refer to it), clear
COMPLETEP. */
static void static void
maybe_record_node (vec <cgraph_node *> &nodes, maybe_record_node (vec <cgraph_node *> &nodes,
tree target, pointer_set_t *inserted, tree target, pointer_set_t *inserted,
bool can_refer,
bool *completep) bool *completep)
{ {
struct cgraph_node *target_node; struct cgraph_node *target_node;
enum built_in_function fcode; enum built_in_function fcode;
if (!can_refer)
{
/* The only case when method of anonymous namespace becomes unreferable
is when we completely optimized it out. */
if (flag_ltrans
|| !target
|| !type_in_anonymous_namespace_p (DECL_CONTEXT (target)))
*completep = false;
return;
}
if (!target if (!target
/* Those are used to mark impossible scenarios. */ /* Those are used to mark impossible scenarios. */
|| (fcode = DECL_FUNCTION_CODE (target)) || (fcode = DECL_FUNCTION_CODE (target))
...@@ -649,6 +668,8 @@ maybe_record_node (vec <cgraph_node *> &nodes, ...@@ -649,6 +668,8 @@ maybe_record_node (vec <cgraph_node *> &nodes,
inserted. inserted.
ANONYMOUS is true if BINFO is part of anonymous namespace. ANONYMOUS is true if BINFO is part of anonymous namespace.
Clear COMPLETEP when we hit unreferable target.
*/ */
static void static void
...@@ -661,7 +682,8 @@ record_target_from_binfo (vec <cgraph_node *> &nodes, ...@@ -661,7 +682,8 @@ record_target_from_binfo (vec <cgraph_node *> &nodes,
HOST_WIDE_INT offset, HOST_WIDE_INT offset,
pointer_set_t *inserted, pointer_set_t *inserted,
pointer_set_t *matched_vtables, pointer_set_t *matched_vtables,
bool anonymous) bool anonymous,
bool *completep)
{ {
tree type = BINFO_TYPE (binfo); tree type = BINFO_TYPE (binfo);
int i; int i;
...@@ -692,6 +714,11 @@ record_target_from_binfo (vec <cgraph_node *> &nodes, ...@@ -692,6 +714,11 @@ record_target_from_binfo (vec <cgraph_node *> &nodes,
return; return;
tree inner_binfo = get_binfo_at_offset (type_binfo, tree inner_binfo = get_binfo_at_offset (type_binfo,
offset, otr_type); offset, otr_type);
if (!inner_binfo)
{
gcc_assert (odr_violation_reported);
return;
}
/* For types in anonymous namespace first check if the respective vtable /* For types in anonymous namespace first check if the respective vtable
is alive. If not, we know the type can't be called. */ is alive. If not, we know the type can't be called. */
if (!flag_ltrans && anonymous) if (!flag_ltrans && anonymous)
...@@ -708,9 +735,11 @@ record_target_from_binfo (vec <cgraph_node *> &nodes, ...@@ -708,9 +735,11 @@ record_target_from_binfo (vec <cgraph_node *> &nodes,
gcc_assert (inner_binfo); gcc_assert (inner_binfo);
if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo))) if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
{ {
tree target = gimple_get_virt_method_for_binfo (otr_token, inner_binfo); bool can_refer;
if (target) tree target = gimple_get_virt_method_for_binfo (otr_token,
maybe_record_node (nodes, target, inserted, NULL); inner_binfo,
&can_refer);
maybe_record_node (nodes, target, inserted, can_refer, completep);
} }
return; return;
} }
...@@ -722,7 +751,7 @@ record_target_from_binfo (vec <cgraph_node *> &nodes, ...@@ -722,7 +751,7 @@ record_target_from_binfo (vec <cgraph_node *> &nodes,
record_target_from_binfo (nodes, base_binfo, otr_type, record_target_from_binfo (nodes, base_binfo, otr_type,
type_binfos, type_binfos,
otr_token, outer_type, offset, inserted, otr_token, outer_type, offset, inserted,
matched_vtables, anonymous); matched_vtables, anonymous, completep);
if (BINFO_VTABLE (binfo)) if (BINFO_VTABLE (binfo))
type_binfos.pop (); type_binfos.pop ();
} }
...@@ -730,7 +759,8 @@ record_target_from_binfo (vec <cgraph_node *> &nodes, ...@@ -730,7 +759,8 @@ record_target_from_binfo (vec <cgraph_node *> &nodes,
/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN) /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
of TYPE, insert them to NODES, recurse into derived nodes. of TYPE, insert them to NODES, recurse into derived nodes.
INSERTED is used to avoid duplicate insertions of methods into NODES. INSERTED is used to avoid duplicate insertions of methods into NODES.
MATCHED_VTABLES are used to avoid duplicate walking vtables. */ MATCHED_VTABLES are used to avoid duplicate walking vtables.
Clear COMPLETEP if unreferable target is found. */
static void static void
possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes, possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
...@@ -740,7 +770,8 @@ possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes, ...@@ -740,7 +770,8 @@ possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
odr_type type, odr_type type,
HOST_WIDE_INT otr_token, HOST_WIDE_INT otr_token,
tree outer_type, tree outer_type,
HOST_WIDE_INT offset) HOST_WIDE_INT offset,
bool *completep)
{ {
tree binfo = TYPE_BINFO (type->type); tree binfo = TYPE_BINFO (type->type);
unsigned int i; unsigned int i;
...@@ -749,14 +780,14 @@ possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes, ...@@ -749,14 +780,14 @@ possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token, record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
outer_type, offset, outer_type, offset,
inserted, matched_vtables, inserted, matched_vtables,
type->anonymous_namespace); type->anonymous_namespace, completep);
type_binfos.release (); type_binfos.release ();
for (i = 0; i < type->derived_types.length (); i++) for (i = 0; i < type->derived_types.length (); i++)
possible_polymorphic_call_targets_1 (nodes, inserted, possible_polymorphic_call_targets_1 (nodes, inserted,
matched_vtables, matched_vtables,
otr_type, otr_type,
type->derived_types[i], type->derived_types[i],
otr_token, outer_type, offset); otr_token, outer_type, offset, completep);
} }
/* Cache of queries for polymorphic call targets. /* Cache of queries for polymorphic call targets.
...@@ -771,7 +802,8 @@ struct polymorphic_call_target_d ...@@ -771,7 +802,8 @@ struct polymorphic_call_target_d
ipa_polymorphic_call_context context; ipa_polymorphic_call_context context;
odr_type type; odr_type type;
vec <cgraph_node *> targets; vec <cgraph_node *> targets;
bool final; int nonconstruction_targets;
bool complete;
}; };
/* Polymorphic call target cache helpers. */ /* Polymorphic call target cache helpers. */
...@@ -1282,7 +1314,7 @@ record_targets_from_bases (tree otr_type, ...@@ -1282,7 +1314,7 @@ record_targets_from_bases (tree otr_type,
HOST_WIDE_INT otr_token, HOST_WIDE_INT otr_token,
tree outer_type, tree outer_type,
HOST_WIDE_INT offset, HOST_WIDE_INT offset,
vec <cgraph_node *> nodes, vec <cgraph_node *> &nodes,
pointer_set_t *inserted, pointer_set_t *inserted,
pointer_set_t *matched_vtables, pointer_set_t *matched_vtables,
bool *completep) bool *completep)
...@@ -1303,7 +1335,9 @@ record_targets_from_bases (tree otr_type, ...@@ -1303,7 +1335,9 @@ record_targets_from_bases (tree otr_type,
pos = int_bit_position (fld); pos = int_bit_position (fld);
size = tree_to_shwi (DECL_SIZE (fld)); size = tree_to_shwi (DECL_SIZE (fld));
if (pos <= offset && (pos + size) > offset) if (pos <= offset && (pos + size) > offset
/* Do not get confused by zero sized bases. */
&& polymorphic_type_binfo_p (TYPE_BINFO (TREE_TYPE (fld))))
break; break;
} }
/* Within a class type we should always find correcponding fields. */ /* Within a class type we should always find correcponding fields. */
...@@ -1317,16 +1351,19 @@ record_targets_from_bases (tree otr_type, ...@@ -1317,16 +1351,19 @@ record_targets_from_bases (tree otr_type,
base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type), base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
offset, otr_type); offset, otr_type);
if (!base_binfo)
{
gcc_assert (odr_violation_reported);
return;
}
gcc_assert (base_binfo); gcc_assert (base_binfo);
if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo))) if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
{ {
tree target = gimple_get_virt_method_for_binfo (otr_token, base_binfo); bool can_refer;
if (target) tree target = gimple_get_virt_method_for_binfo (otr_token,
maybe_record_node (nodes, target, inserted, completep); base_binfo,
/* The only way method in anonymous namespace can become unreferable &can_refer);
is that it has been fully optimized out. */ maybe_record_node (nodes, target, inserted, can_refer, completep);
else if (flag_ltrans || !type_in_anonymous_namespace_p (outer_type))
*completep = false;
pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)); pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
} }
} }
...@@ -1357,6 +1394,10 @@ devirt_variable_node_removal_hook (varpool_node *n, ...@@ -1357,6 +1394,10 @@ devirt_variable_node_removal_hook (varpool_node *n,
in the target cache. If user needs to visit every target list in the target cache. If user needs to visit every target list
just once, it can memoize them. just once, it can memoize them.
NONCONSTRUCTION_TARGETS specify number of targets with asumption that
the type is not in the construction. Those targets appear first in the
vector returned.
Returned vector is placed into cache. It is NOT caller's responsibility Returned vector is placed into cache. It is NOT caller's responsibility
to free it. The vector can be freed on cgraph_remove_node call if to free it. The vector can be freed on cgraph_remove_node call if
the particular node is a virtual function present in the cache. */ the particular node is a virtual function present in the cache. */
...@@ -1366,7 +1407,8 @@ possible_polymorphic_call_targets (tree otr_type, ...@@ -1366,7 +1407,8 @@ possible_polymorphic_call_targets (tree otr_type,
HOST_WIDE_INT otr_token, HOST_WIDE_INT otr_token,
ipa_polymorphic_call_context context, ipa_polymorphic_call_context context,
bool *completep, bool *completep,
void **cache_token) void **cache_token,
int *nonconstruction_targetsp)
{ {
static struct cgraph_node_hook_list *node_removal_hook_holder; static struct cgraph_node_hook_list *node_removal_hook_holder;
pointer_set_t *inserted; pointer_set_t *inserted;
...@@ -1377,12 +1419,15 @@ possible_polymorphic_call_targets (tree otr_type, ...@@ -1377,12 +1419,15 @@ possible_polymorphic_call_targets (tree otr_type,
polymorphic_call_target_d **slot; polymorphic_call_target_d **slot;
unsigned int i; unsigned int i;
tree binfo, target; tree binfo, target;
bool final; bool complete;
bool can_refer;
if (!odr_hash.is_created ()) if (!odr_hash.is_created ())
{ {
if (completep) if (completep)
*completep = false; *completep = false;
if (nonconstruction_targetsp)
*nonconstruction_targetsp = 0;
return nodes; return nodes;
} }
...@@ -1406,7 +1451,7 @@ possible_polymorphic_call_targets (tree otr_type, ...@@ -1406,7 +1451,7 @@ possible_polymorphic_call_targets (tree otr_type,
/* If outer and inner type match, there are no bases to see. */ /* If outer and inner type match, there are no bases to see. */
if (type == outer_type) if (type == outer_type)
context.maybe_in_construction = false; context.maybe_in_construction = false;
/* If the type is final, there are no derivations. */ /* If the type is complete, there are no derivations. */
if (TYPE_FINAL_P (outer_type->type)) if (TYPE_FINAL_P (outer_type->type))
context.maybe_derived_type = false; context.maybe_derived_type = false;
...@@ -1434,11 +1479,13 @@ possible_polymorphic_call_targets (tree otr_type, ...@@ -1434,11 +1479,13 @@ possible_polymorphic_call_targets (tree otr_type,
if (*slot) if (*slot)
{ {
if (completep) if (completep)
*completep = (*slot)->final; *completep = (*slot)->complete;
if (nonconstruction_targetsp)
*nonconstruction_targetsp = (*slot)->nonconstruction_targets;
return (*slot)->targets; return (*slot)->targets;
} }
final = true; complete = true;
/* Do actual search. */ /* Do actual search. */
timevar_push (TV_IPA_VIRTUAL_CALL); timevar_push (TV_IPA_VIRTUAL_CALL);
...@@ -1455,49 +1502,58 @@ possible_polymorphic_call_targets (tree otr_type, ...@@ -1455,49 +1502,58 @@ possible_polymorphic_call_targets (tree otr_type,
/* First see virtual method of type itself. */ /* First see virtual method of type itself. */
binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type), binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
context.offset, otr_type); context.offset, otr_type);
target = gimple_get_virt_method_for_binfo (otr_token, binfo); if (binfo)
if (target) target = gimple_get_virt_method_for_binfo (otr_token, binfo,
&can_refer);
else
{ {
maybe_record_node (nodes, target, inserted, &final); gcc_assert (odr_violation_reported);
target = NULL;
}
maybe_record_node (nodes, target, inserted, can_refer, &complete);
/* In the case we get final method, we don't need if (target)
{
/* In the case we get complete method, we don't need
to walk derivations. */ to walk derivations. */
if (DECL_FINAL_P (target)) if (DECL_FINAL_P (target))
context.maybe_derived_type = false; context.maybe_derived_type = false;
} }
/* The only way method in anonymous namespace can become unreferable else
is that it has been fully optimized out. */ gcc_assert (!complete);
else if (flag_ltrans || !type->anonymous_namespace)
final = false;
pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo)); pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
/* Next walk bases, if asked to. */ /* Next walk recursively all derived types. */
if (context.maybe_in_construction)
record_targets_from_bases (otr_type, otr_token, outer_type->type,
context.offset, nodes, inserted,
matched_vtables, &final);
/* Finally walk recursively all derived types. */
if (context.maybe_derived_type) if (context.maybe_derived_type)
{ {
/* For anonymous namespace types we can attempt to build full type. /* For anonymous namespace types we can attempt to build full type.
All derivations must be in this unit (unless we see partial unit). */ All derivations must be in this unit (unless we see partial unit). */
if (!type->anonymous_namespace || flag_ltrans) if (!type->anonymous_namespace || flag_ltrans)
final = false; complete = false;
for (i = 0; i < outer_type->derived_types.length(); i++) for (i = 0; i < outer_type->derived_types.length(); i++)
possible_polymorphic_call_targets_1 (nodes, inserted, possible_polymorphic_call_targets_1 (nodes, inserted,
matched_vtables, matched_vtables,
otr_type, otr_type,
outer_type->derived_types[i], outer_type->derived_types[i],
otr_token, outer_type->type, otr_token, outer_type->type,
context.offset); context.offset, &complete);
} }
/* Finally walk bases, if asked to. */
(*slot)->nonconstruction_targets = nodes.length();
if (context.maybe_in_construction)
record_targets_from_bases (otr_type, otr_token, outer_type->type,
context.offset, nodes, inserted,
matched_vtables, &complete);
(*slot)->targets = nodes; (*slot)->targets = nodes;
(*slot)->final = final; (*slot)->complete = complete;
if (completep) if (completep)
*completep = final; *completep = complete;
if (nonconstruction_targetsp)
*nonconstruction_targetsp = (*slot)->nonconstruction_targets;
pointer_set_destroy (inserted); pointer_set_destroy (inserted);
pointer_set_destroy (matched_vtables); pointer_set_destroy (matched_vtables);
...@@ -1517,28 +1573,46 @@ dump_possible_polymorphic_call_targets (FILE *f, ...@@ -1517,28 +1573,46 @@ dump_possible_polymorphic_call_targets (FILE *f,
bool final; bool final;
odr_type type = get_odr_type (otr_type, false); odr_type type = get_odr_type (otr_type, false);
unsigned int i; unsigned int i;
int nonconstruction;
if (!type) if (!type)
return; return;
targets = possible_polymorphic_call_targets (otr_type, otr_token, targets = possible_polymorphic_call_targets (otr_type, otr_token,
ctx, ctx,
&final); &final, NULL, &nonconstruction);
fprintf (f, " Targets of polymorphic call of type %i:", type->id); fprintf (f, " Targets of polymorphic call of type %i:", type->id);
print_generic_expr (f, type->type, TDF_SLIM); print_generic_expr (f, type->type, TDF_SLIM);
fprintf (f, " token %i\n" fprintf (f, " token %i\n", (int)otr_token);
" Contained in type:", if (ctx.outer_type || ctx.offset)
(int)otr_token); {
fprintf (f, " Contained in type:");
print_generic_expr (f, ctx.outer_type, TDF_SLIM); print_generic_expr (f, ctx.outer_type, TDF_SLIM);
fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n" fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n",
" %s%s%s\n ", ctx.offset);
ctx.offset, }
final ? "This is full list." :
fprintf (f, " %s%s%s\n ",
final ? "This is a complete list." :
"This is partial list; extra targets may be defined in other units.", "This is partial list; extra targets may be defined in other units.",
ctx.maybe_in_construction ? " (base types included)" : "", ctx.maybe_in_construction ? " (base types included)" : "",
ctx.maybe_derived_type ? " (derived types included)" : ""); ctx.maybe_derived_type ? " (derived types included)" : "");
for (i = 0; i < targets.length (); i++) for (i = 0; i < targets.length (); i++)
fprintf (f, " %s/%i", targets[i]->name (), {
targets[i]->order); char *name = NULL;
if (i == (unsigned)nonconstruction)
fprintf (f, "\n If the type is in construction,"
" then additional tarets are:\n"
" ");
if (in_lto_p)
name = cplus_demangle_v3 (targets[i]->asm_name (), 0);
fprintf (f, " %s/%i", name ? name : targets[i]->name (), targets[i]->order);
if (in_lto_p)
free (name);
if (!targets[i]->definition)
fprintf (f, " (no definition%s)",
DECL_DECLARED_INLINE_P (targets[i]->decl)
? " inline" : "");
}
fprintf (f, "\n\n"); fprintf (f, "\n\n");
} }
...@@ -1650,9 +1724,10 @@ ipa_devirt (void) ...@@ -1650,9 +1724,10 @@ ipa_devirt (void)
struct cgraph_node *likely_target = NULL; struct cgraph_node *likely_target = NULL;
void *cache_token; void *cache_token;
bool final; bool final;
int nonconstruction_targets;
vec <cgraph_node *>targets vec <cgraph_node *>targets
= possible_polymorphic_call_targets = possible_polymorphic_call_targets
(e, &final, &cache_token); (e, &final, &cache_token, &nonconstruction_targets);
unsigned int i; unsigned int i;
if (dump_file) if (dump_file)
...@@ -1664,14 +1739,14 @@ ipa_devirt (void) ...@@ -1664,14 +1739,14 @@ ipa_devirt (void)
if (!cgraph_maybe_hot_edge_p (e)) if (!cgraph_maybe_hot_edge_p (e))
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, "Call is cold\n"); fprintf (dump_file, "Call is cold\n\n");
ncold++; ncold++;
continue; continue;
} }
if (e->speculative) if (e->speculative)
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, "Call is aready speculated\n"); fprintf (dump_file, "Call is aready speculated\n\n");
nspeculated++; nspeculated++;
/* When dumping see if we agree with speculation. */ /* When dumping see if we agree with speculation. */
...@@ -1682,7 +1757,7 @@ ipa_devirt (void) ...@@ -1682,7 +1757,7 @@ ipa_devirt (void)
cache_token)) cache_token))
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, "Target list is known to be useless\n"); fprintf (dump_file, "Target list is known to be useless\n\n");
nmultiple++; nmultiple++;
continue; continue;
} }
...@@ -1691,10 +1766,13 @@ ipa_devirt (void) ...@@ -1691,10 +1766,13 @@ ipa_devirt (void)
{ {
if (likely_target) if (likely_target)
{ {
if (i < (unsigned) nonconstruction_targets)
{
likely_target = NULL; likely_target = NULL;
if (dump_file) if (dump_file)
fprintf (dump_file, "More than one likely target\n"); fprintf (dump_file, "More than one likely target\n\n");
nmultiple++; nmultiple++;
}
break; break;
} }
likely_target = targets[i]; likely_target = targets[i];
...@@ -1714,12 +1792,12 @@ ipa_devirt (void) ...@@ -1714,12 +1792,12 @@ ipa_devirt (void)
if (cgraph_function_or_thunk_node (e2->callee, NULL) if (cgraph_function_or_thunk_node (e2->callee, NULL)
== cgraph_function_or_thunk_node (likely_target, NULL)) == cgraph_function_or_thunk_node (likely_target, NULL))
{ {
fprintf (dump_file, "We agree with speculation\n"); fprintf (dump_file, "We agree with speculation\n\n");
nok++; nok++;
} }
else else
{ {
fprintf (dump_file, "We disagree with speculation\n"); fprintf (dump_file, "We disagree with speculation\n\n");
nwrong++; nwrong++;
} }
continue; continue;
...@@ -1727,7 +1805,7 @@ ipa_devirt (void) ...@@ -1727,7 +1805,7 @@ ipa_devirt (void)
if (!likely_target->definition) if (!likely_target->definition)
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, "Target is not an definition\n"); fprintf (dump_file, "Target is not an definition\n\n");
nnotdefined++; nnotdefined++;
continue; continue;
} }
...@@ -1738,7 +1816,7 @@ ipa_devirt (void) ...@@ -1738,7 +1816,7 @@ ipa_devirt (void)
if (DECL_EXTERNAL (likely_target->decl)) if (DECL_EXTERNAL (likely_target->decl))
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, "Target is external\n"); fprintf (dump_file, "Target is external\n\n");
nexternal++; nexternal++;
continue; continue;
} }
...@@ -1747,7 +1825,7 @@ ipa_devirt (void) ...@@ -1747,7 +1825,7 @@ ipa_devirt (void)
&& symtab_can_be_discarded (likely_target)) && symtab_can_be_discarded (likely_target))
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, "Target is overwritable\n"); fprintf (dump_file, "Target is overwritable\n\n");
noverwritable++; noverwritable++;
continue; continue;
} }
...@@ -1755,7 +1833,7 @@ ipa_devirt (void) ...@@ -1755,7 +1833,7 @@ ipa_devirt (void)
{ {
if (dump_file) if (dump_file)
fprintf (dump_file, fprintf (dump_file,
"Speculatively devirtualizing call in %s/%i to %s/%i\n", "Speculatively devirtualizing call in %s/%i to %s/%i\n\n",
n->name (), n->order, n->name (), n->order,
likely_target->name (), likely_target->name (),
likely_target->order); likely_target->order);
......
...@@ -76,7 +76,8 @@ vec <cgraph_node *> ...@@ -76,7 +76,8 @@ vec <cgraph_node *>
possible_polymorphic_call_targets (tree, HOST_WIDE_INT, possible_polymorphic_call_targets (tree, HOST_WIDE_INT,
ipa_polymorphic_call_context, ipa_polymorphic_call_context,
bool *final = NULL, bool *final = NULL,
void **cache_token = NULL); void **cache_token = NULL,
int *nonconstruction_targets = NULL);
odr_type get_odr_type (tree, bool insert = false); odr_type get_odr_type (tree, bool insert = false);
void dump_possible_polymorphic_call_targets (FILE *, tree, HOST_WIDE_INT, void dump_possible_polymorphic_call_targets (FILE *, tree, HOST_WIDE_INT,
const ipa_polymorphic_call_context &); const ipa_polymorphic_call_context &);
...@@ -105,7 +106,8 @@ bool vtable_pointer_value_to_vtable (tree, tree *, unsigned HOST_WIDE_INT *); ...@@ -105,7 +106,8 @@ bool vtable_pointer_value_to_vtable (tree, tree *, unsigned HOST_WIDE_INT *);
inline vec <cgraph_node *> inline vec <cgraph_node *>
possible_polymorphic_call_targets (struct cgraph_edge *e, possible_polymorphic_call_targets (struct cgraph_edge *e,
bool *final = NULL, bool *final = NULL,
void **cache_token = NULL) void **cache_token = NULL,
int *nonconstruction_targets = NULL)
{ {
gcc_checking_assert (e->indirect_info->polymorphic); gcc_checking_assert (e->indirect_info->polymorphic);
ipa_polymorphic_call_context context = {e->indirect_info->offset, ipa_polymorphic_call_context context = {e->indirect_info->offset,
...@@ -115,7 +117,8 @@ possible_polymorphic_call_targets (struct cgraph_edge *e, ...@@ -115,7 +117,8 @@ possible_polymorphic_call_targets (struct cgraph_edge *e,
return possible_polymorphic_call_targets (e->indirect_info->otr_type, return possible_polymorphic_call_targets (e->indirect_info->otr_type,
e->indirect_info->otr_token, e->indirect_info->otr_token,
context, context,
final, cache_token); final, cache_token,
nonconstruction_targets);
} }
/* Same as above but taking OBJ_TYPE_REF as an parameter. */ /* Same as above but taking OBJ_TYPE_REF as an parameter. */
......
2014-02-11 Jan Hubicka <hubicka@ucw.cz>
PR lto/59468
* g++.dg/ipa/devirt-27.C: New testcase.
* g++.dg/ipa/devirt-26.C: New testcase.
2014-02-11 Michael Meissner <meissner@linux.vnet.ibm.com> 2014-02-11 Michael Meissner <meissner@linux.vnet.ibm.com>
PR target/60137 PR target/60137
......
/* { dg-do compile } */
/* { dg-options "-O3 -fdump-ipa-devirt" } */
struct A
{
int a;
virtual int bar(void) {return a;}
};
struct B
{
virtual int foo(void) {return b;}
int b;
};
struct C: A,B
{
virtual int foo(void) {return a;}
};
struct C c;
int test(void)
{
struct C *d=&c;
struct B *b=d;
return d->foo()+b->foo();
}
/* The call to b->foo() is perfectly devirtualizable because C can not be in construction
when &c was used, but we can not analyze that so far. Test that we at least speculate
that type is in the construction. */
/* { dg-final { scan-ipa-dump "Speculatively devirtualizing" "devirt" } } */
/* { dg-final { cleanup-ipa-dump "devirt" } } */
/* { dg-do compile } */
/* { dg-options "-O3 -fdump-ipa-devirt -fdump-tree-optimized" } */
struct A
{
int a;
};
struct B
{
__attribute__ ((visibility("default")))
virtual int foo(void) {return 42;}
int b;
};
struct C: A,B
{
__attribute__ ((visibility("hidden")))
virtual int foo(void);
};
struct C c;
int test(void)
{
struct C *d=&c;
struct B *b=d;
return d->foo()+b->foo();
}
/* { dg-final { scan-tree-dump "OBJ_TYPE_REF" "optimized" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment