Commit 0a387eca by Richard Biener Committed by Richard Biener

lto.c (gimple_types, [...]): Remove old merging code and statistics.

2013-10-14  Richard Biener  <rguenther@suse.de>

	lto/
	* lto.c (gimple_types, type_hash_cache, struct type_pair_d,
	type_pair_cache, lookup_type_pair, struct sccs, next_dfs_num,
	gtc_next_dfs_num, compare_type_names_p, gtc_visit,
	gimple_types_compatible_p_1, gimple_types_compatible_p,
	visit, iterative_hash_name, struct type_hash_pair,
	type_hash_pair_compare, iterative_hash_gimple_type, gimple_type_hash,
	gimple_type_eq, gimple_register_type, num_not_merged_types,
	num_not_merged_types_in_same_scc, num_not_merged_types_trees,
	num_not_merged_types_in_same_scc_trees): Remove old merging code
	and statistics.
	(lto_read_decls): Do not run old merging code in parallel.
	(read_cgraph_and_symbols): Do not init/free old merging
	data structures.
	(print_lto_report_1): Do not report differences of old vs. new
	merging code.

From-SVN: r203533
parent 055416a8
2013-10-14 Richard Biener <rguenther@suse.de>
* lto.c (gimple_types, type_hash_cache, struct type_pair_d,
type_pair_cache, lookup_type_pair, struct sccs, next_dfs_num,
gtc_next_dfs_num, compare_type_names_p, gtc_visit,
gimple_types_compatible_p_1, gimple_types_compatible_p,
visit, iterative_hash_name, struct type_hash_pair,
type_hash_pair_compare, iterative_hash_gimple_type, gimple_type_hash,
gimple_type_eq, gimple_register_type, num_not_merged_types,
num_not_merged_types_in_same_scc, num_not_merged_types_trees,
num_not_merged_types_in_same_scc_trees): Remove old merging code
and statistics.
(lto_read_decls): Do not run old merging code in parallel.
(read_cgraph_and_symbols): Do not init/free old merging
data structures.
(print_lto_report_1): Do not report differences of old vs. new
merging code.
2013-10-14 Richard Biener <rguenther@suse.de>
* lto-lang.c (lto_init): Do not re-init canonical types here.
(lto_register_canonical_types): Move to ...
* lto.c (lto_register_canonical_types): ... here.
......
......@@ -676,1037 +676,6 @@ lto_register_canonical_types (tree node)
}
/* ??? Old hashing and merging code follows, we keep it for statistics
purposes for now. */
/* Global type table. FIXME, it should be possible to re-use some
of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
etc), but those assume that types were built with the various
build_*_type routines which is not the case with the streamer. */
static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
htab_t gimple_types;
static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
htab_t type_hash_cache;
static hashval_t gimple_type_hash (const void *);
/* Structure used to maintain a cache of some type pairs compared by
gimple_types_compatible_p when comparing aggregate types. There are
three possible values for SAME_P:
-2: The pair (T1, T2) has just been inserted in the table.
0: T1 and T2 are different types.
1: T1 and T2 are the same type. */
struct type_pair_d
{
unsigned int uid1;
unsigned int uid2;
signed char same_p;
};
typedef struct type_pair_d *type_pair_t;
#define GIMPLE_TYPE_PAIR_SIZE 16381
struct type_pair_d *type_pair_cache;
/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
entry if none existed. */
static inline type_pair_t
lookup_type_pair (tree t1, tree t2)
{
unsigned int index;
unsigned int uid1, uid2;
if (TYPE_UID (t1) < TYPE_UID (t2))
{
uid1 = TYPE_UID (t1);
uid2 = TYPE_UID (t2);
}
else
{
uid1 = TYPE_UID (t2);
uid2 = TYPE_UID (t1);
}
gcc_checking_assert (uid1 != uid2);
/* iterative_hash_hashval_t imply an function calls.
We know that UIDS are in limited range. */
index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
% GIMPLE_TYPE_PAIR_SIZE);
if (type_pair_cache [index].uid1 == uid1
&& type_pair_cache [index].uid2 == uid2)
return &type_pair_cache[index];
type_pair_cache [index].uid1 = uid1;
type_pair_cache [index].uid2 = uid2;
type_pair_cache [index].same_p = -2;
return &type_pair_cache[index];
}
/* Per pointer state for the SCC finding. The on_sccstack flag
is not strictly required, it is true when there is no hash value
recorded for the type and false otherwise. But querying that
is slower. */
struct sccs
{
unsigned int dfsnum;
unsigned int low;
bool on_sccstack;
union {
hashval_t hash;
signed char same_p;
} u;
};
static unsigned int next_dfs_num;
static unsigned int gtc_next_dfs_num;
/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
true then if any type has no name return false, otherwise return
true if both types have no names. */
static bool
compare_type_names_p (tree t1, tree t2)
{
tree name1 = TYPE_NAME (t1);
tree name2 = TYPE_NAME (t2);
if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
return false;
if (name1 == NULL_TREE)
return true;
/* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
if (TREE_CODE (name1) != TREE_CODE (name2))
return false;
if (TREE_CODE (name1) == TYPE_DECL)
name1 = DECL_NAME (name1);
gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
if (TREE_CODE (name2) == TYPE_DECL)
name2 = DECL_NAME (name2);
gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
/* Identifiers can be compared with pointer equality rather
than a string comparison. */
if (name1 == name2)
return true;
return false;
}
static bool
gimple_types_compatible_p_1 (tree, tree, type_pair_t,
vec<type_pair_t> *,
struct pointer_map_t *, struct obstack *);
/* DFS visit the edge from the callers type pair with state *STATE to
the pair T1, T2 while operating in FOR_MERGING_P mode.
Update the merging status if it is not part of the SCC containing the
callers pair and return it.
SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
static bool
gtc_visit (tree t1, tree t2,
struct sccs *state,
vec<type_pair_t> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
struct sccs *cstate = NULL;
type_pair_t p;
void **slot;
/* Check first for the obvious case of pointer identity. */
if (t1 == t2)
return true;
/* Check that we have two types to compare. */
if (t1 == NULL_TREE || t2 == NULL_TREE)
return false;
/* Can't be the same type if the types don't have the same code. */
if (TREE_CODE (t1) != TREE_CODE (t2))
return false;
/* Can't be the same type if they have different CV qualifiers. */
if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
return false;
if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
return false;
/* Void types and nullptr types are always the same. */
if (TREE_CODE (t1) == VOID_TYPE
|| TREE_CODE (t1) == NULLPTR_TYPE)
return true;
/* Can't be the same type if they have different alignment or mode. */
if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
|| TYPE_MODE (t1) != TYPE_MODE (t2))
return false;
/* Do some simple checks before doing three hashtable queries. */
if (INTEGRAL_TYPE_P (t1)
|| SCALAR_FLOAT_TYPE_P (t1)
|| FIXED_POINT_TYPE_P (t1)
|| TREE_CODE (t1) == VECTOR_TYPE
|| TREE_CODE (t1) == COMPLEX_TYPE
|| TREE_CODE (t1) == OFFSET_TYPE
|| POINTER_TYPE_P (t1))
{
/* Can't be the same type if they have different sign or precision. */
if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
|| TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
return false;
if (TREE_CODE (t1) == INTEGER_TYPE
&& TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
return false;
/* That's all we need to check for float and fixed-point types. */
if (SCALAR_FLOAT_TYPE_P (t1)
|| FIXED_POINT_TYPE_P (t1))
return true;
/* For other types fall through to more complex checks. */
}
/* If the hash values of t1 and t2 are different the types can't
possibly be the same. This helps keeping the type-pair hashtable
small, only tracking comparisons for hash collisions. */
if (gimple_type_hash (t1) != gimple_type_hash (t2))
return false;
/* Allocate a new cache entry for this comparison. */
p = lookup_type_pair (t1, t2);
if (p->same_p == 0 || p->same_p == 1)
{
/* We have already decided whether T1 and T2 are the
same, return the cached result. */
return p->same_p == 1;
}
if ((slot = pointer_map_contains (sccstate, p)) != NULL)
cstate = (struct sccs *)*slot;
/* Not yet visited. DFS recurse. */
if (!cstate)
{
gimple_types_compatible_p_1 (t1, t2, p,
sccstack, sccstate, sccstate_obstack);
cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
state->low = MIN (state->low, cstate->low);
}
/* If the type is still on the SCC stack adjust the parents low. */
if (cstate->dfsnum < state->dfsnum
&& cstate->on_sccstack)
state->low = MIN (cstate->dfsnum, state->low);
/* Return the current lattice value. We start with an equality
assumption so types part of a SCC will be optimistically
treated equal unless proven otherwise. */
return cstate->u.same_p;
}
/* Worker for gimple_types_compatible.
SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
static bool
gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
vec<type_pair_t> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
struct sccs *state;
gcc_assert (p->same_p == -2);
state = XOBNEW (sccstate_obstack, struct sccs);
*pointer_map_insert (sccstate, p) = state;
sccstack->safe_push (p);
state->dfsnum = gtc_next_dfs_num++;
state->low = state->dfsnum;
state->on_sccstack = true;
/* Start with an equality assumption. As we DFS recurse into child
SCCs this assumption may get revisited. */
state->u.same_p = 1;
/* The struct tags shall compare equal. */
if (!compare_type_names_p (t1, t2))
goto different_types;
/* The main variant of both types should compare equal. */
if (TYPE_MAIN_VARIANT (t1) != t1
|| TYPE_MAIN_VARIANT (t2) != t2)
{
if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
}
/* We may not merge typedef types to the same type in different
contexts. */
if (TYPE_NAME (t1)
&& TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
&& DECL_CONTEXT (TYPE_NAME (t1))
&& TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
{
if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
DECL_CONTEXT (TYPE_NAME (t2)),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
}
/* If their attributes are not the same they can't be the same type. */
if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
goto different_types;
/* Do type-specific comparisons. */
switch (TREE_CODE (t1))
{
case VECTOR_TYPE:
case COMPLEX_TYPE:
if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
goto same_types;
case ARRAY_TYPE:
/* Array types are the same if the element types are the same and
the number of elements are the same. */
if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
state, sccstack, sccstate, sccstate_obstack)
|| TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
|| TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
goto different_types;
else
{
tree i1 = TYPE_DOMAIN (t1);
tree i2 = TYPE_DOMAIN (t2);
/* For an incomplete external array, the type domain can be
NULL_TREE. Check this condition also. */
if (i1 == NULL_TREE && i2 == NULL_TREE)
goto same_types;
else if (i1 == NULL_TREE || i2 == NULL_TREE)
goto different_types;
else
{
tree min1 = TYPE_MIN_VALUE (i1);
tree min2 = TYPE_MIN_VALUE (i2);
tree max1 = TYPE_MAX_VALUE (i1);
tree max2 = TYPE_MAX_VALUE (i2);
/* The minimum/maximum values have to be the same. */
if ((min1 == min2
|| (min1 && min2
&& ((TREE_CODE (min1) == PLACEHOLDER_EXPR
&& TREE_CODE (min2) == PLACEHOLDER_EXPR)
|| operand_equal_p (min1, min2, 0))))
&& (max1 == max2
|| (max1 && max2
&& ((TREE_CODE (max1) == PLACEHOLDER_EXPR
&& TREE_CODE (max2) == PLACEHOLDER_EXPR)
|| operand_equal_p (max1, max2, 0)))))
goto same_types;
else
goto different_types;
}
}
case METHOD_TYPE:
/* Method types should belong to the same class. */
if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
/* Fallthru */
case FUNCTION_TYPE:
/* Function types are the same if the return type and arguments types
are the same. */
if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
if (!comp_type_attributes (t1, t2))
goto different_types;
if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
goto same_types;
else
{
tree parms1, parms2;
for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
parms1 && parms2;
parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
{
if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
}
if (parms1 || parms2)
goto different_types;
goto same_types;
}
case OFFSET_TYPE:
{
if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
state, sccstack, sccstate, sccstate_obstack)
|| !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
TYPE_OFFSET_BASETYPE (t2),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
goto same_types;
}
case POINTER_TYPE:
case REFERENCE_TYPE:
{
/* If the two pointers have different ref-all attributes,
they can't be the same type. */
if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
goto different_types;
/* Otherwise, pointer and reference types are the same if the
pointed-to types are the same. */
if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
state, sccstack, sccstate, sccstate_obstack))
goto same_types;
goto different_types;
}
case INTEGER_TYPE:
case BOOLEAN_TYPE:
{
tree min1 = TYPE_MIN_VALUE (t1);
tree max1 = TYPE_MAX_VALUE (t1);
tree min2 = TYPE_MIN_VALUE (t2);
tree max2 = TYPE_MAX_VALUE (t2);
bool min_equal_p = false;
bool max_equal_p = false;
/* If either type has a minimum value, the other type must
have the same. */
if (min1 == NULL_TREE && min2 == NULL_TREE)
min_equal_p = true;
else if (min1 && min2 && operand_equal_p (min1, min2, 0))
min_equal_p = true;
/* Likewise, if either type has a maximum value, the other
type must have the same. */
if (max1 == NULL_TREE && max2 == NULL_TREE)
max_equal_p = true;
else if (max1 && max2 && operand_equal_p (max1, max2, 0))
max_equal_p = true;
if (!min_equal_p || !max_equal_p)
goto different_types;
goto same_types;
}
case ENUMERAL_TYPE:
{
/* FIXME lto, we cannot check bounds on enumeral types because
different front ends will produce different values.
In C, enumeral types are integers, while in C++ each element
will have its own symbolic value. We should decide how enums
are to be represented in GIMPLE and have each front end lower
to that. */
tree v1, v2;
/* For enumeral types, all the values must be the same. */
if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
goto same_types;
for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
v1 && v2;
v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
{
tree c1 = TREE_VALUE (v1);
tree c2 = TREE_VALUE (v2);
if (TREE_CODE (c1) == CONST_DECL)
c1 = DECL_INITIAL (c1);
if (TREE_CODE (c2) == CONST_DECL)
c2 = DECL_INITIAL (c2);
if (tree_int_cst_equal (c1, c2) != 1)
goto different_types;
if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
goto different_types;
}
/* If one enumeration has more values than the other, they
are not the same. */
if (v1 || v2)
goto different_types;
goto same_types;
}
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
{
tree f1, f2;
/* For aggregate types, all the fields must be the same. */
for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
f1 && f2;
f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
{
/* Different field kinds are not compatible. */
if (TREE_CODE (f1) != TREE_CODE (f2))
goto different_types;
/* Field decls must have the same name and offset. */
if (TREE_CODE (f1) == FIELD_DECL
&& (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
|| !gimple_compare_field_offset (f1, f2)))
goto different_types;
/* All entities should have the same name and type. */
if (DECL_NAME (f1) != DECL_NAME (f2)
|| !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
state, sccstack, sccstate, sccstate_obstack))
goto different_types;
}
/* If one aggregate has more fields than the other, they
are not the same. */
if (f1 || f2)
goto different_types;
goto same_types;
}
default:
gcc_unreachable ();
}
/* Common exit path for types that are not compatible. */
different_types:
state->u.same_p = 0;
goto pop;
/* Common exit path for types that are compatible. */
same_types:
gcc_assert (state->u.same_p == 1);
pop:
if (state->low == state->dfsnum)
{
type_pair_t x;
/* Pop off the SCC and set its cache values to the final
comparison result. */
do
{
struct sccs *cstate;
x = sccstack->pop ();
cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
cstate->on_sccstack = false;
x->same_p = state->u.same_p;
}
while (x != p);
}
return state->u.same_p;
}
/* Return true iff T1 and T2 are structurally identical. When
FOR_MERGING_P is true the an incomplete type and a complete type
are considered different, otherwise they are considered compatible. */
static bool
gimple_types_compatible_p (tree t1, tree t2)
{
vec<type_pair_t> sccstack = vNULL;
struct pointer_map_t *sccstate;
struct obstack sccstate_obstack;
type_pair_t p = NULL;
bool res;
/* Before starting to set up the SCC machinery handle simple cases. */
/* Check first for the obvious case of pointer identity. */
if (t1 == t2)
return true;
/* Check that we have two types to compare. */
if (t1 == NULL_TREE || t2 == NULL_TREE)
return false;
/* Can't be the same type if the types don't have the same code. */
if (TREE_CODE (t1) != TREE_CODE (t2))
return false;
/* Can't be the same type if they have different CV qualifiers. */
if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
return false;
if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
return false;
/* Void types and nullptr types are always the same. */
if (TREE_CODE (t1) == VOID_TYPE
|| TREE_CODE (t1) == NULLPTR_TYPE)
return true;
/* Can't be the same type if they have different alignment or mode. */
if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
|| TYPE_MODE (t1) != TYPE_MODE (t2))
return false;
/* Do some simple checks before doing three hashtable queries. */
if (INTEGRAL_TYPE_P (t1)
|| SCALAR_FLOAT_TYPE_P (t1)
|| FIXED_POINT_TYPE_P (t1)
|| TREE_CODE (t1) == VECTOR_TYPE
|| TREE_CODE (t1) == COMPLEX_TYPE
|| TREE_CODE (t1) == OFFSET_TYPE
|| POINTER_TYPE_P (t1))
{
/* Can't be the same type if they have different sign or precision. */
if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
|| TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
return false;
if (TREE_CODE (t1) == INTEGER_TYPE
&& TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
return false;
/* That's all we need to check for float and fixed-point types. */
if (SCALAR_FLOAT_TYPE_P (t1)
|| FIXED_POINT_TYPE_P (t1))
return true;
/* For other types fall through to more complex checks. */
}
/* If the hash values of t1 and t2 are different the types can't
possibly be the same. This helps keeping the type-pair hashtable
small, only tracking comparisons for hash collisions. */
if (gimple_type_hash (t1) != gimple_type_hash (t2))
return false;
/* If we've visited this type pair before (in the case of aggregates
with self-referential types), and we made a decision, return it. */
p = lookup_type_pair (t1, t2);
if (p->same_p == 0 || p->same_p == 1)
{
/* We have already decided whether T1 and T2 are the
same, return the cached result. */
return p->same_p == 1;
}
/* Now set up the SCC machinery for the comparison. */
gtc_next_dfs_num = 1;
sccstate = pointer_map_create ();
gcc_obstack_init (&sccstate_obstack);
res = gimple_types_compatible_p_1 (t1, t2, p,
&sccstack, sccstate, &sccstate_obstack);
sccstack.release ();
pointer_map_destroy (sccstate);
obstack_free (&sccstate_obstack, NULL);
return res;
}
static hashval_t
iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
struct pointer_map_t *, struct obstack *);
/* DFS visit the edge from the callers type with state *STATE to T.
Update the callers type hash V with the hash for T if it is not part
of the SCC containing the callers type and return it.
SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
static hashval_t
visit (tree t, struct sccs *state, hashval_t v,
vec<tree> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
struct sccs *cstate = NULL;
struct tree_int_map m;
void **slot;
/* If there is a hash value recorded for this type then it can't
possibly be part of our parent SCC. Simply mix in its hash. */
m.base.from = t;
if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
&& *slot)
return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
if ((slot = pointer_map_contains (sccstate, t)) != NULL)
cstate = (struct sccs *)*slot;
if (!cstate)
{
hashval_t tem;
/* Not yet visited. DFS recurse. */
tem = iterative_hash_gimple_type (t, v,
sccstack, sccstate, sccstate_obstack);
if (!cstate)
cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
state->low = MIN (state->low, cstate->low);
/* If the type is no longer on the SCC stack and thus is not part
of the parents SCC mix in its hash value. Otherwise we will
ignore the type for hashing purposes and return the unaltered
hash value. */
if (!cstate->on_sccstack)
return tem;
}
if (cstate->dfsnum < state->dfsnum
&& cstate->on_sccstack)
state->low = MIN (cstate->dfsnum, state->low);
/* We are part of our parents SCC, skip this type during hashing
and return the unaltered hash value. */
return v;
}
/* Hash NAME with the previous hash value V and return it. */
static hashval_t
iterative_hash_name (tree name, hashval_t v)
{
if (!name)
return v;
v = iterative_hash_hashval_t (TREE_CODE (name), v);
if (TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
if (!name)
return v;
gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
}
/* A type, hashvalue pair for sorting SCC members. */
struct type_hash_pair {
tree type;
hashval_t hash;
};
/* Compare two type, hashvalue pairs. */
static int
type_hash_pair_compare (const void *p1_, const void *p2_)
{
const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
if (p1->hash < p2->hash)
return -1;
else if (p1->hash > p2->hash)
return 1;
return 0;
}
/* Returning a hash value for gimple type TYPE combined with VAL.
SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
To hash a type we end up hashing in types that are reachable.
Through pointers we can end up with cycles which messes up the
required property that we need to compute the same hash value
for structurally equivalent types. To avoid this we have to
hash all types in a cycle (the SCC) in a commutative way. The
easiest way is to not mix in the hashes of the SCC members at
all. To make this work we have to delay setting the hash
values of the SCC until it is complete. */
static hashval_t
iterative_hash_gimple_type (tree type, hashval_t val,
vec<tree> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
hashval_t v;
void **slot;
struct sccs *state;
/* Not visited during this DFS walk. */
gcc_checking_assert (!pointer_map_contains (sccstate, type));
state = XOBNEW (sccstate_obstack, struct sccs);
*pointer_map_insert (sccstate, type) = state;
sccstack->safe_push (type);
state->dfsnum = next_dfs_num++;
state->low = state->dfsnum;
state->on_sccstack = true;
/* Combine a few common features of types so that types are grouped into
smaller sets; when searching for existing matching types to merge,
only existing types having the same features as the new type will be
checked. */
v = iterative_hash_name (TYPE_NAME (type), 0);
if (TYPE_NAME (type)
&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
&& DECL_CONTEXT (TYPE_NAME (type))
&& TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
sccstack, sccstate, sccstate_obstack);
/* Factor in the variant structure. */
if (TYPE_MAIN_VARIANT (type) != type)
v = visit (TYPE_MAIN_VARIANT (type), state, v,
sccstack, sccstate, sccstate_obstack);
v = iterative_hash_hashval_t (TREE_CODE (type), v);
v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
/* Do not hash the types size as this will cause differences in
hash values for the complete vs. the incomplete type variant. */
/* Incorporate common features of numerical types. */
if (INTEGRAL_TYPE_P (type)
|| SCALAR_FLOAT_TYPE_P (type)
|| FIXED_POINT_TYPE_P (type))
{
v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
v = iterative_hash_hashval_t (TYPE_MODE (type), v);
v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
}
/* For pointer and reference types, fold in information about the type
pointed to. */
if (POINTER_TYPE_P (type))
v = visit (TREE_TYPE (type), state, v,
sccstack, sccstate, sccstate_obstack);
/* For integer types hash the types min/max values and the string flag. */
if (TREE_CODE (type) == INTEGER_TYPE)
{
/* OMP lowering can introduce error_mark_node in place of
random local decls in types. */
if (TYPE_MIN_VALUE (type) != error_mark_node)
v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
if (TYPE_MAX_VALUE (type) != error_mark_node)
v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
}
/* For array types hash the domain and the string flag. */
if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
{
v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
v = visit (TYPE_DOMAIN (type), state, v,
sccstack, sccstate, sccstate_obstack);
}
/* Recurse for aggregates with a single element type. */
if (TREE_CODE (type) == ARRAY_TYPE
|| TREE_CODE (type) == COMPLEX_TYPE
|| TREE_CODE (type) == VECTOR_TYPE)
v = visit (TREE_TYPE (type), state, v,
sccstack, sccstate, sccstate_obstack);
/* Incorporate function return and argument types. */
if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
{
unsigned na;
tree p;
/* For method types also incorporate their parent class. */
if (TREE_CODE (type) == METHOD_TYPE)
v = visit (TYPE_METHOD_BASETYPE (type), state, v,
sccstack, sccstate, sccstate_obstack);
/* Check result and argument types. */
v = visit (TREE_TYPE (type), state, v,
sccstack, sccstate, sccstate_obstack);
for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
{
v = visit (TREE_VALUE (p), state, v,
sccstack, sccstate, sccstate_obstack);
na++;
}
v = iterative_hash_hashval_t (na, v);
}
if (RECORD_OR_UNION_TYPE_P (type))
{
unsigned nf;
tree f;
for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
{
v = iterative_hash_name (DECL_NAME (f), v);
v = visit (TREE_TYPE (f), state, v,
sccstack, sccstate, sccstate_obstack);
nf++;
}
v = iterative_hash_hashval_t (nf, v);
}
/* Record hash for us. */
state->u.hash = v;
/* See if we found an SCC. */
if (state->low == state->dfsnum)
{
tree x;
struct tree_int_map *m;
/* Pop off the SCC and set its hash values. */
x = sccstack->pop ();
/* Optimize SCC size one. */
if (x == type)
{
state->on_sccstack = false;
m = ggc_alloc_cleared_tree_int_map ();
m->base.from = x;
m->to = v;
slot = htab_find_slot (type_hash_cache, m, INSERT);
gcc_assert (!*slot);
*slot = (void *) m;
}
else
{
struct sccs *cstate;
unsigned first, i, size, j;
struct type_hash_pair *pairs;
/* Pop off the SCC and build an array of type, hash pairs. */
first = sccstack->length () - 1;
while ((*sccstack)[first] != type)
--first;
size = sccstack->length () - first + 1;
pairs = XALLOCAVEC (struct type_hash_pair, size);
i = 0;
cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
cstate->on_sccstack = false;
pairs[i].type = x;
pairs[i].hash = cstate->u.hash;
do
{
x = sccstack->pop ();
cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
cstate->on_sccstack = false;
++i;
pairs[i].type = x;
pairs[i].hash = cstate->u.hash;
}
while (x != type);
gcc_assert (i + 1 == size);
/* Sort the arrays of type, hash pairs so that when we mix in
all members of the SCC the hash value becomes independent on
the order we visited the SCC. Disregard hashes equal to
the hash of the type we mix into because we cannot guarantee
a stable sort for those across different TUs. */
qsort (pairs, size, sizeof (struct type_hash_pair),
type_hash_pair_compare);
for (i = 0; i < size; ++i)
{
hashval_t hash;
m = ggc_alloc_cleared_tree_int_map ();
m->base.from = pairs[i].type;
hash = pairs[i].hash;
/* Skip same hashes. */
for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
;
for (; j < size; ++j)
hash = iterative_hash_hashval_t (pairs[j].hash, hash);
for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
hash = iterative_hash_hashval_t (pairs[j].hash, hash);
m->to = hash;
if (pairs[i].type == type)
v = hash;
slot = htab_find_slot (type_hash_cache, m, INSERT);
gcc_assert (!*slot);
*slot = (void *) m;
}
}
}
return iterative_hash_hashval_t (v, val);
}
/* Returns a hash value for P (assumed to be a type). The hash value
is computed using some distinguishing features of the type. Note
that we cannot use pointer hashing here as we may be dealing with
two distinct instances of the same type.
This function should produce the same hash value for two compatible
types according to gimple_types_compatible_p. */
static hashval_t
gimple_type_hash (const void *p)
{
const_tree t = (const_tree) p;
vec<tree> sccstack = vNULL;
struct pointer_map_t *sccstate;
struct obstack sccstate_obstack;
hashval_t val;
void **slot;
struct tree_int_map m;
m.base.from = CONST_CAST_TREE (t);
if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
&& *slot)
return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
/* Perform a DFS walk and pre-hash all reachable types. */
next_dfs_num = 1;
sccstate = pointer_map_create ();
gcc_obstack_init (&sccstate_obstack);
val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
&sccstack, sccstate, &sccstate_obstack);
sccstack.release ();
pointer_map_destroy (sccstate);
obstack_free (&sccstate_obstack, NULL);
return val;
}
/* Returns nonzero if P1 and P2 are equal. */
static int
gimple_type_eq (const void *p1, const void *p2)
{
const_tree t1 = (const_tree) p1;
const_tree t2 = (const_tree) p2;
return gimple_types_compatible_p (CONST_CAST_TREE (t1),
CONST_CAST_TREE (t2));
}
/* Register type T in the global type table gimple_types. */
static tree
gimple_register_type (tree t)
{
void **slot;
/* See if we already have an equivalent type registered. */
slot = htab_find_slot (gimple_types, t, INSERT);
if (*slot
&& *(tree *)slot != t)
return (tree) *((tree *) slot);
/* If not, insert it to the cache and the hash. */
*slot = (void *) t;
return t;
}
/* End of old merging code. */
/* Remember trees that contains references to declarations. */
static GTY(()) vec <tree, va_gc> *tree_with_vars;
......@@ -2145,10 +1114,6 @@ static struct obstack tree_scc_hash_obstack;
static unsigned long num_merged_types;
static unsigned long num_prevailing_types;
static unsigned long num_not_merged_types;
static unsigned long num_not_merged_types_in_same_scc;
static unsigned long num_not_merged_types_trees;
static unsigned long num_not_merged_types_in_same_scc_trees;
static unsigned long num_type_scc_trees;
static unsigned long total_scc_size;
static unsigned long num_sccs_read;
......@@ -2870,38 +1835,10 @@ lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
/* Do remaining fixup tasks for prevailing nodes. */
bool seen_type = false;
bool not_merged_type_same_scc = false;
bool not_merged_type_not_same_scc = false;
for (unsigned i = 0; i < len; ++i)
{
tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
from + i);
/* For statistics, see if the old code would have merged
the type. */
if (TYPE_P (t)
&& (flag_lto_report || (flag_wpa && flag_lto_report_wpa)))
{
tree newt = gimple_register_type (t);
if (newt != t)
{
num_not_merged_types++;
unsigned j;
/* Check if we can never merge the types because
they are in the same SCC and thus the old
code was broken. */
for (j = 0; j < len; ++j)
if (i != j
&& streamer_tree_cache_get_tree
(data_in->reader_cache, from + j) == newt)
{
num_not_merged_types_in_same_scc++;
not_merged_type_same_scc = true;
break;
}
if (j == len)
not_merged_type_not_same_scc = true;
}
}
/* Reconstruct the type variant and pointer-to/reference-to
chains. */
if (TYPE_P (t))
......@@ -2938,13 +1875,6 @@ lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
vec_safe_push (tree_with_vars, t);
}
}
if (not_merged_type_same_scc)
{
num_not_merged_types_in_same_scc_trees += len;
num_not_merged_types_trees += len;
}
else if (not_merged_type_not_same_scc)
num_not_merged_types_trees += len;
if (seen_type)
num_type_scc_trees += len;
}
......@@ -3823,10 +2753,6 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
tree_int_map_eq, NULL);
gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
gimple_canonical_type_eq, 0);
type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
tree_int_map_eq, NULL);
type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
gcc_obstack_init (&tree_scc_hash_obstack);
tree_scc_hash.create (4096);
......@@ -3887,12 +2813,6 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
print_lto_report_1 ();
/* Free gimple type merging datastructures. */
htab_delete (gimple_types);
gimple_types = NULL;
htab_delete (type_hash_cache);
type_hash_cache = NULL;
free (type_pair_cache);
type_pair_cache = NULL;
tree_scc_hash.dispose ();
obstack_free (&tree_scc_hash_obstack, NULL);
htab_delete (gimple_canonical_types);
......@@ -4096,12 +3016,6 @@ print_lto_report_1 (void)
fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
pfx, num_prevailing_types, num_type_scc_trees);
fprintf (stderr, "[%s] Old merging code merges an additional %lu types"
" of which %lu are in the same SCC with their "
"prevailing variant (%lu and %lu associated trees)\n",
pfx, num_not_merged_types, num_not_merged_types_in_same_scc,
num_not_merged_types_trees,
num_not_merged_types_in_same_scc_trees);
fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
"%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
(long) htab_size (gimple_canonical_types),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment