Commit 842d4ee2 by Eric Botcazou Committed by Eric Botcazou

gigi.h (make_packable_type): Declare.

	* gcc-interface/gigi.h (make_packable_type): Declare.
	(make_type_from_size): Likewise.
	(relate_alias_sets): Likewise.
	(maybe_pad_type): Adjust.
	(init_gnat_to_gnu): Delete.
	(destroy_gnat_to_gnu): Likewise.
	(init_dummy_type): Likewise.
	(destroy_dummy_type): Likewise.
	(init_gnat_utils): Declare.
	(destroy_gnat_utils): Likewise.
	(ceil_pow2): New inline function.
	* gcc-interface/decl.c (gnat_to_gnu_entity): Use ceil_pow2.
	<object>: Pass True for the final processing of alignment and size.
	<E_Subprogram_Type>: Only create the TYPE_DECL for a padded return
	type if necessary.
	(round_up_to_align): Delete.
	(ceil_alignment): Likewise.
	(relate_alias_sets): Move to...
	(make_aligning_type): Likewise.
	(make_packable_type): Likewise.
	(maybe_pad_type): Likewise.
	(make_type_from_size): Likewise.
	* gcc-interface/utils.c (MAX_BITS_PER_WORD): Delete.
	(struct pad_type_hash): New type.
	(pad_type_hash_table): New static variable.
	(init_gnat_to_gnu): Merge into...
	(init_dummy_type): Likewise.
	(init_gnat_utils): ...this.  New function.
	(destroy_gnat_to_gnu): Merge into...
	(destroy_dummy_type): Likewise.
	(destroy_gnat_utils): ...this.  New function.
	(pad_type_hash_marked_p): New function.
	(pad_type_hash_hash): Likewise.
	(pad_type_hash_eq): Likewise.
	(relate_alias_sets): ...here.
	(make_aligning_type): Likewise.
	(make_packable_type): Likewise.
	(maybe_pad_type): Likewise.  Change same_rm_size parameter into
	set_rm_size; do not set TYPE_ADA_SIZE if it is false.  Do not set
	null as Ada size.  Do not set TYPE_VOLATILE on the padded type.  If it
	is complete and has constant size, canonicalize it.  Bail out earlier
	if a warning need not be issued.
	(make_type_from_size): Likewise.
	<INTEGER_TYPE>: Bail out if size is too large
	(gnat_types_compatible_p): Do not deal with padded types.
	(convert): Compare main variants for padded types.
	* gcc-interface/trans.c (gigi): Call {init|destroy}_gnat_utils.
	(gnat_to_gnu): Do not convert at the end for a call to a function that
	returns an unconstrained type with default discriminant.
	(Attribute_to_gnu) <Attr_Size>: Simplify handling of padded objects.
	* gcc-interface/utils2.c (build_binary_op) <MODIFY_EXPR>: Likewise.
	Do not use the padded type if it is BLKmode and the inner type is
	non-BLKmode.

From-SVN: r187206
parent 62957409
2012-05-06 Eric Botcazou <ebotcazou@adacore.com>
* gcc-interface/gigi.h (make_packable_type): Declare.
(make_type_from_size): Likewise.
(relate_alias_sets): Likewise.
(maybe_pad_type): Adjust.
(init_gnat_to_gnu): Delete.
(destroy_gnat_to_gnu): Likewise.
(init_dummy_type): Likewise.
(destroy_dummy_type): Likewise.
(init_gnat_utils): Declare.
(destroy_gnat_utils): Likewise.
(ceil_pow2): New inline function.
* gcc-interface/decl.c (gnat_to_gnu_entity): Use ceil_pow2.
<object>: Pass True for the final processing of alignment and size.
<E_Subprogram_Type>: Only create the TYPE_DECL for a padded return
type if necessary.
(round_up_to_align): Delete.
(ceil_alignment): Likewise.
(relate_alias_sets): Move to...
(make_aligning_type): Likewise.
(make_packable_type): Likewise.
(maybe_pad_type): Likewise.
(make_type_from_size): Likewise.
* gcc-interface/utils.c (MAX_BITS_PER_WORD): Delete.
(struct pad_type_hash): New type.
(pad_type_hash_table): New static variable.
(init_gnat_to_gnu): Merge into...
(init_dummy_type): Likewise.
(init_gnat_utils): ...this. New function.
(destroy_gnat_to_gnu): Merge into...
(destroy_dummy_type): Likewise.
(destroy_gnat_utils): ...this. New function.
(pad_type_hash_marked_p): New function.
(pad_type_hash_hash): Likewise.
(pad_type_hash_eq): Likewise.
(relate_alias_sets): ...here.
(make_aligning_type): Likewise.
(make_packable_type): Likewise.
(maybe_pad_type): Likewise. Change same_rm_size parameter into
set_rm_size; do not set TYPE_ADA_SIZE if it is false. Do not set
null as Ada size. Do not set TYPE_VOLATILE on the padded type. If it
is complete and has constant size, canonicalize it. Bail out earlier
if a warning need not be issued.
(make_type_from_size): Likewise.
<INTEGER_TYPE>: Bail out if size is too large
(gnat_types_compatible_p): Do not deal with padded types.
(convert): Compare main variants for padded types.
* gcc-interface/trans.c (gigi): Call {init|destroy}_gnat_utils.
(gnat_to_gnu): Do not convert at the end for a call to a function that
returns an unconstrained type with default discriminant.
(Attribute_to_gnu) <Attr_Size>: Simplify handling of padded objects.
* gcc-interface/utils2.c (build_binary_op) <MODIFY_EXPR>: Likewise.
Do not use the padded type if it is BLKmode and the inner type is
non-BLKmode.
2012-05-02 Pascal Obry <obry@adacore.com> 2012-05-02 Pascal Obry <obry@adacore.com>
Revert Revert
......
...@@ -126,15 +126,6 @@ DEF_VEC_ALLOC_O(variant_desc,heap); ...@@ -126,15 +126,6 @@ DEF_VEC_ALLOC_O(variant_desc,heap);
static GTY ((if_marked ("tree_int_map_marked_p"), static GTY ((if_marked ("tree_int_map_marked_p"),
param_is (struct tree_int_map))) htab_t annotate_value_cache; param_is (struct tree_int_map))) htab_t annotate_value_cache;
enum alias_set_op
{
ALIAS_SET_COPY,
ALIAS_SET_SUBSET,
ALIAS_SET_SUPERSET
};
static void relate_alias_sets (tree, tree, enum alias_set_op);
static bool allocatable_size_p (tree, bool); static bool allocatable_size_p (tree, bool);
static void prepend_one_attribute_to (struct attrib **, static void prepend_one_attribute_to (struct attrib **,
enum attr_type, tree, tree, Node_Id); enum attr_type, tree, tree, Node_Id);
...@@ -144,7 +135,6 @@ static bool type_has_variable_size (tree); ...@@ -144,7 +135,6 @@ static bool type_has_variable_size (tree);
static tree elaborate_expression_1 (tree, Entity_Id, tree, bool, bool); static tree elaborate_expression_1 (tree, Entity_Id, tree, bool, bool);
static tree elaborate_expression_2 (tree, Entity_Id, tree, bool, bool, static tree elaborate_expression_2 (tree, Entity_Id, tree, bool, bool,
unsigned int); unsigned int);
static tree make_packable_type (tree, bool);
static tree gnat_to_gnu_component_type (Entity_Id, bool, bool); static tree gnat_to_gnu_component_type (Entity_Id, bool, bool);
static tree gnat_to_gnu_param (Entity_Id, Mechanism_Type, Entity_Id, bool, static tree gnat_to_gnu_param (Entity_Id, Mechanism_Type, Entity_Id, bool,
bool *); bool *);
...@@ -165,9 +155,7 @@ static VEC(variant_desc,heap) *build_variant_list (tree, ...@@ -165,9 +155,7 @@ static VEC(variant_desc,heap) *build_variant_list (tree,
VEC(variant_desc,heap) *); VEC(variant_desc,heap) *);
static tree validate_size (Uint, tree, Entity_Id, enum tree_code, bool, bool); static tree validate_size (Uint, tree, Entity_Id, enum tree_code, bool, bool);
static void set_rm_size (Uint, tree, Entity_Id); static void set_rm_size (Uint, tree, Entity_Id);
static tree make_type_from_size (tree, tree, bool);
static unsigned int validate_alignment (Uint, Entity_Id, unsigned int); static unsigned int validate_alignment (Uint, Entity_Id, unsigned int);
static unsigned int ceil_alignment (unsigned HOST_WIDE_INT);
static void check_ok_for_atomic (tree, Entity_Id, bool); static void check_ok_for_atomic (tree, Entity_Id, bool);
static tree create_field_decl_from (tree, tree, tree, tree, tree, static tree create_field_decl_from (tree, tree, tree, tree, tree,
VEC(subst_pair,heap) *); VEC(subst_pair,heap) *);
...@@ -838,7 +826,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) ...@@ -838,7 +826,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
else if (compare_tree_int (TYPE_SIZE (gnu_type), align_cap) > 0) else if (compare_tree_int (TYPE_SIZE (gnu_type), align_cap) > 0)
align = align_cap; align = align_cap;
else else
align = ceil_alignment (tree_low_cst (TYPE_SIZE (gnu_type), 1)); align = ceil_pow2 (tree_low_cst (TYPE_SIZE (gnu_type), 1));
/* But make sure not to under-align the object. */ /* But make sure not to under-align the object. */
if (align <= TYPE_ALIGN (gnu_type)) if (align <= TYPE_ALIGN (gnu_type))
...@@ -921,8 +909,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) ...@@ -921,8 +909,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
tree orig_type = gnu_type; tree orig_type = gnu_type;
gnu_type = maybe_pad_type (gnu_type, gnu_size, align, gnat_entity, gnu_type = maybe_pad_type (gnu_type, gnu_size, align, gnat_entity,
false, false, definition, false, false, definition, true);
gnu_size ? true : false);
/* If a padding record was made, declare it now since it will /* If a padding record was made, declare it now since it will
never be declared otherwise. This is necessary to ensure never be declared otherwise. This is necessary to ensure
...@@ -2942,7 +2929,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) ...@@ -2942,7 +2929,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
= validate_alignment (Alignment (gnat_entity), gnat_entity, 0); = validate_alignment (Alignment (gnat_entity), gnat_entity, 0);
else if (Is_Atomic (gnat_entity)) else if (Is_Atomic (gnat_entity))
TYPE_ALIGN (gnu_type) TYPE_ALIGN (gnu_type)
= esize >= BITS_PER_WORD ? BITS_PER_WORD : ceil_alignment (esize); = esize >= BITS_PER_WORD ? BITS_PER_WORD : ceil_pow2 (esize);
/* If a type needs strict alignment, the minimum size will be the /* If a type needs strict alignment, the minimum size will be the
type size instead of the RM size (see validate_size). Cap the type size instead of the RM size (see validate_size). Cap the
alignment, lest it causes this type size to become too large. */ alignment, lest it causes this type size to become too large. */
...@@ -4163,6 +4150,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) ...@@ -4163,6 +4150,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
mechanism to avoid copying too much data when it returns. */ mechanism to avoid copying too much data when it returns. */
if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (gnu_return_type))) if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (gnu_return_type)))
{ {
tree orig_type = gnu_return_type;
gnu_return_type gnu_return_type
= maybe_pad_type (gnu_return_type, = maybe_pad_type (gnu_return_type,
max_size (TYPE_SIZE (gnu_return_type), max_size (TYPE_SIZE (gnu_return_type),
...@@ -4172,8 +4161,11 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) ...@@ -4172,8 +4161,11 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
/* Declare it now since it will never be declared otherwise. /* Declare it now since it will never be declared otherwise.
This is necessary to ensure that its subtrees are properly This is necessary to ensure that its subtrees are properly
marked. */ marked. */
create_type_decl (TYPE_NAME (gnu_return_type), gnu_return_type, if (gnu_return_type != orig_type
NULL, true, debug_info_p, gnat_entity); && !DECL_P (TYPE_NAME (gnu_return_type)))
create_type_decl (TYPE_NAME (gnu_return_type),
gnu_return_type, NULL, true,
debug_info_p, gnat_entity);
return_by_invisi_ref_p = true; return_by_invisi_ref_p = true;
} }
...@@ -4700,7 +4692,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) ...@@ -4700,7 +4692,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
if (operand_equal_p (TYPE_SIZE (gnu_type), gnu_size, 0) if (operand_equal_p (TYPE_SIZE (gnu_type), gnu_size, 0)
&& operand_equal_p (rm_size (gnu_type), gnu_size, 0)) && operand_equal_p (rm_size (gnu_type), gnu_size, 0))
gnu_size = 0; gnu_size = NULL_TREE;
} }
/* If the alignment hasn't already been processed and this is /* If the alignment hasn't already been processed and this is
...@@ -4763,6 +4755,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) ...@@ -4763,6 +4755,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
gnu_entity_name = DECL_NAME (gnu_entity_name); gnu_entity_name = DECL_NAME (gnu_entity_name);
} }
/* Now set the RM size of the type. We cannot do it before padding
because we need to accept arbitrary RM sizes on integral types. */
set_rm_size (RM_Size (gnat_entity), gnu_type, gnat_entity); set_rm_size (RM_Size (gnat_entity), gnu_type, gnat_entity);
/* If we are at global level, GCC will have applied variable_size to /* If we are at global level, GCC will have applied variable_size to
...@@ -5843,83 +5837,6 @@ elaborate_entity (Entity_Id gnat_entity) ...@@ -5843,83 +5837,6 @@ elaborate_entity (Entity_Id gnat_entity)
} }
} }
/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
If this is a multi-dimensional array type, do this recursively.
OP may be
- ALIAS_SET_COPY: the new set is made a copy of the old one.
- ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
- ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
static void
relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
{
/* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
of a one-dimensional array, since the padding has the same alias set
as the field type, but if it's a multi-dimensional array, we need to
see the inner types. */
while (TREE_CODE (gnu_old_type) == RECORD_TYPE
&& (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
|| TYPE_PADDING_P (gnu_old_type)))
gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
/* Unconstrained array types are deemed incomplete and would thus be given
alias set 0. Retrieve the underlying array type. */
if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
gnu_old_type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
gnu_new_type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
&& TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
&& TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
switch (op)
{
case ALIAS_SET_COPY:
/* The alias set shouldn't be copied between array types with different
aliasing settings because this can break the aliasing relationship
between the array type and its element type. */
#ifndef ENABLE_CHECKING
if (flag_strict_aliasing)
#endif
gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
&& TREE_CODE (gnu_old_type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (gnu_new_type)
!= TYPE_NONALIASED_COMPONENT (gnu_old_type)));
TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
break;
case ALIAS_SET_SUBSET:
case ALIAS_SET_SUPERSET:
{
alias_set_type old_set = get_alias_set (gnu_old_type);
alias_set_type new_set = get_alias_set (gnu_new_type);
/* Do nothing if the alias sets conflict. This ensures that we
never call record_alias_subset several times for the same pair
or at all for alias set 0. */
if (!alias_sets_conflict_p (old_set, new_set))
{
if (op == ALIAS_SET_SUBSET)
record_alias_subset (old_set, new_set);
else
record_alias_subset (new_set, old_set);
}
}
break;
default:
gcc_unreachable ();
}
record_component_aliases (gnu_new_type);
}
/* Return true if the size represented by GNU_SIZE can be handled by an /* Return true if the size represented by GNU_SIZE can be handled by an
allocation. If STATIC_P is true, consider only what can be done with a allocation. If STATIC_P is true, consider only what can be done with a
static allocation. */ static allocation. */
...@@ -6211,471 +6128,6 @@ elaborate_expression_2 (tree gnu_expr, Entity_Id gnat_entity, tree gnu_name, ...@@ -6211,471 +6128,6 @@ elaborate_expression_2 (tree gnu_expr, Entity_Id gnat_entity, tree gnu_name,
unit_align); unit_align);
} }
/* Create a record type that contains a SIZE bytes long field of TYPE with a
starting bit position so that it is aligned to ALIGN bits, and leaving at
least ROOM bytes free before the field. BASE_ALIGN is the alignment the
record is guaranteed to get. */
tree
make_aligning_type (tree type, unsigned int align, tree size,
unsigned int base_align, int room)
{
/* We will be crafting a record type with one field at a position set to be
the next multiple of ALIGN past record'address + room bytes. We use a
record placeholder to express record'address. */
tree record_type = make_node (RECORD_TYPE);
tree record = build0 (PLACEHOLDER_EXPR, record_type);
tree record_addr_st
= convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
/* The diagram below summarizes the shape of what we manipulate:
<--------- pos ---------->
{ +------------+-------------+-----------------+
record =>{ |############| ... | field (type) |
{ +------------+-------------+-----------------+
|<-- room -->|<- voffset ->|<---- size ----->|
o o
| |
record_addr vblock_addr
Every length is in sizetype bytes there, except "pos" which has to be
set as a bit position in the GCC tree for the record. */
tree room_st = size_int (room);
tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
tree voffset_st, pos, field;
tree name = TYPE_NAME (type);
if (TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
name = concat_name (name, "ALIGN");
TYPE_NAME (record_type) = name;
/* Compute VOFFSET and then POS. The next byte position multiple of some
alignment after some address is obtained by "and"ing the alignment minus
1 with the two's complement of the address. */
voffset_st = size_binop (BIT_AND_EXPR,
fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
size_int ((align / BITS_PER_UNIT) - 1));
/* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
pos = size_binop (MULT_EXPR,
convert (bitsizetype,
size_binop (PLUS_EXPR, room_st, voffset_st)),
bitsize_unit_node);
/* Craft the GCC record representation. We exceptionally do everything
manually here because 1) our generic circuitry is not quite ready to
handle the complex position/size expressions we are setting up, 2) we
have a strong simplifying factor at hand: we know the maximum possible
value of voffset, and 3) we have to set/reset at least the sizes in
accordance with this maximum value anyway, as we need them to convey
what should be "alloc"ated for this type.
Use -1 as the 'addressable' indication for the field to prevent the
creation of a bitfield. We don't need one, it would have damaging
consequences on the alignment computation, and create_field_decl would
make one without this special argument, for instance because of the
complex position expression. */
field = create_field_decl (get_identifier ("F"), type, record_type, size,
pos, 1, -1);
TYPE_FIELDS (record_type) = field;
TYPE_ALIGN (record_type) = base_align;
TYPE_USER_ALIGN (record_type) = 1;
TYPE_SIZE (record_type)
= size_binop (PLUS_EXPR,
size_binop (MULT_EXPR, convert (bitsizetype, size),
bitsize_unit_node),
bitsize_int (align + room * BITS_PER_UNIT));
TYPE_SIZE_UNIT (record_type)
= size_binop (PLUS_EXPR, size,
size_int (room + align / BITS_PER_UNIT));
SET_TYPE_MODE (record_type, BLKmode);
relate_alias_sets (record_type, type, ALIAS_SET_COPY);
/* Declare it now since it will never be declared otherwise. This is
necessary to ensure that its subtrees are properly marked. */
create_type_decl (name, record_type, NULL, true, false, Empty);
return record_type;
}
/* Return the result of rounding T up to ALIGN. */
static inline unsigned HOST_WIDE_INT
round_up_to_align (unsigned HOST_WIDE_INT t, unsigned int align)
{
t += align - 1;
t /= align;
t *= align;
return t;
}
/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
as the field type of a packed record if IN_RECORD is true, or as the
component type of a packed array if IN_RECORD is false. See if we can
rewrite it either as a type that has a non-BLKmode, which we can pack
tighter in the packed record case, or as a smaller type. If so, return
the new type. If not, return the original type. */
static tree
make_packable_type (tree type, bool in_record)
{
unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
unsigned HOST_WIDE_INT new_size;
tree new_type, old_field, field_list = NULL_TREE;
/* No point in doing anything if the size is zero. */
if (size == 0)
return type;
new_type = make_node (TREE_CODE (type));
/* Copy the name and flags from the old type to that of the new.
Note that we rely on the pointer equality created here for
TYPE_NAME to look through conversions in various places. */
TYPE_NAME (new_type) = TYPE_NAME (type);
TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
if (TREE_CODE (type) == RECORD_TYPE)
TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
/* If we are in a record and have a small size, set the alignment to
try for an integral mode. Otherwise set it to try for a smaller
type with BLKmode. */
if (in_record && size <= MAX_FIXED_MODE_SIZE)
{
TYPE_ALIGN (new_type) = ceil_alignment (size);
new_size = round_up_to_align (size, TYPE_ALIGN (new_type));
}
else
{
unsigned HOST_WIDE_INT align;
/* Do not try to shrink the size if the RM size is not constant. */
if (TYPE_CONTAINS_TEMPLATE_P (type)
|| !host_integerp (TYPE_ADA_SIZE (type), 1))
return type;
/* Round the RM size up to a unit boundary to get the minimal size
for a BLKmode record. Give up if it's already the size. */
new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
new_size = round_up_to_align (new_size, BITS_PER_UNIT);
if (new_size == size)
return type;
align = new_size & -new_size;
TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
}
TYPE_USER_ALIGN (new_type) = 1;
/* Now copy the fields, keeping the position and size as we don't want
to change the layout by propagating the packedness downwards. */
for (old_field = TYPE_FIELDS (type); old_field;
old_field = DECL_CHAIN (old_field))
{
tree new_field_type = TREE_TYPE (old_field);
tree new_field, new_size;
if (RECORD_OR_UNION_TYPE_P (new_field_type)
&& !TYPE_FAT_POINTER_P (new_field_type)
&& host_integerp (TYPE_SIZE (new_field_type), 1))
new_field_type = make_packable_type (new_field_type, true);
/* However, for the last field in a not already packed record type
that is of an aggregate type, we need to use the RM size in the
packable version of the record type, see finish_record_type. */
if (!DECL_CHAIN (old_field)
&& !TYPE_PACKED (type)
&& RECORD_OR_UNION_TYPE_P (new_field_type)
&& !TYPE_FAT_POINTER_P (new_field_type)
&& !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
&& TYPE_ADA_SIZE (new_field_type))
new_size = TYPE_ADA_SIZE (new_field_type);
else
new_size = DECL_SIZE (old_field);
new_field
= create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
new_size, bit_position (old_field),
TYPE_PACKED (type),
!DECL_NONADDRESSABLE_P (old_field));
DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
DECL_CHAIN (new_field) = field_list;
field_list = new_field;
}
finish_record_type (new_type, nreverse (field_list), 2, false);
relate_alias_sets (new_type, type, ALIAS_SET_COPY);
SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
/* If this is a padding record, we never want to make the size smaller
than what was specified. For QUAL_UNION_TYPE, also copy the size. */
if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
{
TYPE_SIZE (new_type) = TYPE_SIZE (type);
TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
new_size = size;
}
else
{
TYPE_SIZE (new_type) = bitsize_int (new_size);
TYPE_SIZE_UNIT (new_type)
= size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
}
if (!TYPE_CONTAINS_TEMPLATE_P (type))
SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
compute_record_mode (new_type);
/* Try harder to get a packable type if necessary, for example
in case the record itself contains a BLKmode field. */
if (in_record && TYPE_MODE (new_type) == BLKmode)
SET_TYPE_MODE (new_type,
mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
/* If neither the mode nor the size has shrunk, return the old type. */
if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
return type;
return new_type;
}
/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
if needed. We have already verified that SIZE and TYPE are large enough.
GNAT_ENTITY is used to name the resulting record and to issue a warning.
IS_COMPONENT_TYPE is true if this is being done for the component type
of an array. IS_USER_TYPE is true if we must complete the original type.
DEFINITION is true if this type is being defined. SAME_RM_SIZE is true
if the RM size of the resulting type is to be set to SIZE too; otherwise,
it's set to the RM size of the original type. */
tree
maybe_pad_type (tree type, tree size, unsigned int align,
Entity_Id gnat_entity, bool is_component_type,
bool is_user_type, bool definition, bool same_rm_size)
{
tree orig_rm_size = same_rm_size ? NULL_TREE : rm_size (type);
tree orig_size = TYPE_SIZE (type);
tree record, field;
/* If TYPE is a padded type, see if it agrees with any size and alignment
we were given. If so, return the original type. Otherwise, strip
off the padding, since we will either be returning the inner type
or repadding it. If no size or alignment is specified, use that of
the original padded type. */
if (TYPE_IS_PADDING_P (type))
{
if ((!size
|| operand_equal_p (round_up (size,
MAX (align, TYPE_ALIGN (type))),
round_up (TYPE_SIZE (type),
MAX (align, TYPE_ALIGN (type))),
0))
&& (align == 0 || align == TYPE_ALIGN (type)))
return type;
if (!size)
size = TYPE_SIZE (type);
if (align == 0)
align = TYPE_ALIGN (type);
type = TREE_TYPE (TYPE_FIELDS (type));
orig_size = TYPE_SIZE (type);
}
/* If the size is either not being changed or is being made smaller (which
is not done here and is only valid for bitfields anyway), show the size
isn't changing. Likewise, clear the alignment if it isn't being
changed. Then return if we aren't doing anything. */
if (size
&& (operand_equal_p (size, orig_size, 0)
|| (TREE_CODE (orig_size) == INTEGER_CST
&& tree_int_cst_lt (size, orig_size))))
size = NULL_TREE;
if (align == TYPE_ALIGN (type))
align = 0;
if (align == 0 && !size)
return type;
/* If requested, complete the original type and give it a name. */
if (is_user_type)
create_type_decl (get_entity_name (gnat_entity), type,
NULL, !Comes_From_Source (gnat_entity),
!(TYPE_NAME (type)
&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
&& DECL_IGNORED_P (TYPE_NAME (type))),
gnat_entity);
/* We used to modify the record in place in some cases, but that could
generate incorrect debugging information. So make a new record
type and name. */
record = make_node (RECORD_TYPE);
TYPE_PADDING_P (record) = 1;
if (Present (gnat_entity))
TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
TYPE_VOLATILE (record)
= Present (gnat_entity) && Treat_As_Volatile (gnat_entity);
TYPE_ALIGN (record) = align;
TYPE_SIZE (record) = size ? size : orig_size;
TYPE_SIZE_UNIT (record)
= convert (sizetype,
size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
bitsize_unit_node));
/* If we are changing the alignment and the input type is a record with
BLKmode and a small constant size, try to make a form that has an
integral mode. This might allow the padding record to also have an
integral mode, which will be much more efficient. There is no point
in doing so if a size is specified unless it is also a small constant
size and it is incorrect to do so if we cannot guarantee that the mode
will be naturally aligned since the field must always be addressable.
??? This might not always be a win when done for a stand-alone object:
since the nominal and the effective type of the object will now have
different modes, a VIEW_CONVERT_EXPR will be required for converting
between them and it might be hard to overcome afterwards, including
at the RTL level when the stand-alone object is accessed as a whole. */
if (align != 0
&& RECORD_OR_UNION_TYPE_P (type)
&& TYPE_MODE (type) == BLKmode
&& !TYPE_BY_REFERENCE_P (type)
&& TREE_CODE (orig_size) == INTEGER_CST
&& !TREE_OVERFLOW (orig_size)
&& compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
&& (!size
|| (TREE_CODE (size) == INTEGER_CST
&& compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
{
tree packable_type = make_packable_type (type, true);
if (TYPE_MODE (packable_type) != BLKmode
&& align >= TYPE_ALIGN (packable_type))
type = packable_type;
}
/* Now create the field with the original size. */
field = create_field_decl (get_identifier ("F"), type, record, orig_size,
bitsize_zero_node, 0, 1);
DECL_INTERNAL_P (field) = 1;
/* Do not emit debug info until after the auxiliary record is built. */
finish_record_type (record, field, 1, false);
/* Set the same size for its RM size if requested; otherwise reuse
the RM size of the original type. */
SET_TYPE_ADA_SIZE (record, same_rm_size ? size : orig_rm_size);
/* Unless debugging information isn't being written for the input type,
write a record that shows what we are a subtype of and also make a
variable that indicates our size, if still variable. */
if (TREE_CODE (orig_size) != INTEGER_CST
&& TYPE_NAME (record)
&& TYPE_NAME (type)
&& !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
&& DECL_IGNORED_P (TYPE_NAME (type))))
{
tree marker = make_node (RECORD_TYPE);
tree name = TYPE_NAME (record);
tree orig_name = TYPE_NAME (type);
if (TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
if (TREE_CODE (orig_name) == TYPE_DECL)
orig_name = DECL_NAME (orig_name);
TYPE_NAME (marker) = concat_name (name, "XVS");
finish_record_type (marker,
create_field_decl (orig_name,
build_reference_type (type),
marker, NULL_TREE, NULL_TREE,
0, 0),
0, true);
add_parallel_type (record, marker);
if (definition && size && TREE_CODE (size) != INTEGER_CST)
TYPE_SIZE_UNIT (marker)
= create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
TYPE_SIZE_UNIT (record), false, false, false,
false, NULL, gnat_entity);
}
rest_of_record_type_compilation (record);
/* If the size was widened explicitly, maybe give a warning. Take the
original size as the maximum size of the input if there was an
unconstrained record involved and round it up to the specified alignment,
if one was specified. But don't do it if we are just annotating types
and the type is tagged, since tagged types aren't fully laid out in this
mode. */
if (CONTAINS_PLACEHOLDER_P (orig_size))
orig_size = max_size (orig_size, true);
if (align)
orig_size = round_up (orig_size, align);
if (Present (gnat_entity)
&& size
&& TREE_CODE (size) != MAX_EXPR
&& TREE_CODE (size) != COND_EXPR
&& !operand_equal_p (size, orig_size, 0)
&& !(TREE_CODE (size) == INTEGER_CST
&& TREE_CODE (orig_size) == INTEGER_CST
&& (TREE_OVERFLOW (size)
|| TREE_OVERFLOW (orig_size)
|| tree_int_cst_lt (size, orig_size)))
&& !(type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
{
Node_Id gnat_error_node = Empty;
if (Is_Packed_Array_Type (gnat_entity))
gnat_entity = Original_Array_Type (gnat_entity);
if ((Ekind (gnat_entity) == E_Component
|| Ekind (gnat_entity) == E_Discriminant)
&& Present (Component_Clause (gnat_entity)))
gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
else if (Present (Size_Clause (gnat_entity)))
gnat_error_node = Expression (Size_Clause (gnat_entity));
/* Generate message only for entities that come from source, since
if we have an entity created by expansion, the message will be
generated for some other corresponding source entity. */
if (Comes_From_Source (gnat_entity))
{
if (Present (gnat_error_node))
post_error_ne_tree ("{^ }bits of & unused?",
gnat_error_node, gnat_entity,
size_diffop (size, orig_size));
else if (is_component_type)
post_error_ne_tree ("component of& padded{ by ^ bits}?",
gnat_entity, gnat_entity,
size_diffop (size, orig_size));
}
}
return record;
}
/* Given a GNU tree and a GNAT list of choices, generate an expression to test /* Given a GNU tree and a GNAT list of choices, generate an expression to test
the value passed against the list of choices. */ the value passed against the list of choices. */
...@@ -8245,95 +7697,6 @@ set_rm_size (Uint uint_size, tree gnu_type, Entity_Id gnat_entity) ...@@ -8245,95 +7697,6 @@ set_rm_size (Uint uint_size, tree gnu_type, Entity_Id gnat_entity)
SET_TYPE_ADA_SIZE (gnu_type, size); SET_TYPE_ADA_SIZE (gnu_type, size);
} }
/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
If TYPE is the best type, return it. Otherwise, make a new type. We
only support new integral and pointer types. FOR_BIASED is true if
we are making a biased type. */
static tree
make_type_from_size (tree type, tree size_tree, bool for_biased)
{
unsigned HOST_WIDE_INT size;
bool biased_p;
tree new_type;
/* If size indicates an error, just return TYPE to avoid propagating
the error. Likewise if it's too large to represent. */
if (!size_tree || !host_integerp (size_tree, 1))
return type;
size = tree_low_cst (size_tree, 1);
switch (TREE_CODE (type))
{
case INTEGER_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
biased_p = (TREE_CODE (type) == INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type));
/* Integer types with precision 0 are forbidden. */
if (size == 0)
size = 1;
/* Only do something if the type is not a packed array type and
doesn't already have the proper size. */
if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
|| (TYPE_PRECISION (type) == size && biased_p == for_biased))
break;
biased_p |= for_biased;
if (size > LONG_LONG_TYPE_SIZE)
size = LONG_LONG_TYPE_SIZE;
if (TYPE_UNSIGNED (type) || biased_p)
new_type = make_unsigned_type (size);
else
new_type = make_signed_type (size);
TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
SET_TYPE_RM_MIN_VALUE (new_type,
convert (TREE_TYPE (new_type),
TYPE_MIN_VALUE (type)));
SET_TYPE_RM_MAX_VALUE (new_type,
convert (TREE_TYPE (new_type),
TYPE_MAX_VALUE (type)));
/* Copy the name to show that it's essentially the same type and
not a subrange type. */
TYPE_NAME (new_type) = TYPE_NAME (type);
TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
return new_type;
case RECORD_TYPE:
/* Do something if this is a fat pointer, in which case we
may need to return the thin pointer. */
if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
{
enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
if (!targetm.valid_pointer_mode (p_mode))
p_mode = ptr_mode;
return
build_pointer_type_for_mode
(TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
p_mode, 0);
}
break;
case POINTER_TYPE:
/* Only do something if this is a thin pointer, in which case we
may need to return the fat pointer. */
if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
return
build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
break;
default:
break;
}
return type;
}
/* ALIGNMENT is a Uint giving the alignment specified for GNAT_ENTITY, /* ALIGNMENT is a Uint giving the alignment specified for GNAT_ENTITY,
a type or object whose present alignment is ALIGN. If this alignment is a type or object whose present alignment is ALIGN. If this alignment is
valid, return it. Otherwise, give an error and return ALIGN. */ valid, return it. Otherwise, give an error and return ALIGN. */
...@@ -8426,14 +7789,6 @@ validate_alignment (Uint alignment, Entity_Id gnat_entity, unsigned int align) ...@@ -8426,14 +7789,6 @@ validate_alignment (Uint alignment, Entity_Id gnat_entity, unsigned int align)
return align; return align;
} }
/* Return the smallest alignment not less than SIZE. */
static unsigned int
ceil_alignment (unsigned HOST_WIDE_INT size)
{
return (unsigned int) 1 << (floor_log2 (size - 1) + 1);
}
/* Verify that OBJECT, a type or decl, is something we can implement /* Verify that OBJECT, a type or decl, is something we can implement
atomically. If not, give an error for GNAT_ENTITY. COMP_P is true atomically. If not, give an error for GNAT_ENTITY. COMP_P is true
......
...@@ -123,18 +123,48 @@ extern tree get_minimal_subprog_decl (Entity_Id gnat_entity); ...@@ -123,18 +123,48 @@ extern tree get_minimal_subprog_decl (Entity_Id gnat_entity);
extern tree make_aligning_type (tree type, unsigned int align, tree size, extern tree make_aligning_type (tree type, unsigned int align, tree size,
unsigned int base_align, int room); unsigned int base_align, int room);
/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
as the field type of a packed record if IN_RECORD is true, or as the
component type of a packed array if IN_RECORD is false. See if we can
rewrite it either as a type that has a non-BLKmode, which we can pack
tighter in the packed record case, or as a smaller type. If so, return
the new type. If not, return the original type. */
extern tree make_packable_type (tree type, bool in_record);
/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
If TYPE is the best type, return it. Otherwise, make a new type. We
only support new integral and pointer types. FOR_BIASED is true if
we are making a biased type. */
extern tree make_type_from_size (tree type, tree size_tree, bool for_biased);
/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
if needed. We have already verified that SIZE and TYPE are large enough. if needed. We have already verified that SIZE and TYPE are large enough.
GNAT_ENTITY is used to name the resulting record and to issue a warning. GNAT_ENTITY is used to name the resulting record and to issue a warning.
IS_COMPONENT_TYPE is true if this is being done for the component type IS_COMPONENT_TYPE is true if this is being done for the component type of
of an array. IS_USER_TYPE is true if we must complete the original type. an array. IS_USER_TYPE is true if the original type needs to be completed.
DEFINITION is true if this type is being defined. SAME_RM_SIZE is true DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
if the RM size of the resulting type is to be set to SIZE too; otherwise, the RM size of the resulting type is to be set to SIZE too. */
it's set to the RM size of the original type. */
extern tree maybe_pad_type (tree type, tree size, unsigned int align, extern tree maybe_pad_type (tree type, tree size, unsigned int align,
Entity_Id gnat_entity, bool is_component_type, Entity_Id gnat_entity, bool is_component_type,
bool is_user_type, bool definition, bool is_user_type, bool definition,
bool same_rm_size); bool set_rm_size);
enum alias_set_op
{
ALIAS_SET_COPY,
ALIAS_SET_SUBSET,
ALIAS_SET_SUPERSET
};
/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
If this is a multi-dimensional array type, do this recursively.
OP may be
- ALIAS_SET_COPY: the new set is made a copy of the old one.
- ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
- ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
extern void relate_alias_sets (tree gnu_new_type, tree gnu_old_type,
enum alias_set_op op);
/* Given a GNU tree and a GNAT list of choices, generate an expression to test /* Given a GNU tree and a GNAT list of choices, generate an expression to test
the value passed against the list of choices. */ the value passed against the list of choices. */
...@@ -497,11 +527,11 @@ extern tree convert_to_index_type (tree expr); ...@@ -497,11 +527,11 @@ extern tree convert_to_index_type (tree expr);
/* Routines created solely for the tree translator's sake. Their prototypes /* Routines created solely for the tree translator's sake. Their prototypes
can be changed as desired. */ can be changed as desired. */
/* Initialize the association of GNAT nodes to GCC trees. */ /* Initialize data structures of the utils.c module. */
extern void init_gnat_to_gnu (void); extern void init_gnat_utils (void);
/* Destroy the association of GNAT nodes to GCC trees. */ /* Destroy data structures of the utils.c module. */
extern void destroy_gnat_to_gnu (void); extern void destroy_gnat_utils (void);
/* GNAT_ENTITY is a GNAT tree node for a defining identifier. /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
GNU_DECL is the GCC tree which is to be associated with GNU_DECL is the GCC tree which is to be associated with
...@@ -519,12 +549,6 @@ extern tree get_gnu_tree (Entity_Id gnat_entity); ...@@ -519,12 +549,6 @@ extern tree get_gnu_tree (Entity_Id gnat_entity);
/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */ /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
extern bool present_gnu_tree (Entity_Id gnat_entity); extern bool present_gnu_tree (Entity_Id gnat_entity);
/* Initialize the association of GNAT nodes to GCC trees as dummies. */
extern void init_dummy_type (void);
/* Destroy the association of GNAT nodes to GCC trees as dummies. */
extern void destroy_dummy_type (void);
/* Make a dummy type corresponding to GNAT_TYPE. */ /* Make a dummy type corresponding to GNAT_TYPE. */
extern tree make_dummy_type (Entity_Id gnat_type); extern tree make_dummy_type (Entity_Id gnat_type);
...@@ -1008,3 +1032,9 @@ extern void enumerate_modes (void (*f) (const char *, int, int, int, int, int, ...@@ -1008,3 +1032,9 @@ extern void enumerate_modes (void (*f) (const char *, int, int, int, int, int,
/* Convenient shortcuts. */ /* Convenient shortcuts. */
#define VECTOR_TYPE_P(TYPE) (TREE_CODE (TYPE) == VECTOR_TYPE) #define VECTOR_TYPE_P(TYPE) (TREE_CODE (TYPE) == VECTOR_TYPE)
static inline unsigned HOST_WIDE_INT
ceil_pow2 (unsigned HOST_WIDE_INT x)
{
return (unsigned HOST_WIDE_INT) 1 << (floor_log2 (x - 1) + 1);
}
...@@ -338,8 +338,7 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name ATTRIBUTE_UNUSED, ...@@ -338,8 +338,7 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name ATTRIBUTE_UNUSED,
/* Initialize ourselves. */ /* Initialize ourselves. */
init_code_table (); init_code_table ();
init_gnat_to_gnu (); init_gnat_utils ();
init_dummy_type ();
/* If we are just annotating types, give VOID_TYPE zero sizes to avoid /* If we are just annotating types, give VOID_TYPE zero sizes to avoid
errors. */ errors. */
...@@ -685,8 +684,7 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name ATTRIBUTE_UNUSED, ...@@ -685,8 +684,7 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name ATTRIBUTE_UNUSED,
} }
/* Destroy ourselves. */ /* Destroy ourselves. */
destroy_gnat_to_gnu (); destroy_gnat_utils ();
destroy_dummy_type ();
/* We cannot track the location of errors past this point. */ /* We cannot track the location of errors past this point. */
error_gnat_node = Empty; error_gnat_node = Empty;
...@@ -1501,34 +1499,25 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute) ...@@ -1501,34 +1499,25 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
gnu_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type))); gnu_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
} }
/* If we're looking for the size of a field, return the field size. /* If we're looking for the size of a field, return the field size. */
Otherwise, if the prefix is an object, or if we're looking for
'Object_Size or 'Max_Size_In_Storage_Elements, the result is the
GCC size of the type. Otherwise, it is the RM size of the type. */
if (TREE_CODE (gnu_prefix) == COMPONENT_REF) if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
gnu_result = DECL_SIZE (TREE_OPERAND (gnu_prefix, 1)); gnu_result = DECL_SIZE (TREE_OPERAND (gnu_prefix, 1));
else if (TREE_CODE (gnu_prefix) != TYPE_DECL
/* Otherwise, if the prefix is an object, or if we are looking for
'Object_Size or 'Max_Size_In_Storage_Elements, the result is the
GCC size of the type. We make an exception for padded objects,
as we do not take into account alignment promotions for the size.
This is in keeping with the object case of gnat_to_gnu_entity. */
else if ((TREE_CODE (gnu_prefix) != TYPE_DECL
&& !(TYPE_IS_PADDING_P (gnu_type)
&& TREE_CODE (gnu_expr) == COMPONENT_REF))
|| attribute == Attr_Object_Size || attribute == Attr_Object_Size
|| attribute == Attr_Max_Size_In_Storage_Elements) || attribute == Attr_Max_Size_In_Storage_Elements)
{ {
/* If the prefix is an object of a padded type, the GCC size isn't /* If this is a dereference and we have a special dynamic constrained
relevant to the programmer. Normally what we want is the RM size, subtype on the prefix, use it to compute the size; otherwise, use
which was set from the specified size, but if it was not set, we the designated subtype. */
want the size of the field. Using the MAX of those two produces if (Nkind (Prefix (gnat_node)) == N_Explicit_Dereference)
the right result in all cases. Don't use the size of the field
if it's self-referential, since that's never what's wanted. */
if (TREE_CODE (gnu_prefix) != TYPE_DECL
&& TYPE_IS_PADDING_P (gnu_type)
&& TREE_CODE (gnu_expr) == COMPONENT_REF)
{
gnu_result = rm_size (gnu_type);
if (!CONTAINS_PLACEHOLDER_P
(DECL_SIZE (TREE_OPERAND (gnu_expr, 1))))
gnu_result
= size_binop (MAX_EXPR, gnu_result,
DECL_SIZE (TREE_OPERAND (gnu_expr, 1)));
}
else if (Nkind (Prefix (gnat_node)) == N_Explicit_Dereference)
{ {
Node_Id gnat_deref = Prefix (gnat_node); Node_Id gnat_deref = Prefix (gnat_node);
Node_Id gnat_actual_subtype Node_Id gnat_actual_subtype
...@@ -1547,12 +1536,12 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute) ...@@ -1547,12 +1536,12 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
get_identifier ("SIZE"), get_identifier ("SIZE"),
false); false);
} }
gnu_result = TYPE_SIZE (gnu_type);
} }
else
gnu_result = TYPE_SIZE (gnu_type); gnu_result = TYPE_SIZE (gnu_type);
} }
/* Otherwise, the result is the RM size of the type. */
else else
gnu_result = rm_size (gnu_type); gnu_result = rm_size (gnu_type);
...@@ -6921,15 +6910,10 @@ gnat_to_gnu (Node_Id gnat_node) ...@@ -6921,15 +6910,10 @@ gnat_to_gnu (Node_Id gnat_node)
else if (TREE_CODE (gnu_result) == CALL_EXPR else if (TREE_CODE (gnu_result) == CALL_EXPR
&& TYPE_IS_PADDING_P (TREE_TYPE (gnu_result)) && TYPE_IS_PADDING_P (TREE_TYPE (gnu_result))
&& TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_result)))
== gnu_result_type
&& CONTAINS_PLACEHOLDER_P (TYPE_SIZE (gnu_result_type))) && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (gnu_result_type)))
{ ;
/* ??? We need to convert if the padded type has fixed size because
gnat_types_compatible_p will say that padded types are compatible
but the gimplifier will not and, therefore, will ultimately choke
if there isn't a conversion added early. */
if (TREE_CODE (TYPE_SIZE (TREE_TYPE (gnu_result))) == INTEGER_CST)
gnu_result = convert (gnu_result_type, gnu_result);
}
else if (TREE_TYPE (gnu_result) != gnu_result_type) else if (TREE_TYPE (gnu_result) != gnu_result_type)
gnu_result = convert (gnu_result_type, gnu_result); gnu_result = convert (gnu_result_type, gnu_result);
......
...@@ -58,10 +58,6 @@ ...@@ -58,10 +58,6 @@
#include "ada-tree.h" #include "ada-tree.h"
#include "gigi.h" #include "gigi.h"
#ifndef MAX_BITS_PER_WORD
#define MAX_BITS_PER_WORD BITS_PER_WORD
#endif
/* If nonzero, pretend we are allocating at global level. */ /* If nonzero, pretend we are allocating at global level. */
int force_global; int force_global;
...@@ -215,6 +211,21 @@ static GTY(()) VEC(tree,gc) *global_renaming_pointers; ...@@ -215,6 +211,21 @@ static GTY(()) VEC(tree,gc) *global_renaming_pointers;
/* A chain of unused BLOCK nodes. */ /* A chain of unused BLOCK nodes. */
static GTY((deletable)) tree free_block_chain; static GTY((deletable)) tree free_block_chain;
static int pad_type_hash_marked_p (const void *p);
static hashval_t pad_type_hash_hash (const void *p);
static int pad_type_hash_eq (const void *p1, const void *p2);
/* A hash table of padded types. It is modelled on the generic type
hash table in tree.c, which must thus be used as a reference. */
struct GTY(()) pad_type_hash {
unsigned long hash;
tree type;
};
static GTY ((if_marked ("pad_type_hash_marked_p"),
param_is (struct pad_type_hash)))
htab_t pad_type_hash_table;
static tree merge_sizes (tree, tree, tree, bool, bool); static tree merge_sizes (tree, tree, tree, bool, bool);
static tree compute_related_constant (tree, tree); static tree compute_related_constant (tree, tree);
static tree split_plus (tree, tree *); static tree split_plus (tree, tree *);
...@@ -223,23 +234,43 @@ static tree convert_to_fat_pointer (tree, tree); ...@@ -223,23 +234,43 @@ static tree convert_to_fat_pointer (tree, tree);
static bool potential_alignment_gap (tree, tree, tree); static bool potential_alignment_gap (tree, tree, tree);
static void process_attributes (tree, struct attrib *); static void process_attributes (tree, struct attrib *);
/* Initialize the association of GNAT nodes to GCC trees. */ /* Initialize data structures of the utils.c module. */
void void
init_gnat_to_gnu (void) init_gnat_utils (void)
{ {
/* Initialize the association of GNAT nodes to GCC trees. */
associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes); associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
/* Initialize the association of GNAT nodes to GCC trees as dummies. */
dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
/* Initialize the hash table of padded types. */
pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
pad_type_hash_eq, 0);
} }
/* Destroy the association of GNAT nodes to GCC trees. */ /* Destroy data structures of the utils.c module. */
void void
destroy_gnat_to_gnu (void) destroy_gnat_utils (void)
{ {
/* Destroy the association of GNAT nodes to GCC trees. */
ggc_free (associate_gnat_to_gnu); ggc_free (associate_gnat_to_gnu);
associate_gnat_to_gnu = NULL; associate_gnat_to_gnu = NULL;
}
/* Destroy the association of GNAT nodes to GCC trees as dummies. */
ggc_free (dummy_node_table);
dummy_node_table = NULL;
/* Destroy the hash table of padded types. */
htab_delete (pad_type_hash_table);
pad_type_hash_table = NULL;
/* Invalidate the global renaming pointers. */
invalidate_global_renaming_pointers ();
}
/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort. tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
If NO_CHECK is true, the latter check is suppressed. If NO_CHECK is true, the latter check is suppressed.
...@@ -281,23 +312,6 @@ present_gnu_tree (Entity_Id gnat_entity) ...@@ -281,23 +312,6 @@ present_gnu_tree (Entity_Id gnat_entity)
return PRESENT_GNU_TREE (gnat_entity); return PRESENT_GNU_TREE (gnat_entity);
} }
/* Initialize the association of GNAT nodes to GCC trees as dummies. */
void
init_dummy_type (void)
{
dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
}
/* Destroy the association of GNAT nodes to GCC trees as dummies. */
void
destroy_dummy_type (void)
{
ggc_free (dummy_node_table);
dummy_node_table = NULL;
}
/* Make a dummy type corresponding to GNAT_TYPE. */ /* Make a dummy type corresponding to GNAT_TYPE. */
tree tree
...@@ -630,6 +644,702 @@ gnat_pushdecl (tree decl, Node_Id gnat_node) ...@@ -630,6 +644,702 @@ gnat_pushdecl (tree decl, Node_Id gnat_node)
} }
} }
/* Create a record type that contains a SIZE bytes long field of TYPE with a
starting bit position so that it is aligned to ALIGN bits, and leaving at
least ROOM bytes free before the field. BASE_ALIGN is the alignment the
record is guaranteed to get. */
tree
make_aligning_type (tree type, unsigned int align, tree size,
unsigned int base_align, int room)
{
/* We will be crafting a record type with one field at a position set to be
the next multiple of ALIGN past record'address + room bytes. We use a
record placeholder to express record'address. */
tree record_type = make_node (RECORD_TYPE);
tree record = build0 (PLACEHOLDER_EXPR, record_type);
tree record_addr_st
= convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
/* The diagram below summarizes the shape of what we manipulate:
<--------- pos ---------->
{ +------------+-------------+-----------------+
record =>{ |############| ... | field (type) |
{ +------------+-------------+-----------------+
|<-- room -->|<- voffset ->|<---- size ----->|
o o
| |
record_addr vblock_addr
Every length is in sizetype bytes there, except "pos" which has to be
set as a bit position in the GCC tree for the record. */
tree room_st = size_int (room);
tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
tree voffset_st, pos, field;
tree name = TYPE_NAME (type);
if (TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
name = concat_name (name, "ALIGN");
TYPE_NAME (record_type) = name;
/* Compute VOFFSET and then POS. The next byte position multiple of some
alignment after some address is obtained by "and"ing the alignment minus
1 with the two's complement of the address. */
voffset_st = size_binop (BIT_AND_EXPR,
fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
size_int ((align / BITS_PER_UNIT) - 1));
/* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
pos = size_binop (MULT_EXPR,
convert (bitsizetype,
size_binop (PLUS_EXPR, room_st, voffset_st)),
bitsize_unit_node);
/* Craft the GCC record representation. We exceptionally do everything
manually here because 1) our generic circuitry is not quite ready to
handle the complex position/size expressions we are setting up, 2) we
have a strong simplifying factor at hand: we know the maximum possible
value of voffset, and 3) we have to set/reset at least the sizes in
accordance with this maximum value anyway, as we need them to convey
what should be "alloc"ated for this type.
Use -1 as the 'addressable' indication for the field to prevent the
creation of a bitfield. We don't need one, it would have damaging
consequences on the alignment computation, and create_field_decl would
make one without this special argument, for instance because of the
complex position expression. */
field = create_field_decl (get_identifier ("F"), type, record_type, size,
pos, 1, -1);
TYPE_FIELDS (record_type) = field;
TYPE_ALIGN (record_type) = base_align;
TYPE_USER_ALIGN (record_type) = 1;
TYPE_SIZE (record_type)
= size_binop (PLUS_EXPR,
size_binop (MULT_EXPR, convert (bitsizetype, size),
bitsize_unit_node),
bitsize_int (align + room * BITS_PER_UNIT));
TYPE_SIZE_UNIT (record_type)
= size_binop (PLUS_EXPR, size,
size_int (room + align / BITS_PER_UNIT));
SET_TYPE_MODE (record_type, BLKmode);
relate_alias_sets (record_type, type, ALIAS_SET_COPY);
/* Declare it now since it will never be declared otherwise. This is
necessary to ensure that its subtrees are properly marked. */
create_type_decl (name, record_type, NULL, true, false, Empty);
return record_type;
}
/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
as the field type of a packed record if IN_RECORD is true, or as the
component type of a packed array if IN_RECORD is false. See if we can
rewrite it either as a type that has a non-BLKmode, which we can pack
tighter in the packed record case, or as a smaller type. If so, return
the new type. If not, return the original type. */
tree
make_packable_type (tree type, bool in_record)
{
unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
unsigned HOST_WIDE_INT new_size;
tree new_type, old_field, field_list = NULL_TREE;
unsigned int align;
/* No point in doing anything if the size is zero. */
if (size == 0)
return type;
new_type = make_node (TREE_CODE (type));
/* Copy the name and flags from the old type to that of the new.
Note that we rely on the pointer equality created here for
TYPE_NAME to look through conversions in various places. */
TYPE_NAME (new_type) = TYPE_NAME (type);
TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
if (TREE_CODE (type) == RECORD_TYPE)
TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
/* If we are in a record and have a small size, set the alignment to
try for an integral mode. Otherwise set it to try for a smaller
type with BLKmode. */
if (in_record && size <= MAX_FIXED_MODE_SIZE)
{
align = ceil_pow2 (size);
TYPE_ALIGN (new_type) = align;
new_size = (size + align - 1) & -align;
}
else
{
unsigned HOST_WIDE_INT align;
/* Do not try to shrink the size if the RM size is not constant. */
if (TYPE_CONTAINS_TEMPLATE_P (type)
|| !host_integerp (TYPE_ADA_SIZE (type), 1))
return type;
/* Round the RM size up to a unit boundary to get the minimal size
for a BLKmode record. Give up if it's already the size. */
new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
if (new_size == size)
return type;
align = new_size & -new_size;
TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
}
TYPE_USER_ALIGN (new_type) = 1;
/* Now copy the fields, keeping the position and size as we don't want
to change the layout by propagating the packedness downwards. */
for (old_field = TYPE_FIELDS (type); old_field;
old_field = DECL_CHAIN (old_field))
{
tree new_field_type = TREE_TYPE (old_field);
tree new_field, new_size;
if (RECORD_OR_UNION_TYPE_P (new_field_type)
&& !TYPE_FAT_POINTER_P (new_field_type)
&& host_integerp (TYPE_SIZE (new_field_type), 1))
new_field_type = make_packable_type (new_field_type, true);
/* However, for the last field in a not already packed record type
that is of an aggregate type, we need to use the RM size in the
packable version of the record type, see finish_record_type. */
if (!DECL_CHAIN (old_field)
&& !TYPE_PACKED (type)
&& RECORD_OR_UNION_TYPE_P (new_field_type)
&& !TYPE_FAT_POINTER_P (new_field_type)
&& !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
&& TYPE_ADA_SIZE (new_field_type))
new_size = TYPE_ADA_SIZE (new_field_type);
else
new_size = DECL_SIZE (old_field);
new_field
= create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
new_size, bit_position (old_field),
TYPE_PACKED (type),
!DECL_NONADDRESSABLE_P (old_field));
DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
DECL_CHAIN (new_field) = field_list;
field_list = new_field;
}
finish_record_type (new_type, nreverse (field_list), 2, false);
relate_alias_sets (new_type, type, ALIAS_SET_COPY);
SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
/* If this is a padding record, we never want to make the size smaller
than what was specified. For QUAL_UNION_TYPE, also copy the size. */
if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
{
TYPE_SIZE (new_type) = TYPE_SIZE (type);
TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
new_size = size;
}
else
{
TYPE_SIZE (new_type) = bitsize_int (new_size);
TYPE_SIZE_UNIT (new_type)
= size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
}
if (!TYPE_CONTAINS_TEMPLATE_P (type))
SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
compute_record_mode (new_type);
/* Try harder to get a packable type if necessary, for example
in case the record itself contains a BLKmode field. */
if (in_record && TYPE_MODE (new_type) == BLKmode)
SET_TYPE_MODE (new_type,
mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
/* If neither the mode nor the size has shrunk, return the old type. */
if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
return type;
return new_type;
}
/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
If TYPE is the best type, return it. Otherwise, make a new type. We
only support new integral and pointer types. FOR_BIASED is true if
we are making a biased type. */
tree
make_type_from_size (tree type, tree size_tree, bool for_biased)
{
unsigned HOST_WIDE_INT size;
bool biased_p;
tree new_type;
/* If size indicates an error, just return TYPE to avoid propagating
the error. Likewise if it's too large to represent. */
if (!size_tree || !host_integerp (size_tree, 1))
return type;
size = tree_low_cst (size_tree, 1);
switch (TREE_CODE (type))
{
case INTEGER_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
biased_p = (TREE_CODE (type) == INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type));
/* Integer types with precision 0 are forbidden. */
if (size == 0)
size = 1;
/* Only do something if the type isn't a packed array type and doesn't
already have the proper size and the size isn't too large. */
if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
|| (TYPE_PRECISION (type) == size && biased_p == for_biased)
|| size > LONG_LONG_TYPE_SIZE)
break;
biased_p |= for_biased;
if (TYPE_UNSIGNED (type) || biased_p)
new_type = make_unsigned_type (size);
else
new_type = make_signed_type (size);
TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
SET_TYPE_RM_MIN_VALUE (new_type,
convert (TREE_TYPE (new_type),
TYPE_MIN_VALUE (type)));
SET_TYPE_RM_MAX_VALUE (new_type,
convert (TREE_TYPE (new_type),
TYPE_MAX_VALUE (type)));
/* Copy the name to show that it's essentially the same type and
not a subrange type. */
TYPE_NAME (new_type) = TYPE_NAME (type);
TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
return new_type;
case RECORD_TYPE:
/* Do something if this is a fat pointer, in which case we
may need to return the thin pointer. */
if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
{
enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
if (!targetm.valid_pointer_mode (p_mode))
p_mode = ptr_mode;
return
build_pointer_type_for_mode
(TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
p_mode, 0);
}
break;
case POINTER_TYPE:
/* Only do something if this is a thin pointer, in which case we
may need to return the fat pointer. */
if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
return
build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
break;
default:
break;
}
return type;
}
/* See if the data pointed to by the hash table slot is marked. */
static int
pad_type_hash_marked_p (const void *p)
{
const_tree const type = ((const struct pad_type_hash *) p)->type;
return ggc_marked_p (type);
}
/* Return the cached hash value. */
static hashval_t
pad_type_hash_hash (const void *p)
{
return ((const struct pad_type_hash *) p)->hash;
}
/* Return 1 iff the padded types are equivalent. */
static int
pad_type_hash_eq (const void *p1, const void *p2)
{
const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
tree type1, type2;
if (t1->hash != t2->hash)
return 0;
type1 = t1->type;
type2 = t2->type;
/* We consider that the padded types are equivalent if they pad the same
type and have the same size, alignment and RM size. Taking the mode
into account is redundant since it is determined by the others. */
return
TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
&& TYPE_SIZE (type1) == TYPE_SIZE (type2)
&& TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
&& TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
}
/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
if needed. We have already verified that SIZE and TYPE are large enough.
GNAT_ENTITY is used to name the resulting record and to issue a warning.
IS_COMPONENT_TYPE is true if this is being done for the component type of
an array. IS_USER_TYPE is true if the original type needs to be completed.
DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
the RM size of the resulting type is to be set to SIZE too. */
tree
maybe_pad_type (tree type, tree size, unsigned int align,
Entity_Id gnat_entity, bool is_component_type,
bool is_user_type, bool definition, bool set_rm_size)
{
tree orig_size = TYPE_SIZE (type);
tree record, field;
/* If TYPE is a padded type, see if it agrees with any size and alignment
we were given. If so, return the original type. Otherwise, strip
off the padding, since we will either be returning the inner type
or repadding it. If no size or alignment is specified, use that of
the original padded type. */
if (TYPE_IS_PADDING_P (type))
{
if ((!size
|| operand_equal_p (round_up (size,
MAX (align, TYPE_ALIGN (type))),
round_up (TYPE_SIZE (type),
MAX (align, TYPE_ALIGN (type))),
0))
&& (align == 0 || align == TYPE_ALIGN (type)))
return type;
if (!size)
size = TYPE_SIZE (type);
if (align == 0)
align = TYPE_ALIGN (type);
type = TREE_TYPE (TYPE_FIELDS (type));
orig_size = TYPE_SIZE (type);
}
/* If the size is either not being changed or is being made smaller (which
is not done here and is only valid for bitfields anyway), show the size
isn't changing. Likewise, clear the alignment if it isn't being
changed. Then return if we aren't doing anything. */
if (size
&& (operand_equal_p (size, orig_size, 0)
|| (TREE_CODE (orig_size) == INTEGER_CST
&& tree_int_cst_lt (size, orig_size))))
size = NULL_TREE;
if (align == TYPE_ALIGN (type))
align = 0;
if (align == 0 && !size)
return type;
/* If requested, complete the original type and give it a name. */
if (is_user_type)
create_type_decl (get_entity_name (gnat_entity), type,
NULL, !Comes_From_Source (gnat_entity),
!(TYPE_NAME (type)
&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
&& DECL_IGNORED_P (TYPE_NAME (type))),
gnat_entity);
/* We used to modify the record in place in some cases, but that could
generate incorrect debugging information. So make a new record
type and name. */
record = make_node (RECORD_TYPE);
TYPE_PADDING_P (record) = 1;
if (Present (gnat_entity))
TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
TYPE_ALIGN (record) = align;
TYPE_SIZE (record) = size ? size : orig_size;
TYPE_SIZE_UNIT (record)
= convert (sizetype,
size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
bitsize_unit_node));
/* If we are changing the alignment and the input type is a record with
BLKmode and a small constant size, try to make a form that has an
integral mode. This might allow the padding record to also have an
integral mode, which will be much more efficient. There is no point
in doing so if a size is specified unless it is also a small constant
size and it is incorrect to do so if we cannot guarantee that the mode
will be naturally aligned since the field must always be addressable.
??? This might not always be a win when done for a stand-alone object:
since the nominal and the effective type of the object will now have
different modes, a VIEW_CONVERT_EXPR will be required for converting
between them and it might be hard to overcome afterwards, including
at the RTL level when the stand-alone object is accessed as a whole. */
if (align != 0
&& RECORD_OR_UNION_TYPE_P (type)
&& TYPE_MODE (type) == BLKmode
&& !TYPE_BY_REFERENCE_P (type)
&& TREE_CODE (orig_size) == INTEGER_CST
&& !TREE_OVERFLOW (orig_size)
&& compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
&& (!size
|| (TREE_CODE (size) == INTEGER_CST
&& compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
{
tree packable_type = make_packable_type (type, true);
if (TYPE_MODE (packable_type) != BLKmode
&& align >= TYPE_ALIGN (packable_type))
type = packable_type;
}
/* Now create the field with the original size. */
field = create_field_decl (get_identifier ("F"), type, record, orig_size,
bitsize_zero_node, 0, 1);
DECL_INTERNAL_P (field) = 1;
/* Do not emit debug info until after the auxiliary record is built. */
finish_record_type (record, field, 1, false);
/* Set the RM size if requested. */
if (set_rm_size)
{
SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
/* If the padded type is complete and has constant size, we canonicalize
it by means of the hash table. This is consistent with the language
semantics and ensures that gigi and the middle-end have a common view
of these padded types. */
if (TREE_CONSTANT (TYPE_SIZE (record)))
{
hashval_t hashcode;
struct pad_type_hash in, *h;
void **loc;
hashcode = iterative_hash_object (TYPE_HASH (type), 0);
hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
in.hash = hashcode;
in.type = record;
h = (struct pad_type_hash *)
htab_find_with_hash (pad_type_hash_table, &in, hashcode);
if (h)
{
record = h->type;
goto built;
}
h = ggc_alloc_pad_type_hash ();
h->hash = hashcode;
h->type = record;
loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
INSERT);
*loc = (void *)h;
}
}
/* Unless debugging information isn't being written for the input type,
write a record that shows what we are a subtype of and also make a
variable that indicates our size, if still variable. */
if (TREE_CODE (orig_size) != INTEGER_CST
&& TYPE_NAME (record)
&& TYPE_NAME (type)
&& !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
&& DECL_IGNORED_P (TYPE_NAME (type))))
{
tree marker = make_node (RECORD_TYPE);
tree name = TYPE_NAME (record);
tree orig_name = TYPE_NAME (type);
if (TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
if (TREE_CODE (orig_name) == TYPE_DECL)
orig_name = DECL_NAME (orig_name);
TYPE_NAME (marker) = concat_name (name, "XVS");
finish_record_type (marker,
create_field_decl (orig_name,
build_reference_type (type),
marker, NULL_TREE, NULL_TREE,
0, 0),
0, true);
add_parallel_type (record, marker);
if (definition && size && TREE_CODE (size) != INTEGER_CST)
TYPE_SIZE_UNIT (marker)
= create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
TYPE_SIZE_UNIT (record), false, false, false,
false, NULL, gnat_entity);
}
rest_of_record_type_compilation (record);
built:
/* If the size was widened explicitly, maybe give a warning. Take the
original size as the maximum size of the input if there was an
unconstrained record involved and round it up to the specified alignment,
if one was specified. But don't do it if we are just annotating types
and the type is tagged, since tagged types aren't fully laid out in this
mode. */
if (!size
|| TREE_CODE (size) == COND_EXPR
|| TREE_CODE (size) == MAX_EXPR
|| No (gnat_entity)
|| (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
return record;
if (CONTAINS_PLACEHOLDER_P (orig_size))
orig_size = max_size (orig_size, true);
if (align)
orig_size = round_up (orig_size, align);
if (!operand_equal_p (size, orig_size, 0)
&& !(TREE_CODE (size) == INTEGER_CST
&& TREE_CODE (orig_size) == INTEGER_CST
&& (TREE_OVERFLOW (size)
|| TREE_OVERFLOW (orig_size)
|| tree_int_cst_lt (size, orig_size))))
{
Node_Id gnat_error_node = Empty;
if (Is_Packed_Array_Type (gnat_entity))
gnat_entity = Original_Array_Type (gnat_entity);
if ((Ekind (gnat_entity) == E_Component
|| Ekind (gnat_entity) == E_Discriminant)
&& Present (Component_Clause (gnat_entity)))
gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
else if (Present (Size_Clause (gnat_entity)))
gnat_error_node = Expression (Size_Clause (gnat_entity));
/* Generate message only for entities that come from source, since
if we have an entity created by expansion, the message will be
generated for some other corresponding source entity. */
if (Comes_From_Source (gnat_entity))
{
if (Present (gnat_error_node))
post_error_ne_tree ("{^ }bits of & unused?",
gnat_error_node, gnat_entity,
size_diffop (size, orig_size));
else if (is_component_type)
post_error_ne_tree ("component of& padded{ by ^ bits}?",
gnat_entity, gnat_entity,
size_diffop (size, orig_size));
}
}
return record;
}
/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
If this is a multi-dimensional array type, do this recursively.
OP may be
- ALIAS_SET_COPY: the new set is made a copy of the old one.
- ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
- ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
void
relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
{
/* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
of a one-dimensional array, since the padding has the same alias set
as the field type, but if it's a multi-dimensional array, we need to
see the inner types. */
while (TREE_CODE (gnu_old_type) == RECORD_TYPE
&& (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
|| TYPE_PADDING_P (gnu_old_type)))
gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
/* Unconstrained array types are deemed incomplete and would thus be given
alias set 0. Retrieve the underlying array type. */
if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
gnu_old_type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
gnu_new_type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
&& TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
&& TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
switch (op)
{
case ALIAS_SET_COPY:
/* The alias set shouldn't be copied between array types with different
aliasing settings because this can break the aliasing relationship
between the array type and its element type. */
#ifndef ENABLE_CHECKING
if (flag_strict_aliasing)
#endif
gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
&& TREE_CODE (gnu_old_type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (gnu_new_type)
!= TYPE_NONALIASED_COMPONENT (gnu_old_type)));
TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
break;
case ALIAS_SET_SUBSET:
case ALIAS_SET_SUPERSET:
{
alias_set_type old_set = get_alias_set (gnu_old_type);
alias_set_type new_set = get_alias_set (gnu_new_type);
/* Do nothing if the alias sets conflict. This ensures that we
never call record_alias_subset several times for the same pair
or at all for alias set 0. */
if (!alias_sets_conflict_p (old_set, new_set))
{
if (op == ALIAS_SET_SUBSET)
record_alias_subset (old_set, new_set);
else
record_alias_subset (new_set, old_set);
}
}
break;
default:
gcc_unreachable ();
}
record_component_aliases (gnu_new_type);
}
/* Record TYPE as a builtin type for Ada. NAME is the name of the type. /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
ARTIFICIAL_P is true if it's a type that was generated by the compiler. */ ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
...@@ -2224,14 +2934,6 @@ gnat_types_compatible_p (tree t1, tree t2) ...@@ -2224,14 +2934,6 @@ gnat_types_compatible_p (tree t1, tree t2)
&& gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))) && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
return 1; return 1;
/* Padding record types are also compatible if they pad the same
type and have the same constant size. */
if (code == RECORD_TYPE
&& TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
&& TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
&& tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
return 1;
return 0; return 0;
} }
...@@ -3705,7 +4407,7 @@ convert (tree type, tree expr) ...@@ -3705,7 +4407,7 @@ convert (tree type, tree expr)
&& TYPE_PADDING_P (type) && TYPE_PADDING_P (etype) && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
&& (!TREE_CONSTANT (TYPE_SIZE (type)) && (!TREE_CONSTANT (TYPE_SIZE (type))
|| !TREE_CONSTANT (TYPE_SIZE (etype)) || !TREE_CONSTANT (TYPE_SIZE (etype))
|| gnat_types_compatible_p (type, etype) || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
|| TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))) || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
== TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype))))) == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
; ;
...@@ -3734,8 +4436,8 @@ convert (tree type, tree expr) ...@@ -3734,8 +4436,8 @@ convert (tree type, tree expr)
if (TREE_CODE (expr) == COMPONENT_REF if (TREE_CODE (expr) == COMPONENT_REF
&& TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0))) && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
&& (!TREE_CONSTANT (TYPE_SIZE (type)) && (!TREE_CONSTANT (TYPE_SIZE (type))
|| gnat_types_compatible_p (type, || TYPE_MAIN_VARIANT (type)
TREE_TYPE (TREE_OPERAND (expr, 0))) == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
|| (ecode == RECORD_TYPE || (ecode == RECORD_TYPE
&& TYPE_NAME (etype) && TYPE_NAME (etype)
== TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))))) == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
......
...@@ -789,16 +789,28 @@ build_binary_op (enum tree_code op_code, tree result_type, ...@@ -789,16 +789,28 @@ build_binary_op (enum tree_code op_code, tree result_type,
else if (TYPE_IS_PADDING_P (left_type) else if (TYPE_IS_PADDING_P (left_type)
&& TREE_CONSTANT (TYPE_SIZE (left_type)) && TREE_CONSTANT (TYPE_SIZE (left_type))
&& ((TREE_CODE (right_operand) == COMPONENT_REF && ((TREE_CODE (right_operand) == COMPONENT_REF
&& TYPE_IS_PADDING_P && TYPE_MAIN_VARIANT (left_type)
(TREE_TYPE (TREE_OPERAND (right_operand, 0))) == TYPE_MAIN_VARIANT
&& gnat_types_compatible_p (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
(left_type,
TREE_TYPE (TREE_OPERAND (right_operand, 0))))
|| (TREE_CODE (right_operand) == CONSTRUCTOR || (TREE_CODE (right_operand) == CONSTRUCTOR
&& !CONTAINS_PLACEHOLDER_P && !CONTAINS_PLACEHOLDER_P
(DECL_SIZE (TYPE_FIELDS (left_type))))) (DECL_SIZE (TYPE_FIELDS (left_type)))))
&& !integer_zerop (TYPE_SIZE (right_type))) && !integer_zerop (TYPE_SIZE (right_type)))
operation_type = left_type; {
/* We make an exception for a BLKmode type padding a non-BLKmode
inner type and do the conversion of the LHS right away, since
unchecked_convert wouldn't do it properly. */
if (TYPE_MODE (left_type) == BLKmode
&& TYPE_MODE (right_type) != BLKmode
&& TREE_CODE (right_operand) != CONSTRUCTOR)
{
operation_type = right_type;
left_operand = convert (operation_type, left_operand);
left_type = operation_type;
}
else
operation_type = left_type;
}
/* If we have a call to a function that returns an unconstrained type /* If we have a call to a function that returns an unconstrained type
with default discriminant on the RHS, use the RHS type (which is with default discriminant on the RHS, use the RHS type (which is
......
2012-05-04 Eric Botcazou <ebotcazou@adacore.com>
* gcc.target/ia64/pr48496.c: New test.
* gcc.target/ia64/pr52657.c: Likewise.
2012-05-05 Manuel López-Ibáñez <manu@gcc.gnu.org> 2012-05-05 Manuel López-Ibáñez <manu@gcc.gnu.org>
PR c/43772 PR c/43772
......
-- { dg-do compile }
with Discr36_Pkg;
package body Discr36 is
function N return Natural is begin return 0; end;
type Arr is array (1 .. N) of R;
function My_Func is new Discr36_Pkg.Func (Arr);
procedure Proc is
A : constant Arr := My_Func;
begin
null;
end;
end Discr36;
package Discr36 is
type R (D : Boolean := True) is record
case D is
when True => I : Integer;
when False => null;
end case;
end record;
function N return Natural;
end Discr36;
package body Discr36_Pkg is
function Func return T is
Ret : T;
pragma Warnings (Off, Ret);
begin
return Ret;
end;
end Discr36_Pkg;
package Discr36_Pkg is
generic
type T is private;
function Func return T;
end Discr36_Pkg;
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment