Commit 1e0424d9 by Jakub Jelinek Committed by Jakub Jelinek

re PR debug/78839 (DWARF output different between GCC 5 and 6)

	PR debug/78839
	* dwarf2out.c (field_byte_offset): Restore the
	PCC_BITFIELD_TYPE_MATTERS behavior for INTEGER_CST DECL_FIELD_OFFSET
	and DECL_FIELD_BIT_OFFSET.  Use fold_build2 instead of build2 + fold.
	(analyze_variants_discr, gen_variant_part): Use fold_build2 instead
	of build2 + fold.

From-SVN: r244545
parent 8ddfdbc2
2017-01-17 Jakub Jelinek <jakub@redhat.com>
PR debug/78839
* dwarf2out.c (field_byte_offset): Restore the
PCC_BITFIELD_TYPE_MATTERS behavior for INTEGER_CST DECL_FIELD_OFFSET
and DECL_FIELD_BIT_OFFSET. Use fold_build2 instead of build2 + fold.
(analyze_variants_discr, gen_variant_part): Use fold_build2 instead
of build2 + fold.
2017-01-17 Eric Botcazou <ebotcazou@adacore.com> 2017-01-17 Eric Botcazou <ebotcazou@adacore.com>
PR ada/67205 PR ada/67205
......
...@@ -17980,10 +17980,6 @@ static dw_loc_descr_ref ...@@ -17980,10 +17980,6 @@ static dw_loc_descr_ref
field_byte_offset (const_tree decl, struct vlr_context *ctx, field_byte_offset (const_tree decl, struct vlr_context *ctx,
HOST_WIDE_INT *cst_offset) HOST_WIDE_INT *cst_offset)
{ {
offset_int object_offset_in_bits;
offset_int object_offset_in_bytes;
offset_int bitpos_int;
bool is_byte_offset_cst, is_bit_offset_cst;
tree tree_result; tree tree_result;
dw_loc_list_ref loc_result; dw_loc_list_ref loc_result;
...@@ -17994,20 +17990,21 @@ field_byte_offset (const_tree decl, struct vlr_context *ctx, ...@@ -17994,20 +17990,21 @@ field_byte_offset (const_tree decl, struct vlr_context *ctx,
else else
gcc_assert (TREE_CODE (decl) == FIELD_DECL); gcc_assert (TREE_CODE (decl) == FIELD_DECL);
is_bit_offset_cst = TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST;
is_byte_offset_cst = TREE_CODE (DECL_FIELD_OFFSET (decl)) != INTEGER_CST;
/* We cannot handle variable bit offsets at the moment, so abort if it's the /* We cannot handle variable bit offsets at the moment, so abort if it's the
case. */ case. */
if (is_bit_offset_cst) if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
return NULL; return NULL;
#ifdef PCC_BITFIELD_TYPE_MATTERS #ifdef PCC_BITFIELD_TYPE_MATTERS
/* We used to handle only constant offsets in all cases. Now, we handle /* We used to handle only constant offsets in all cases. Now, we handle
properly dynamic byte offsets only when PCC bitfield type doesn't properly dynamic byte offsets only when PCC bitfield type doesn't
matter. */ matter. */
if (PCC_BITFIELD_TYPE_MATTERS && is_byte_offset_cst && is_bit_offset_cst) if (PCC_BITFIELD_TYPE_MATTERS
&& TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
{ {
offset_int object_offset_in_bits;
offset_int object_offset_in_bytes;
offset_int bitpos_int;
tree type; tree type;
tree field_size_tree; tree field_size_tree;
offset_int deepest_bitpos; offset_int deepest_bitpos;
...@@ -18102,13 +18099,23 @@ field_byte_offset (const_tree decl, struct vlr_context *ctx, ...@@ -18102,13 +18099,23 @@ field_byte_offset (const_tree decl, struct vlr_context *ctx,
object_offset_in_bits object_offset_in_bits
= round_up_to_align (object_offset_in_bits, decl_align_in_bits); = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
} }
object_offset_in_bytes
= wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
if (ctx->variant_part_offset == NULL_TREE)
{
*cst_offset = object_offset_in_bytes.to_shwi ();
return NULL;
}
tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
} }
else
#endif /* PCC_BITFIELD_TYPE_MATTERS */ #endif /* PCC_BITFIELD_TYPE_MATTERS */
tree_result = byte_position (decl);
tree_result = byte_position (decl);
if (ctx->variant_part_offset != NULL_TREE) if (ctx->variant_part_offset != NULL_TREE)
tree_result = fold (build2 (PLUS_EXPR, TREE_TYPE (tree_result), tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
ctx->variant_part_offset, tree_result)); ctx->variant_part_offset, tree_result);
/* If the byte offset is a constant, it's simplier to handle a native /* If the byte offset is a constant, it's simplier to handle a native
constant rather than a DWARF expression. */ constant rather than a DWARF expression. */
...@@ -23744,14 +23751,12 @@ analyze_variants_discr (tree variant_part_decl, ...@@ -23744,14 +23751,12 @@ analyze_variants_discr (tree variant_part_decl,
if (!lower_cst_included) if (!lower_cst_included)
lower_cst lower_cst
= fold (build2 (PLUS_EXPR, TREE_TYPE (lower_cst), = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
lower_cst, build_int_cst (TREE_TYPE (lower_cst), 1));
build_int_cst (TREE_TYPE (lower_cst), 1)));
if (!upper_cst_included) if (!upper_cst_included)
upper_cst upper_cst
= fold (build2 (MINUS_EXPR, TREE_TYPE (upper_cst), = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
upper_cst, build_int_cst (TREE_TYPE (upper_cst), 1));
build_int_cst (TREE_TYPE (upper_cst), 1)));
if (!get_discr_value (lower_cst, if (!get_discr_value (lower_cst,
&new_node->dw_discr_lower_bound) &new_node->dw_discr_lower_bound)
...@@ -23922,8 +23927,8 @@ gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx, ...@@ -23922,8 +23927,8 @@ gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
we recurse. */ we recurse. */
vlr_sub_ctx.variant_part_offset vlr_sub_ctx.variant_part_offset
= fold (build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset), = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
variant_part_offset, byte_position (member))); variant_part_offset, byte_position (member));
gen_variant_part (member, &vlr_sub_ctx, variant_die); gen_variant_part (member, &vlr_sub_ctx, variant_die);
} }
else else
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment