Commit 9cbbba28 by Eric Botcazou Committed by Eric Botcazou

stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype into a PLUS_EXPR byte offset.

	* stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype
	into a PLUS_EXPR byte offset.

	* tree-ssa-pre.c (can_value_number_call): Delete.
	(compute_avail): Skip all statements with side effects.
	<GIMPLE_CALL>: Skip calls to internal functions.

From-SVN: r187450
parent 5341ab80
2012-05-14 Eric Botcazou <ebotcazou@adacore.com>
* stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype
into a PLUS_EXPR byte offset.
* tree-ssa-pre.c (can_value_number_call): Delete.
(compute_avail): Skip all statements with side effects.
<GIMPLE_CALL>: Skip calls to internal functions.
2012-05-13 Steven Bosscher <steven@gcc.gnu.org> 2012-05-13 Steven Bosscher <steven@gcc.gnu.org>
* config/pa/pa.md: Use define_c_enum for "unspec" and "unspecv". * config/pa/pa.md: Use define_c_enum for "unspec" and "unspecv".
......
...@@ -786,25 +786,29 @@ start_record_layout (tree t) ...@@ -786,25 +786,29 @@ start_record_layout (tree t)
} }
/* Return the combined bit position for the byte offset OFFSET and the /* Return the combined bit position for the byte offset OFFSET and the
bit position BITPOS. */ bit position BITPOS.
These functions operate on byte and bit positions present in FIELD_DECLs
and assume that these expressions result in no (intermediate) overflow.
This assumption is necessary to fold the expressions as much as possible,
so as to avoid creating artificially variable-sized types in languages
supporting variable-sized types like Ada. */
tree tree
bit_from_pos (tree offset, tree bitpos) bit_from_pos (tree offset, tree bitpos)
{ {
if (TREE_CODE (offset) == PLUS_EXPR)
offset = size_binop (PLUS_EXPR,
fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
else
offset = fold_convert (bitsizetype, offset);
return size_binop (PLUS_EXPR, bitpos, return size_binop (PLUS_EXPR, bitpos,
size_binop (MULT_EXPR, size_binop (MULT_EXPR, offset, bitsize_unit_node));
fold_convert (bitsizetype, offset),
bitsize_unit_node));
} }
/* Return the combined truncated byte position for the byte offset OFFSET and /* Return the combined truncated byte position for the byte offset OFFSET and
the bit position BITPOS. the bit position BITPOS. */
These functions operate on byte and bit positions as present in FIELD_DECLs
and assume that these expressions result in no (intermediate) overflow.
This assumption is necessary to fold the expressions as much as possible,
so as to avoid creating artificially variable-sized types in languages
supporting variable-sized types like Ada. */
tree tree
byte_from_pos (tree offset, tree bitpos) byte_from_pos (tree offset, tree bitpos)
......
...@@ -2586,19 +2586,6 @@ compute_antic (void) ...@@ -2586,19 +2586,6 @@ compute_antic (void)
sbitmap_free (changed_blocks); sbitmap_free (changed_blocks);
} }
/* Return true if we can value number the call in STMT. This is true
if we have a pure or constant call to a real function. */
static bool
can_value_number_call (gimple stmt)
{
if (gimple_call_internal_p (stmt))
return false;
if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
return true;
return false;
}
/* Return true if OP is a tree which we can perform PRE on. /* Return true if OP is a tree which we can perform PRE on.
This may not match the operations we can value number, but in This may not match the operations we can value number, but in
a perfect world would. */ a perfect world would. */
...@@ -3975,8 +3962,7 @@ compute_avail (void) ...@@ -3975,8 +3962,7 @@ compute_avail (void)
or control flow. or control flow.
If this isn't a call or it is the last stmt in the If this isn't a call or it is the last stmt in the
basic-block then the CFG represents things correctly. */ basic-block then the CFG represents things correctly. */
if (is_gimple_call (stmt) if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
&& !stmt_ends_bb_p (stmt))
{ {
/* Non-looping const functions always return normally. /* Non-looping const functions always return normally.
Otherwise the call might not return or have side-effects Otherwise the call might not return or have side-effects
...@@ -3998,8 +3984,7 @@ compute_avail (void) ...@@ -3998,8 +3984,7 @@ compute_avail (void)
bitmap_value_insert_into_set (AVAIL_OUT (block), e); bitmap_value_insert_into_set (AVAIL_OUT (block), e);
} }
if (gimple_has_volatile_ops (stmt) if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
|| stmt_could_throw_p (stmt))
continue; continue;
switch (gimple_code (stmt)) switch (gimple_code (stmt))
...@@ -4017,7 +4002,8 @@ compute_avail (void) ...@@ -4017,7 +4002,8 @@ compute_avail (void)
pre_expr result = NULL; pre_expr result = NULL;
VEC(vn_reference_op_s, heap) *ops = NULL; VEC(vn_reference_op_s, heap) *ops = NULL;
if (!can_value_number_call (stmt)) /* We can value number only calls to real functions. */
if (gimple_call_internal_p (stmt))
continue; continue;
copy_reference_ops_from_call (stmt, &ops); copy_reference_ops_from_call (stmt, &ops);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment