Commit 3bc27de7 by Richard Guenther Committed by Richard Biener

re PR tree-optimization/47365 (wrong code with -O -ftree-pre)

2011-01-21  Richard Guenther  <rguenther@suse.de>

	PR tree-optimization/47365
	* tree-ssa-sccvn.h (vn_lookup_kind): Declare.
	(vn_reference_lookup_pieces): Adjust.
	(vn_reference_lookup): Likewise.
	* tree-ssa-sccvn.c (vn_walk_kind): New static global.
	(vn_reference_lookup_3): Only look through kills if in
	VN_WALKREWRITE mode.
	(vn_reference_lookup_pieces): Adjust.
	(vn_reference_lookup): Likewise.
	(visit_reference_op_load): Likewise.
	(visit_reference_op_store): Likewise.
	* tree-ssa-pre.c (phi_translate_1): Use VN_WALK mode.
	(compute_avail): Likewise.
	(eliminate): Likewise.

	* gcc.dg/torture/pr47365.c: New testcase.
	* gcc.dg/tree-ssa/pr47392.c: Likewise.

From-SVN: r169089
parent 33e39b66
2011-01-21 Richard Guenther <rguenther@suse.de>
PR tree-optimization/47365
* tree-ssa-sccvn.h (vn_lookup_kind): Declare.
(vn_reference_lookup_pieces): Adjust.
(vn_reference_lookup): Likewise.
* tree-ssa-sccvn.c (vn_walk_kind): New static global.
(vn_reference_lookup_3): Only look through kills if in
VN_WALKREWRITE mode.
(vn_reference_lookup_pieces): Adjust.
(vn_reference_lookup): Likewise.
(visit_reference_op_load): Likewise.
(visit_reference_op_store): Likewise.
* tree-ssa-pre.c (phi_translate_1): Use VN_WALK mode.
(compute_avail): Likewise.
(eliminate): Likewise.
2011-01-21 Jakub Jelinek <jakub@redhat.com> 2011-01-21 Jakub Jelinek <jakub@redhat.com>
* tree-ssa-live.c (remove_unused_scope_block_p): Don't remove * tree-ssa-live.c (remove_unused_scope_block_p): Don't remove
......
2011-01-21 Richard Guenther <rguenther@suse.de>
PR tree-optimization/47365
* gcc.dg/torture/pr47365.c: New testcase.
* gcc.dg/tree-ssa/pr47392.c: Likewise.
2011-01-21 Rainer Orth <ro@CeBiTec.Uni-Bielefeld.DE> 2011-01-21 Rainer Orth <ro@CeBiTec.Uni-Bielefeld.DE>
* g++.dg/other/anon5.C: Skip on mips-sgi-irix*. * g++.dg/other/anon5.C: Skip on mips-sgi-irix*.
......
/* { dg-do run } */
struct A
{
int i;
};
struct B
{
struct A a[2];
};
int i = 1;
struct B b = { 0, 3 };
static void
test ()
{
if (b.a[0].i != i)
{
int t = b.a[0].i;
b.a[0] = b.a[1];
b.a[1].i = t;
}
if (b.a[1].i == i)
__builtin_abort ();
if (b.a[0].i == 0)
__builtin_abort ();
}
int
main ()
{
test ();
return 0;
}
/* { dg-do run } */
/* { dg-options "-O2 -fdump-tree-pre-stats" } */
struct A
{
int i;
};
struct B
{
struct A a[2];
};
int i = 1;
struct B b = { 0, 3 };
static void
test ()
{
if (b.a[0].i != i)
{
int t = b.a[0].i;
b.a[0] = b.a[1];
b.a[1].i = t;
}
if (b.a[1].i == i)
__builtin_abort ();
if (b.a[0].i == 0)
__builtin_abort ();
}
int __attribute__((hot))
main ()
{
test ();
return 0;
}
/* { dg-final { scan-tree-dump "Eliminated: 1" "pre" } } */
/* { dg-final { cleanup-tree-dump "pre" } } */
...@@ -1681,7 +1681,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, ...@@ -1681,7 +1681,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
tree result = vn_reference_lookup_pieces (newvuse, ref->set, tree result = vn_reference_lookup_pieces (newvuse, ref->set,
ref->type, ref->type,
newoperands, newoperands,
&newref, true); &newref, VN_WALK);
if (result) if (result)
VEC_free (vn_reference_op_s, heap, newoperands); VEC_free (vn_reference_op_s, heap, newoperands);
...@@ -2594,6 +2594,10 @@ compute_antic (void) ...@@ -2594,6 +2594,10 @@ compute_antic (void)
{ {
if (dump_file && (dump_flags & TDF_DETAILS)) if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Starting iteration %d\n", num_iterations); fprintf (dump_file, "Starting iteration %d\n", num_iterations);
/* ??? We need to clear our PHI translation cache here as the
ANTIC sets shrink and we restrict valid translations to
those having operands with leaders in ANTIC. Same below
for PA ANTIC computation. */
num_iterations++; num_iterations++;
changed = false; changed = false;
for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--) for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
...@@ -3607,10 +3611,22 @@ do_regular_insertion (basic_block block, basic_block dom) ...@@ -3607,10 +3611,22 @@ do_regular_insertion (basic_block block, basic_block dom)
already existing along every predecessor, and already existing along every predecessor, and
it's defined by some predecessor, it is it's defined by some predecessor, it is
partially redundant. */ partially redundant. */
if (!cant_insert && !all_same && by_some && do_insertion if (!cant_insert && !all_same && by_some)
&& dbg_cnt (treepre_insert))
{ {
if (insert_into_preds_of_block (block, get_expression_id (expr), if (!do_insertion)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Skipping partial redundancy for "
"expression ");
print_pre_expr (dump_file, expr);
fprintf (dump_file, " (%04d), no redundancy on to be "
"optimized for speed edge\n", val);
}
}
else if (dbg_cnt (treepre_insert)
&& insert_into_preds_of_block (block,
get_expression_id (expr),
avail)) avail))
new_stuff = true; new_stuff = true;
} }
...@@ -3999,7 +4015,7 @@ compute_avail (void) ...@@ -3999,7 +4015,7 @@ compute_avail (void)
copy_reference_ops_from_call (stmt, &ops); copy_reference_ops_from_call (stmt, &ops);
vn_reference_lookup_pieces (gimple_vuse (stmt), 0, vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
gimple_expr_type (stmt), gimple_expr_type (stmt),
ops, &ref, false); ops, &ref, VN_NOWALK);
VEC_free (vn_reference_op_s, heap, ops); VEC_free (vn_reference_op_s, heap, ops);
if (!ref) if (!ref)
continue; continue;
...@@ -4069,7 +4085,7 @@ compute_avail (void) ...@@ -4069,7 +4085,7 @@ compute_avail (void)
vn_reference_lookup (gimple_assign_rhs1 (stmt), vn_reference_lookup (gimple_assign_rhs1 (stmt),
gimple_vuse (stmt), gimple_vuse (stmt),
true, &ref); VN_WALK, &ref);
if (!ref) if (!ref)
continue; continue;
...@@ -4313,7 +4329,7 @@ eliminate (void) ...@@ -4313,7 +4329,7 @@ eliminate (void)
tree rhs = gimple_assign_rhs1 (stmt); tree rhs = gimple_assign_rhs1 (stmt);
tree val; tree val;
val = vn_reference_lookup (gimple_assign_lhs (stmt), val = vn_reference_lookup (gimple_assign_lhs (stmt),
gimple_vuse (stmt), true, NULL); gimple_vuse (stmt), VN_WALK, NULL);
if (TREE_CODE (rhs) == SSA_NAME) if (TREE_CODE (rhs) == SSA_NAME)
rhs = VN_INFO (rhs)->valnum; rhs = VN_INFO (rhs)->valnum;
if (val if (val
......
...@@ -1243,6 +1243,7 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) ...@@ -1243,6 +1243,7 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
} }
static tree *last_vuse_ptr; static tree *last_vuse_ptr;
static vn_lookup_kind vn_walk_kind;
/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
with the current VUSE and performs the expression lookup. */ with the current VUSE and performs the expression lookup. */
...@@ -1379,7 +1380,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) ...@@ -1379,7 +1380,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
/* For aggregate copies translate the reference through them if /* For aggregate copies translate the reference through them if
the copy kills ref. */ the copy kills ref. */
else if (gimple_assign_single_p (def_stmt) else if (vn_walk_kind == VN_WALKREWRITE
&& gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt)) && (DECL_P (gimple_assign_rhs1 (def_stmt))
|| TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
|| handled_component_p (gimple_assign_rhs1 (def_stmt)))) || handled_component_p (gimple_assign_rhs1 (def_stmt))))
...@@ -1473,7 +1475,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) ...@@ -1473,7 +1475,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
tree tree
vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
VEC (vn_reference_op_s, heap) *operands, VEC (vn_reference_op_s, heap) *operands,
vn_reference_t *vnresult, bool maywalk) vn_reference_t *vnresult, vn_lookup_kind kind)
{ {
struct vn_reference_s vr1; struct vn_reference_s vr1;
vn_reference_t tmp; vn_reference_t tmp;
...@@ -1501,10 +1503,11 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, ...@@ -1501,10 +1503,11 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
vn_reference_lookup_1 (&vr1, vnresult); vn_reference_lookup_1 (&vr1, vnresult);
if (!*vnresult if (!*vnresult
&& maywalk && kind != VN_NOWALK
&& vr1.vuse) && vr1.vuse)
{ {
ao_ref r; ao_ref r;
vn_walk_kind = kind;
if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands)) if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
*vnresult = *vnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
...@@ -1527,7 +1530,7 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, ...@@ -1527,7 +1530,7 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
stored in the hashtable if one exists. */ stored in the hashtable if one exists. */
tree tree
vn_reference_lookup (tree op, tree vuse, bool maywalk, vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_t *vnresult) vn_reference_t *vnresult)
{ {
VEC (vn_reference_op_s, heap) *operands; VEC (vn_reference_op_s, heap) *operands;
...@@ -1545,12 +1548,13 @@ vn_reference_lookup (tree op, tree vuse, bool maywalk, ...@@ -1545,12 +1548,13 @@ vn_reference_lookup (tree op, tree vuse, bool maywalk,
if ((cst = fully_constant_vn_reference_p (&vr1))) if ((cst = fully_constant_vn_reference_p (&vr1)))
return cst; return cst;
if (maywalk if (kind != VN_NOWALK
&& vr1.vuse) && vr1.vuse)
{ {
vn_reference_t wvnresult; vn_reference_t wvnresult;
ao_ref r; ao_ref r;
ao_ref_init (&r, op); ao_ref_init (&r, op);
vn_walk_kind = kind;
wvnresult = wvnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2, vn_reference_lookup_2,
...@@ -2257,14 +2261,14 @@ visit_reference_op_load (tree lhs, tree op, gimple stmt) ...@@ -2257,14 +2261,14 @@ visit_reference_op_load (tree lhs, tree op, gimple stmt)
last_vuse = gimple_vuse (stmt); last_vuse = gimple_vuse (stmt);
last_vuse_ptr = &last_vuse; last_vuse_ptr = &last_vuse;
result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL); result = vn_reference_lookup (op, gimple_vuse (stmt), VN_WALKREWRITE, NULL);
last_vuse_ptr = NULL; last_vuse_ptr = NULL;
/* If we have a VCE, try looking up its operand as it might be stored in /* If we have a VCE, try looking up its operand as it might be stored in
a different type. */ a different type. */
if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR) if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt), result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
true, NULL); VN_WALKREWRITE, NULL);
/* We handle type-punning through unions by value-numbering based /* We handle type-punning through unions by value-numbering based
on offset and size of the access. Be prepared to handle a on offset and size of the access. Be prepared to handle a
...@@ -2375,7 +2379,7 @@ visit_reference_op_store (tree lhs, tree op, gimple stmt) ...@@ -2375,7 +2379,7 @@ visit_reference_op_store (tree lhs, tree op, gimple stmt)
Otherwise, the vdefs for the store are used when inserting into Otherwise, the vdefs for the store are used when inserting into
the table, since the store generates a new memory state. */ the table, since the store generates a new memory state. */
result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL); result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
if (result) if (result)
{ {
......
...@@ -187,10 +187,11 @@ void copy_reference_ops_from_ref (tree, VEC(vn_reference_op_s, heap) **); ...@@ -187,10 +187,11 @@ void copy_reference_ops_from_ref (tree, VEC(vn_reference_op_s, heap) **);
void copy_reference_ops_from_call (gimple, VEC(vn_reference_op_s, heap) **); void copy_reference_ops_from_call (gimple, VEC(vn_reference_op_s, heap) **);
bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree, bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree,
VEC (vn_reference_op_s, heap) *); VEC (vn_reference_op_s, heap) *);
typedef enum { VN_NOWALK, VN_WALK, VN_WALKREWRITE } vn_lookup_kind;
tree vn_reference_lookup_pieces (tree, alias_set_type, tree, tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
VEC (vn_reference_op_s, heap) *, VEC (vn_reference_op_s, heap) *,
vn_reference_t *, bool); vn_reference_t *, vn_lookup_kind);
tree vn_reference_lookup (tree, tree, bool, vn_reference_t *); tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *);
vn_reference_t vn_reference_insert (tree, tree, tree); vn_reference_t vn_reference_insert (tree, tree, tree);
vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree, vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
VEC (vn_reference_op_s, heap) *, VEC (vn_reference_op_s, heap) *,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment