Commit 5f0653a8 by Richard Biener

tree-optimization/91123 - restore redundant store removal

Redundant store removal in FRE was restricted for correctness reasons.
The following extends correctness fixes required to memcpy/aggregate
copy translation.  The main change is that we no longer insert
references rewritten to cover such aggregate copies into the hashtable
but the original one.

2020-02-04  Richard Biener  <rguenther@suse.de>

	PR tree-optimization/91123
	* tree-ssa-sccvn.c (vn_walk_cb_data::finish): New method.
	(vn_walk_cb_data::last_vuse): New member.
	(vn_walk_cb_data::saved_operands): Likewsie.
	(vn_walk_cb_data::~vn_walk_cb_data): Release saved_operands.
	(vn_walk_cb_data::push_partial_def): Use finish.
	(vn_reference_lookup_2): Update last_vuse and use finish if
	we've saved operands.
	(vn_reference_lookup_3): Use finish and update calls to
	push_partial_defs everywhere.  When translating through
	memcpy or aggregate copies save off operands and alias-set.
	(eliminate_dom_walker::eliminate_stmt): Restore VN_WALKREWRITE
	operation for redundant store removal.

	* gcc.dg/tree-ssa/ssa-fre-85.c: New testcase.
parent fc98d038
2020-02-04 Richard Biener <rguenther@suse.de> 2020-02-04 Richard Biener <rguenther@suse.de>
PR tree-optimization/91123
* tree-ssa-sccvn.c (vn_walk_cb_data::finish): New method.
(vn_walk_cb_data::last_vuse): New member.
(vn_walk_cb_data::saved_operands): Likewsie.
(vn_walk_cb_data::~vn_walk_cb_data): Release saved_operands.
(vn_walk_cb_data::push_partial_def): Use finish.
(vn_reference_lookup_2): Update last_vuse and use finish if
we've saved operands.
(vn_reference_lookup_3): Use finish and update calls to
push_partial_defs everywhere. When translating through
memcpy or aggregate copies save off operands and alias-set.
(eliminate_dom_walker::eliminate_stmt): Restore VN_WALKREWRITE
operation for redundant store removal.
2020-02-04 Richard Biener <rguenther@suse.de>
PR tree-optimization/92819 PR tree-optimization/92819
* tree-ssa-forwprop.c (simplify_vector_constructor): Avoid * tree-ssa-forwprop.c (simplify_vector_constructor): Avoid
generating more stmts than before. generating more stmts than before.
......
2020-02-04 Richard Biener <rguenther@suse.de> 2020-02-04 Richard Biener <rguenther@suse.de>
PR tree-optimization/91123
* gcc.dg/tree-ssa/ssa-fre-85.c: New testcase.
2020-02-04 Richard Biener <rguenther@suse.de>
PR tree-optimization/92819 PR tree-optimization/92819
* gcc.target/i386/pr92819.c: New testcase. * gcc.target/i386/pr92819.c: New testcase.
* gcc.target/i386/pr92803.c: Adjust. * gcc.target/i386/pr92803.c: Adjust.
......
/* { dg-do compile } */
/* { dg-options "-O -fstrict-aliasing -fdump-tree-fre1-details" } */
struct X { int i; int j; };
struct X x, y;
void foo ()
{
x.i = 1;
y = x;
y.i = 1; // redundant
}
/* { dg-final { scan-tree-dump "Deleted redundant store y.i" "fre1" } } */
...@@ -1687,26 +1687,30 @@ struct vn_walk_cb_data ...@@ -1687,26 +1687,30 @@ struct vn_walk_cb_data
{ {
vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_, vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
vn_lookup_kind vn_walk_kind_, bool tbaa_p_) vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
: vr (vr_), last_vuse_ptr (last_vuse_ptr_), : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_), known_ranges (NULL) vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_),
saved_operands (vNULL), first_set (-2), known_ranges (NULL)
{ {
if (!last_vuse_ptr)
last_vuse_ptr = &last_vuse;
ao_ref_init (&orig_ref, orig_ref_); ao_ref_init (&orig_ref, orig_ref_);
} }
~vn_walk_cb_data (); ~vn_walk_cb_data ();
void *push_partial_def (const pd_data& pd, tree, void *finish (alias_set_type, tree);
alias_set_type, HOST_WIDE_INT); void *push_partial_def (const pd_data& pd, alias_set_type, HOST_WIDE_INT);
vn_reference_t vr; vn_reference_t vr;
ao_ref orig_ref; ao_ref orig_ref;
tree *last_vuse_ptr; tree *last_vuse_ptr;
tree last_vuse;
vn_lookup_kind vn_walk_kind; vn_lookup_kind vn_walk_kind;
bool tbaa_p; bool tbaa_p;
vec<vn_reference_op_s> saved_operands;
/* The VDEFs of partial defs we come along. */ /* The VDEFs of partial defs we come along. */
auto_vec<pd_data, 2> partial_defs; auto_vec<pd_data, 2> partial_defs;
/* The first defs range to avoid splay tree setup in most cases. */ /* The first defs range to avoid splay tree setup in most cases. */
pd_range first_range; pd_range first_range;
tree first_vuse;
alias_set_type first_set; alias_set_type first_set;
splay_tree known_ranges; splay_tree known_ranges;
obstack ranges_obstack; obstack ranges_obstack;
...@@ -1719,6 +1723,17 @@ vn_walk_cb_data::~vn_walk_cb_data () ...@@ -1719,6 +1723,17 @@ vn_walk_cb_data::~vn_walk_cb_data ()
splay_tree_delete (known_ranges); splay_tree_delete (known_ranges);
obstack_free (&ranges_obstack, NULL); obstack_free (&ranges_obstack, NULL);
} }
saved_operands.release ();
}
void *
vn_walk_cb_data::finish (alias_set_type set, tree val)
{
if (first_set != -2)
set = first_set;
return vn_reference_lookup_or_insert_for_pieces
(last_vuse, set, vr->type,
saved_operands.exists () ? saved_operands : vr->operands, val);
} }
/* pd_range splay-tree helpers. */ /* pd_range splay-tree helpers. */
...@@ -1753,7 +1768,7 @@ pd_tree_dealloc (void *, void *) ...@@ -1753,7 +1768,7 @@ pd_tree_dealloc (void *, void *)
on failure. */ on failure. */
void * void *
vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, vn_walk_cb_data::push_partial_def (const pd_data &pd,
alias_set_type set, HOST_WIDE_INT maxsizei) alias_set_type set, HOST_WIDE_INT maxsizei)
{ {
const HOST_WIDE_INT bufsize = 64; const HOST_WIDE_INT bufsize = 64;
...@@ -1774,7 +1789,6 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, ...@@ -1774,7 +1789,6 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
partial_defs.safe_push (pd); partial_defs.safe_push (pd);
first_range.offset = pd.offset; first_range.offset = pd.offset;
first_range.size = pd.size; first_range.size = pd.size;
first_vuse = vuse;
first_set = set; first_set = set;
last_vuse_ptr = NULL; last_vuse_ptr = NULL;
/* Continue looking for partial defs. */ /* Continue looking for partial defs. */
...@@ -1908,8 +1922,7 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, ...@@ -1908,8 +1922,7 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
"Successfully combined %u partial definitions\n", ndefs); "Successfully combined %u partial definitions\n", ndefs);
/* We are using the alias-set of the first store we encounter which /* We are using the alias-set of the first store we encounter which
should be appropriate here. */ should be appropriate here. */
return vn_reference_lookup_or_insert_for_pieces return finish (first_set, val);
(first_vuse, first_set, vr->type, vr->operands, val);
} }
else else
{ {
...@@ -1937,7 +1950,10 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_) ...@@ -1937,7 +1950,10 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
return NULL; return NULL;
if (data->last_vuse_ptr) if (data->last_vuse_ptr)
*data->last_vuse_ptr = vuse; {
*data->last_vuse_ptr = vuse;
data->last_vuse = vuse;
}
/* Fixup vuse and hash. */ /* Fixup vuse and hash. */
if (vr->vuse) if (vr->vuse)
...@@ -1949,7 +1965,11 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_) ...@@ -1949,7 +1965,11 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
hash = vr->hashcode; hash = vr->hashcode;
slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT); slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
if (slot) if (slot)
return *slot; {
if ((*slot)->result && data->saved_operands.exists ())
return data->finish (vr->set, (*slot)->result);
return *slot;
}
return NULL; return NULL;
} }
...@@ -2479,8 +2499,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2479,8 +2499,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (!val) if (!val)
return (void *)-1; return (void *)-1;
} }
return vn_reference_lookup_or_insert_for_pieces return data->finish (0, val);
(vuse, 0, vr->type, vr->operands, val);
} }
/* For now handle clearing memory with partial defs. */ /* For now handle clearing memory with partial defs. */
else if (known_eq (ref->size, maxsize) else if (known_eq (ref->size, maxsize)
...@@ -2495,7 +2514,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2495,7 +2514,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = build_constructor (NULL_TREE, NULL); pd.rhs = build_constructor (NULL_TREE, NULL);
pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = leni; pd.size = leni;
return data->push_partial_def (pd, vuse, 0, maxsizei); return data->push_partial_def (pd, 0, maxsizei);
} }
} }
...@@ -2534,8 +2553,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2534,8 +2553,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (gimple_clobber_p (def_stmt)) if (gimple_clobber_p (def_stmt))
return (void *)-1; return (void *)-1;
tree val = build_zero_cst (vr->type); tree val = build_zero_cst (vr->type);
return vn_reference_lookup_or_insert_for_pieces return data->finish (get_alias_set (lhs), val);
(vuse, get_alias_set (lhs), vr->type, vr->operands, val);
} }
else if (known_eq (ref->size, maxsize) else if (known_eq (ref->size, maxsize)
&& maxsize.is_constant (&maxsizei) && maxsize.is_constant (&maxsizei)
...@@ -2556,8 +2574,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2556,8 +2574,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = gimple_assign_rhs1 (def_stmt); pd.rhs = gimple_assign_rhs1 (def_stmt);
pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = size2i / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT;
return data->push_partial_def (pd, vuse, get_alias_set (lhs), return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
maxsizei);
} }
} }
} }
...@@ -2656,8 +2673,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2656,8 +2673,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
} }
if (val) if (val)
return vn_reference_lookup_or_insert_for_pieces return data->finish (get_alias_set (lhs), val);
(vuse, get_alias_set (lhs), vr->type, vr->operands, val);
} }
} }
else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i)) else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i))
...@@ -2669,8 +2685,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2669,8 +2685,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = rhs; pd.rhs = rhs;
pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = size2i / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT;
return data->push_partial_def (pd, vuse, get_alias_set (lhs), return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
maxsizei);
} }
} }
} }
...@@ -2738,9 +2753,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2738,9 +2753,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (val if (val
&& (TREE_CODE (val) != SSA_NAME && (TREE_CODE (val) != SSA_NAME
|| ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))) || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
return vn_reference_lookup_or_insert_for_pieces return data->finish (get_alias_set (lhs), val);
(vuse, get_alias_set (lhs), vr->type,
vr->operands, val);
} }
else if (maxsize.is_constant (&maxsizei) else if (maxsize.is_constant (&maxsizei)
&& maxsizei % BITS_PER_UNIT == 0 && maxsizei % BITS_PER_UNIT == 0
...@@ -2756,8 +2769,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2756,8 +2769,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = SSA_VAL (def_rhs); pd.rhs = SSA_VAL (def_rhs);
pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
pd.size = size2i / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT;
return data->push_partial_def (pd, vuse, get_alias_set (lhs), return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
maxsizei);
} }
} }
} }
...@@ -2858,6 +2870,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2858,6 +2870,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
extra_off)); extra_off));
} }
/* Save the operands since we need to use the original ones for
the hash entry we use. */
if (!data->saved_operands.exists ())
data->saved_operands = vr->operands.copy ();
/* We need to pre-pend vr->operands[0..i] to rhs. */ /* We need to pre-pend vr->operands[0..i] to rhs. */
vec<vn_reference_op_s> old = vr->operands; vec<vn_reference_op_s> old = vr->operands;
if (i + 1 + rhs.length () > vr->operands.length ()) if (i + 1 + rhs.length () > vr->operands.length ())
...@@ -2876,8 +2893,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2876,8 +2893,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (val) if (val)
{ {
if (data->partial_defs.is_empty ()) if (data->partial_defs.is_empty ())
return vn_reference_lookup_or_insert_for_pieces return data->finish (get_alias_set (lhs), val);
(vuse, get_alias_set (lhs), vr->type, vr->operands, val);
/* This is the only interesting case for partial-def handling /* This is the only interesting case for partial-def handling
coming from targets that like to gimplify init-ctors as coming from targets that like to gimplify init-ctors as
aggregate copies from constant data like aarch64 for aggregate copies from constant data like aarch64 for
...@@ -2889,8 +2905,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2889,8 +2905,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = val; pd.rhs = val;
pd.offset = 0; pd.offset = 0;
pd.size = maxsizei / BITS_PER_UNIT; pd.size = maxsizei / BITS_PER_UNIT;
return data->push_partial_def (pd, vuse, get_alias_set (lhs), return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
maxsizei);
} }
} }
...@@ -2914,6 +2929,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -2914,6 +2929,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
/* Invalidate the original access path since it now contains /* Invalidate the original access path since it now contains
the wrong base. */ the wrong base. */
data->orig_ref.ref = NULL_TREE; data->orig_ref.ref = NULL_TREE;
/* Use the alias-set of this LHS for recording an eventual result. */
if (data->first_set == -2)
data->first_set = get_alias_set (lhs);
/* Keep looking for the adjusted *REF / VR pair. */ /* Keep looking for the adjusted *REF / VR pair. */
return NULL; return NULL;
...@@ -3034,6 +3052,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -3034,6 +3052,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size)) if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
return (void *)-1; return (void *)-1;
/* Save the operands since we need to use the original ones for
the hash entry we use. */
if (!data->saved_operands.exists ())
data->saved_operands = vr->operands.copy ();
/* Make room for 2 operands in the new reference. */ /* Make room for 2 operands in the new reference. */
if (vr->operands.length () < 2) if (vr->operands.length () < 2)
{ {
...@@ -3062,8 +3085,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -3062,8 +3085,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
/* Try folding the new reference to a constant. */ /* Try folding the new reference to a constant. */
tree val = fully_constant_vn_reference_p (vr); tree val = fully_constant_vn_reference_p (vr);
if (val) if (val)
return vn_reference_lookup_or_insert_for_pieces return data->finish (0, val);
(vuse, 0, vr->type, vr->operands, val);
/* Adjust *ref from the new operands. */ /* Adjust *ref from the new operands. */
if (!ao_ref_init_from_vn_reference (&r, 0, vr->type, vr->operands)) if (!ao_ref_init_from_vn_reference (&r, 0, vr->type, vr->operands))
...@@ -3078,6 +3100,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, ...@@ -3078,6 +3100,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
/* Invalidate the original access path since it now contains /* Invalidate the original access path since it now contains
the wrong base. */ the wrong base. */
data->orig_ref.ref = NULL_TREE; data->orig_ref.ref = NULL_TREE;
/* Use the alias-set of this stmt for recording an eventual result. */
if (data->first_set == -2)
data->first_set = 0;
/* Keep looking for the adjusted *REF / VR pair. */ /* Keep looking for the adjusted *REF / VR pair. */
return NULL; return NULL;
...@@ -5655,8 +5680,8 @@ eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi) ...@@ -5655,8 +5680,8 @@ eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
} }
tree val = NULL_TREE; tree val = NULL_TREE;
if (lookup_lhs) if (lookup_lhs)
val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt), VN_WALK, val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
&vnresult, false); VN_WALKREWRITE, &vnresult, false);
if (TREE_CODE (rhs) == SSA_NAME) if (TREE_CODE (rhs) == SSA_NAME)
rhs = VN_INFO (rhs)->valnum; rhs = VN_INFO (rhs)->valnum;
if (val if (val
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment