Commit 3d6fd7ce by Richard Biener

tree-optimization/93946 - fix bogus redundant store removal in FRE, DSE and DOM

This fixes a common mistake in removing a store that looks redudnant but
is not because it changes the dynamic type of the memory and thus makes
a difference for following loads with TBAA.

2020-03-03  Richard Biener  <rguenther@suse.de>

	PR tree-optimization/93946
	* alias.h (refs_same_for_tbaa_p): Declare.
	* alias.c (refs_same_for_tbaa_p): New function.
	* tree-ssa-alias.c (ao_ref_alias_set): For a NULL ref return
	zero.
	* tree-ssa-scopedtables.h
	(avail_exprs_stack::lookup_avail_expr): Add output argument
	giving access to the hashtable entry.
	* tree-ssa-scopedtables.c (avail_exprs_stack::lookup_avail_expr):
	Likewise.
	* tree-ssa-dom.c: Include alias.h.
	(dom_opt_dom_walker::optimize_stmt): Validate TBAA state before
	removing redundant store.
	* tree-ssa-sccvn.h (vn_reference_s::base_set): New member.
	(ao_ref_init_from_vn_reference): Adjust prototype.
	(vn_reference_lookup_pieces): Likewise.
	(vn_reference_insert_pieces): Likewise.
	* tree-ssa-sccvn.c: Track base alias set in addition to alias
	set everywhere.
	(eliminate_dom_walker::eliminate_stmt): Also check base alias
	set when removing redundant stores.
	(visit_reference_op_store): Likewise.
	* dse.c (record_store): Adjust valdity check for redundant
	store removal.

	* gcc.dg/torture/pr93946-1.c: New testcase.
	* gcc.dg/torture/pr93946-2.c: Likewise.
parent 01eb1bb0
2020-03-03 Richard Biener <rguenther@suse.de>
PR tree-optimization/93946
* alias.h (refs_same_for_tbaa_p): Declare.
* alias.c (refs_same_for_tbaa_p): New function.
* tree-ssa-alias.c (ao_ref_alias_set): For a NULL ref return
zero.
* tree-ssa-scopedtables.h
(avail_exprs_stack::lookup_avail_expr): Add output argument
giving access to the hashtable entry.
* tree-ssa-scopedtables.c (avail_exprs_stack::lookup_avail_expr):
Likewise.
* tree-ssa-dom.c: Include alias.h.
(dom_opt_dom_walker::optimize_stmt): Validate TBAA state before
removing redundant store.
* tree-ssa-sccvn.h (vn_reference_s::base_set): New member.
(ao_ref_init_from_vn_reference): Adjust prototype.
(vn_reference_lookup_pieces): Likewise.
(vn_reference_insert_pieces): Likewise.
* tree-ssa-sccvn.c: Track base alias set in addition to alias
set everywhere.
(eliminate_dom_walker::eliminate_stmt): Also check base alias
set when removing redundant stores.
(visit_reference_op_store): Likewise.
* dse.c (record_store): Adjust valdity check for redundant
store removal.
2020-03-03 Jakub Jelinek <jakub@redhat.com>
PR target/26877
......
......@@ -368,6 +368,26 @@ rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
&& MEM_ALIAS_SET (mem) != 0);
}
/* Return true if the ref EARLIER behaves the same as LATER with respect
to TBAA for every memory reference that might follow LATER. */
bool
refs_same_for_tbaa_p (tree earlier, tree later)
{
ao_ref earlier_ref, later_ref;
ao_ref_init (&earlier_ref, earlier);
ao_ref_init (&later_ref, later);
alias_set_type earlier_set = ao_ref_alias_set (&earlier_ref);
alias_set_type later_set = ao_ref_alias_set (&later_ref);
if (!(earlier_set == later_set
|| alias_set_subset_of (later_set, earlier_set)))
return false;
alias_set_type later_base_set = ao_ref_base_alias_set (&later_ref);
alias_set_type earlier_base_set = ao_ref_base_alias_set (&earlier_ref);
return (earlier_base_set == later_base_set
|| alias_set_subset_of (later_base_set, earlier_base_set));
}
/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
such an entry, or NULL otherwise. */
......
......@@ -38,6 +38,7 @@ extern void dump_alias_stats_in_alias_c (FILE *s);
tree reference_alias_ptr_type (tree);
bool alias_ptr_types_compatible_p (tree, tree);
int compare_base_decls (tree, tree);
bool refs_same_for_tbaa_p (tree, tree);
/* This alias set can be used to force a memory to conflict with all
other memories, creating a barrier across which no memory reference
......
......@@ -1540,9 +1540,12 @@ record_store (rtx body, bb_info_t bb_info)
width)
/* We can only remove the later store if the earlier aliases
at least all accesses the later one. */
&& (MEM_ALIAS_SET (mem) == MEM_ALIAS_SET (s_info->mem)
|| alias_set_subset_of (MEM_ALIAS_SET (mem),
MEM_ALIAS_SET (s_info->mem))))
&& ((MEM_ALIAS_SET (mem) == MEM_ALIAS_SET (s_info->mem)
|| alias_set_subset_of (MEM_ALIAS_SET (mem),
MEM_ALIAS_SET (s_info->mem)))
&& (!MEM_EXPR (s_info->mem)
|| refs_same_for_tbaa_p (MEM_EXPR (s_info->mem),
MEM_EXPR (mem)))))
{
if (GET_MODE (mem) == BLKmode)
{
......
2020-03-03 Richard Biener <rguenther@suse.de>
PR tree-optimization/93946
* gcc.dg/torture/pr93946-1.c: New testcase.
* gcc.dg/torture/pr93946-2.c: Likewise.
2020-03-03 Jakub Jelinek <jakub@redhat.com>
PR rtl-optimization/94002
......
/* { dg-do run } */
union U { long long i; long f; };
struct a {union U u;};
struct aa {struct a a;};
struct b {union U u;};
struct bb {struct b b;};
long __attribute__((noipa))
foo (struct bb *bv, void *ptr)
{
struct aa *a = ptr;
struct bb *b = ptr;
bv->b.u.f = 1;
a->a.u.i = 0;
b->b.u.f = 0;
return bv->b.u.f;
}
int
main ()
{
union C {struct aa aa; struct bb bb;} v;
if (foo (&v.bb, &v) != 0)
__builtin_abort ();
return 0;
}
/* { dg-do run } */
/* { dg-additional-options "-fno-tree-dse" } */
union U { long long i; long f; };
struct a {union U u;};
struct aa {struct a a;};
struct b {union U u;};
struct bb {struct b b;};
long __attribute__((noipa))
foo (struct bb *bv, void *ptr)
{
struct aa *a = ptr;
struct bb *b = ptr;
bv->b.u.f = 1;
a->a.u.i = 0;
b->b.u.f = 0;
return bv->b.u.f;
}
int
main ()
{
union C {struct aa aa; struct bb bb;} v;
if (foo (&v.bb, &v) != 0)
__builtin_abort ();
return 0;
}
......@@ -697,6 +697,8 @@ ao_ref_alias_set (ao_ref *ref)
{
if (ref->ref_alias_set != -1)
return ref->ref_alias_set;
if (!ref->ref)
return 0;
ref->ref_alias_set = get_alias_set (ref->ref);
return ref->ref_alias_set;
}
......
......@@ -49,6 +49,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-vrp.h"
#include "vr-values.h"
#include "gimple-ssa-evrp-analyze.h"
#include "alias.h"
/* This file implements optimizations on the dominator tree. */
......@@ -2155,9 +2156,14 @@ dom_opt_dom_walker::optimize_stmt (basic_block bb, gimple_stmt_iterator *si,
else
new_stmt = gimple_build_assign (rhs, lhs);
gimple_set_vuse (new_stmt, gimple_vuse (stmt));
expr_hash_elt *elt = NULL;
cached_lhs = m_avail_exprs_stack->lookup_avail_expr (new_stmt, false,
false);
if (cached_lhs && operand_equal_p (rhs, cached_lhs, 0))
false, &elt);
if (cached_lhs
&& operand_equal_p (rhs, cached_lhs, 0)
&& refs_same_for_tbaa_p (elt->expr ()->kind == EXPR_SINGLE
? elt->expr ()->ops.single.rhs
: NULL_TREE, lhs))
{
basic_block bb = gimple_bb (stmt);
unlink_stmt_vdef (stmt);
......
......@@ -658,6 +658,17 @@ dse_optimize_redundant_stores (gimple *stmt)
{
int cnt = 0;
/* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
alias_set_type earlier_set = 0;
alias_set_type earlier_base_set = 0;
if (is_gimple_assign (stmt))
{
ao_ref lhs_ref;
ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
earlier_set = ao_ref_alias_set (&lhs_ref);
earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
}
/* We could do something fairly complex and look through PHIs
like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
the effort.
......@@ -698,10 +709,27 @@ dse_optimize_redundant_stores (gimple *stmt)
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
if (is_gimple_assign (use_stmt))
delete_dead_or_redundant_assignment (&gsi, "redundant",
need_eh_cleanup);
{
ao_ref lhs_ref;
ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
if ((earlier_set == ao_ref_alias_set (&lhs_ref)
|| alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
earlier_set))
&& (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
|| alias_set_subset_of
(ao_ref_base_alias_set (&lhs_ref),
earlier_base_set)))
delete_dead_or_redundant_assignment (&gsi, "redundant",
need_eh_cleanup);
}
else if (is_gimple_call (use_stmt))
delete_dead_or_redundant_call (&gsi, "redundant");
{
if ((earlier_set == 0
|| alias_set_subset_of (0, earlier_set))
&& (earlier_base_set == 0
|| alias_set_subset_of (0, earlier_base_set)))
delete_dead_or_redundant_call (&gsi, "redundant");
}
else
gcc_unreachable ();
}
......
......@@ -1140,7 +1140,8 @@ fully_constant_expression (pre_expr e)
static tree
translate_vuse_through_block (vec<vn_reference_op_s> operands,
alias_set_type set, tree type, tree vuse,
alias_set_type set, alias_set_type base_set,
tree type, tree vuse,
basic_block phiblock,
basic_block block, bool *same_valid)
{
......@@ -1156,7 +1157,8 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
return vuse;
unsigned int cnt = param_sccvn_max_alias_queries_per_access;
use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
use_oracle = ao_ref_init_from_vn_reference (&ref, set, base_set,
type, operands);
/* Use the alias-oracle to find either the PHI node in this block,
the first VUSE used in this block that is equivalent to vuse or
......@@ -1535,7 +1537,8 @@ phi_translate_1 (bitmap_set_t dest,
{
newvuse = translate_vuse_through_block (newoperands.exists ()
? newoperands : operands,
ref->set, ref->type,
ref->set, ref->base_set,
ref->type,
vuse, phiblock, pred,
changed
? NULL : &same_valid);
......@@ -1551,6 +1554,7 @@ phi_translate_1 (bitmap_set_t dest,
unsigned int new_val_id;
tree result = vn_reference_lookup_pieces (newvuse, ref->set,
ref->base_set,
ref->type,
newoperands.exists ()
? newoperands : operands,
......@@ -1608,7 +1612,7 @@ phi_translate_1 (bitmap_set_t dest,
if (!newoperands.exists ())
newoperands = operands.copy ();
newref = vn_reference_insert_pieces (newvuse, ref->set,
ref->type,
ref->base_set, ref->type,
newoperands,
result, new_val_id);
newoperands = vNULL;
......@@ -1827,8 +1831,8 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
/* Init ref only if we really need it. */
if (ref.base == NULL_TREE
&& !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
refx->operands))
&& !ao_ref_init_from_vn_reference (&ref, refx->set, refx->base_set,
refx->type, refx->operands))
{
res = true;
break;
......@@ -3914,12 +3918,16 @@ compute_avail (void)
case VN_REFERENCE:
{
tree rhs1 = gimple_assign_rhs1 (stmt);
alias_set_type set = get_alias_set (rhs1);
ao_ref rhs1_ref;
ao_ref_init (&rhs1_ref, rhs1);
alias_set_type set = ao_ref_alias_set (&rhs1_ref);
alias_set_type base_set
= ao_ref_base_alias_set (&rhs1_ref);
vec<vn_reference_op_s> operands
= vn_reference_operands_for_lookup (rhs1);
vn_reference_t ref;
vn_reference_lookup_pieces (gimple_vuse (stmt), set,
TREE_TYPE (rhs1),
base_set, TREE_TYPE (rhs1),
operands, &ref, VN_WALK);
if (!ref)
{
......
......@@ -367,7 +367,8 @@ static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
enum tree_code, tree, tree *);
static tree vn_lookup_simplify_result (gimple_match_op *);
static vn_reference_t vn_reference_lookup_or_insert_for_pieces
(tree, alias_set_type, tree, vec<vn_reference_op_s, va_heap>, tree);
(tree, alias_set_type, alias_set_type, tree,
vec<vn_reference_op_s, va_heap>, tree);
/* Return whether there is value numbering information for a given SSA name. */
......@@ -982,8 +983,8 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, tree type,
vec<vn_reference_op_s> ops)
alias_set_type set, alias_set_type base_set,
tree type, vec<vn_reference_op_s> ops)
{
vn_reference_op_t op;
unsigned i;
......@@ -993,7 +994,6 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
poly_offset_int max_size;
poly_offset_int size = -1;
tree size_tree = NULL_TREE;
alias_set_type base_alias_set = -1;
/* First get the final access size from just the outermost expression. */
op = &ops[0];
......@@ -1050,7 +1050,6 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
/* Record the base objects. */
case MEM_REF:
base_alias_set = get_deref_alias_set (op->op0);
*op0_p = build2 (MEM_REF, op->type,
NULL_TREE, op->op0);
MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
......@@ -1140,10 +1139,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
ref->ref = NULL_TREE;
ref->base = base;
ref->ref_alias_set = set;
if (base_alias_set != -1)
ref->base_alias_set = base_alias_set;
else
ref->base_alias_set = get_alias_set (base);
ref->base_alias_set = base_set;
/* We discount volatiles from value-numbering elsewhere. */
ref->volatile_p = false;
......@@ -1689,15 +1685,17 @@ struct vn_walk_cb_data
vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
: vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_),
saved_operands (vNULL), first_set (-2), known_ranges (NULL)
saved_operands (vNULL), first_set (-2), first_base_set (-2),
known_ranges (NULL)
{
if (!last_vuse_ptr)
last_vuse_ptr = &last_vuse;
ao_ref_init (&orig_ref, orig_ref_);
}
~vn_walk_cb_data ();
void *finish (alias_set_type, tree);
void *push_partial_def (const pd_data& pd, alias_set_type, HOST_WIDE_INT);
void *finish (alias_set_type, alias_set_type, tree);
void *push_partial_def (const pd_data& pd,
alias_set_type, alias_set_type, HOST_WIDE_INT);
vn_reference_t vr;
ao_ref orig_ref;
......@@ -1712,6 +1710,7 @@ struct vn_walk_cb_data
/* The first defs range to avoid splay tree setup in most cases. */
pd_range first_range;
alias_set_type first_set;
alias_set_type first_base_set;
splay_tree known_ranges;
obstack ranges_obstack;
};
......@@ -1727,12 +1726,15 @@ vn_walk_cb_data::~vn_walk_cb_data ()
}
void *
vn_walk_cb_data::finish (alias_set_type set, tree val)
vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
{
if (first_set != -2)
set = first_set;
{
set = first_set;
base_set = first_base_set;
}
return vn_reference_lookup_or_insert_for_pieces
(last_vuse, set, vr->type,
(last_vuse, set, base_set, vr->type,
saved_operands.exists () ? saved_operands : vr->operands, val);
}
......@@ -1769,7 +1771,8 @@ pd_tree_dealloc (void *, void *)
void *
vn_walk_cb_data::push_partial_def (const pd_data &pd,
alias_set_type set, HOST_WIDE_INT maxsizei)
alias_set_type set, alias_set_type base_set,
HOST_WIDE_INT maxsizei)
{
const HOST_WIDE_INT bufsize = 64;
/* We're using a fixed buffer for encoding so fail early if the object
......@@ -1794,6 +1797,7 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd,
first_range.offset = pd.offset;
first_range.size = pd.size;
first_set = set;
first_base_set = base_set;
last_vuse_ptr = NULL;
/* Continue looking for partial defs. */
return NULL;
......@@ -2062,7 +2066,7 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd,
"Successfully combined %u partial definitions\n", ndefs);
/* We are using the alias-set of the first store we encounter which
should be appropriate here. */
return finish (first_set, val);
return finish (first_set, first_base_set, val);
}
else
{
......@@ -2107,7 +2111,7 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
if (slot)
{
if ((*slot)->result && data->saved_operands.exists ())
return data->finish (vr->set, (*slot)->result);
return data->finish (vr->set, vr->base_set, (*slot)->result);
return *slot;
}
......@@ -2121,6 +2125,7 @@ vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
static vn_reference_t
vn_reference_lookup_or_insert_for_pieces (tree vuse,
alias_set_type set,
alias_set_type base_set,
tree type,
vec<vn_reference_op_s,
va_heap> operands,
......@@ -2133,6 +2138,7 @@ vn_reference_lookup_or_insert_for_pieces (tree vuse,
vr1.operands = operands;
vr1.type = type;
vr1.set = set;
vr1.base_set = base_set;
vr1.hashcode = vn_reference_compute_hash (&vr1);
if (vn_reference_lookup_1 (&vr1, &result))
return result;
......@@ -2140,7 +2146,7 @@ vn_reference_lookup_or_insert_for_pieces (tree vuse,
value_id = VN_INFO (value)->value_id;
else
value_id = get_or_alloc_constant_value_id (value);
return vn_reference_insert_pieces (vuse, set, type,
return vn_reference_insert_pieces (vuse, set, base_set, type,
operands.copy (), value, value_id);
}
......@@ -2407,22 +2413,16 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
}
vn_context_bb = saved_rpo_bb;
if (valueized_anything)
ao_ref_init (&lhs_ref, lhs);
lhs_ref_ok = true;
if (valueized_anything
&& ao_ref_init_from_vn_reference
(&lhs_ref, ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
&& !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
{
lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
get_alias_set (lhs),
TREE_TYPE (lhs), lhs_ops);
if (lhs_ref_ok
&& !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
{
*disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
return NULL;
}
}
else
{
ao_ref_init (&lhs_ref, lhs);
lhs_ref_ok = true;
*disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
return NULL;
}
/* Besides valueizing the LHS we can also use access-path based
......@@ -2673,7 +2673,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (!val)
return (void *)-1;
}
return data->finish (0, val);
return data->finish (0, 0, val);
}
/* For now handle clearing memory with partial defs. */
else if (known_eq (ref->size, maxsize)
......@@ -2691,7 +2691,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = build_constructor (NULL_TREE, NULL);
pd.offset = offset2i - offseti;
pd.size = leni << LOG2_BITS_PER_UNIT;
return data->push_partial_def (pd, 0, maxsizei);
return data->push_partial_def (pd, 0, 0, maxsizei);
}
}
......@@ -2701,22 +2701,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
&& gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
&& CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
{
tree lhs = gimple_assign_lhs (def_stmt);
tree base2;
poly_int64 offset2, size2, maxsize2;
HOST_WIDE_INT offset2i, size2i;
bool reverse;
if (lhs_ref_ok)
{
base2 = ao_ref_base (&lhs_ref);
offset2 = lhs_ref.offset;
size2 = lhs_ref.size;
maxsize2 = lhs_ref.max_size;
reverse = reverse_storage_order_for_component_p (lhs);
}
else
base2 = get_ref_base_and_extent (lhs,
&offset2, &size2, &maxsize2, &reverse);
gcc_assert (lhs_ref_ok);
base2 = ao_ref_base (&lhs_ref);
offset2 = lhs_ref.offset;
size2 = lhs_ref.size;
maxsize2 = lhs_ref.max_size;
if (known_size_p (maxsize2)
&& known_eq (maxsize2, size2)
&& adjust_offsets_for_equal_base_address (base, &offset,
......@@ -2730,7 +2722,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (gimple_clobber_p (def_stmt))
return (void *)-1;
tree val = build_zero_cst (vr->type);
return data->finish (get_alias_set (lhs), val);
return data->finish (ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref), val);
}
else if (known_eq (ref->size, maxsize)
&& maxsize.is_constant (&maxsizei)
......@@ -2747,7 +2740,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = gimple_assign_rhs1 (def_stmt);
pd.offset = offset2i - offseti;
pd.size = size2i;
return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref),
maxsizei);
}
}
}
......@@ -2774,17 +2769,12 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
poly_int64 offset2, size2, maxsize2;
HOST_WIDE_INT offset2i, size2i;
bool reverse;
if (lhs_ref_ok)
{
base2 = ao_ref_base (&lhs_ref);
offset2 = lhs_ref.offset;
size2 = lhs_ref.size;
maxsize2 = lhs_ref.max_size;
reverse = reverse_storage_order_for_component_p (lhs);
}
else
base2 = get_ref_base_and_extent (lhs,
&offset2, &size2, &maxsize2, &reverse);
gcc_assert (lhs_ref_ok);
base2 = ao_ref_base (&lhs_ref);
offset2 = lhs_ref.offset;
size2 = lhs_ref.size;
maxsize2 = lhs_ref.max_size;
reverse = reverse_storage_order_for_component_p (lhs);
if (base2
&& !reverse
&& !storage_order_barrier_p (lhs)
......@@ -2887,7 +2877,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
}
if (val)
return data->finish (get_alias_set (lhs), val);
return data->finish (ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref), val);
}
}
else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
......@@ -2900,7 +2891,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = rhs;
pd.offset = offset2i - offseti;
pd.size = size2i;
return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref),
maxsizei);
}
}
}
......@@ -2918,17 +2911,12 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
poly_int64 offset2, size2, maxsize2;
HOST_WIDE_INT offset2i, size2i, offseti;
bool reverse;
if (lhs_ref_ok)
{
base2 = ao_ref_base (&lhs_ref);
offset2 = lhs_ref.offset;
size2 = lhs_ref.size;
maxsize2 = lhs_ref.max_size;
reverse = reverse_storage_order_for_component_p (lhs);
}
else
base2 = get_ref_base_and_extent (lhs,
&offset2, &size2, &maxsize2, &reverse);
gcc_assert (lhs_ref_ok);
base2 = ao_ref_base (&lhs_ref);
offset2 = lhs_ref.offset;
size2 = lhs_ref.size;
maxsize2 = lhs_ref.max_size;
reverse = reverse_storage_order_for_component_p (lhs);
tree def_rhs = gimple_assign_rhs1 (def_stmt);
if (!reverse
&& !storage_order_barrier_p (lhs)
......@@ -2968,7 +2956,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (val
&& (TREE_CODE (val) != SSA_NAME
|| ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
return data->finish (get_alias_set (lhs), val);
return data->finish (ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref), val);
}
else if (maxsize.is_constant (&maxsizei)
&& offset.is_constant (&offseti)
......@@ -2980,7 +2969,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = SSA_VAL (def_rhs);
pd.offset = offset2i - offseti;
pd.size = size2i;
return data->push_partial_def (pd, get_alias_set (lhs), maxsizei);
return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref),
maxsizei);
}
}
}
......@@ -2993,15 +2984,13 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
|| TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
{
tree lhs = gimple_assign_lhs (def_stmt);
tree base2;
int i, j, k;
auto_vec<vn_reference_op_s> rhs;
vn_reference_op_t vro;
ao_ref r;
if (!lhs_ref_ok)
return (void *)-1;
gcc_assert (lhs_ref_ok);
/* See if the assignment kills REF. */
base2 = ao_ref_base (&lhs_ref);
......@@ -3104,7 +3093,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (val)
{
if (data->partial_defs.is_empty ())
return data->finish (get_alias_set (lhs), val);
return data->finish (ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref), val);
/* This is the only interesting case for partial-def handling
coming from targets that like to gimplify init-ctors as
aggregate copies from constant data like aarch64 for
......@@ -3115,7 +3105,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
pd.rhs = val;
pd.offset = 0;
pd.size = maxsizei;
return data->push_partial_def (pd, get_alias_set (lhs),
return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
ao_ref_base_alias_set (&lhs_ref),
maxsizei);
}
}
......@@ -3127,7 +3118,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
return (void *)-1;
/* Adjust *ref from the new operands. */
if (!ao_ref_init_from_vn_reference (&r, get_alias_set (rhs1),
ao_ref rhs1_ref;
ao_ref_init (&rhs1_ref, rhs1);
if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
ao_ref_base_alias_set (&rhs1_ref),
vr->type, vr->operands))
return (void *)-1;
/* This can happen with bitfields. */
......@@ -3142,7 +3136,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
data->orig_ref.ref = NULL_TREE;
/* Use the alias-set of this LHS for recording an eventual result. */
if (data->first_set == -2)
data->first_set = get_alias_set (lhs);
{
data->first_set = ao_ref_alias_set (&lhs_ref);
data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
}
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
......@@ -3302,10 +3299,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
/* Try folding the new reference to a constant. */
tree val = fully_constant_vn_reference_p (vr);
if (val)
return data->finish (0, val);
return data->finish (0, 0, val);
/* Adjust *ref from the new operands. */
if (!ao_ref_init_from_vn_reference (&r, 0, vr->type, vr->operands))
if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
return (void *)-1;
/* This can happen with bitfields. */
if (maybe_ne (ref->size, r.size))
......@@ -3319,7 +3316,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
data->orig_ref.ref = NULL_TREE;
/* Use the alias-set of this stmt for recording an eventual result. */
if (data->first_set == -2)
data->first_set = 0;
{
data->first_set = 0;
data->first_base_set = 0;
}
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
......@@ -3346,7 +3346,8 @@ vn_reference_operands_for_lookup (tree op)
vn_reference_t stored in the hashtable if something is found. */
tree
vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
vn_reference_lookup_pieces (tree vuse, alias_set_type set,
alias_set_type base_set, tree type,
vec<vn_reference_op_s> operands,
vn_reference_t *vnresult, vn_lookup_kind kind)
{
......@@ -3369,6 +3370,7 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
= valueize_refs (shared_lookup_references);
vr1.type = type;
vr1.set = set;
vr1.set = base_set;
vr1.hashcode = vn_reference_compute_hash (&vr1);
if ((cst = fully_constant_vn_reference_p (&vr1)))
return cst;
......@@ -3381,7 +3383,7 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
ao_ref r;
unsigned limit = param_sccvn_max_alias_queries_per_access;
vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true);
if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
if (ao_ref_init_from_vn_reference (&r, set, base_set, type, vr1.operands))
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
vn_reference_lookup_2,
......@@ -3420,7 +3422,10 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vr1.operands = operands
= valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
vr1.type = TREE_TYPE (op);
vr1.set = get_alias_set (op);
ao_ref op_ref;
ao_ref_init (&op_ref, op);
vr1.set = ao_ref_alias_set (&op_ref);
vr1.base_set = ao_ref_base_alias_set (&op_ref);
vr1.hashcode = vn_reference_compute_hash (&vr1);
if ((cst = fully_constant_vn_reference_p (&vr1)))
return cst;
......@@ -3434,8 +3439,8 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
/* Make sure to use a valueized reference if we valueized anything.
Otherwise preserve the full reference for advanced TBAA. */
if (!valuezied_anything
|| !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
vr1.operands))
|| !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
vr1.type, vr1.operands))
ao_ref_init (&r, op);
vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
last_vuse_ptr, kind, tbaa_p);
......@@ -3476,6 +3481,7 @@ vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
vr->operands = valueize_shared_reference_ops_from_call (call);
vr->type = gimple_expr_type (call);
vr->set = 0;
vr->base_set = 0;
vr->hashcode = vn_reference_compute_hash (vr);
vn_reference_lookup_1 (vr, vnresult);
}
......@@ -3497,7 +3503,10 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
vr1->vuse = vuse_ssa_val (vuse);
vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
vr1->type = TREE_TYPE (op);
vr1->set = get_alias_set (op);
ao_ref op_ref;
ao_ref_init (&op_ref, op);
vr1->set = ao_ref_alias_set (&op_ref);
vr1->base_set = ao_ref_base_alias_set (&op_ref);
vr1->hashcode = vn_reference_compute_hash (vr1);
vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
vr1->result_vdef = vdef;
......@@ -3539,7 +3548,8 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
structure we created. */
vn_reference_t
vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
vn_reference_insert_pieces (tree vuse, alias_set_type set,
alias_set_type base_set, tree type,
vec<vn_reference_op_s> operands,
tree result, unsigned int value_id)
......@@ -3553,6 +3563,7 @@ vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
vr1->operands = valueize_refs (operands);
vr1->type = type;
vr1->set = set;
vr1->base_set = base_set;
vr1->hashcode = vn_reference_compute_hash (vr1);
if (result && TREE_CODE (result) == SSA_NAME)
result = SSA_VAL (result);
......@@ -4735,6 +4746,7 @@ visit_reference_op_call (tree lhs, gcall *stmt)
vr2->operands = vr1.operands.copy ();
vr2->type = vr1.type;
vr2->set = vr1.set;
vr2->base_set = vr1.base_set;
vr2->hashcode = vr1.hashcode;
vr2->result = lhs;
vr2->result_vdef = vdef_val;
......@@ -4839,9 +4851,14 @@ visit_reference_op_store (tree lhs, tree op, gimple *stmt)
{
/* If the TBAA state isn't compatible for downstream reads
we cannot value-number the VDEFs the same. */
alias_set_type set = get_alias_set (lhs);
if (vnresult->set != set
&& ! alias_set_subset_of (set, vnresult->set))
ao_ref lhs_ref;
ao_ref_init (&lhs_ref, lhs);
alias_set_type set = ao_ref_alias_set (&lhs_ref);
alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
if ((vnresult->set != set
&& ! alias_set_subset_of (set, vnresult->set))
|| (vnresult->base_set != base_set
&& ! alias_set_subset_of (base_set, vnresult->base_set)))
resultsame = false;
}
}
......@@ -5915,10 +5932,15 @@ eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
/* We can only remove the later store if the former aliases
at least all accesses the later one does or if the store
was to readonly memory storing the same value. */
alias_set_type set = get_alias_set (lhs);
ao_ref lhs_ref;
ao_ref_init (&lhs_ref, lhs);
alias_set_type set = ao_ref_alias_set (&lhs_ref);
alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
if (! vnresult
|| vnresult->set == set
|| alias_set_subset_of (set, vnresult->set))
|| ((vnresult->set == set
|| alias_set_subset_of (set, vnresult->set))
&& (vnresult->base_set == base_set
|| alias_set_subset_of (base_set, vnresult->base_set))))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
......
......@@ -143,6 +143,7 @@ typedef struct vn_reference_s
hashval_t hashcode;
tree vuse;
alias_set_type set;
alias_set_type base_set;
tree type;
vec<vn_reference_op_s> operands;
tree result;
......@@ -249,17 +250,17 @@ tree vn_nary_op_lookup_pieces (unsigned int, enum tree_code,
tree, tree *, vn_nary_op_t *);
vn_nary_op_t vn_nary_op_insert_pieces (unsigned int, enum tree_code,
tree, tree *, tree, unsigned int);
bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree,
vec<vn_reference_op_s> );
bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, alias_set_type,
tree, vec<vn_reference_op_s> );
vec<vn_reference_op_s> vn_reference_operands_for_lookup (tree);
tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
tree vn_reference_lookup_pieces (tree, alias_set_type, alias_set_type, tree,
vec<vn_reference_op_s> ,
vn_reference_t *, vn_lookup_kind);
tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *, bool,
tree * = NULL);
void vn_reference_lookup_call (gcall *, vn_reference_t *, vn_reference_t);
vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
vec<vn_reference_op_s> ,
vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, alias_set_type,
tree, vec<vn_reference_op_s>,
tree, unsigned int);
bool vn_nary_op_eq (const_vn_nary_op_t const vno1,
......
......@@ -223,7 +223,8 @@ avail_exprs_stack::simplify_binary_operation (gimple *stmt,
we finish processing this block and its children. */
tree
avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p,
expr_hash_elt **elt)
{
expr_hash_elt **slot;
tree lhs;
......@@ -317,6 +318,8 @@ avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
/* Extract the LHS of the assignment so that it can be used as the current
definition of another variable. */
lhs = (*slot)->lhs ();
if (elt)
*elt = *slot;
/* Valueize the result. */
if (TREE_CODE (lhs) == SSA_NAME)
......
......@@ -148,7 +148,7 @@ class avail_exprs_stack
/* Lookup and conditionally insert an expression into the table,
recording enough information to unwind as needed. */
tree lookup_avail_expr (gimple *, bool, bool);
tree lookup_avail_expr (gimple *, bool, bool, expr_hash_elt ** = NULL);
void record_cond (cond_equivalence *);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment