Commit 1284e2b1 by Richard Biener Committed by Richard Biener

re PR tree-optimization/90883 (Generated code is worse if returned struct is unnamed)

2019-10-11  Richard Biener  <rguenther@suse.de>

	PR tree-optimization/90883
	PR tree-optimization/91091
	* tree-ssa-sccvn.c (vn_reference_lookup_3): Use correct
	alias-sets both for recording VN table entries and continuing
	walking after translating through copies.  Handle same-sized
	reads from SSA names by returning the plain SSA name.
	(eliminate_dom_walker::eliminate_stmt): Properly handle
	non-size precision stores in redundant store elimination.

	* gcc.dg/torture/20191011-1.c: New testcase.
	* gcc.dg/tree-ssa/ssa-fre-82.c: Likewise.
	* gcc.dg/tree-ssa/ssa-fre-83.c: Likewise.
	* gcc.dg/tree-ssa/redundant-assign-zero-1.c: Disable FRE.
	* gcc.dg/tree-ssa/redundant-assign-zero-2.c: Likewise.

From-SVN: r276882
parent e5207f1a
2019-10-11 Richard Biener <rguenther@suse.de>
PR tree-optimization/90883
PR tree-optimization/91091
* tree-ssa-sccvn.c (vn_reference_lookup_3): Use correct
alias-sets both for recording VN table entries and continuing
walking after translating through copies. Handle same-sized
reads from SSA names by returning the plain SSA name.
(eliminate_dom_walker::eliminate_stmt): Properly handle
non-size precision stores in redundant store elimination.
2019-10-11 Jan Hubicka <hubicka@ucw.cz>
* ggc-page.c (release_pages): Output statistics when !quiet_flag.
......
2019-10-11 Richard Biener <rguenther@suse.de>
PR tree-optimization/90883
PR tree-optimization/91091
* gcc.dg/torture/20191011-1.c: New testcase.
* gcc.dg/tree-ssa/ssa-fre-82.c: Likewise.
* gcc.dg/tree-ssa/ssa-fre-83.c: Likewise.
* gcc.dg/tree-ssa/redundant-assign-zero-1.c: Disable FRE.
* gcc.dg/tree-ssa/redundant-assign-zero-2.c: Likewise.
2019-10-11 Eric Botcazou <ebotcazou@adacore.com>
* gnat.dg/specs/size_clause4.ads: New test.
......
/* { dg-do run } */
/* { dg-additional-options "-fgimple -fstrict-aliasing" } */
#if __SIZEOF_INT__ != __SIZEOF_FLOAT__
int main() { return 0; }
#else
struct X { int i; };
float f;
int __GIMPLE (ssa,startwith("fre")) __attribute__((noipa))
foo (float *p)
{
struct X x;
float tem;
int _2;
__BB(2):
f = 0.0f;
__MEM <float> (p_1(D)) = 1.0f;
x = __VIEW_CONVERT <struct X> (f);
_2 = x.i;
return _2;
}
int
main()
{
if (foo (&f) == 0)
__builtin_abort ();
return 0;
}
#endif
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-dse-details" } */
/* { dg-options "-O2 -fno-tree-fre -fdump-tree-dse-details" } */
void blah (char *);
......
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-dse-details" } */
/* { dg-options "-O2 -fno-tree-fre -fdump-tree-dse-details" } */
#include <string.h>
......
/* { dg-do run } */
/* { dg-options "-O -fdump-tree-fre1-details" } */
struct S { _Bool x; };
void
foo (struct S *s)
{
__builtin_memset (s, 1, sizeof (struct S));
s->x = 1;
}
int
main ()
{
struct S s;
foo (&s);
char c;
__builtin_memcpy (&c, &s.x, 1);
if (c != 1)
__builtin_abort ();
return 0;
}
/* { dg-final { scan-tree-dump "Deleted redundant store" "fre1" } } */
/* { dg-do compile } */
/* { dg-options "-O -fdump-tree-fre1-details" } */
struct X
{
int a : 1;
int b : 1;
} x;
void foo (int v)
{
x.a = 1;
x.b = v;
x.a = 1;
x.b = v;
}
struct Y
{
_Bool a;
_Bool b;
} y;
void bar (int v)
{
y.a = 1;
y.b = v;
y.a = 1;
y.b = v;
}
/* { dg-final { scan-tree-dump-times "Deleted redundant store" 4 "fre1" } } */
......@@ -1877,8 +1877,10 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
"Successfully combined %u partial definitions\n", ndefs);
/* ??? If we track partial defs alias-set we could use that if it
is the same for all. Use zero for now. */
return vn_reference_lookup_or_insert_for_pieces
(first_vuse, vr->set, vr->type, vr->operands, val);
(first_vuse, 0, vr->type, vr->operands, val);
}
else
{
......@@ -2331,9 +2333,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
}
}
/* If we are looking for redundant stores do not create new hashtable
entries from aliasing defs with made up alias-sets. */
if (*disambiguate_only > TR_TRANSLATE || !data->tbaa_p)
if (*disambiguate_only > TR_TRANSLATE)
return (void *)-1;
/* If we cannot constrain the size of the reference we cannot
......@@ -2449,7 +2449,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
return (void *)-1;
}
return vn_reference_lookup_or_insert_for_pieces
(vuse, vr->set, vr->type, vr->operands, val);
(vuse, 0, vr->type, vr->operands, val);
}
/* For now handle clearing memory with partial defs. */
else if (known_eq (ref->size, maxsize)
......@@ -2499,7 +2499,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
{
tree val = build_zero_cst (vr->type);
return vn_reference_lookup_or_insert_for_pieces
(vuse, vr->set, vr->type, vr->operands, val);
(vuse, get_alias_set (lhs), vr->type, vr->operands, val);
}
else if (known_eq (ref->size, maxsize)
&& maxsize.is_constant (&maxsizei)
......@@ -2614,7 +2614,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
if (val)
return vn_reference_lookup_or_insert_for_pieces
(vuse, vr->set, vr->type, vr->operands, val);
(vuse, get_alias_set (lhs), vr->type, vr->operands, val);
}
}
else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i))
......@@ -2672,23 +2672,26 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
according to endianness. */
&& (! INTEGRAL_TYPE_P (vr->type)
|| known_eq (ref->size, TYPE_PRECISION (vr->type)))
&& multiple_p (ref->size, BITS_PER_UNIT)
&& (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
|| type_has_mode_precision_p (TREE_TYPE (def_rhs))))
&& multiple_p (ref->size, BITS_PER_UNIT))
{
gimple_match_op op (gimple_match_cond::UNCOND,
BIT_FIELD_REF, vr->type,
vn_valueize (def_rhs),
bitsize_int (ref->size),
bitsize_int (offset - offset2));
tree val = vn_nary_build_or_lookup (&op);
if (val
&& (TREE_CODE (val) != SSA_NAME
|| ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
if (known_eq (ref->size, size2))
return vn_reference_lookup_or_insert_for_pieces
(vuse, get_alias_set (lhs), vr->type, vr->operands,
SSA_VAL (def_rhs));
else if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
|| type_has_mode_precision_p (TREE_TYPE (def_rhs)))
{
vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
(vuse, vr->set, vr->type, vr->operands, val);
return res;
gimple_match_op op (gimple_match_cond::UNCOND,
BIT_FIELD_REF, vr->type,
vn_valueize (def_rhs),
bitsize_int (ref->size),
bitsize_int (offset - offset2));
tree val = vn_nary_build_or_lookup (&op);
if (val
&& (TREE_CODE (val) != SSA_NAME
|| ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
return vn_reference_lookup_or_insert_for_pieces
(vuse, get_alias_set (lhs), vr->type, vr->operands, val);
}
}
}
......@@ -2770,7 +2773,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
}
/* Now re-write REF to be based on the rhs of the assignment. */
copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
tree rhs1 = gimple_assign_rhs1 (def_stmt);
copy_reference_ops_from_ref (rhs1, &rhs);
/* Apply an extra offset to the inner MEM_REF of the RHS. */
if (maybe_ne (extra_off, 0))
......@@ -2806,7 +2810,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
{
if (data->partial_defs.is_empty ())
return vn_reference_lookup_or_insert_for_pieces
(vuse, vr->set, vr->type, vr->operands, val);
(vuse, get_alias_set (rhs1), vr->type, vr->operands, val);
/* This is the only interesting case for partial-def handling
coming from targets that like to gimplify init-ctors as
aggregate copies from constant data like aarch64 for
......@@ -2829,7 +2833,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
return (void *)-1;
/* Adjust *ref from the new operands. */
if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
if (!ao_ref_init_from_vn_reference (&r, get_alias_set (rhs1),
vr->type, vr->operands))
return (void *)-1;
/* This can happen with bitfields. */
if (maybe_ne (ref->size, r.size))
......@@ -2990,10 +2995,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
tree val = fully_constant_vn_reference_p (vr);
if (val)
return vn_reference_lookup_or_insert_for_pieces
(vuse, vr->set, vr->type, vr->operands, val);
(vuse, 0, vr->type, vr->operands, val);
/* Adjust *ref from the new operands. */
if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
if (!ao_ref_init_from_vn_reference (&r, 0, vr->type, vr->operands))
return (void *)-1;
/* This can happen with bitfields. */
if (maybe_ne (ref->size, r.size))
......@@ -5539,8 +5544,48 @@ eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
tree val;
tree rhs = gimple_assign_rhs1 (stmt);
vn_reference_t vnresult;
val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
&vnresult, false);
/* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
typed load of a byte known to be 0x11 as 1 so a store of
a boolean 1 is detected as redundant. Because of this we
have to make sure to lookup with a ref where its size
matches the precision. */
tree lookup_lhs = lhs;
if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
&& (TREE_CODE (lhs) != COMPONENT_REF
|| !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
&& !type_has_mode_precision_p (TREE_TYPE (lhs)))
{
if (TREE_CODE (lhs) == COMPONENT_REF
|| TREE_CODE (lhs) == MEM_REF)
{
tree ltype = build_nonstandard_integer_type
(TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
TYPE_UNSIGNED (TREE_TYPE (lhs)));
if (TREE_CODE (lhs) == COMPONENT_REF)
{
tree foff = component_ref_field_offset (lhs);
tree f = TREE_OPERAND (lhs, 1);
if (!poly_int_tree_p (foff))
lookup_lhs = NULL_TREE;
else
lookup_lhs = build3 (BIT_FIELD_REF, ltype,
TREE_OPERAND (lhs, 0),
TYPE_SIZE (TREE_TYPE (lhs)),
bit_from_pos
(foff, DECL_FIELD_BIT_OFFSET (f)));
}
else
lookup_lhs = build2 (MEM_REF, ltype,
TREE_OPERAND (lhs, 0),
TREE_OPERAND (lhs, 1));
}
else
lookup_lhs = NULL_TREE;
}
val = NULL_TREE;
if (lookup_lhs)
val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt), VN_WALK,
&vnresult, false);
if (TREE_CODE (rhs) == SSA_NAME)
rhs = VN_INFO (rhs)->valnum;
if (val
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment