Commit ebc1b29e by Richard Biener Committed by Richard Biener

re PR middle-end/71002 (-fstrict-aliasing breaks Boost's short string optimization implementation)

2016-05-11  Richard Biener  <rguenther@suse.de>

	PR middle-end/71002
	* alias.c (reference_alias_ptr_type): Preserve alias-set zero
	if the langhook insists on it.
	* fold-const.c (make_bit_field_ref): Add arg for the original
	reference and preserve its alias-set.
	(decode_field_reference): Take exp by reference and adjust it
	to the original memory reference.
	(optimize_bit_field_compare): Adjust callers.
	(fold_truth_andor_1): Likewise.
	* gimplify.c (gimplify_expr): Adjust in-SSA form test.

	* g++.dg/torture/pr71002.C: New testcase.

From-SVN: r236117
parent 98ccd1d7
2016-05-11 Richard Biener <rguenther@suse.de>
PR middle-end/71002
* alias.c (reference_alias_ptr_type): Preserve alias-set zero
if the langhook insists on it.
* fold-const.c (make_bit_field_ref): Add arg for the original
reference and preserve its alias-set.
(decode_field_reference): Take exp by reference and adjust it
to the original memory reference.
(optimize_bit_field_compare): Adjust callers.
(fold_truth_andor_1): Likewise.
* gimplify.c (gimplify_expr): Adjust in-SSA form test.
2016-05-11 Ilya Enkovich <ilya.enkovich@intel.com>
PR middle-end/70807
......
......@@ -769,6 +769,10 @@ reference_alias_ptr_type_1 (tree *t)
tree
reference_alias_ptr_type (tree t)
{
/* If the frontend assigns this alias-set zero, preserve that. */
if (lang_hooks.get_alias_set (t) == 0)
return ptr_type_node;
tree ptype = reference_alias_ptr_type_1 (&t);
/* If there is a given pointer type for aliasing purposes, return it. */
if (ptype != NULL_TREE)
......
......@@ -117,14 +117,8 @@ static enum tree_code compcode_to_comparison (enum comparison_code);
static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
static tree make_bit_field_ref (location_t, tree, tree,
HOST_WIDE_INT, HOST_WIDE_INT, int, int);
static tree optimize_bit_field_compare (location_t, enum tree_code,
tree, tree, tree);
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
HOST_WIDE_INT *,
machine_mode *, int *, int *, int *,
tree *, tree *);
static int simple_operand_p (const_tree);
static bool simple_operand_p_2 (tree);
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
......@@ -3803,15 +3797,23 @@ distribute_real_division (location_t loc, enum tree_code code, tree type,
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
and uses reverse storage order if REVERSEP is nonzero. */
and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
is the original memory reference used to preserve the alias set of
the access. */
static tree
make_bit_field_ref (location_t loc, tree inner, tree type,
make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
int unsignedp, int reversep)
{
tree result, bftype;
if (get_alias_set (inner) != get_alias_set (orig_inner))
inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
build_fold_addr_expr (inner),
build_int_cst
(reference_alias_ptr_type (orig_inner), 0));
if (bitpos == 0 && !reversep)
{
tree size = TYPE_SIZE (TREE_TYPE (inner));
......@@ -3937,13 +3939,13 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
and return. */
return fold_build2_loc (loc, code, compare_type,
fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
make_bit_field_ref (loc, linner,
make_bit_field_ref (loc, linner, lhs,
unsigned_type,
nbitsize, nbitpos,
1, lreversep),
mask),
fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
make_bit_field_ref (loc, rinner,
make_bit_field_ref (loc, rinner, rhs,
unsigned_type,
nbitsize, nbitpos,
1, rreversep),
......@@ -3988,8 +3990,8 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
/* Make a new bitfield reference, shift the constant over the
appropriate number of bits and mask it with the computed mask
(in case this was a signed field). If we changed it, make a new one. */
lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
lreversep);
lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
nbitsize, nbitpos, 1, lreversep);
rhs = const_binop (BIT_AND_EXPR,
const_binop (LSHIFT_EXPR,
......@@ -4028,11 +4030,12 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
do anything with. */
static tree
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
HOST_WIDE_INT *pbitpos, machine_mode *pmode,
int *punsignedp, int *preversep, int *pvolatilep,
tree *pmask, tree *pand_mask)
{
tree exp = *exp_;
tree outer_type = 0;
tree and_mask = 0;
tree mask, inner, offset;
......@@ -4069,6 +4072,8 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
return 0;
*exp_ = exp;
/* If the number of bits in the reference is the same as the bitsize of
the outer type, then the outer type gives the signedness. Otherwise
(in case of a small bitfield) the signedness is unchanged. */
......@@ -5677,19 +5682,19 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
volatilep = 0;
ll_inner = decode_field_reference (loc, ll_arg,
ll_inner = decode_field_reference (loc, &ll_arg,
&ll_bitsize, &ll_bitpos, &ll_mode,
&ll_unsignedp, &ll_reversep, &volatilep,
&ll_mask, &ll_and_mask);
lr_inner = decode_field_reference (loc, lr_arg,
lr_inner = decode_field_reference (loc, &lr_arg,
&lr_bitsize, &lr_bitpos, &lr_mode,
&lr_unsignedp, &lr_reversep, &volatilep,
&lr_mask, &lr_and_mask);
rl_inner = decode_field_reference (loc, rl_arg,
rl_inner = decode_field_reference (loc, &rl_arg,
&rl_bitsize, &rl_bitpos, &rl_mode,
&rl_unsignedp, &rl_reversep, &volatilep,
&rl_mask, &rl_and_mask);
rr_inner = decode_field_reference (loc, rr_arg,
rr_inner = decode_field_reference (loc, &rr_arg,
&rr_bitsize, &rr_bitpos, &rr_mode,
&rr_unsignedp, &rr_reversep, &volatilep,
&rr_mask, &rr_and_mask);
......@@ -5851,12 +5856,14 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
{
lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
lntype, lnbitsize, lnbitpos,
ll_unsignedp || rl_unsignedp, ll_reversep);
if (! all_ones_mask_p (ll_mask, lnbitsize))
lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
rntype, rnbitsize, rnbitpos,
lr_unsignedp || rr_unsignedp, lr_reversep);
if (! all_ones_mask_p (lr_mask, rnbitsize))
rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
......@@ -5878,11 +5885,11 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
{
tree type;
lhs = make_bit_field_ref (loc, ll_inner, lntype,
lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
ll_bitsize + rl_bitsize,
MIN (ll_bitpos, rl_bitpos),
ll_unsignedp, ll_reversep);
rhs = make_bit_field_ref (loc, lr_inner, rntype,
rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
lr_bitsize + rr_bitsize,
MIN (lr_bitpos, rr_bitpos),
lr_unsignedp, lr_reversep);
......@@ -5947,7 +5954,8 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
reference we will make. Unless the mask is all ones the width of
that field, perform the mask operation. Then compare with the
merged constant. */
result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
result = make_bit_field_ref (loc, ll_inner, ll_arg,
lntype, lnbitsize, lnbitpos,
ll_unsignedp || rl_unsignedp, ll_reversep);
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
......
......@@ -10452,7 +10452,7 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
in suitable form. Re-gimplifying would mark the address
operand addressable. Always gimplify when not in SSA form
as we still may have to gimplify decls with value-exprs. */
if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
|| !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
{
ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
......
2016-05-11 Richard Biener <rguenther@suse.de>
PR middle-end/71002
* g++.dg/torture/pr71002.C: New testcase.
2016-05-11 Ilya Enkovich <ilya.enkovich@intel.com>
PR middle-end/70807
......
// { dg-do run }
using size_t = __SIZE_TYPE__;
inline void* operator new(size_t, void* p) noexcept
{ return p; }
inline void operator delete(void*, void*)
{ }
struct long_t
{
size_t is_short : 1;
size_t length : (__SIZEOF_SIZE_T__ * __CHAR_BIT__ - 1);
size_t capacity;
char* pointer;
};
union long_raw_t {
unsigned char data[sizeof(long_t)];
struct __attribute__((aligned(alignof(long_t)))) { } align;
};
struct short_header
{
unsigned char is_short : 1;
unsigned char length : (__CHAR_BIT__ - 1);
};
struct short_t
{
short_header h;
char data[23];
};
union repr_t
{
long_raw_t r;
short_t s;
const short_t& short_repr() const
{ return s; }
const long_t& long_repr() const
{ return *static_cast<const long_t*>(static_cast<const void*>(&r)); }
short_t& short_repr()
{ return s; }
long_t& long_repr()
{ return *static_cast<long_t*>(static_cast<void*>(&r)); }
};
class string
{
public:
string()
{
short_t& s = m_repr.short_repr();
s.h.is_short = 1;
s.h.length = 0;
s.data[0] = '\0';
}
string(const char* str)
{
size_t length = __builtin_strlen(str);
if (length + 1 > 23) {
long_t& l = m_repr.long_repr();
l.is_short = 0;
l.length = length;
l.capacity = length + 1;
l.pointer = new char[l.capacity];
__builtin_memcpy(l.pointer, str, length + 1);
} else {
short_t& s = m_repr.short_repr();
s.h.is_short = 1;
s.h.length = length;
__builtin_memcpy(s.data, str, length + 1);
}
}
string(string&& other)
: string{}
{
swap_data(other);
}
~string()
{
if (!is_short()) {
delete[] m_repr.long_repr().pointer;
}
}
size_t length() const
{ return is_short() ? short_length() : long_length(); }
private:
bool is_short() const
{ return m_repr.s.h.is_short != 0; }
size_t short_length() const
{ return m_repr.short_repr().h.length; }
size_t long_length() const
{ return m_repr.long_repr().length; }
void swap_data(string& other)
{
if (is_short()) {
if (other.is_short()) {
repr_t tmp(m_repr);
m_repr = other.m_repr;
other.m_repr = tmp;
} else {
short_t short_backup(m_repr.short_repr());
m_repr.short_repr().~short_t();
::new(&m_repr.long_repr()) long_t(other.m_repr.long_repr());
other.m_repr.long_repr().~long_t();
::new(&other.m_repr.short_repr()) short_t(short_backup);
}
} else {
if (other.is_short()) {
short_t short_backup(other.m_repr.short_repr());
other.m_repr.short_repr().~short_t();
::new(&other.m_repr.long_repr()) long_t(m_repr.long_repr());
m_repr.long_repr().~long_t();
::new(&m_repr.short_repr()) short_t(short_backup);
} else {
long_t tmp(m_repr.long_repr());
m_repr.long_repr() = other.m_repr.long_repr();
other.m_repr.long_repr() = tmp;
}
}
}
repr_t m_repr;
};
struct foo
{
__attribute__((noinline))
foo(string str)
: m_str{static_cast<string&&>(str)},
m_len{m_str.length()}
{ }
string m_str;
size_t m_len;
};
int main()
{
foo f{"the quick brown fox jumps over the lazy dog"};
if (f.m_len == 0) {
__builtin_abort();
}
return 0;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment