Commit dc808df6 by Alexandre Oliva Committed by Alexandre Oliva

re PR middle-end/22156 (bit-field copying regressed)

PR middle-end/22156
* tree-sra.c (struct sra_elt): Add in_bitfld_block.  Remove
all_no_warning.
(struct sra_walk_fns): Remove use_all parameter from use.
(sra_hash_tree): Handle BIT_FIELD_REFs.
(sra_elt_hash): Don't hash bitfld blocks.
(sra_elt_eq): Skip them in parent compares as well.  Handle
BIT_FIELD_REFs.
(sra_walk_expr): Don't maintain or pass down use_all_p.
(scan_use): Remove use_all parameter.
(scalarize_use): Likewise.  Re-expand assignment to
BIT_FIELD_REF of gimple_reg.  De-scalarize before input or
output, and re-scalarize after output.  Don't mark anything
for no warning.
(scalarize_ldst): Adjust.
(scalarize_walk_gimple_modify_statement): Likewise.
(build_element_name_1): Handle BIT_FIELD_REFs.
(instantiate_element): Don't warn for any element whose parent
is used as a whole.
(instantiate_missing_elements_1): Return the sra_elt.
(canon_type_for_field): New.
(try_instantiate_multiple_fields): New.
(instantiate_missing_elemnts): Use them.
(mark_no_warning): Removed.
(generate_one_element_ref): Handle BIT_FIELD_REFs.
(REPLDUP, sra_build_elt_assignment): New.
(generate_copy_inout): Use them.
(generate_element_copy): Likewise.  Handle bitfld differences.
(generate_element_zero): Don't recurse for blocks.  Use
sra_build_elt_assignment.
(generate_one_element_int): Take elt instead of var.  Use
sra_build_elt_assignment.
(generate_element_init_1): Adjust.
(scalarize_use, scalarize_copy): Use REPLDUP.
(scalarize_ldst): Move assert before dereference.
(dump_sra_elt_name): Handle BIT_FIELD_REFs.

From-SVN: r123524
parent 21f7aaa4
2007-04-05 Alexandre Oliva <aoliva@redhat.com>
PR middle-end/22156
* tree-sra.c (struct sra_elt): Add in_bitfld_block. Remove
all_no_warning.
(struct sra_walk_fns): Remove use_all parameter from use.
(sra_hash_tree): Handle BIT_FIELD_REFs.
(sra_elt_hash): Don't hash bitfld blocks.
(sra_elt_eq): Skip them in parent compares as well. Handle
BIT_FIELD_REFs.
(sra_walk_expr): Don't maintain or pass down use_all_p.
(scan_use): Remove use_all parameter.
(scalarize_use): Likewise. Re-expand assignment to
BIT_FIELD_REF of gimple_reg. De-scalarize before input or
output, and re-scalarize after output. Don't mark anything
for no warning.
(scalarize_ldst): Adjust.
(scalarize_walk_gimple_modify_statement): Likewise.
(build_element_name_1): Handle BIT_FIELD_REFs.
(instantiate_element): Don't warn for any element whose parent
is used as a whole.
(instantiate_missing_elements_1): Return the sra_elt.
(canon_type_for_field): New.
(try_instantiate_multiple_fields): New.
(instantiate_missing_elemnts): Use them.
(mark_no_warning): Removed.
(generate_one_element_ref): Handle BIT_FIELD_REFs.
(REPLDUP, sra_build_elt_assignment): New.
(generate_copy_inout): Use them.
(generate_element_copy): Likewise. Handle bitfld differences.
(generate_element_zero): Don't recurse for blocks. Use
sra_build_elt_assignment.
(generate_one_element_int): Take elt instead of var. Use
sra_build_elt_assignment.
(generate_element_init_1): Adjust.
(scalarize_use, scalarize_copy): Use REPLDUP.
(scalarize_ldst): Move assert before dereference.
(dump_sra_elt_name): Handle BIT_FIELD_REFs.
2007-04-05 Steven Bosscher <steven@gcc.gnu.org> 2007-04-05 Steven Bosscher <steven@gcc.gnu.org>
* regmove.c: Fix unused variable warnings due to previous commit. * regmove.c: Fix unused variable warnings due to previous commit.
......
...@@ -147,6 +147,10 @@ struct sra_elt ...@@ -147,6 +147,10 @@ struct sra_elt
/* True if there is BIT_FIELD_REF on the lhs with a vector. */ /* True if there is BIT_FIELD_REF on the lhs with a vector. */
bool is_vector_lhs; bool is_vector_lhs;
/* 1 if the element is a field that is part of a block, 2 if the field
is the block itself, 0 if it's neither. */
char in_bitfld_block;
}; };
#define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR) #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
...@@ -461,6 +465,12 @@ sra_hash_tree (tree t) ...@@ -461,6 +465,12 @@ sra_hash_tree (tree t)
h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h); h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
break; break;
case BIT_FIELD_REF:
/* Don't take operand 0 into account, that's our parent. */
h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
break;
default: default:
gcc_unreachable (); gcc_unreachable ();
} }
...@@ -479,12 +489,14 @@ sra_elt_hash (const void *x) ...@@ -479,12 +489,14 @@ sra_elt_hash (const void *x)
h = sra_hash_tree (e->element); h = sra_hash_tree (e->element);
/* Take into account everything back up the chain. Given that chain /* Take into account everything except bitfield blocks back up the
lengths are rarely very long, this should be acceptable. If we chain. Given that chain lengths are rarely very long, this
truly identify this as a performance problem, it should work to should be acceptable. If we truly identify this as a performance
hash the pointer value "e->parent". */ problem, it should work to hash the pointer value
"e->parent". */
for (p = e->parent; p ; p = p->parent) for (p = e->parent; p ; p = p->parent)
h = (h * 65521) ^ sra_hash_tree (p->element); if (!p->in_bitfld_block)
h = (h * 65521) ^ sra_hash_tree (p->element);
return h; return h;
} }
...@@ -497,8 +509,17 @@ sra_elt_eq (const void *x, const void *y) ...@@ -497,8 +509,17 @@ sra_elt_eq (const void *x, const void *y)
const struct sra_elt *a = x; const struct sra_elt *a = x;
const struct sra_elt *b = y; const struct sra_elt *b = y;
tree ae, be; tree ae, be;
const struct sra_elt *ap = a->parent;
const struct sra_elt *bp = b->parent;
if (a->parent != b->parent) if (ap)
while (ap->in_bitfld_block)
ap = ap->parent;
if (bp)
while (bp->in_bitfld_block)
bp = bp->parent;
if (ap != bp)
return false; return false;
ae = a->element; ae = a->element;
...@@ -533,6 +554,11 @@ sra_elt_eq (const void *x, const void *y) ...@@ -533,6 +554,11 @@ sra_elt_eq (const void *x, const void *y)
return false; return false;
return fields_compatible_p (ae, be); return fields_compatible_p (ae, be);
case BIT_FIELD_REF:
return
tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
&& tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
default: default:
gcc_unreachable (); gcc_unreachable ();
} }
...@@ -671,10 +697,9 @@ struct sra_walk_fns ...@@ -671,10 +697,9 @@ struct sra_walk_fns
/* Invoked when ELT is required as a unit. Note that ELT might refer to /* Invoked when ELT is required as a unit. Note that ELT might refer to
a leaf node, in which case this is a simple scalar reference. *EXPR_P a leaf node, in which case this is a simple scalar reference. *EXPR_P
points to the location of the expression. IS_OUTPUT is true if this points to the location of the expression. IS_OUTPUT is true if this
is a left-hand-side reference. USE_ALL is true if we saw something we is a left-hand-side reference. */
couldn't quite identify and had to force the use of the entire object. */
void (*use) (struct sra_elt *elt, tree *expr_p, void (*use) (struct sra_elt *elt, tree *expr_p,
block_stmt_iterator *bsi, bool is_output, bool use_all); block_stmt_iterator *bsi, bool is_output);
/* Invoked when we have a copy between two scalarizable references. */ /* Invoked when we have a copy between two scalarizable references. */
void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt, void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
...@@ -728,7 +753,6 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, ...@@ -728,7 +753,6 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
tree expr = *expr_p; tree expr = *expr_p;
tree inner = expr; tree inner = expr;
bool disable_scalarization = false; bool disable_scalarization = false;
bool use_all_p = false;
/* We're looking to collect a reference expression between EXPR and INNER, /* We're looking to collect a reference expression between EXPR and INNER,
such that INNER is a scalarizable decl and all other nodes through EXPR such that INNER is a scalarizable decl and all other nodes through EXPR
...@@ -749,7 +773,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, ...@@ -749,7 +773,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
if (disable_scalarization) if (disable_scalarization)
elt->cannot_scalarize = true; elt->cannot_scalarize = true;
else else
fns->use (elt, expr_p, bsi, is_output, use_all_p); fns->use (elt, expr_p, bsi, is_output);
} }
return; return;
...@@ -836,7 +860,6 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, ...@@ -836,7 +860,6 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
use_all: use_all:
expr_p = &TREE_OPERAND (inner, 0); expr_p = &TREE_OPERAND (inner, 0);
inner = expr = *expr_p; inner = expr = *expr_p;
use_all_p = true;
break; break;
default: default:
...@@ -884,11 +907,14 @@ sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi, ...@@ -884,11 +907,14 @@ sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns); sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
} }
static void sra_replace (block_stmt_iterator *bsi, tree list);
static tree sra_build_elt_assignment (struct sra_elt *elt, tree src);
/* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */ /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
static void static void
sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi, sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
const struct sra_walk_fns *fns) const struct sra_walk_fns *fns)
{ {
struct sra_elt *lhs_elt, *rhs_elt; struct sra_elt *lhs_elt, *rhs_elt;
tree lhs, rhs; tree lhs, rhs;
...@@ -911,7 +937,7 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi, ...@@ -911,7 +937,7 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs)) if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
fns->ldst (rhs_elt, lhs, bsi, false); fns->ldst (rhs_elt, lhs, bsi, false);
else else
fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false); fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false);
} }
/* If it isn't scalarizable, there may be scalarizable variables within, so /* If it isn't scalarizable, there may be scalarizable variables within, so
...@@ -958,7 +984,9 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi, ...@@ -958,7 +984,9 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
/* Otherwise we're being used in some context that requires the /* Otherwise we're being used in some context that requires the
aggregate to be seen as a whole. Invoke USE. */ aggregate to be seen as a whole. Invoke USE. */
else else
fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false); {
fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true);
}
} }
/* Similarly to above, LHS_ELT being null only means that the LHS as a /* Similarly to above, LHS_ELT being null only means that the LHS as a
...@@ -1069,7 +1097,7 @@ find_candidates_for_sra (void) ...@@ -1069,7 +1097,7 @@ find_candidates_for_sra (void)
static void static void
scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED, scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
block_stmt_iterator *bsi ATTRIBUTE_UNUSED, block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED) bool is_output ATTRIBUTE_UNUSED)
{ {
elt->n_uses += 1; elt->n_uses += 1;
} }
...@@ -1177,6 +1205,15 @@ build_element_name_1 (struct sra_elt *elt) ...@@ -1177,6 +1205,15 @@ build_element_name_1 (struct sra_elt *elt)
sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t)); sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
obstack_grow (&sra_obstack, buffer, strlen (buffer)); obstack_grow (&sra_obstack, buffer, strlen (buffer));
} }
else if (TREE_CODE (t) == BIT_FIELD_REF)
{
sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
tree_low_cst (TREE_OPERAND (t, 2), 1));
obstack_grow (&sra_obstack, buffer, strlen (buffer));
sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
tree_low_cst (TREE_OPERAND (t, 1), 1));
obstack_grow (&sra_obstack, buffer, strlen (buffer));
}
else else
{ {
tree name = DECL_NAME (t); tree name = DECL_NAME (t);
...@@ -1209,9 +1246,12 @@ instantiate_element (struct sra_elt *elt) ...@@ -1209,9 +1246,12 @@ instantiate_element (struct sra_elt *elt)
{ {
struct sra_elt *base_elt; struct sra_elt *base_elt;
tree var, base; tree var, base;
bool nowarn = TREE_NO_WARNING (elt->element);
for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent) for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
continue; if (!nowarn)
nowarn = base_elt->parent->n_uses
|| TREE_NO_WARNING (base_elt->parent->element);
base = base_elt->element; base = base_elt->element;
elt->replacement = var = make_rename_temp (elt->type, "SR"); elt->replacement = var = make_rename_temp (elt->type, "SR");
...@@ -1240,9 +1280,7 @@ instantiate_element (struct sra_elt *elt) ...@@ -1240,9 +1280,7 @@ instantiate_element (struct sra_elt *elt)
DECL_DEBUG_EXPR_IS_FROM (var) = 1; DECL_DEBUG_EXPR_IS_FROM (var) = 1;
DECL_IGNORED_P (var) = 0; DECL_IGNORED_P (var) = 0;
TREE_NO_WARNING (var) = TREE_NO_WARNING (base); TREE_NO_WARNING (var) = nowarn;
if (elt->element && TREE_NO_WARNING (elt->element))
TREE_NO_WARNING (var) = 1;
} }
else else
{ {
...@@ -1337,7 +1375,7 @@ sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep) ...@@ -1337,7 +1375,7 @@ sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
static void instantiate_missing_elements (struct sra_elt *elt); static void instantiate_missing_elements (struct sra_elt *elt);
static void static struct sra_elt *
instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type) instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
{ {
struct sra_elt *sub = lookup_element (elt, child, type, INSERT); struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
...@@ -1348,6 +1386,262 @@ instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type) ...@@ -1348,6 +1386,262 @@ instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
} }
else else
instantiate_missing_elements (sub); instantiate_missing_elements (sub);
return sub;
}
/* Obtain the canonical type for field F of ELEMENT. */
static tree
canon_type_for_field (tree f, tree element)
{
tree field_type = TREE_TYPE (f);
/* canonicalize_component_ref() unwidens some bit-field types (not
marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
may introduce type mismatches. */
if (INTEGRAL_TYPE_P (field_type)
&& DECL_MODE (f) != TYPE_MODE (field_type))
field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
field_type,
element,
f, NULL_TREE),
NULL_TREE));
return field_type;
}
/* Look for adjacent fields of ELT starting at F that we'd like to
scalarize as a single variable. Return the last field of the
group. */
static tree
try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
{
unsigned HOST_WIDE_INT align, oalign, word, bit, size, alchk;
enum machine_mode mode;
tree first = f, prev;
tree type, var;
struct sra_elt *block;
if (!is_sra_scalar_type (TREE_TYPE (f))
|| !host_integerp (DECL_FIELD_OFFSET (f), 1)
|| !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
|| !host_integerp (DECL_SIZE (f), 1)
|| lookup_element (elt, f, NULL, NO_INSERT))
return f;
/* Taking the alignment of elt->element is not enough, since it
might be just an array index or some such. */
for (block = elt; block; block = block->parent)
if (DECL_P (block->element))
{
align = DECL_ALIGN (block->element);
break;
}
gcc_assert (block);
oalign = DECL_OFFSET_ALIGN (f);
word = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
bit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
size = tree_low_cst (DECL_SIZE (f), 1);
if (align > oalign)
align = oalign;
alchk = align - 1;
alchk = ~alchk;
if ((bit & alchk) != ((bit + size - 1) & alchk))
return f;
/* Find adjacent fields in the same alignment word. */
for (prev = f, f = TREE_CHAIN (f);
f && TREE_CODE (f) == FIELD_DECL
&& is_sra_scalar_type (TREE_TYPE (f))
&& host_integerp (DECL_FIELD_OFFSET (f), 1)
&& host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
&& host_integerp (DECL_SIZE (f), 1)
&& (HOST_WIDE_INT)word == tree_low_cst (DECL_FIELD_OFFSET (f), 1)
&& !lookup_element (elt, f, NULL, NO_INSERT);
prev = f, f = TREE_CHAIN (f))
{
unsigned HOST_WIDE_INT nbit, nsize;
nbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
nsize = tree_low_cst (DECL_SIZE (f), 1);
if (bit + size == nbit)
{
if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
break;
size += nsize;
}
else if (nbit + nsize == bit)
{
if ((nbit & alchk) != ((bit + size - 1) & alchk))
break;
bit = nbit;
size += nsize;
}
else
break;
}
f = prev;
if (f == first)
return f;
gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
/* Try to widen the bit range so as to cover padding bits as well. */
if ((bit & ~alchk) || size != align)
{
unsigned HOST_WIDE_INT mbit = bit & alchk;
unsigned HOST_WIDE_INT msize = align;
for (f = TYPE_FIELDS (elt->type);
f; f = TREE_CHAIN (f))
{
unsigned HOST_WIDE_INT fword, fbit, fsize;
/* Skip the fields from first to prev. */
if (f == first)
{
f = prev;
continue;
}
if (!(TREE_CODE (f) == FIELD_DECL
&& host_integerp (DECL_FIELD_OFFSET (f), 1)
&& host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
continue;
fword = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
/* If we're past the selected word, we're fine. */
if (word < fword)
continue;
fbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
if (host_integerp (DECL_SIZE (f), 1))
fsize = tree_low_cst (DECL_SIZE (f), 1);
else
/* Assume a variable-sized field takes up all space till
the end of the word. ??? Endianness issues? */
fsize = align - fbit;
if (fword < word)
{
/* A large field might start at a previous word and
extend into the selected word. Exclude those
bits. ??? Endianness issues? */
HOST_WIDE_INT diff = fbit + fsize
- (HOST_WIDE_INT)((word - fword) * BITS_PER_UNIT + mbit);
if (diff <= 0)
continue;
mbit += diff;
msize -= diff;
}
else
{
gcc_assert (fword == word);
/* Non-overlapping, great. */
if (fbit + fsize <= mbit
|| mbit + msize <= fbit)
continue;
if (fbit <= mbit)
{
unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
mbit += diff;
msize -= diff;
}
else if (fbit > mbit)
msize -= (mbit + msize - fbit);
else
gcc_unreachable ();
}
}
bit = mbit;
size = msize;
}
/* Now we know the bit range we're interested in. Find the smallest
machine mode we can use to access it. */
for (mode = smallest_mode_for_size (size, MODE_INT);
;
mode = GET_MODE_WIDER_MODE (mode))
{
gcc_assert (mode != VOIDmode);
alchk = GET_MODE_PRECISION (mode) - 1;
alchk = ~alchk;
if ((bit & alchk) == ((bit + size - 1) & alchk))
break;
}
gcc_assert (~alchk < align);
/* Create the field group as a single variable. */
type = lang_hooks.types.type_for_mode (mode, 1);
gcc_assert (type);
var = build3 (BIT_FIELD_REF, type, NULL_TREE,
bitsize_int (size),
bitsize_int (word * BITS_PER_UNIT + bit));
BIT_FIELD_REF_UNSIGNED (var) = 1;
block = instantiate_missing_elements_1 (elt, var, type);
gcc_assert (block && block->is_scalar);
var = block->replacement;
if (((word * BITS_PER_UNIT + bit) & ~alchk)
|| (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
{
block->replacement = build3 (BIT_FIELD_REF,
TREE_TYPE (block->element), var,
bitsize_int (size),
bitsize_int ((word * BITS_PER_UNIT
+ bit) & ~alchk));
BIT_FIELD_REF_UNSIGNED (block->replacement) = 1;
TREE_NO_WARNING (block->replacement) = 1;
}
block->in_bitfld_block = 2;
/* Add the member fields to the group, such that they access
portions of the group variable. */
for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
{
tree field_type = canon_type_for_field (f, elt->element);
struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
gcc_assert (fld && fld->is_scalar && !fld->replacement);
fld->replacement = build3 (BIT_FIELD_REF, field_type, var,
DECL_SIZE (f),
bitsize_int
((word * BITS_PER_UNIT
+ (TREE_INT_CST_LOW
(DECL_FIELD_BIT_OFFSET (f))))
& ~alchk));
BIT_FIELD_REF_UNSIGNED (fld->replacement) = TYPE_UNSIGNED (field_type);
TREE_NO_WARNING (block->replacement) = 1;
fld->in_bitfld_block = 1;
}
return prev;
} }
static void static void
...@@ -1363,21 +1657,17 @@ instantiate_missing_elements (struct sra_elt *elt) ...@@ -1363,21 +1657,17 @@ instantiate_missing_elements (struct sra_elt *elt)
for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
if (TREE_CODE (f) == FIELD_DECL) if (TREE_CODE (f) == FIELD_DECL)
{ {
tree field_type = TREE_TYPE (f); tree last = try_instantiate_multiple_fields (elt, f);
/* canonicalize_component_ref() unwidens some bit-field if (last != f)
types (not marked as DECL_BIT_FIELD in C++), so we {
must do the same, lest we may introduce type f = last;
mismatches. */ continue;
if (INTEGRAL_TYPE_P (field_type) }
&& DECL_MODE (f) != TYPE_MODE (field_type))
field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF, instantiate_missing_elements_1 (elt, f,
field_type, canon_type_for_field
elt->element, (f, elt->element));
f, NULL_TREE),
NULL_TREE));
instantiate_missing_elements_1 (elt, f, field_type);
} }
break; break;
} }
...@@ -1689,6 +1979,16 @@ generate_one_element_ref (struct sra_elt *elt, tree base) ...@@ -1689,6 +1979,16 @@ generate_one_element_ref (struct sra_elt *elt, tree base)
{ {
tree field = elt->element; tree field = elt->element;
/* We can't test elt->in_bitfld_blk here because, when this is
called from instantiate_element, we haven't set this field
yet. */
if (TREE_CODE (field) == BIT_FIELD_REF)
{
tree ret = copy_node (field);
TREE_OPERAND (ret, 0) = base;
return ret;
}
/* Watch out for compatible records with differing field lists. */ /* Watch out for compatible records with differing field lists. */
if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base))) if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
field = find_compatible_field (TREE_TYPE (base), field); field = find_compatible_field (TREE_TYPE (base), field);
...@@ -1741,6 +2041,126 @@ sra_build_assignment (tree dst, tree src) ...@@ -1741,6 +2041,126 @@ sra_build_assignment (tree dst, tree src)
return build_gimple_modify_stmt (dst, src); return build_gimple_modify_stmt (dst, src);
} }
/* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
takes care of assignments, but we must create copies for uses. */
#define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : copy_node (t))
static tree
sra_build_elt_assignment (struct sra_elt *elt, tree src)
{
tree dst = elt->replacement;
tree var, type, tmp, tmp2, tmp3;
tree list, stmt;
tree cst, cst2, mask;
tree minshift, maxshift;
if (TREE_CODE (dst) != BIT_FIELD_REF
|| !elt->in_bitfld_block)
return sra_build_assignment (REPLDUP (dst), src);
var = TREE_OPERAND (dst, 0);
/* Try to widen the assignment to the entire variable.
We need the source to be a BIT_FIELD_REF as well, such that, for
BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
if sp >= dp, we can turn it into
d = BIT_FIELD_REF<s,sp+sz,sp-dp>. */
if (elt->in_bitfld_block == 2
&& TREE_CODE (src) == BIT_FIELD_REF
&& !tree_int_cst_lt (TREE_OPERAND (src, 2), TREE_OPERAND (dst, 2)))
{
src = fold_build3 (BIT_FIELD_REF, TREE_TYPE (var),
TREE_OPERAND (src, 0),
size_binop (PLUS_EXPR, TREE_OPERAND (src, 1),
TREE_OPERAND (dst, 2)),
size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
TREE_OPERAND (dst, 2)));
BIT_FIELD_REF_UNSIGNED (src) = 1;
return sra_build_assignment (var, src);
}
if (!is_gimple_reg (var))
return sra_build_assignment (REPLDUP (dst), src);
list = alloc_stmt_list ();
cst = TREE_OPERAND (dst, 2);
if (WORDS_BIG_ENDIAN)
{
cst = size_binop (MINUS_EXPR, DECL_SIZE (var), cst);
maxshift = cst;
}
else
minshift = cst;
cst2 = size_binop (PLUS_EXPR, TREE_OPERAND (dst, 1),
TREE_OPERAND (dst, 2));
if (WORDS_BIG_ENDIAN)
{
cst2 = size_binop (MINUS_EXPR, DECL_SIZE (var), cst2);
minshift = cst2;
}
else
maxshift = cst2;
type = TREE_TYPE (var);
mask = build_int_cst_wide (type, 1, 0);
cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, 1);
cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, 1);
mask = int_const_binop (MINUS_EXPR, cst, cst2, 1);
mask = fold_build1 (BIT_NOT_EXPR, type, mask);
if (!WORDS_BIG_ENDIAN)
cst2 = TREE_OPERAND (dst, 2);
tmp = make_rename_temp (type, "SR");
stmt = build_gimple_modify_stmt (tmp,
fold_build2 (BIT_AND_EXPR, type,
var, mask));
append_to_statement_list (stmt, &list);
if (is_gimple_reg (src))
tmp2 = src;
else
{
tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
stmt = sra_build_assignment (tmp2, src);
append_to_statement_list (stmt, &list);
}
if (!TYPE_UNSIGNED (TREE_TYPE (tmp2))
|| TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (type))
{
tmp3 = make_rename_temp (type, "SR");
tmp2 = fold_build3 (BIT_FIELD_REF, type, tmp2, TREE_OPERAND (dst, 1),
bitsize_int (0));
if (TREE_CODE (tmp2) == BIT_FIELD_REF)
BIT_FIELD_REF_UNSIGNED (tmp2) = 1;
stmt = sra_build_assignment (tmp3, tmp2);
append_to_statement_list (stmt, &list);
tmp2 = tmp3;
}
if (!integer_zerop (minshift))
{
tmp3 = make_rename_temp (type, "SR");
stmt = build_gimple_modify_stmt (tmp3,
fold_build2 (LSHIFT_EXPR, type,
tmp2, minshift));
append_to_statement_list (stmt, &list);
tmp2 = tmp3;
}
stmt = build_gimple_modify_stmt (var,
fold_build2 (BIT_IOR_EXPR, type,
tmp, tmp2));
append_to_statement_list (stmt, &list);
return list;
}
/* Generate a set of assignment statements in *LIST_P to copy all /* Generate a set of assignment statements in *LIST_P to copy all
instantiated elements under ELT to or from the equivalent structure instantiated elements under ELT to or from the equivalent structure
rooted at EXPR. COPY_OUT controls the direction of the copy, with rooted at EXPR. COPY_OUT controls the direction of the copy, with
...@@ -1771,9 +2191,9 @@ generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr, ...@@ -1771,9 +2191,9 @@ generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
else if (elt->replacement) else if (elt->replacement)
{ {
if (copy_out) if (copy_out)
t = sra_build_assignment (elt->replacement, expr); t = sra_build_elt_assignment (elt, expr);
else else
t = sra_build_assignment (expr, elt->replacement); t = sra_build_assignment (expr, REPLDUP (elt->replacement));
append_to_statement_list (t, list_p); append_to_statement_list (t, list_p);
} }
else else
...@@ -1798,6 +2218,19 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p) ...@@ -1798,6 +2218,19 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
FOR_EACH_ACTUAL_CHILD (dc, dst) FOR_EACH_ACTUAL_CHILD (dc, dst)
{ {
sc = lookup_element (src, dc->element, NULL, NO_INSERT); sc = lookup_element (src, dc->element, NULL, NO_INSERT);
if (!sc && dc->in_bitfld_block == 2)
{
struct sra_elt *dcs;
FOR_EACH_ACTUAL_CHILD (dcs, dc)
{
sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
gcc_assert (sc);
generate_element_copy (dcs, sc, list_p);
}
continue;
}
gcc_assert (sc); gcc_assert (sc);
generate_element_copy (dc, sc, list_p); generate_element_copy (dc, sc, list_p);
} }
...@@ -1808,7 +2241,7 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p) ...@@ -1808,7 +2241,7 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
gcc_assert (src->replacement); gcc_assert (src->replacement);
t = sra_build_assignment (dst->replacement, src->replacement); t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
append_to_statement_list (t, list_p); append_to_statement_list (t, list_p);
} }
} }
...@@ -1829,8 +2262,9 @@ generate_element_zero (struct sra_elt *elt, tree *list_p) ...@@ -1829,8 +2262,9 @@ generate_element_zero (struct sra_elt *elt, tree *list_p)
return; return;
} }
FOR_EACH_ACTUAL_CHILD (c, elt) if (!elt->in_bitfld_block)
generate_element_zero (c, list_p); FOR_EACH_ACTUAL_CHILD (c, elt)
generate_element_zero (c, list_p);
if (elt->replacement) if (elt->replacement)
{ {
...@@ -1839,7 +2273,7 @@ generate_element_zero (struct sra_elt *elt, tree *list_p) ...@@ -1839,7 +2273,7 @@ generate_element_zero (struct sra_elt *elt, tree *list_p)
gcc_assert (elt->is_scalar); gcc_assert (elt->is_scalar);
t = fold_convert (elt->type, integer_zero_node); t = fold_convert (elt->type, integer_zero_node);
t = sra_build_assignment (elt->replacement, t); t = sra_build_elt_assignment (elt, t);
append_to_statement_list (t, list_p); append_to_statement_list (t, list_p);
} }
} }
...@@ -1848,10 +2282,10 @@ generate_element_zero (struct sra_elt *elt, tree *list_p) ...@@ -1848,10 +2282,10 @@ generate_element_zero (struct sra_elt *elt, tree *list_p)
Add the result to *LIST_P. */ Add the result to *LIST_P. */
static void static void
generate_one_element_init (tree var, tree init, tree *list_p) generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
{ {
/* The replacement can be almost arbitrarily complex. Gimplify. */ /* The replacement can be almost arbitrarily complex. Gimplify. */
tree stmt = sra_build_assignment (var, init); tree stmt = sra_build_elt_assignment (elt, init);
gimplify_and_add (stmt, list_p); gimplify_and_add (stmt, list_p);
} }
...@@ -1880,7 +2314,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p) ...@@ -1880,7 +2314,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
{ {
if (elt->replacement) if (elt->replacement)
{ {
generate_one_element_init (elt->replacement, init, list_p); generate_one_element_init (elt, init, list_p);
elt->visited = true; elt->visited = true;
} }
return result; return result;
...@@ -2039,7 +2473,7 @@ sra_replace (block_stmt_iterator *bsi, tree list) ...@@ -2039,7 +2473,7 @@ sra_replace (block_stmt_iterator *bsi, tree list)
static void static void
scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi, scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
bool is_output, bool use_all) bool is_output)
{ {
tree list = NULL, stmt = bsi_stmt (*bsi); tree list = NULL, stmt = bsi_stmt (*bsi);
...@@ -2048,8 +2482,27 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi, ...@@ -2048,8 +2482,27 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
/* If we have a replacement, then updating the reference is as /* If we have a replacement, then updating the reference is as
simple as modifying the existing statement in place. */ simple as modifying the existing statement in place. */
if (is_output) if (is_output)
mark_all_v_defs (stmt); {
*expr_p = elt->replacement; if (TREE_CODE (elt->replacement) == BIT_FIELD_REF
&& is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
&& TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
&& &GIMPLE_STMT_OPERAND (stmt, 0) == expr_p)
{
tree newstmt = sra_build_elt_assignment
(elt, GIMPLE_STMT_OPERAND (stmt, 1));
if (TREE_CODE (newstmt) != STATEMENT_LIST)
{
tree list = alloc_stmt_list ();
append_to_statement_list (newstmt, &list);
newstmt = list;
}
sra_replace (bsi, newstmt);
return;
}
mark_all_v_defs (stmt);
}
*expr_p = REPLDUP (elt->replacement);
update_stmt (stmt); update_stmt (stmt);
} }
else else
...@@ -2067,17 +2520,23 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi, ...@@ -2067,17 +2520,23 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
This optimization would be most effective if sra_walk_function This optimization would be most effective if sra_walk_function
processed the blocks in dominator order. */ processed the blocks in dominator order. */
generate_copy_inout (elt, is_output, generate_element_ref (elt), &list); generate_copy_inout (elt, false, generate_element_ref (elt), &list);
if (list == NULL) if (list)
return;
mark_all_v_defs (list);
if (is_output)
sra_insert_after (bsi, list);
else
{ {
mark_all_v_defs (list);
sra_insert_before (bsi, list); sra_insert_before (bsi, list);
if (use_all) mark_no_warning (elt);
mark_no_warning (elt); }
if (is_output)
{
list = NULL;
generate_copy_inout (elt, true, generate_element_ref (elt), &list);
if (list)
{
mark_all_v_defs (list);
sra_insert_after (bsi, list);
}
} }
} }
} }
...@@ -2101,7 +2560,7 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt, ...@@ -2101,7 +2560,7 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT); gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement; GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
GIMPLE_STMT_OPERAND (stmt, 1) = rhs_elt->replacement; GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
update_stmt (stmt); update_stmt (stmt);
} }
else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy) else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
...@@ -2243,7 +2702,7 @@ scalarize_ldst (struct sra_elt *elt, tree other, ...@@ -2243,7 +2702,7 @@ scalarize_ldst (struct sra_elt *elt, tree other,
{ {
/* Since ELT is not fully instantiated, we have to leave the /* Since ELT is not fully instantiated, we have to leave the
block copy in place. Treat this as a USE. */ block copy in place. Treat this as a USE. */
scalarize_use (elt, NULL, bsi, is_output, false); scalarize_use (elt, NULL, bsi, is_output);
} }
else else
{ {
...@@ -2255,8 +2714,8 @@ scalarize_ldst (struct sra_elt *elt, tree other, ...@@ -2255,8 +2714,8 @@ scalarize_ldst (struct sra_elt *elt, tree other,
mark_all_v_defs (stmt); mark_all_v_defs (stmt);
generate_copy_inout (elt, is_output, other, &list); generate_copy_inout (elt, is_output, other, &list);
mark_all_v_defs (list);
gcc_assert (list); gcc_assert (list);
mark_all_v_defs (list);
/* Preserve EH semantics. */ /* Preserve EH semantics. */
if (stmt_ends_bb_p (stmt)) if (stmt_ends_bb_p (stmt))
...@@ -2352,6 +2811,10 @@ dump_sra_elt_name (FILE *f, struct sra_elt *elt) ...@@ -2352,6 +2811,10 @@ dump_sra_elt_name (FILE *f, struct sra_elt *elt)
fputc ('.', f); fputc ('.', f);
print_generic_expr (f, elt->element, dump_flags); print_generic_expr (f, elt->element, dump_flags);
} }
else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
else if (TREE_CODE (elt->element) == RANGE_EXPR) else if (TREE_CODE (elt->element) == RANGE_EXPR)
fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]", fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)), TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment