Commit 6bec9271 by Richard Guenther Committed by Richard Biener

tree-flow.h (okay_component_ref_for_subvars): Remove.

2005-12-15  Richard Guenther  <rguenther@suse.de>

	* tree-flow.h (okay_component_ref_for_subvars): Remove.
	(get_ref_base_and_extent): Declare.
	* tree-dfa.c (okay_component_ref_for_subvars): Remove.
	(get_ref_base_and_extent): New function.
	* tree-ssa-alias.c (find_used_portions): Use it.
	* tree-ssa-structalias.c (get_constraint_for_component_ref):
	Likewise.
	* tree-ssa-operands.c (get_expr_operands): Likewise.

From-SVN: r108568
parent cd6dba21
2005-12-15 Richard Guenther <rguenther@suse.de>
* tree-flow.h (okay_component_ref_for_subvars): Remove.
(get_ref_base_and_extent): Declare.
* tree-dfa.c (okay_component_ref_for_subvars): Remove.
(get_ref_base_and_extent): New function.
* tree-ssa-alias.c (find_used_portions): Use it.
* tree-ssa-structalias.c (get_constraint_for_component_ref):
Likewise.
* tree-ssa-operands.c (get_expr_operands): Likewise.
2005-12-15 Paolo Bonzini <bonzini@gnu.org>
* combine.c: Remove force_to_mode's fourth parameter.
......
......@@ -792,45 +792,157 @@ find_new_referenced_vars (tree *stmt_p)
}
/* If REF is a COMPONENT_REF for a structure that can have sub-variables, and
we know where REF is accessing, return the variable in REF that has the
sub-variables. If the return value is not NULL, POFFSET will be the
offset, in bits, of REF inside the return value, and PSIZE will be the
size, in bits, of REF inside the return value. */
/* If REF is a handled component reference for a structure, return the
base variable. The access range is delimited by bit positions *POFFSET and
*POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
*PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
and *PMAX_SIZE are equal, the access is non-variable. */
tree
okay_component_ref_for_subvars (tree ref, unsigned HOST_WIDE_INT *poffset,
unsigned HOST_WIDE_INT *psize)
get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
HOST_WIDE_INT *psize,
HOST_WIDE_INT *pmax_size)
{
tree result = NULL;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
tree offset;
enum machine_mode mode;
int unsignedp;
int volatilep;
gcc_assert (!SSA_VAR_P (ref));
*poffset = 0;
*psize = (unsigned int) -1;
if (ref_contains_array_ref (ref))
return result;
ref = get_inner_reference (ref, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep, false);
if (TREE_CODE (ref) == INDIRECT_REF)
return result;
else if (offset == NULL && bitsize != -1 && SSA_VAR_P (ref))
HOST_WIDE_INT bitsize = -1;
HOST_WIDE_INT maxsize = -1;
tree size_tree = NULL_TREE;
tree bit_offset = bitsize_zero_node;
gcc_assert (!SSA_VAR_P (exp));
/* First get the final access size from just the outermost expression. */
if (TREE_CODE (exp) == COMPONENT_REF)
size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
else if (TREE_CODE (exp) == BIT_FIELD_REF)
size_tree = TREE_OPERAND (exp, 1);
else
{
*poffset = bitpos;
*psize = bitsize;
if (get_subvars_for_var (ref) != NULL)
return ref;
enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
if (mode == BLKmode)
size_tree = TYPE_SIZE (TREE_TYPE (exp));
else
bitsize = GET_MODE_BITSIZE (mode);
}
else if (SSA_VAR_P (ref))
if (size_tree != NULL_TREE)
{
if (get_subvars_for_var (ref) != NULL)
return ref;
if (! host_integerp (size_tree, 1))
bitsize = -1;
else
bitsize = TREE_INT_CST_LOW (size_tree);
}
return NULL_TREE;
/* Initially, maxsize is the same as the accessed element size.
In the following it will only grow (or become -1). */
maxsize = bitsize;
/* Compute cumulative bit-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
while (1)
{
switch (TREE_CODE (exp))
{
case BIT_FIELD_REF:
bit_offset = size_binop (PLUS_EXPR, bit_offset,
TREE_OPERAND (exp, 2));
break;
case COMPONENT_REF:
{
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
{
this_offset = size_binop (MULT_EXPR,
fold_convert (bitsizetype,
this_offset),
bitsize_unit_node);
bit_offset = size_binop (PLUS_EXPR,
bit_offset, this_offset);
bit_offset = size_binop (PLUS_EXPR, bit_offset,
DECL_FIELD_BIT_OFFSET (field));
}
else
{
tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
/* We need to adjust maxsize to the whole structure bitsize.
But we can subtract any constant offset seen sofar,
because that would get us out of the structure otherwise. */
if (maxsize != -1
&& csize && host_integerp (csize, 1))
{
maxsize = (TREE_INT_CST_LOW (csize)
- TREE_INT_CST_LOW (bit_offset));
}
else
maxsize = -1;
}
}
break;
case ARRAY_REF:
case ARRAY_RANGE_REF:
{
tree index = TREE_OPERAND (exp, 1);
tree low_bound = array_ref_low_bound (exp);
tree unit_size = array_ref_element_size (exp);
if (! integer_zerop (low_bound))
index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
index, low_bound);
index = size_binop (MULT_EXPR,
fold_convert (sizetype, index), unit_size);
if (TREE_CODE (index) == INTEGER_CST)
{
index = size_binop (MULT_EXPR,
fold_convert (bitsizetype, index),
bitsize_unit_node);
bit_offset = size_binop (PLUS_EXPR, bit_offset, index);
}
else
{
tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
/* We need to adjust maxsize to the whole array bitsize.
But we can subtract any constant offset seen sofar,
because that would get us outside of the array otherwise. */
if (maxsize != -1
&& asize && host_integerp (asize, 1))
{
maxsize = (TREE_INT_CST_LOW (asize)
- TREE_INT_CST_LOW (bit_offset));
}
else
maxsize = -1;
}
}
break;
case REALPART_EXPR:
break;
case IMAGPART_EXPR:
bit_offset = size_binop (PLUS_EXPR, bit_offset,
bitsize_int (bitsize));
break;
case VIEW_CONVERT_EXPR:
/* ??? We probably should give up here and bail out. */
break;
default:
goto done;
}
exp = TREE_OPERAND (exp, 0);
}
done:
/* ??? Due to negative offsets in ARRAY_REF we can end up with
negative bit_offset here. We might want to store a zero offset
in this case. */
*poffset = TREE_INT_CST_LOW (bit_offset);
*psize = bitsize;
*pmax_size = maxsize;
return exp;
}
......@@ -592,8 +592,8 @@ static inline subvar_t get_subvars_for_var (tree);
static inline tree get_subvar_at (tree, unsigned HOST_WIDE_INT);
static inline bool ref_contains_array_ref (tree);
static inline bool array_ref_contains_indirect_ref (tree);
extern tree okay_component_ref_for_subvars (tree, unsigned HOST_WIDE_INT *,
unsigned HOST_WIDE_INT *);
extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
HOST_WIDE_INT *, HOST_WIDE_INT *);
static inline bool var_can_have_subvars (tree);
static inline bool overlap_subvar (unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
......
......@@ -2695,16 +2695,14 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
case COMPONENT_REF:
{
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitmaxsize;
HOST_WIDE_INT bitpos;
tree offset;
enum machine_mode mode;
int unsignedp;
int volatilep;
tree ref;
ref = get_inner_reference (*tp, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep, false);
if (DECL_P (ref) && offset == NULL && bitsize != -1)
{
ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
if (DECL_P (ref)
&& var_can_have_subvars (ref)
&& bitmaxsize != -1)
{
size_t uid = DECL_UID (ref);
used_part_t up;
......@@ -2712,37 +2710,18 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
if (bitpos <= up->minused)
up->minused = bitpos;
if ((bitpos + bitsize >= up->maxused))
up->maxused = bitpos + bitsize;
if ((bitpos + bitmaxsize >= up->maxused))
up->maxused = bitpos + bitmaxsize;
up->explicit_uses = true;
if (bitsize == bitmaxsize)
up->explicit_uses = true;
else
up->implicit_uses = true;
up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
}
else if (DECL_P (ref))
{
if (DECL_SIZE (ref)
&& var_can_have_subvars (ref)
&& TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST)
{
used_part_t up;
size_t uid = DECL_UID (ref);
up = get_or_create_used_part_for (uid);
up->minused = 0;
up->maxused = TREE_INT_CST_LOW (DECL_SIZE (ref));
up->implicit_uses = true;
up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
}
}
}
break;
/* This is here to make sure we mark the entire base variable as used
......
......@@ -1133,25 +1133,26 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case IMAGPART_EXPR:
{
tree ref;
unsigned HOST_WIDE_INT offset, size;
HOST_WIDE_INT offset, size, maxsize;
/* This component ref becomes an access to all of the subvariables
it can touch, if we can determine that, but *NOT* the real one.
If we can't determine which fields we could touch, the recursion
will eventually get to a variable and add *all* of its subvars, or
whatever is the minimum correct subset. */
ref = okay_component_ref_for_subvars (expr, &offset, &size);
if (ref)
ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
{
subvar_t svars = get_subvars_for_var (ref);
subvar_t sv;
for (sv = svars; sv; sv = sv->next)
{
bool exact;
if (overlap_subvar (offset, size, sv, &exact))
if (overlap_subvar (offset, maxsize, sv, &exact))
{
int subvar_flags = flags;
if (!exact)
if (!exact
|| size != maxsize)
subvar_flags &= ~opf_kill_def;
add_stmt_operand (&sv->var, s_ann, subvar_flags);
}
......
......@@ -2348,11 +2348,8 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
{
tree orig_t = t;
HOST_WIDE_INT bitsize = -1;
HOST_WIDE_INT bitmaxsize = -1;
HOST_WIDE_INT bitpos;
tree offset = NULL_TREE;
enum machine_mode mode;
int unsignedp;
int volatilep;
tree forzero;
struct constraint_expr *result;
unsigned int beforelength = VEC_length (ce_s, *results);
......@@ -2374,8 +2371,7 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
return;
}
t = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep, false);
t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
get_constraint_for (t, results, anyoffset);
result = VEC_last (ce_s, *results);
......@@ -2386,11 +2382,11 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
result->type = SCALAR;
/* If we know where this goes, then yay. Otherwise, booo. */
if (offset == NULL && bitsize != -1)
if (bitmaxsize != -1
&& bitsize == bitmaxsize)
{
result->offset = bitpos;
}
}
/* FIXME: Handle the DEREF case. */
else if (anyoffset && result->type != DEREF)
{
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment