Commit afe84921 by Richard Henderson Committed by Richard Henderson

expr.c (get_inner_reference): Handle REAL/IMAGPART_EXPR.

        * expr.c (get_inner_reference): Handle REAL/IMAGPART_EXPR.
        (handled_component_p): Likewise.
        * alias.c (can_address_p): Reformat and simplify.  Handle
        REAL/IMAGPART_EXPR.  Do not disable addressability based on
        alias set zero.
        * fold-const.c (build_fold_addr_expr_with_type): Remove duplicate
        check for REAL/IMAGPART_EXPR.
        * gimplify.c (gimplify_compound_lval): Likewise.
        * tree-cfg.c (verify_expr): Likewise.
        * tree-gimple.c (is_gimple_addressable, get_base_address): Likewise.
        * tree-nested.c (build_addr, convert_nonlocal_reference): Likewise.
        (convert_local_reference): Likewise.
        * tree-ssa-loop-ivopts.c (prepare_decl_rtl): Likewise.

From-SVN: r91511
parent f2978871
2004-11-29 Richard Henderson <rth@redhat.com>
* expr.c (get_inner_reference): Handle REAL/IMAGPART_EXPR.
(handled_component_p): Likewise.
* alias.c (can_address_p): Reformat and simplify. Handle
REAL/IMAGPART_EXPR. Do not disable addressability based on
alias set zero.
* fold-const.c (build_fold_addr_expr_with_type): Remove duplicate
check for REAL/IMAGPART_EXPR.
* gimplify.c (gimplify_compound_lval): Likewise.
* tree-cfg.c (verify_expr): Likewise.
* tree-gimple.c (is_gimple_addressable, get_base_address): Likewise.
* tree-nested.c (build_addr, convert_nonlocal_reference): Likewise.
(convert_local_reference): Likewise.
* tree-ssa-loop-ivopts.c (prepare_decl_rtl): Likewise.
2004-11-30 Alan Modra <amodra@bigpond.net.au>
* expr.c (emit_group_load_1): Don't die on const_int orig_src.
......
......@@ -384,30 +384,36 @@ find_base_decl (tree t)
int
can_address_p (tree t)
{
/* If we're at the end, it is vacuously addressable. */
if (! handled_component_p (t))
return 1;
while (1)
{
/* If we're at the end, it is vacuously addressable. */
if (!handled_component_p (t))
return true;
/* Bitfields are never addressable. */
else if (TREE_CODE (t) == BIT_FIELD_REF)
return 0;
switch (TREE_CODE (t))
{
case COMPONENT_REF:
if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
return false;
break;
/* Fields are addressable unless they are marked as nonaddressable or
the containing type has alias set 0. */
else if (TREE_CODE (t) == COMPONENT_REF
&& ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
&& get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
&& can_address_p (TREE_OPERAND (t, 0)))
return 1;
case ARRAY_REF:
case ARRAY_RANGE_REF:
if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
return false;
break;
/* Likewise for arrays. */
else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
&& ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
&& get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
&& can_address_p (TREE_OPERAND (t, 0)))
return 1;
case REALPART_EXPR:
case IMAGPART_EXPR:
break;
return 0;
default:
/* Bitfields and casts are never addressable. */
return false;
}
t = TREE_OPERAND (t, 0);
}
}
/* Return the alias set for T, which may be either a type or an
......
......@@ -5311,65 +5311,90 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
and find the ultimate containing object. */
while (1)
{
if (TREE_CODE (exp) == BIT_FIELD_REF)
bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
else if (TREE_CODE (exp) == COMPONENT_REF)
switch (TREE_CODE (exp))
{
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
case BIT_FIELD_REF:
bit_offset = size_binop (PLUS_EXPR, bit_offset,
TREE_OPERAND (exp, 2));
break;
/* If this field hasn't been filled in yet, don't go
past it. This should only happen when folding expressions
made during type construction. */
if (this_offset == 0)
break;
case COMPONENT_REF:
{
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
offset = size_binop (PLUS_EXPR, offset, this_offset);
bit_offset = size_binop (PLUS_EXPR, bit_offset,
DECL_FIELD_BIT_OFFSET (field));
/* If this field hasn't been filled in yet, don't go past it.
This should only happen when folding expressions made during
type construction. */
if (this_offset == 0)
break;
/* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
}
offset = size_binop (PLUS_EXPR, offset, this_offset);
bit_offset = size_binop (PLUS_EXPR, bit_offset,
DECL_FIELD_BIT_OFFSET (field));
else if (TREE_CODE (exp) == ARRAY_REF
|| TREE_CODE (exp) == ARRAY_RANGE_REF)
{
tree index = TREE_OPERAND (exp, 1);
tree low_bound = array_ref_low_bound (exp);
tree unit_size = array_ref_element_size (exp);
/* We assume all arrays have sizes that are a multiple of a byte.
First subtract the lower bound, if any, in the type of the
index, then convert to sizetype and multiply by the size of the
array element. */
if (! integer_zerop (low_bound))
index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
index, low_bound));
offset = size_binop (PLUS_EXPR, offset,
size_binop (MULT_EXPR,
convert (sizetype, index),
unit_size));
}
/* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
}
break;
/* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
conversions that don't change the mode, and all view conversions
except those that need to "step up" the alignment. */
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
&& ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
&& ! ((TYPE_ALIGN (TREE_TYPE (exp))
> TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
&& STRICT_ALIGNMENT
&& (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
< BIGGEST_ALIGNMENT)
&& (TYPE_ALIGN_OK (TREE_TYPE (exp))
|| TYPE_ALIGN_OK (TREE_TYPE
(TREE_OPERAND (exp, 0))))))
&& ! ((TREE_CODE (exp) == NOP_EXPR
|| TREE_CODE (exp) == CONVERT_EXPR)
&& (TYPE_MODE (TREE_TYPE (exp))
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
break;
case ARRAY_REF:
case ARRAY_RANGE_REF:
{
tree index = TREE_OPERAND (exp, 1);
tree low_bound = array_ref_low_bound (exp);
tree unit_size = array_ref_element_size (exp);
/* We assume all arrays have sizes that are a multiple of a byte.
First subtract the lower bound, if any, in the type of the
index, then convert to sizetype and multiply by the size of
the array element. */
if (! integer_zerop (low_bound))
index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
index, low_bound));
offset = size_binop (PLUS_EXPR, offset,
size_binop (MULT_EXPR,
convert (sizetype, index),
unit_size));
}
break;
case REALPART_EXPR:
bit_offset = bitsize_zero_node;
break;
case IMAGPART_EXPR:
bit_offset = build_int_cst (bitsizetype, *pbitsize);
break;
/* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
conversions that don't change the mode, and all view conversions
except those that need to "step up" the alignment. */
case NON_LVALUE_EXPR:
break;
case NOP_EXPR:
case CONVERT_EXPR:
if (TYPE_MODE (TREE_TYPE (exp))
!= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
goto done;
break;
case VIEW_CONVERT_EXPR:
if ((TYPE_ALIGN (TREE_TYPE (exp))
> TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
&& STRICT_ALIGNMENT
&& (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
< BIGGEST_ALIGNMENT)
&& (TYPE_ALIGN_OK (TREE_TYPE (exp))
|| TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
goto done;
break;
default:
goto done;
}
/* If any reference in the chain is volatile, the effect is volatile. */
if (TREE_THIS_VOLATILE (exp))
......@@ -5377,6 +5402,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
exp = TREE_OPERAND (exp, 0);
}
done:
/* If OFFSET is constant, see if we can return the whole thing as a
constant bit position. Otherwise, split it up. */
......@@ -5499,6 +5525,8 @@ handled_component_p (tree t)
case ARRAY_RANGE_REF:
case NON_LVALUE_EXPR:
case VIEW_CONVERT_EXPR:
case REALPART_EXPR:
case IMAGPART_EXPR:
return 1;
/* ??? Sure they are handled, but get_inner_reference may return
......
......@@ -10799,9 +10799,7 @@ build_fold_addr_expr_with_type (tree t, tree ptrtype)
{
tree base = t;
while (handled_component_p (base)
|| TREE_CODE (base) == REALPART_EXPR
|| TREE_CODE (base) == IMAGPART_EXPR)
while (handled_component_p (base))
base = TREE_OPERAND (base, 0);
if (DECL_P (base))
TREE_ADDRESSABLE (base) = 1;
......
......@@ -1427,12 +1427,8 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p,
it VARRAY_TREE. */
VARRAY_GENERIC_PTR_NOGC_INIT (stack, 10, "stack");
/* We can either handle REALPART_EXPR, IMAGEPART_EXPR anything that
handled_components can deal with. */
for (p = expr_p;
(handled_component_p (*p)
|| TREE_CODE (*p) == REALPART_EXPR || TREE_CODE (*p) == IMAGPART_EXPR);
p = &TREE_OPERAND (*p, 0))
/* We can handle anything that get_inner_reference can deal with. */
for (p = expr_p; handled_component_p (*p); p = &TREE_OPERAND (*p, 0))
VARRAY_PUSH_GENERIC_PTR_NOGC (stack, *p);
gcc_assert (VARRAY_ACTIVE_SIZE (stack));
......
......@@ -3238,9 +3238,7 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
tree) and ensure that any variable used as a prefix is marked
addressable. */
for (x = TREE_OPERAND (t, 0);
(handled_component_p (x)
|| TREE_CODE (x) == REALPART_EXPR
|| TREE_CODE (x) == IMAGPART_EXPR);
handled_component_p (x);
x = TREE_OPERAND (x, 0))
;
......@@ -3288,8 +3286,7 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
that determine where to reference is either a constant or a variable,
verify that the base is valid, and then show we've already checked
the subtrees. */
while (TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR
|| handled_component_p (t))
while (handled_component_p (t))
{
if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
......
......@@ -161,10 +161,7 @@ bool
is_gimple_addressable (tree t)
{
return (is_gimple_id (t) || handled_component_p (t)
|| TREE_CODE (t) == REALPART_EXPR
|| TREE_CODE (t) == IMAGPART_EXPR
|| INDIRECT_REF_P (t));
}
/* Return true if T is function invariant. Or rather a restricted
......@@ -430,8 +427,7 @@ get_call_expr_in (tree t)
tree
get_base_address (tree t)
{
while (TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR
|| handled_component_p (t))
while (handled_component_p (t))
t = TREE_OPERAND (t, 0);
if (SSA_VAR_P (t)
......
......@@ -155,8 +155,7 @@ build_addr (tree exp)
{
tree base = exp;
while (TREE_CODE (base) == REALPART_EXPR || TREE_CODE (base) == IMAGPART_EXPR
|| handled_component_p (base))
while (handled_component_p (base))
base = TREE_OPERAND (base, 0);
if (DECL_P (base))
......@@ -849,9 +848,7 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
anything that describes the references. Otherwise, we lose track
of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
wi->val_only = true;
for (; handled_component_p (t)
|| TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR;
tp = &TREE_OPERAND (t, 0), t = *tp)
for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
{
if (TREE_CODE (t) == COMPONENT_REF)
walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
......@@ -966,9 +963,7 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
anything that describes the references. Otherwise, we lose track
of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
wi->val_only = true;
for (; handled_component_p (t)
|| TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR;
tp = &TREE_OPERAND (t, 0), t = *tp)
for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
{
if (TREE_CODE (t) == COMPONENT_REF)
walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
......
......@@ -2162,10 +2162,9 @@ prepare_decl_rtl (tree *expr_p, int *ws, void *data)
{
case ADDR_EXPR:
for (expr_p = &TREE_OPERAND (*expr_p, 0);
(handled_component_p (*expr_p)
|| TREE_CODE (*expr_p) == REALPART_EXPR
|| TREE_CODE (*expr_p) == IMAGPART_EXPR);
expr_p = &TREE_OPERAND (*expr_p, 0));
handled_component_p (*expr_p);
expr_p = &TREE_OPERAND (*expr_p, 0))
continue;
obj = *expr_p;
if (DECL_P (obj))
x = produce_memory_decl_rtl (obj, regno);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment