Commit f2918c18 by Andrew MacLeod Committed by Andrew Macleod

tree-dfa.h (get_addr_base_and_unit_offset_1): Move from here.


	* tree-dfa.h (get_addr_base_and_unit_offset_1): Move from here.
	* tree-dfa.c (get_addr_base_and_unit_offset_1): To here.

From-SVN: r211757
parent 5a96dac6
2014-06-17 Andrew MacLeod <amacleod@redhat.com>
* tree-dfa.h (get_addr_base_and_unit_offset_1): Move from here.
* tree-dfa.c (get_addr_base_and_unit_offset_1): To here.
2014-06-17 Xinliang David Li <davidxl@google.com>
* tree-pretty-print.c (dump_function_header): Print cgraph uid.
......
......@@ -664,6 +664,145 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
denotes the starting address of the memory access EXP.
Returns NULL_TREE if the offset is not constant or any component
is not BITS_PER_UNIT-aligned.
VALUEIZE if non-NULL is used to valueize SSA names. It should return
its argument or a constant if the argument is known to be constant. */
tree
get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
tree (*valueize) (tree))
{
HOST_WIDE_INT byte_offset = 0;
/* Compute cumulative byte-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
while (1)
{
switch (TREE_CODE (exp))
{
case BIT_FIELD_REF:
{
HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
if (this_off % BITS_PER_UNIT)
return NULL_TREE;
byte_offset += this_off / BITS_PER_UNIT;
}
break;
case COMPONENT_REF:
{
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
HOST_WIDE_INT hthis_offset;
if (!this_offset
|| TREE_CODE (this_offset) != INTEGER_CST
|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
% BITS_PER_UNIT))
return NULL_TREE;
hthis_offset = TREE_INT_CST_LOW (this_offset);
hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
/ BITS_PER_UNIT);
byte_offset += hthis_offset;
}
break;
case ARRAY_REF:
case ARRAY_RANGE_REF:
{
tree index = TREE_OPERAND (exp, 1);
tree low_bound, unit_size;
if (valueize
&& TREE_CODE (index) == SSA_NAME)
index = (*valueize) (index);
/* If the resulting bit-offset is constant, track it. */
if (TREE_CODE (index) == INTEGER_CST
&& (low_bound = array_ref_low_bound (exp),
TREE_CODE (low_bound) == INTEGER_CST)
&& (unit_size = array_ref_element_size (exp),
TREE_CODE (unit_size) == INTEGER_CST))
{
offset_int woffset
= wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (index)));
woffset *= wi::to_offset (unit_size);
byte_offset += woffset.to_shwi ();
}
else
return NULL_TREE;
}
break;
case REALPART_EXPR:
break;
case IMAGPART_EXPR:
byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
break;
case VIEW_CONVERT_EXPR:
break;
case MEM_REF:
{
tree base = TREE_OPERAND (exp, 0);
if (valueize
&& TREE_CODE (base) == SSA_NAME)
base = (*valueize) (base);
/* Hand back the decl for MEM[&decl, off]. */
if (TREE_CODE (base) == ADDR_EXPR)
{
if (!integer_zerop (TREE_OPERAND (exp, 1)))
{
offset_int off = mem_ref_offset (exp);
byte_offset += off.to_short_addr ();
}
exp = TREE_OPERAND (base, 0);
}
goto done;
}
case TARGET_MEM_REF:
{
tree base = TREE_OPERAND (exp, 0);
if (valueize
&& TREE_CODE (base) == SSA_NAME)
base = (*valueize) (base);
/* Hand back the decl for MEM[&decl, off]. */
if (TREE_CODE (base) == ADDR_EXPR)
{
if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
return NULL_TREE;
if (!integer_zerop (TMR_OFFSET (exp)))
{
offset_int off = mem_ref_offset (exp);
byte_offset += off.to_short_addr ();
}
exp = TREE_OPERAND (base, 0);
}
goto done;
}
default:
goto done;
}
exp = TREE_OPERAND (exp, 0);
}
done:
*poffset = byte_offset;
return exp;
}
/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
denotes the starting address of the memory access EXP.
Returns NULL_TREE if the offset is not constant or any component
is not BITS_PER_UNIT-aligned. */
tree
......
......@@ -31,152 +31,11 @@ extern void set_ssa_default_def (struct function *, tree, tree);
extern tree get_or_create_ssa_default_def (struct function *, tree);
extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
HOST_WIDE_INT *, HOST_WIDE_INT *);
extern tree get_addr_base_and_unit_offset_1 (tree, HOST_WIDE_INT *,
tree (*) (tree));
extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
extern bool stmt_references_abnormal_ssa_name (gimple);
extern void dump_enumerated_decls (FILE *, int);
/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
denotes the starting address of the memory access EXP.
Returns NULL_TREE if the offset is not constant or any component
is not BITS_PER_UNIT-aligned.
VALUEIZE if non-NULL is used to valueize SSA names. It should return
its argument or a constant if the argument is known to be constant. */
/* ??? This is a static inline here to avoid the overhead of the indirect calls
to VALUEIZE. But is this overhead really that significant? And should we
perhaps just rely on WHOPR to specialize the function? */
static inline tree
get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
tree (*valueize) (tree))
{
HOST_WIDE_INT byte_offset = 0;
/* Compute cumulative byte-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
while (1)
{
switch (TREE_CODE (exp))
{
case BIT_FIELD_REF:
{
HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
if (this_off % BITS_PER_UNIT)
return NULL_TREE;
byte_offset += this_off / BITS_PER_UNIT;
}
break;
case COMPONENT_REF:
{
tree field = TREE_OPERAND (exp, 1);
tree this_offset = component_ref_field_offset (exp);
HOST_WIDE_INT hthis_offset;
if (!this_offset
|| TREE_CODE (this_offset) != INTEGER_CST
|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
% BITS_PER_UNIT))
return NULL_TREE;
hthis_offset = TREE_INT_CST_LOW (this_offset);
hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
/ BITS_PER_UNIT);
byte_offset += hthis_offset;
}
break;
case ARRAY_REF:
case ARRAY_RANGE_REF:
{
tree index = TREE_OPERAND (exp, 1);
tree low_bound, unit_size;
if (valueize
&& TREE_CODE (index) == SSA_NAME)
index = (*valueize) (index);
/* If the resulting bit-offset is constant, track it. */
if (TREE_CODE (index) == INTEGER_CST
&& (low_bound = array_ref_low_bound (exp),
TREE_CODE (low_bound) == INTEGER_CST)
&& (unit_size = array_ref_element_size (exp),
TREE_CODE (unit_size) == INTEGER_CST))
{
offset_int woffset
= wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (index)));
woffset *= wi::to_offset (unit_size);
byte_offset += woffset.to_shwi ();
}
else
return NULL_TREE;
}
break;
case REALPART_EXPR:
break;
case IMAGPART_EXPR:
byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
break;
case VIEW_CONVERT_EXPR:
break;
case MEM_REF:
{
tree base = TREE_OPERAND (exp, 0);
if (valueize
&& TREE_CODE (base) == SSA_NAME)
base = (*valueize) (base);
/* Hand back the decl for MEM[&decl, off]. */
if (TREE_CODE (base) == ADDR_EXPR)
{
if (!integer_zerop (TREE_OPERAND (exp, 1)))
{
offset_int off = mem_ref_offset (exp);
byte_offset += off.to_short_addr ();
}
exp = TREE_OPERAND (base, 0);
}
goto done;
}
case TARGET_MEM_REF:
{
tree base = TREE_OPERAND (exp, 0);
if (valueize
&& TREE_CODE (base) == SSA_NAME)
base = (*valueize) (base);
/* Hand back the decl for MEM[&decl, off]. */
if (TREE_CODE (base) == ADDR_EXPR)
{
if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
return NULL_TREE;
if (!integer_zerop (TMR_OFFSET (exp)))
{
offset_int off = mem_ref_offset (exp);
byte_offset += off.to_short_addr ();
}
exp = TREE_OPERAND (base, 0);
}
goto done;
}
default:
goto done;
}
exp = TREE_OPERAND (exp, 0);
}
done:
*poffset = byte_offset;
return exp;
}
#endif /* GCC_TREE_DFA_H */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment