Commit 1da68f56 by Richard Kenner Committed by Richard Kenner

alias.c (alias_sets_conflict_p): New function.

	* alias.c (alias_sets_conflict_p): New function.
	(mems_in_disjoint_alias_sets_p): Use it.
	(readonly_fields_p): Moved from expr.c; check for record type.
	(objects_must_conflict_p): New function.
	* calls.c (expand_call): Use assign_temp as much as possible, use
	readonly variant if assigned once, and don't set memory attributes.
	(emit_library_call_value_1, store_one_arg): Likewise.
	* integrate.c (expand_inline_function): Likewise.
	* stmt.c (expand_asm_operands, expand_return): Likewise.
	* expr.c (copy_blkmode_from_reg, store_constructor): Likewise.
	(store_field, save_noncopied_parts, expand_expr): Likewise.
	(expand_expr_unaligned): Likewise.
	(readonly_fields_p): Moved to alias.c.
	(safe_from_p): Rework handling of SAVE_EXPR.
	MEMs ony conflict if alias sets conflict; likewise for INDIRECT_REF.
	* function.c (struct temp_slot): Delete field ALIAS_SET; add TYPE.
	(assign_stack_for_temp): Use objects_must_confict_p.
	Set all memory attributes from type, if specified.
	(mark_temp_slot): Mark TYPE field.
	* tree.h (alias_sets_conflict_p, readonly_fields_p): New decls.
	(objects_must_conflict_p): Likewise.

	* stmt.c (expand_decl): Don't use assign_stack_temp in error case.
	(add_case_node): No need to copy nodes anymore.

From-SVN: r38559
parent 2e9ab75d
2000-12-30 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* alias.c (alias_sets_conflict_p): New function.
(mems_in_disjoint_alias_sets_p): Use it.
(readonly_fields_p): Moved from expr.c; check for record type.
(objects_must_conflict_p): New function.
* calls.c (expand_call): Use assign_temp as much as possible, use
readonly variant if assigned once, and don't set memory attributes.
(emit_library_call_value_1, store_one_arg): Likewise.
* integrate.c (expand_inline_function): Likewise.
* stmt.c (expand_asm_operands, expand_return): Likewise.
* expr.c (copy_blkmode_from_reg, store_constructor): Likewise.
(store_field, save_noncopied_parts, expand_expr): Likewise.
(expand_expr_unaligned): Likewise.
(readonly_fields_p): Moved to alias.c.
(safe_from_p): Rework handling of SAVE_EXPR.
MEMs ony conflict if alias sets conflict; likewise for INDIRECT_REF.
* function.c (struct temp_slot): Delete field ALIAS_SET; add TYPE.
(assign_stack_for_temp): Use objects_must_confict_p.
Set all memory attributes from type, if specified.
(mark_temp_slot): Mark TYPE field.
* tree.h (alias_sets_conflict_p, readonly_fields_p): New decls.
(objects_must_conflict_p): Likewise.
* stmt.c (expand_decl): Don't use assign_stack_temp in error case.
(add_case_node): No need to copy nodes anymore.
2000-12-30 Alexandre Oliva <aoliva@redhat.com>
* config/sh/sh.c (split_branches): Don't dereference re-computed
`beyond' before checking it's non-NULL.
2000-12-29 Robert Lipe <robertl@sco.com>
Remove COFF support from i?86-pc-sco3.2v5.
......
......@@ -211,8 +211,6 @@ mems_in_disjoint_alias_sets_p (mem1, mem2)
rtx mem1;
rtx mem2;
{
alias_set_entry ase;
#ifdef ENABLE_CHECKING
/* Perform a basic sanity check. Namely, that there are no alias sets
if we're not using strict aliasing. This helps to catch bugs
......@@ -226,49 +224,112 @@ mems_in_disjoint_alias_sets_p (mem1, mem2)
abort ();
#endif
/* If have no alias set information for one of the MEMs, we have to assume
it can alias anything. */
if (MEM_ALIAS_SET (mem1) == 0 || MEM_ALIAS_SET (mem2) == 0)
return 0;
return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
}
/* If the two alias sets are the same, they may alias. */
if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
return 0;
/* Insert the NODE into the splay tree given by DATA. Used by
record_alias_subset via splay_tree_foreach. */
static int
insert_subset_children (node, data)
splay_tree_node node;
void *data;
{
splay_tree_insert ((splay_tree) data, node->key, node->value);
return 0;
}
/* Return 1 if the two specified alias sets may conflict. */
int
alias_sets_conflict_p (set1, set2)
HOST_WIDE_INT set1, set2;
{
alias_set_entry ase;
/* If have no alias set information for one of the operands, we have
to assume it can alias anything. */
if (set1 == 0 || set2 == 0
/* If the two alias sets are the same, they may alias. */
|| set1 == set2)
return 1;
/* See if the first alias set is a subset of the second. */
ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
ase = get_alias_set_entry (set1);
if (ase != 0
&& (ase->has_zero_child
|| splay_tree_lookup (ase->children,
(splay_tree_key) MEM_ALIAS_SET (mem2))))
return 0;
(splay_tree_key) set2)))
return 1;
/* Now do the same, but with the alias sets reversed. */
ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
ase = get_alias_set_entry (set2);
if (ase != 0
&& (ase->has_zero_child
|| splay_tree_lookup (ase->children,
(splay_tree_key) MEM_ALIAS_SET (mem1))))
return 0;
(splay_tree_key) set1)))
return 1;
/* The two MEMs are in distinct alias sets, and neither one is the
/* The two alias sets are distinct and neither one is the
child of the other. Therefore, they cannot alias. */
return 1;
return 0;
}
/* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
has any readonly fields. If any of the fields have types that
contain readonly fields, return true as well. */
/* Insert the NODE into the splay tree given by DATA. Used by
record_alias_subset via splay_tree_foreach. */
static int
insert_subset_children (node, data)
splay_tree_node node;
void *data;
int
readonly_fields_p (type)
tree type;
{
splay_tree_insert ((splay_tree) data, node->key, node->value);
tree field;
if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
&& TREE_CODE (type) != QUAL_UNION_TYPE)
return 0;
for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
if (TREE_CODE (field) == FIELD_DECL
&& (TREE_READONLY (field)
|| readonly_fields_p (TREE_TYPE (field))))
return 1;
return 0;
}
/* Return 1 if any MEM object of type T1 will always conflict (using the
dependency routines in this file) with any MEM object of type T2.
This is used when allocating temporary storage. If T1 and/or T2 are
NULL_TREE, it means we know nothing about the storage. */
int
objects_must_conflict_p (t1, t2)
tree t1, t2;
{
/* If they are the same type, they must conflict. */
if (t1 == t2
/* Likewise if both are volatile. */
|| (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
return 1;
/* We now know they are different types. If one or both has readonly fields
or if one is readonly and the other not, they may not conflict.
Likewise if one is aggregate and the other is scalar. */
if ((t1 != 0 && readonly_fields_p (t1))
|| (t2 != 0 && readonly_fields_p (t2))
|| ((t1 != 0 && TYPE_READONLY (t1))
!= (t2 != 0 && TYPE_READONLY (t2)))
|| ((t1 != 0 && AGGREGATE_TYPE_P (t1))
!= (t2 != 0 && AGGREGATE_TYPE_P (t2))))
return 0;
/* Otherwise they conflict only if the alias sets conflict. */
return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
t2 ? get_alias_set (t2) : 0);
}
/* T is an expression with pointer type. Find the DECL on which this
expression is based. (For example, in `a[i]' this would be `a'.)
If there is no such DECL, or a unique decl cannot be determined,
......
......@@ -2260,16 +2260,11 @@ expand_call (exp, target, ignore)
structure_value_addr = XEXP (target, 0);
else
{
rtx d;
/* For variable-sized objects, we must be called with a target
specified. If we were to allocate space on the stack here,
we would have no way of knowing when to free it. */
rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
if (struct_value_size < 0)
abort ();
d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
mark_temp_addr_taken (d);
structure_value_addr = XEXP (d, 0);
target = 0;
......@@ -3230,18 +3225,20 @@ expand_call (exp, target, ignore)
The Irix 6 ABI has examples of this. */
else if (GET_CODE (valreg) == PARALLEL)
{
int bytes = int_size_in_bytes (TREE_TYPE (exp));
if (target == 0)
{
target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
bytes, 0);
MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
/* This will only be assigned once, so it can be readonly. */
tree nt = build_qualified_type (TREE_TYPE (exp),
(TYPE_QUALS (TREE_TYPE (exp))
| TYPE_QUAL_CONST));
target = assign_temp (nt, 0, 1, 1);
preserve_temp_slots (target);
}
if (! rtx_equal_p (target, valreg))
emit_group_store (target, valreg, bytes,
emit_group_store (target, valreg,
int_size_in_bytes (TREE_TYPE (exp)),
TYPE_ALIGN (TREE_TYPE (exp)));
/* We can not support sibling calls for this case. */
......@@ -3562,7 +3559,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
if (value != 0 && GET_CODE (value) == MEM)
mem_value = value;
else
mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
mem_value = assign_temp (type_for_mode (outmode, 0), 0, 1, 1);
#endif
/* This call returns a big structure. */
......@@ -3666,7 +3663,8 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
{
/* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
be viewed as just an efficiency improvement. */
rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
rtx slot = assign_temp (type_for_mode (mode, 0), 0, 1, 1);
call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode, slot),
call_fusage);
......@@ -4339,15 +4337,15 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
if (save_mode == BLKmode)
{
arg->save_area = assign_stack_temp (BLKmode,
arg->size.constant, 0);
MEM_SET_IN_STRUCT_P (arg->save_area,
AGGREGATE_TYPE_P (TREE_TYPE
(arg->tree_value)));
tree ot = TREE_TYPE (arg->tree_value);
tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
| TYPE_QUAL_CONST));
arg->save_area = assign_temp (nt, 0, 1, 1);
preserve_temp_slots (arg->save_area);
emit_block_move (validize_mem (arg->save_area), stack_area,
GEN_INT (arg->size.constant),
PARM_BOUNDARY);
expr_size (arg->tree_value),
MIN (PARM_BOUNDARY, TYPE_ALIGN (nt)));
}
else
{
......
......@@ -193,17 +193,11 @@ struct temp_slot
int align;
/* The size, in units, of the slot. */
HOST_WIDE_INT size;
/* The alias set for the slot. If the alias set is zero, we don't
know anything about the alias set of the slot. We must only
reuse a slot if it is assigned an object of the same alias set.
Otherwise, the rest of the compiler may assume that the new use
of the slot cannot alias the old use of the slot, which is
false. If the slot has alias set zero, then we can't reuse the
slot at all, since we have no idea what alias set may have been
imposed on the memory. For example, if the stack slot is the
call frame for an inline functioned, we have no idea what alias
sets will be assigned to various pieces of the call frame. */
HOST_WIDE_INT alias_set;
/* The type of the object in the slot, or zero if it doesn't correspond
to a type. We use this to determine whether a slot can be reused.
It can be reused if objects of the type of the new slot will always
conflict with objects of the type of the old slot. */
tree type;
/* The value of `sequence_rtl_expr' when this temporary is allocated. */
tree rtl_expr;
/* Non-zero if this temporary is currently in use. */
......@@ -658,7 +652,6 @@ assign_stack_temp_for_type (mode, size, keep, type)
tree type;
{
int align;
HOST_WIDE_INT alias_set;
struct temp_slot *p, *best_p = 0;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
......@@ -666,14 +659,6 @@ assign_stack_temp_for_type (mode, size, keep, type)
if (size == -1)
abort ();
/* If we know the alias set for the memory that will be used, use
it. If there's no TYPE, then we don't know anything about the
alias set for the memory. */
if (type)
alias_set = get_alias_set (type);
else
alias_set = 0;
if (mode == BLKmode)
align = BIGGEST_ALIGNMENT;
else
......@@ -691,8 +676,7 @@ assign_stack_temp_for_type (mode, size, keep, type)
for (p = temp_slots; p; p = p->next)
if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
&& ! p->in_use
&& (! flag_strict_aliasing
|| (alias_set && p->alias_set == alias_set))
&& objects_must_conflict_p (p->type, type)
&& (best_p == 0 || best_p->size > p->size
|| (best_p->size == p->size && best_p->align > p->align)))
{
......@@ -728,7 +712,7 @@ assign_stack_temp_for_type (mode, size, keep, type)
p->align = best_p->align;
p->address = 0;
p->rtl_expr = 0;
p->alias_set = best_p->alias_set;
p->type = best_p->type;
p->next = temp_slots;
temp_slots = p;
......@@ -766,7 +750,6 @@ assign_stack_temp_for_type (mode, size, keep, type)
align);
p->align = align;
p->alias_set = alias_set;
/* The following slot size computation is necessary because we don't
know the actual size of the temporary slot until assign_stack_local
......@@ -797,6 +780,7 @@ assign_stack_temp_for_type (mode, size, keep, type)
p->in_use = 1;
p->addr_taken = 0;
p->rtl_expr = seq_rtl_expr;
p->type = type;
if (keep == 2)
{
......@@ -819,10 +803,23 @@ assign_stack_temp_for_type (mode, size, keep, type)
RTX_UNCHANGING_P (p->slot) = 0;
MEM_IN_STRUCT_P (p->slot) = 0;
MEM_SCALAR_P (p->slot) = 0;
MEM_ALIAS_SET (p->slot) = alias_set;
MEM_VOLATILE_P (p->slot) = 0;
/* If we know the alias set for the memory that will be used, use
it. If there's no TYPE, then we don't know anything about the
alias set for the memory. */
if (type)
MEM_ALIAS_SET (p->slot) = get_alias_set (type);
else
MEM_ALIAS_SET (p->slot) = 0;
/* If a type is specified, set the relevant flags. */
if (type != 0)
MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
{
RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
}
return p->slot;
}
......@@ -1509,6 +1506,7 @@ put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
/* Make sure that all refs to the variable, previously made
when it was a register, are fixed up to be valid again.
See function above for meaning of arguments. */
static void
schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
struct function *function;
......@@ -7453,6 +7451,7 @@ mark_temp_slot (t)
ggc_mark_rtx (t->slot);
ggc_mark_rtx (t->address);
ggc_mark_tree (t->rtl_expr);
ggc_mark_tree (t->type);
t = t->next;
}
......
......@@ -716,14 +716,9 @@ expand_inline_function (fndecl, parms, target, ignore, type,
if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
&& REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
{
rtx stack_slot
= assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
int_size_in_bytes (TREE_TYPE (arg)), 1);
MEM_SET_IN_STRUCT_P (stack_slot,
AGGREGATE_TYPE_P (TREE_TYPE (arg)));
rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
store_expr (arg, stack_slot, 0);
arg_vals[i] = XEXP (stack_slot, 0);
invisiref = 1;
}
......
......@@ -1757,7 +1757,10 @@ expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
|| GET_CODE (op) == CONCAT)
{
tree type = TREE_TYPE (TREE_VALUE (tail));
rtx memloc = assign_temp (type, 1, 1, 1);
tree qual_type = build_qualified_type (type,
(TYPE_QUALS (type)
| TYPE_QUAL_CONST));
rtx memloc = assign_temp (qual_type, 1, 1, 1);
emit_move_insn (memloc, op);
op = memloc;
......@@ -3100,8 +3103,10 @@ expand_return (retval)
{
/* Calculate the return value into a temporary (usually a pseudo
reg). */
val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
0, 0, 1);
tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
val = assign_temp (nt, 0, 0, 1);
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
emit_queue ();
......@@ -3822,12 +3827,13 @@ expand_decl (decl)
if (type == error_mark_node)
DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
else if (DECL_SIZE (decl) == 0)
/* Variable with incomplete type. */
{
if (DECL_INITIAL (decl) == 0)
/* Error message was already done; now avoid a crash. */
DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
else
/* An initializer is going to decide the size of this array.
Until we know the size, represent its address with a reg. */
......@@ -4735,18 +4741,16 @@ add_case_node (low, high, label, duplicate)
}
}
/* Add this label to the chain, and succeed.
Copy LOW, HIGH so they are on temporary rather than momentary
obstack and will thus survive till the end of the case statement. */
/* Add this label to the chain, and succeed. */
r = (struct case_node *) xmalloc (sizeof (struct case_node));
r->low = copy_node (low);
r->low = low;
/* If the bounds are equal, turn this into the one-value case. */
if (tree_int_cst_equal (low, high))
r->high = r->low;
else
r->high = copy_node (high);
r->high = high;
r->code_label = label;
expand_label (label);
......
......@@ -2623,6 +2623,10 @@ extern void rest_of_type_compilation PARAMS ((tree, int));
/* In alias.c */
extern void record_component_aliases PARAMS ((tree));
extern HOST_WIDE_INT get_alias_set PARAMS ((tree));
extern int alias_sets_conflict_p PARAMS ((HOST_WIDE_INT,
HOST_WIDE_INT));
extern int readonly_fields_p PARAMS ((tree));
extern int objects_must_conflict_p PARAMS ((tree, tree));
/* In c-common.c */
extern HOST_WIDE_INT lang_get_alias_set PARAMS ((tree));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment