Commit 36edd3cc by Bernd Schmidt Committed by Bernd Schmidt

Change memory mangement and constant pool handling for nested functions to match…

Change memory mangement and constant pool handling for nested functions to match that of normal functions; add init_machine_status mechanism.

From-SVN: r29101
parent 1b63ada4
No preview for this file type
......@@ -168,6 +168,18 @@ enum reg_class const regclass_map[FIRST_PSEUDO_REGISTER] =
struct rtx_def *ix86_compare_op0 = NULL_RTX;
struct rtx_def *ix86_compare_op1 = NULL_RTX;
#define MAX_386_STACK_LOCALS 2
/* Define the structure for the machine field in struct function. */
struct machine_function
{
rtx stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
};
static int pic_label_no = 0;
#define ix86_stack_locals (current_function->machine->stack_locals)
/* which cpu are we scheduling for */
enum processor_type ix86_cpu;
......@@ -235,6 +247,7 @@ static void ix86_dump_ppro_packet PROTO ((FILE *));
static void ix86_reorder_insn PROTO ((rtx *, rtx *));
static rtx * ix86_pent_find_pair PROTO ((rtx *, rtx *, enum attr_pent_pair,
rtx));
static void ix86_init_machine_status PROTO ((struct function *));
struct ix86_address
{
......@@ -338,6 +351,9 @@ override_options ()
target_flags |= processor_target_table[ix86_cpu].target_enable;
target_flags &= ~processor_target_table[ix86_cpu].target_disable;
/* Arrange to set up i386_stack_locals for all functions. */
init_machine_status = ix86_init_machine_status;
/* Validate registers in register allocation order. */
if (ix86_reg_alloc_order)
{
......@@ -4940,58 +4956,23 @@ ix86_expand_strlensi_unroll_1 (out, align_rtx, scratch)
emit_label (end_0_label);
}
#define MAX_386_STACK_LOCALS 2
static rtx ix86_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
/* Define the structure for the machine field in struct function. */
struct machine_function
{
rtx ix86_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
};
/* Functions to save and restore ix86_stack_locals.
These will be called, via pointer variables,
from push_function_context and pop_function_context. */
void
save_386_machine_status (p)
struct function *p;
{
p->machine
= (struct machine_function *) xmalloc (sizeof (struct machine_function));
bcopy ((char *) ix86_stack_locals, (char *) p->machine->ix86_stack_locals,
sizeof ix86_stack_locals);
}
void
restore_386_machine_status (p)
struct function *p;
{
bcopy ((char *) p->machine->ix86_stack_locals, (char *) ix86_stack_locals,
sizeof ix86_stack_locals);
free (p->machine);
p->machine = NULL;
}
/* Clear stack slot assignments remembered from previous functions.
This is called from INIT_EXPANDERS once before RTL is emitted for each
function. */
void
clear_386_stack_locals ()
static void
ix86_init_machine_status (p)
struct function *p;
{
enum machine_mode mode;
int n;
p->machine
= (struct machine_function *) xmalloc (sizeof (struct machine_function));
for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
mode = (enum machine_mode) ((int) mode + 1))
for (n = 0; n < MAX_386_STACK_LOCALS; n++)
ix86_stack_locals[(int) mode][n] = NULL_RTX;
/* Arrange to save and restore ix86_stack_locals around nested functions. */
save_machine_status = save_386_machine_status;
restore_machine_status = restore_386_machine_status;
}
/* Return a MEM corresponding to a stack slot with mode MODE.
......
......@@ -1603,13 +1603,6 @@ do \
} \
while (0)
/* Initialize data used by insn expanders. This is called from
init_emit, once for each function, before code is generated.
For 386, clear stack slot assignments remembered from previous
functions. */
#define INIT_EXPANDERS clear_386_stack_locals ()
/* The `FINALIZE_PIC' macro serves as a hook to emit these special
codes once the function is being compiled into assembly code, but
not before. (It is not done before, because in the case of
......@@ -2498,9 +2491,6 @@ extern void ix86_split_ashrdi XPARAMS((xrtx *, xrtx));
extern void ix86_split_lshrdi XPARAMS((xrtx *, xrtx));
extern void ix86_expand_strlensi_unroll_1 XPARAMS((xrtx, xrtx, xrtx));
extern void save_386_machine_status XPARAMS((struct function *));
extern void restore_386_machine_status XPARAMS((struct function *));
extern void clear_386_stack_locals XPARAMS((void));
extern xrtx assign_386_stack_local XPARAMS((xmode, int));
extern int ix86_attr_length_default XPARAMS((xrtx));
......
......@@ -117,6 +117,7 @@ static int virtuals_instantiated;
/* These variables hold pointers to functions to
save and restore machine-specific data,
in push_function_context and pop_function_context. */
void (*init_machine_status) PROTO((struct function *));
void (*save_machine_status) PROTO((struct function *));
void (*restore_machine_status) PROTO((struct function *));
......@@ -300,7 +301,15 @@ void
push_function_context_to (context)
tree context;
{
struct function *p;
struct function *p, *context_data;
if (context)
{
context_data = (context == current_function_decl
? current_function
: find_function_data (context));
context_data->contains_functions = 1;
}
if (current_function == 0)
init_dummy_function_start ();
......@@ -311,8 +320,7 @@ push_function_context_to (context)
p->decl = current_function_decl;
p->fixup_var_refs_queue = 0;
save_tree_status (p, context);
save_varasm_status (p, context);
save_tree_status (p);
if (save_lang_status)
(*save_lang_status) (p);
if (save_machine_status)
......@@ -340,14 +348,11 @@ pop_function_context_from (context)
current_function = p;
outer_function_chain = p->next;
current_function_contains_functions
|= p->inline_obstacks || context == current_function_decl;
current_function_decl = p->decl;
reg_renumber = 0;
restore_tree_status (p, context);
restore_tree_status (p);
restore_emit_status (p);
restore_varasm_status (p);
if (restore_machine_status)
(*restore_machine_status) (p);
......@@ -365,7 +370,8 @@ pop_function_context_from (context)
virtuals_instantiated = 0;
}
void pop_function_context ()
void
pop_function_context ()
{
pop_function_context_from (current_function_decl);
}
......@@ -5576,7 +5582,7 @@ prepare_function_start ()
/* We haven't done register allocation yet. */
reg_renumber = 0;
init_const_rtx_hash_table ();
init_varasm_status (current_function);
/* Set if a call to setjmp is seen. */
current_function_calls_setjmp = 0;
......@@ -5640,6 +5646,9 @@ prepare_function_start ()
inhibit_defer_pop = 0;
current_function_outgoing_args_size = 0;
if (init_machine_status)
(*init_machine_status) (current_function);
}
/* Initialize the rtl expansion mechanism so that we can do simple things
......
......@@ -182,6 +182,7 @@ struct function
struct stmt_status *stmt;
struct expr_status *expr;
struct emit_status *emit;
struct varasm_status *varasm;
/* For function.c. */
......@@ -415,12 +416,9 @@ struct function
struct obstack *expression_obstack;
struct obstack *saveable_obstack;
struct obstack *rtl_obstack;
struct simple_obstack_stack *inline_obstacks;
/* For integrate.c. We duplicate some of the fields so that
save_for_inline_copying can keep two versions. */
/* For integrate.c. */
int inlinable;
struct emit_status *inl_emit;
/* This is in fact an rtvec. */
void *original_arg_vector;
tree original_decl_initial;
......@@ -448,13 +446,6 @@ struct function
/* If some insns can be deferred to the delay slots of the epilogue, the
delay list for them is recorded here. */
rtx epilogue_delay_list;
/* For varasm. */
struct constant_descriptor **const_rtx_hash_table;
struct pool_sym **const_rtx_sym_hash_table;
struct pool_constant *first_pool, *last_pool;
int pool_offset;
rtx const_double_chain;
};
extern struct function *current_function;
......@@ -541,6 +532,7 @@ extern HOST_WIDE_INT get_func_frame_size PROTO((struct function *));
/* These variables hold pointers to functions to
save and restore machine-specific data,
in push_function_context and pop_function_context. */
extern void (*init_machine_status) PROTO((struct function *));
extern void (*save_machine_status) PROTO((struct function *));
extern void (*restore_machine_status) PROTO((struct function *));
......@@ -549,10 +541,8 @@ extern void (*save_lang_status) PROTO((struct function *));
extern void (*restore_lang_status) PROTO((struct function *));
/* Save and restore status information for a nested function. */
extern void save_tree_status PROTO((struct function *, tree));
extern void restore_tree_status PROTO((struct function *, tree));
extern void save_varasm_status PROTO((struct function *, tree));
extern void restore_varasm_status PROTO((struct function *));
extern void save_tree_status PROTO((struct function *));
extern void restore_tree_status PROTO((struct function *));
extern void restore_emit_status PROTO((struct function *));
extern rtx get_first_block_beg PROTO((void));
......
......@@ -60,22 +60,15 @@ extern struct obstack *function_maybepermanent_obstack;
: (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
#endif
static rtvec initialize_for_inline PROTO((tree, int));
static rtvec initialize_for_inline PROTO((tree));
static void adjust_copied_decl_tree PROTO((tree));
static tree copy_decl_list PROTO((tree));
static tree copy_decl_tree PROTO((tree));
static void copy_decl_rtls PROTO((tree));
static void save_constants PROTO((rtx *));
static void note_modified_parmregs PROTO((rtx, rtx));
static rtx copy_for_inline PROTO((rtx));
static void integrate_parm_decls PROTO((tree, struct inline_remap *,
rtvec));
static void integrate_decl_tree PROTO((tree, int,
struct inline_remap *));
static void save_constants_in_decl_trees PROTO ((tree));
static void subst_constants PROTO((rtx *, rtx,
struct inline_remap *));
static void restore_constants PROTO((rtx *));
static void set_block_origin_self PROTO((tree));
static void set_decl_origin_self PROTO((tree));
static void set_block_abstract_flags PROTO((tree, int));
......@@ -95,6 +88,11 @@ static tree copy_and_set_decl_abstract_origin PROTO((tree));
int inline_max_insns = 10000;
/* Used by copy_rtx_and_substitute; this indicates whether the function is
called for the purpose of inlining or some other purpose (i.e. loop
unrolling). This affects how constant pool references are handled.
This variable contains the FUNCTION_DECL for the inlined function. */
static struct function *inlining = 0;
/* Returns the Ith entry in the label_map contained in MAP. If the
Ith entry has not yet been set, return a fresh label. This function
......@@ -231,63 +229,20 @@ function_cannot_inline_p (fndecl)
return 0;
}
/* Variables used within save_for_inline. */
/* Mapping from old pseudo-register to new pseudo-registers.
The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
It is allocated in `save_for_inline' and `expand_inline_function',
and deallocated on exit from each of those routines. */
static rtx *reg_map;
/* Mapping from old code-labels to new code-labels.
The first element of this map is label_map[min_labelno].
It is allocated in `save_for_inline' and `expand_inline_function',
and deallocated on exit from each of those routines. */
static rtx *label_map;
/* Mapping from old insn uid's to copied insns.
It is allocated in `save_for_inline' and `expand_inline_function',
and deallocated on exit from each of those routines. */
static rtx *insn_map;
/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
Zero for a reg that isn't a parm's home.
Only reg numbers less than max_parm_reg are mapped here. */
static tree *parmdecl_map;
/* When an insn is being copied by copy_for_inline,
this is nonzero if we have copied an ASM_OPERANDS.
In that case, it is the original input-operand vector. */
static rtvec orig_asm_operands_vector;
/* When an insn is being copied by copy_for_inline,
this is nonzero if we have copied an ASM_OPERANDS.
In that case, it is the copied input-operand vector. */
static rtvec copy_asm_operands_vector;
/* Likewise, this is the copied constraints vector. */
static rtvec copy_asm_constraints_vector;
/* In save_for_inline, nonzero if past the parm-initialization insns. */
static int in_nonparm_insns;
/* subroutines passed to duplicate_eh_handlers to map exception labels */
static rtx
save_for_inline_eh_labelmap (label)
rtx label;
{
int index = CODE_LABEL_NUMBER (label);
return label_map[index];
}
/* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
/* Subroutine for `save_for_inline_nocopy'. Performs initialization
needed to save FNDECL's insns and info for future inline expansion. */
static rtvec
initialize_for_inline (fndecl, copy)
initialize_for_inline (fndecl)
tree fndecl;
int copy;
{
int i;
rtvec arg_vector;
......@@ -302,7 +257,6 @@ initialize_for_inline (fndecl, copy)
parms = TREE_CHAIN (parms), i++)
{
rtx p = DECL_RTL (parms);
int copied_incoming = 0;
/* If we have (mem (addressof (mem ...))), use the inner MEM since
otherwise the copy_rtx call below will not unshare the MEM since
......@@ -311,25 +265,6 @@ initialize_for_inline (fndecl, copy)
&& GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
p = XEXP (XEXP (p, 0), 0);
if (GET_CODE (p) == MEM && copy)
{
/* Copy the rtl so that modifications of the addresses
later in compilation won't affect this arg_vector.
Virtual register instantiation can screw the address
of the rtl. */
rtx new = copy_rtx (p);
/* Don't leave the old copy anywhere in this decl. */
if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
|| (GET_CODE (DECL_RTL (parms)) == MEM
&& GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
&& (XEXP (DECL_RTL (parms), 0)
== XEXP (DECL_INCOMING_RTL (parms), 0))))
DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
DECL_RTL (parms) = new;
}
RTVEC_ELT (arg_vector, i) = p;
if (GET_CODE (p) == REG)
......@@ -348,23 +283,6 @@ initialize_for_inline (fndecl, copy)
/* This flag is cleared later
if the function ever modifies the value of the parm. */
TREE_READONLY (parms) = 1;
/* Copy DECL_INCOMING_RTL if not done already. This can
happen if DECL_RTL is a reg. */
if (copy && ! copied_incoming)
{
p = DECL_INCOMING_RTL (parms);
/* If we have (mem (addressof (mem ...))), use the inner MEM since
otherwise the copy_rtx call below will not unshare the MEM since
it shares ADDRESSOF. */
if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
&& GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
p = XEXP (XEXP (p, 0), 0);
if (GET_CODE (p) == MEM)
DECL_INCOMING_RTL (parms) = copy_rtx (p);
}
}
return arg_vector;
......@@ -394,314 +312,6 @@ adjust_copied_decl_tree (block)
adjust_copied_decl_tree (subblock);
}
/* Make the insns and PARM_DECLs of the current function permanent
and record other information in DECL_SAVED_INSNS to allow inlining
of this function in subsequent calls.
This function is called when we are going to immediately compile
the insns for FNDECL. The insns in maybepermanent_obstack cannot be
modified by the compilation process, so we copy all of them to
new storage and consider the new insns to be the insn chain to be
compiled. Our caller (rest_of_compilation) saves the original
DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
/* ??? The nonlocal_label list should be adjusted also. However, since
a function that contains a nested function never gets inlined currently,
the nonlocal_label list will always be empty, so we don't worry about
it for now. */
void
save_for_inline_copying (fndecl)
tree fndecl;
{
rtvec argvec;
rtx new_first_insn, new_last_insn, insn;
int max_labelno, min_labelno, i, len;
int max_reg;
int max_uid;
rtx first_nonparm_insn;
char *new, *new1;
rtx *new_parm_reg_stack_loc;
rtx *new2;
struct emit_status *es
= (struct emit_status *) xmalloc (sizeof (struct emit_status));
/* Make and emit a return-label if we have not already done so.
Do this before recording the bounds on label numbers. */
if (return_label == 0)
{
return_label = gen_label_rtx ();
emit_label (return_label);
}
*es = *current_function->emit;
/* Get some bounds on the labels and registers used. */
max_labelno = max_label_num ();
min_labelno = get_first_label_num ();
max_reg = max_reg_num ();
/* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
for the parms, prior to elimination of virtual registers.
These values are needed for substituting parms properly. */
parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
argvec = initialize_for_inline (fndecl, 1);
if (current_function_uses_const_pool)
{
/* Replace any constant pool references with the actual constant. We
will put the constants back in the copy made below. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
save_constants (&PATTERN (insn));
if (REG_NOTES (insn))
save_constants (&REG_NOTES (insn));
}
/* Also scan all decls, and replace any constant pool references with the
actual constant. */
save_constants_in_decl_trees (DECL_INITIAL (fndecl));
/* Clear out the constant pool so that we can recreate it with the
copied constants below. */
init_const_rtx_hash_table ();
clear_const_double_mem ();
}
max_uid = get_max_uid ();
/* We have now allocated all that needs to be allocated permanently
on the rtx obstack. Set our high-water mark, so that we
can free the rest of this when the time comes. */
preserve_data ();
/* Copy the chain insns of this function.
Install the copied chain as the insns of this function,
for continued compilation;
the original chain is recorded as the DECL_SAVED_INSNS
for inlining future calls. */
/* If there are insns that copy parms from the stack into pseudo registers,
those insns are not copied. `expand_inline_function' must
emit the correct code to handle such things. */
insn = get_insns ();
if (GET_CODE (insn) != NOTE)
abort ();
new_first_insn = rtx_alloc (NOTE);
NOTE_SOURCE_FILE (new_first_insn) = NOTE_SOURCE_FILE (insn);
NOTE_LINE_NUMBER (new_first_insn) = NOTE_LINE_NUMBER (insn);
INSN_UID (new_first_insn) = INSN_UID (insn);
PREV_INSN (new_first_insn) = NULL;
NEXT_INSN (new_first_insn) = NULL;
new_last_insn = new_first_insn;
/* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
Make these new rtx's now, and install them in regno_reg_rtx, so they
will be the official pseudo-reg rtx's for the rest of compilation. */
reg_map = (rtx *) savealloc (es->regno_pointer_flag_length * sizeof (rtx));
len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
regno_reg_rtx[i], len);
es->x_regno_reg_rtx = reg_map;
/* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
init_virtual_regs (es);
/* Likewise each label rtx must have a unique rtx as its copy. */
/* We used to use alloca here, but the size of what it would try to
allocate would occasionally cause it to exceed the stack limit and
cause unpredictable core dumps. Some examples were > 2Mb in size. */
label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
for (i = min_labelno; i < max_labelno; i++)
label_map[i] = gen_label_rtx ();
/* Likewise for parm_reg_stack_slot. */
new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
for (i = 0; i < max_parm_reg; i++)
new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
parm_reg_stack_loc = new_parm_reg_stack_loc;
/* Record the mapping of old insns to copied insns. */
insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
bzero ((char *) insn_map, max_uid * sizeof (rtx));
/* Get the insn which signals the end of parameter setup code. */
first_nonparm_insn = get_first_nonparm_insn ();
/* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
(the former occurs when a variable has its address taken)
since these may be shared and can be changed by virtual
register instantiation. DECL_RTL values for our arguments
have already been copied by initialize_for_inline. */
for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
if (GET_CODE (regno_reg_rtx[i]) == MEM)
XEXP (regno_reg_rtx[i], 0)
= copy_for_inline (XEXP (regno_reg_rtx[i], 0));
/* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
contained in it. */
new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
bcopy ((char *) parm_reg_stack_loc, (char *) new2,
max_parm_reg * sizeof (rtx));
parm_reg_stack_loc = new2;
for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
if (parm_reg_stack_loc[i])
parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
/* Copy the tree of subblocks of the function, and the decls in them.
We will use the copy for compiling this function, then restore the original
subblocks and decls for use when inlining this function.
Several parts of the compiler modify BLOCK trees. In particular,
instantiate_virtual_regs will instantiate any virtual regs
mentioned in the DECL_RTLs of the decls, and loop
unrolling will replicate any BLOCK trees inside an unrolled loop.
The modified subblocks or DECL_RTLs would be incorrect for the original rtl
which we will use for inlining. The rtl might even contain pseudoregs
whose space has been freed. */
DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
/* Now copy each DECL_RTL which is a MEM,
so it is safe to modify their addresses. */
copy_decl_rtls (DECL_INITIAL (fndecl));
/* The fndecl node acts as its own progenitor, so mark it as such. */
DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
/* Now copy the chain of insns. Do this twice. The first copy the insn
itself and its body. The second time copy of REG_NOTES. This is because
a REG_NOTE may have a forward pointer to another insn. */
for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
{
rtx copy;
orig_asm_operands_vector = 0;
if (insn == first_nonparm_insn)
in_nonparm_insns = 1;
switch (GET_CODE (insn))
{
case NOTE:
/* No need to keep these. */
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
continue;
copy = rtx_alloc (NOTE);
NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
else
{
NOTE_SOURCE_FILE (insn) = (char *) copy;
NOTE_SOURCE_FILE (copy) = 0;
}
if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
|| NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
{
int new_region = CODE_LABEL_NUMBER
(label_map[NOTE_BLOCK_NUMBER (copy)]);
/* we have to duplicate the handlers for the original */
if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy), new_region,
save_for_inline_eh_labelmap);
/* We have to forward these both to match the new exception
region. */
NOTE_BLOCK_NUMBER (copy) = new_region;
}
RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
break;
case INSN:
case JUMP_INSN:
case CALL_INSN:
copy = rtx_alloc (GET_CODE (insn));
if (GET_CODE (insn) == CALL_INSN)
CALL_INSN_FUNCTION_USAGE (copy)
= copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
PATTERN (copy) = copy_for_inline (PATTERN (insn));
INSN_CODE (copy) = -1;
LOG_LINKS (copy) = NULL_RTX;
RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
break;
case CODE_LABEL:
copy = label_map[CODE_LABEL_NUMBER (insn)];
LABEL_NAME (copy) = LABEL_NAME (insn);
break;
case BARRIER:
copy = rtx_alloc (BARRIER);
break;
default:
abort ();
}
INSN_UID (copy) = INSN_UID (insn);
insn_map[INSN_UID (insn)] = copy;
NEXT_INSN (new_last_insn) = copy;
PREV_INSN (copy) = new_last_insn;
new_last_insn = copy;
}
adjust_copied_decl_tree (DECL_INITIAL (fndecl));
/* Now copy the REG_NOTES. */
for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
&& insn_map[INSN_UID(insn)])
REG_NOTES (insn_map[INSN_UID (insn)])
= copy_for_inline (REG_NOTES (insn));
NEXT_INSN (new_last_insn) = NULL;
/* Make new versions of the register tables. */
new = (char *) savealloc (es->regno_pointer_flag_length);
memcpy (new, es->regno_pointer_flag, es->regno_pointer_flag_length);
new1 = (char *) savealloc (es->regno_pointer_flag_length);
memcpy (new1, es->regno_pointer_align, es->regno_pointer_flag_length);
es->regno_pointer_flag = new;
es->regno_pointer_align = new1;
free (label_map);
current_function->inl_max_label_num = max_label_num ();
current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
current_function->original_arg_vector = argvec;
current_function->original_decl_initial = DECL_INITIAL (fndecl);
/* Use the copy we made for compiling the function now, and
use the original values for inlining. */
current_function->inl_emit = current_function->emit;
current_function->emit = es;
set_new_first_and_last_insn (new_first_insn, new_last_insn);
DECL_SAVED_INSNS (fndecl) = current_function;
}
/* Copy NODE (as with copy_node). NODE must be a DECL. Set the
DECL_ABSTRACT_ORIGIN for the new accordinly. */
......@@ -723,81 +333,6 @@ copy_and_set_decl_abstract_origin (node)
return copy;
}
/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
For example, this can copy a list made of TREE_LIST nodes. While copying,
set DECL_ABSTRACT_ORIGIN appropriately. */
static tree
copy_decl_list (list)
tree list;
{
tree head;
register tree prev, next;
if (list == 0)
return 0;
head = prev = copy_and_set_decl_abstract_origin (list);
next = TREE_CHAIN (list);
while (next)
{
register tree copy;
copy = copy_and_set_decl_abstract_origin (next);
TREE_CHAIN (prev) = copy;
prev = copy;
next = TREE_CHAIN (next);
}
return head;
}
/* Make a copy of the entire tree of blocks BLOCK, and return it. */
static tree
copy_decl_tree (block)
tree block;
{
tree t, vars, subblocks;
vars = copy_decl_list (BLOCK_VARS (block));
subblocks = 0;
/* Process all subblocks. */
for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
{
tree copy = copy_decl_tree (t);
TREE_CHAIN (copy) = subblocks;
subblocks = copy;
}
t = copy_node (block);
BLOCK_VARS (t) = vars;
BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
/* If the BLOCK being cloned is already marked as having been instantiated
from something else, then leave that `origin' marking alone. Otherwise,
mark the clone as having originated from the BLOCK we are cloning. */
if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
BLOCK_ABSTRACT_ORIGIN (t) = block;
return t;
}
/* Copy DECL_RTLs in all decls in the given BLOCK node. */
static void
copy_decl_rtls (block)
tree block;
{
tree t;
for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
/* Process all subblocks. */
for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
copy_decl_rtls (t);
}
/* Make the insns and PARM_DECLs of the current function permanent
and record other information in DECL_SAVED_INSNS to allow inlining
of this function in subsequent calls.
......@@ -841,7 +376,7 @@ save_for_inline_nocopy (fndecl)
emit_label (return_label);
}
argvec = initialize_for_inline (fndecl, 0);
argvec = initialize_for_inline (fndecl);
/* If there are insns that copy parms from the stack into pseudo registers,
those insns are not copied. `expand_inline_function' must
......@@ -867,25 +402,9 @@ save_for_inline_nocopy (fndecl)
in_nonparm_insns = 1;
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
if (current_function_uses_const_pool)
{
/* Replace any constant pool references with the actual constant.
We will put the constant back if we need to write the
function out after all. */
save_constants (&PATTERN (insn));
if (REG_NOTES (insn))
save_constants (&REG_NOTES (insn));
}
/* Record what interesting things happen to our parameters. */
note_stores (PATTERN (insn), note_modified_parmregs);
}
}
/* Also scan all decls, and replace any constant pool references with the
actual constant. */
save_constants_in_decl_trees (DECL_INITIAL (fndecl));
/* We have now allocated all that needs to be allocated permanently
on the rtx obstack. Set our high-water mark, so that we
......@@ -893,7 +412,6 @@ save_for_inline_nocopy (fndecl)
preserve_data ();
current_function->inl_emit = current_function->emit;
current_function->inl_max_label_num = max_label_num ();
current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
current_function->original_arg_vector = argvec;
......@@ -901,87 +419,6 @@ save_for_inline_nocopy (fndecl)
DECL_SAVED_INSNS (fndecl) = current_function;
}
/* Given PX, a pointer into an insn, search for references to the constant
pool. Replace each with a CONST that has the mode of the original
constant, contains the constant, and has RTX_INTEGRATED_P set.
Similarly, constant pool addresses not enclosed in a MEM are replaced
with an ADDRESS and CONST rtx which also gives the constant, its
mode, the mode of the address, and has RTX_INTEGRATED_P set. */
static void
save_constants (px)
rtx *px;
{
rtx x;
int i, j;
again:
x = *px;
/* If this is a CONST_DOUBLE, don't try to fix things up in
CONST_DOUBLE_MEM, because this is an infinite recursion. */
if (GET_CODE (x) == CONST_DOUBLE)
return;
else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
{
enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
RTX_INTEGRATED_P (new) = 1;
/* If the MEM was in a different mode than the constant (perhaps we
were only looking at the low-order part), surround it with a
SUBREG so we can save both modes. */
if (GET_MODE (x) != const_mode)
{
new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
RTX_INTEGRATED_P (new) = 1;
}
*px = new;
save_constants (&XEXP (*px, 0));
}
else if (GET_CODE (x) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x))
{
*px = gen_rtx_ADDRESS (GET_MODE (x),
gen_rtx_CONST (get_pool_mode (x),
get_pool_constant (x)));
save_constants (&XEXP (*px, 0));
RTX_INTEGRATED_P (*px) = 1;
}
else
{
const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
int len = GET_RTX_LENGTH (GET_CODE (x));
for (i = len-1; i >= 0; i--)
{
switch (fmt[i])
{
case 'E':
for (j = 0; j < XVECLEN (x, i); j++)
save_constants (&XVECEXP (x, i, j));
break;
case 'e':
if (XEXP (x, i) == 0)
continue;
if (i == 0)
{
/* Hack tail-recursion here. */
px = &XEXP (x, 0);
goto again;
}
save_constants (&XEXP (x, i));
break;
}
}
}
}
/* Note whether a parameter is modified or not. */
static void
......@@ -996,262 +433,6 @@ note_modified_parmregs (reg, x)
TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
}
/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
according to `reg_map' and `label_map'. The original rtl insns
will be saved for inlining; this is used to make a copy
which is used to finish compiling the inline function itself.
If we find a "saved" constant pool entry, one which was replaced with
the value of the constant, convert it back to a constant pool entry.
Since the pool wasn't touched, this should simply restore the old
address.
All other kinds of rtx are copied except those that can never be
changed during compilation. */
static rtx
copy_for_inline (orig)
rtx orig;
{
register rtx x = orig;
register rtx new;
register int i;
register enum rtx_code code;
register const char *format_ptr;
if (x == 0)
return x;
code = GET_CODE (x);
/* These types may be freely shared. */
switch (code)
{
case QUEUED:
case CONST_INT:
case PC:
case CC0:
return x;
case SYMBOL_REF:
if (! SYMBOL_REF_NEED_ADJUST (x))
return x;
return rethrow_symbol_map (x, save_for_inline_eh_labelmap);
case CONST_DOUBLE:
/* We have to make a new CONST_DOUBLE to ensure that we account for
it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
{
REAL_VALUE_TYPE d;
REAL_VALUE_FROM_CONST_DOUBLE (d, x);
return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
}
else
return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
VOIDmode);
case CONST:
/* Get constant pool entry for constant in the pool. */
if (RTX_INTEGRATED_P (x))
return validize_mem (force_const_mem (GET_MODE (x),
copy_for_inline (XEXP (x, 0))));
break;
case SUBREG:
/* Get constant pool entry, but access in different mode. */
if (RTX_INTEGRATED_P (x))
{
new = force_const_mem (GET_MODE (SUBREG_REG (x)),
copy_for_inline (XEXP (SUBREG_REG (x), 0)));
PUT_MODE (new, GET_MODE (x));
return validize_mem (new);
}
break;
case ADDRESS:
/* If not special for constant pool error. Else get constant pool
address. */
if (! RTX_INTEGRATED_P (x))
abort ();
new = force_const_mem (GET_MODE (XEXP (x, 0)),
copy_for_inline (XEXP (XEXP (x, 0), 0)));
new = XEXP (new, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (new) != GET_MODE (x))
new = convert_memory_address (GET_MODE (x), new);
#endif
return new;
case ASM_OPERANDS:
/* If a single asm insn contains multiple output operands
then it contains multiple ASM_OPERANDS rtx's that share operand 3.
We must make sure that the copied insn continues to share it. */
if (orig_asm_operands_vector == XVEC (orig, 3))
{
x = rtx_alloc (ASM_OPERANDS);
x->volatil = orig->volatil;
XSTR (x, 0) = XSTR (orig, 0);
XSTR (x, 1) = XSTR (orig, 1);
XINT (x, 2) = XINT (orig, 2);
XVEC (x, 3) = copy_asm_operands_vector;
XVEC (x, 4) = copy_asm_constraints_vector;
XSTR (x, 5) = XSTR (orig, 5);
XINT (x, 6) = XINT (orig, 6);
return x;
}
break;
case MEM:
/* A MEM is usually allowed to be shared if its address is constant
or is a constant plus one of the special registers.
We do not allow sharing of addresses that are either a special
register or the sum of a constant and a special register because
it is possible for unshare_all_rtl to copy the address, into memory
that won't be saved. Although the MEM can safely be shared, and
won't be copied there, the address itself cannot be shared, and may
need to be copied.
There are also two exceptions with constants: The first is if the
constant is a LABEL_REF or the sum of the LABEL_REF
and an integer. This case can happen if we have an inline
function that supplies a constant operand to the call of another
inline function that uses it in a switch statement. In this case,
we will be replacing the LABEL_REF, so we have to replace this MEM
as well.
The second case is if we have a (const (plus (address ..) ...)).
In that case we need to put back the address of the constant pool
entry. */
if (CONSTANT_ADDRESS_P (XEXP (x, 0))
&& GET_CODE (XEXP (x, 0)) != LABEL_REF
&& ! (GET_CODE (XEXP (x, 0)) == CONST
&& (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
&& ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
== LABEL_REF)
|| (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
== ADDRESS)))))
return x;
break;
case LABEL_REF:
/* If this is a non-local label, just make a new LABEL_REF.
Otherwise, use the new label as well. */
x = gen_rtx_LABEL_REF (GET_MODE (orig),
LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
: label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
return x;
case REG:
if (REGNO (x) > LAST_VIRTUAL_REGISTER)
return reg_map [REGNO (x)];
else
return x;
case SET:
/* If a parm that gets modified lives in a pseudo-reg,
clear its TREE_READONLY to prevent certain optimizations. */
{
rtx dest = SET_DEST (x);
while (GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == ZERO_EXTRACT
|| GET_CODE (dest) == SUBREG)
dest = XEXP (dest, 0);
if (GET_CODE (dest) == REG
&& REGNO (dest) < max_parm_reg
&& REGNO (dest) >= FIRST_PSEUDO_REGISTER
&& parmdecl_map[REGNO (dest)] != 0
/* The insn to load an arg pseudo from a stack slot
does not count as modifying it. */
&& in_nonparm_insns)
TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
}
break;
#if 0 /* This is a good idea, but here is the wrong place for it. */
/* Arrange that CONST_INTs always appear as the second operand
if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
always appear as the first. */
case PLUS:
if (GET_CODE (XEXP (x, 0)) == CONST_INT
|| (XEXP (x, 1) == frame_pointer_rtx
|| (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
&& XEXP (x, 1) == arg_pointer_rtx)))
{
rtx t = XEXP (x, 0);
XEXP (x, 0) = XEXP (x, 1);
XEXP (x, 1) = t;
}
break;
#endif
default:
break;
}
/* Replace this rtx with a copy of itself. */
x = rtx_alloc (code);
bcopy ((char *) orig, (char *) x,
(sizeof (*x) - sizeof (x->fld)
+ sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
/* Now scan the subexpressions recursively.
We can store any replaced subexpressions directly into X
since we know X is not shared! Any vectors in X
must be copied if X was copied. */
format_ptr = GET_RTX_FORMAT (code);
for (i = 0; i < GET_RTX_LENGTH (code); i++)
{
switch (*format_ptr++)
{
case 'e':
XEXP (x, i) = copy_for_inline (XEXP (x, i));
break;
case 'u':
/* Change any references to old-insns to point to the
corresponding copied insns. */
XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
break;
case 'E':
if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
{
register int j;
XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), XVEC (x, i)->elem);
for (j = 0; j < XVECLEN (x, i); j++)
XVECEXP (x, i, j)
= copy_for_inline (XVECEXP (x, i, j));
}
break;
}
}
if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
{
orig_asm_operands_vector = XVEC (orig, 3);
copy_asm_operands_vector = XVEC (x, 3);
copy_asm_constraints_vector = XVEC (x, 4);
}
return x;
}
/* Unfortunately, we need a global copy of const_equiv map for communication
with a function called from note_stores. Be *very* careful that this
is used properly in the presence of recursion. */
......@@ -1326,9 +507,10 @@ expand_inline_function (fndecl, parms, target, ignore, type,
tree type;
rtx structure_value_addr;
{
struct function *inlining_previous;
struct function *inl_f = DECL_SAVED_INSNS (fndecl);
tree formal, actual, block;
rtx parm_insns = inl_f->inl_emit->x_first_insn;
rtx parm_insns = inl_f->emit->x_first_insn;
rtx insns = (inl_f->inl_last_parm_insn
? NEXT_INSN (inl_f->inl_last_parm_insn)
: parm_insns);
......@@ -1337,7 +519,7 @@ expand_inline_function (fndecl, parms, target, ignore, type,
rtx insn;
int max_regno;
register int i;
int min_labelno = inl_f->inl_emit->x_first_label_num;
int min_labelno = inl_f->emit->x_first_label_num;
int max_labelno = inl_f->inl_max_label_num;
int nargs;
rtx local_return_label = 0;
......@@ -1357,7 +539,7 @@ expand_inline_function (fndecl, parms, target, ignore, type,
rtx *real_label_map = 0;
/* Allow for equivalences of the pseudos we make for virtual fp and ap. */
max_regno = inl_f->inl_emit->x_reg_rtx_no + 3;
max_regno = inl_f->emit->x_reg_rtx_no + 3;
if (max_regno < FIRST_PSEUDO_REGISTER)
abort ();
......@@ -1500,7 +682,7 @@ expand_inline_function (fndecl, parms, target, ignore, type,
= (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
map->label_map = real_label_map;
inl_max_uid = (inl_f->inl_emit->x_cur_insn_uid + 1);
inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
map->insn_map = (rtx *) alloca (inl_max_uid * sizeof (rtx));
bzero ((char *) map->insn_map, inl_max_uid * sizeof (rtx));
map->min_insnno = 0;
......@@ -1536,8 +718,8 @@ expand_inline_function (fndecl, parms, target, ignore, type,
if (map->insns_at_start == 0)
map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
map->regno_pointer_flag = inl_f->inl_emit->regno_pointer_flag;
map->regno_pointer_align = inl_f->inl_emit->regno_pointer_align;
map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
map->regno_pointer_align = inl_f->emit->regno_pointer_align;
/* Update the outgoing argument size to allow for those in the inlined
function. */
......@@ -1625,6 +807,12 @@ expand_inline_function (fndecl, parms, target, ignore, type,
abort ();
}
/* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
specially. This function can be called recursively, so we need to
save the previous value. */
inlining_previous = inlining;
inlining = inl_f;
/* Now do the parameters that will be placed in memory. */
for (formal = DECL_ARGUMENTS (fndecl), i = 0;
......@@ -2132,6 +1320,7 @@ expand_inline_function (fndecl, parms, target, ignore, type,
free (real_label_map);
if (map)
VARRAY_FREE (map->const_equiv_varray);
inlining = inlining_previous;
return target;
}
......@@ -2237,23 +1426,6 @@ integrate_decl_tree (let, level, map)
}
}
/* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
through save_constants. */
static void
save_constants_in_decl_trees (let)
tree let;
{
tree t;
for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
if (DECL_RTL (t) != 0)
save_constants (&DECL_RTL (t));
for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
save_constants_in_decl_trees (t);
}
/* Create a new copy of an rtx.
Recursively copies the operands of the rtx,
except for those few rtx codes that are sharable.
......@@ -2506,8 +1678,38 @@ copy_rtx_and_substitute (orig, map)
remapped label. Otherwise, symbols are returned unchanged. */
if (CONSTANT_POOL_ADDRESS_P (orig))
{
rtx constant = get_pool_constant (orig);
if (GET_CODE (constant) == LABEL_REF)
struct function *f = inlining ? inlining : current_function;
rtx constant = get_pool_constant_for_function (f, orig);
enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
if (inlining)
{
rtx temp = force_const_mem (const_mode,
copy_rtx_and_substitute (constant, map));
#if 0
/* Legitimizing the address here is incorrect.
Since we had a SYMBOL_REF before, we can assume it is valid
to have one in this position in the insn.
Also, change_address may create new registers. These
registers will not have valid reg_map entries. This can
cause try_constants() to fail because assumes that all
registers in the rtx have valid reg_map entries, and it may
end up replacing one of these new registers with junk. */
if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
#endif
temp = XEXP (temp, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (temp) != GET_MODE (orig))
temp = convert_memory_address (GET_MODE (orig), temp);
#endif
return temp;
}
else if (GET_CODE (constant) == LABEL_REF)
return XEXP (force_const_mem (GET_MODE (orig),
copy_rtx_and_substitute (constant,
map)),
......@@ -2542,62 +1744,8 @@ copy_rtx_and_substitute (orig, map)
/* Make new constant pool entry for a constant
that was in the pool of the inline function. */
if (RTX_INTEGRATED_P (orig))
{
/* If this was an address of a constant pool entry that itself
had to be placed in the constant pool, it might not be a
valid address. So the recursive call below might turn it
into a register. In that case, it isn't a constant any
more, so return it. This has the potential of changing a
MEM into a REG, but we'll assume that it safe. */
temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
if (! CONSTANT_P (temp))
return temp;
return validize_mem (force_const_mem (GET_MODE (orig), temp));
}
break;
case ADDRESS:
/* If from constant pool address, make new constant pool entry and
return its address. */
if (! RTX_INTEGRATED_P (orig))
abort ();
temp
= force_const_mem (GET_MODE (XEXP (orig, 0)),
copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
map));
#if 0
/* Legitimizing the address here is incorrect.
The only ADDRESS rtx's that can reach here are ones created by
save_constants. Hence the operand of the ADDRESS is always valid
in this position of the instruction, since the original rtx without
the ADDRESS was valid.
The reason we don't legitimize the address here is that on the
Sparc, the caller may have a (high ...) surrounding this ADDRESS.
This code forces the operand of the address to a register, which
fails because we can not take the HIGH part of a register.
Also, change_address may create new registers. These registers
will not have valid reg_map entries. This can cause try_constants()
to fail because assumes that all registers in the rtx have valid
reg_map entries, and it may end up replacing one of these new
registers with junk. */
if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
#endif
temp = XEXP (temp, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (temp) != GET_MODE (orig))
temp = convert_memory_address (GET_MODE (orig), temp);
#endif
return temp;
break;
case ASM_OPERANDS:
/* If a single asm insn contains multiple output operands
......@@ -2666,6 +1814,23 @@ copy_rtx_and_substitute (orig, map)
break;
case MEM:
if (inlining
&& GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
{
enum machine_mode const_mode = get_pool_mode_for_function (inlining, XEXP (orig, 0));
rtx constant = get_pool_constant_for_function (inlining, XEXP (orig, 0));
constant = copy_rtx_and_substitute (constant, map);
/* If this was an address of a constant pool entry that itself
had to be placed in the constant pool, it might not be a
valid address. So the recursive call might have turned it
into a register. In that case, it isn't a constant any
more, so return it. This has the potential of changing a
MEM into a REG, but we'll assume that it safe. */
if (! CONSTANT_P (constant))
return constant;
return validize_mem (force_const_mem (const_mode, constant));
}
copy = rtx_alloc (MEM);
PUT_MODE (copy, mode);
XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
......@@ -3122,88 +2287,6 @@ mark_stores (dest, x)
}
}
/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
pointed to by PX, they represent constants in the constant pool.
Replace these with a new memory reference obtained from force_const_mem.
Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
address of a constant pool entry. Replace them with the address of
a new constant pool entry obtained from force_const_mem. */
static void
restore_constants (px)
rtx *px;
{
rtx x = *px;
int i, j;
const char *fmt;
if (x == 0)
return;
if (GET_CODE (x) == CONST_DOUBLE)
{
/* We have to make a new CONST_DOUBLE to ensure that we account for
it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
{
REAL_VALUE_TYPE d;
REAL_VALUE_FROM_CONST_DOUBLE (d, x);
*px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
}
else
*px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
VOIDmode);
}
else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
{
restore_constants (&XEXP (x, 0));
*px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
}
else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
{
/* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
rtx new = XEXP (SUBREG_REG (x), 0);
restore_constants (&new);
new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
PUT_MODE (new, GET_MODE (x));
*px = validize_mem (new);
}
else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
{
rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
XEXP (XEXP (x, 0), 0)),
0);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (new) != GET_MODE (x))
new = convert_memory_address (GET_MODE (x), new);
#endif
*px = new;
}
else
{
fmt = GET_RTX_FORMAT (GET_CODE (x));
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
{
switch (*fmt++)
{
case 'E':
for (j = 0; j < XVECLEN (x, i); j++)
restore_constants (&XVECEXP (x, i, j));
break;
case 'e':
restore_constants (&XEXP (x, i));
break;
}
}
}
}
/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
that it points to the node itself, thus indicating that the node is its
......@@ -3331,26 +2414,16 @@ void
output_inline_function (fndecl)
tree fndecl;
{
struct function *curf = current_function;
struct function *f = DECL_SAVED_INSNS (fndecl);
rtx last;
/* Things we allocate from here on are part of this function, not
permanent. */
temporary_allocation ();
current_function = f;
current_function_decl = fndecl;
clear_emit_caches ();
/* Find last insn and rebuild the constant pool. */
init_const_rtx_hash_table ();
for (last = get_insns (); NEXT_INSN (last); last = NEXT_INSN (last))
{
if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
{
restore_constants (&PATTERN (last));
restore_constants (&REG_NOTES (last));
}
}
/* Things we allocate from here on are part of this function, not
permanent. */
temporary_allocation ();
set_new_last_label_num (f->inl_max_label_num);
......@@ -3375,6 +2448,6 @@ output_inline_function (fndecl)
/* Compile this function all the way down to assembly code. */
rest_of_compilation (fndecl);
current_function = 0;
current_function_decl = 0;
current_function = curf;
current_function_decl = curf ? curf->decl : 0;
}
......@@ -309,9 +309,6 @@ extern void defer_addressed_constants PROTO((void));
and output now all those that have been deferred. */
extern void output_deferred_addressed_constants PROTO((void));
/* Initialize constant pool hashing for next function. */
extern void init_const_rtx_hash_table PROTO((void));
/* Return the size of the constant pool. */
extern int get_pool_size PROTO((void));
......
......@@ -21,6 +21,8 @@ Boston, MA 02111-1307, USA. */
#ifndef _RTL_H
#define _RTL_H
struct function;
#include "machmode.h"
#undef FFS /* Some systems predefine this symbol; don't let it interfere. */
......@@ -1001,6 +1003,8 @@ extern rtx force_const_mem PROTO((enum machine_mode, rtx));
extern rtx force_reg PROTO((enum machine_mode, rtx));
extern rtx get_pool_constant PROTO((rtx));
extern enum machine_mode get_pool_mode PROTO((rtx));
extern rtx get_pool_constant_for_function PROTO((struct function *, rtx));
extern enum machine_mode get_pool_mode_for_function PROTO((struct function *, rtx));
extern int get_pool_offset PROTO((rtx));
extern rtx simplify_subtraction PROTO((rtx));
extern rtx assign_stack_local PROTO((enum machine_mode,
......
......@@ -3648,30 +3648,16 @@ rest_of_compilation (decl)
compile it by itself, defer decision till end of compilation.
finish_compilation will call rest_of_compilation again
for those functions that need to be output. Also defer those
functions that we are supposed to defer. We cannot defer
functions containing nested functions since the nested function
data is in our non-saved obstack. We cannot defer nested
functions for the same reason. */
/* If this is a nested inline, remove ADDRESSOF now so we can
finish compiling ourselves. Otherwise, wait until EOF.
We have to do this because the purge_addressof transformation
changes the DECL_RTL for many variables, which confuses integrate.
Also, save_for_inline_copying can be very expensive. */
functions that we are supposed to defer. */
if (inlinable)
{
if (decl_function_context (decl))
purge_addressof (insns);
else
DECL_DEFER_OUTPUT (decl) = 1;
}
if (! current_function_contains_functions
&& (DECL_DEFER_OUTPUT (decl)
if (DECL_DEFER_OUTPUT (decl)
|| (DECL_INLINE (decl)
&& ((! TREE_PUBLIC (decl) && ! TREE_ADDRESSABLE (decl)
&& ! flag_keep_inline_functions)
|| DECL_EXTERNAL (decl)))))
|| DECL_EXTERNAL (decl))))
{
DECL_DEFER_OUTPUT (decl) = 1;
......@@ -3720,40 +3706,6 @@ rest_of_compilation (decl)
goto exit_rest_of_compilation;
}
/* If we have to compile the function now, save its rtl and subdecls
so that its compilation will not affect what others get. */
if (inlinable || DECL_DEFER_OUTPUT (decl))
{
#ifdef DWARF_DEBUGGING_INFO
/* Generate the DWARF info for the "abstract" instance of
a function which we will generate an out-of-line instance
of almost immediately (and which we may also later generate
various inlined instances of). */
if (write_symbols == DWARF_DEBUG)
{
set_decl_abstract_flags (decl, 1);
TIMEVAR (symout_time, dwarfout_file_scope_decl (decl, 0));
set_decl_abstract_flags (decl, 0);
}
#endif
#ifdef DWARF2_DEBUGGING_INFO
/* Generate the DWARF2 info for the "abstract" instance of
a function which we will generate an out-of-line instance
of almost immediately (and which we may also later generate
various inlined instances of). */
if (write_symbols == DWARF2_DEBUG)
{
set_decl_abstract_flags (decl, 1);
TIMEVAR (symout_time, dwarf2out_decl (decl));
set_decl_abstract_flags (decl, 0);
}
#endif
saved_block_tree = DECL_INITIAL (decl);
saved_arguments = DECL_ARGUMENTS (decl);
TIMEVAR (integration_time, save_for_inline_copying (decl));
DECL_SAVED_INSNS (decl)->inlinable = inlinable;
}
/* If specified extern inline but we aren't inlining it, we are
done. This goes for anything that gets here with DECL_EXTERNAL
set, not just things with DECL_INLINE. */
......
......@@ -64,21 +64,6 @@ struct obstack *function_maybepermanent_obstack;
struct obstack maybepermanent_obstack;
/* This is a list of function_maybepermanent_obstacks for top-level inline
functions that are compiled in the middle of compiling other functions. */
struct simple_obstack_stack *toplev_inline_obstacks;
/* Former elements of toplev_inline_obstacks that have been recycled. */
struct simple_obstack_stack *extra_inline_obstacks;
/* This is a list of function_maybepermanent_obstacks for inline functions
nested in the current function that were compiled in the middle of
compiling other functions. */
struct simple_obstack_stack *inline_obstacks;
/* The contents of the current function definition are allocated
in this obstack, and all are freed at the end of the function.
For top-level functions, this is temporary_obstack.
......@@ -331,9 +316,8 @@ gcc_obstack_init (obstack)
compile; if it isn't current_function_decl, we have to play some games. */
void
save_tree_status (p, context)
save_tree_status (p)
struct function *p;
tree context;
{
p->all_types_permanent = all_types_permanent;
p->momentary_stack = momentary_stack;
......@@ -347,50 +331,10 @@ save_tree_status (p, context)
p->expression_obstack = expression_obstack;
p->saveable_obstack = saveable_obstack;
p->rtl_obstack = rtl_obstack;
p->inline_obstacks = inline_obstacks;
if (current_function_decl && context == current_function_decl)
/* Objects that need to be saved in this function can be in the nonsaved
obstack of the enclosing function since they can't possibly be needed
once it has returned. */
function_maybepermanent_obstack = function_obstack;
else
{
/* We're compiling a function which isn't nested in the current
function. We need to create a new maybepermanent_obstack for this
function, since it can't go onto any of the existing obstacks. */
struct simple_obstack_stack **head;
struct simple_obstack_stack *current;
if (context == NULL_TREE)
head = &toplev_inline_obstacks;
else
{
struct function *f = find_function_data (context);
head = &f->inline_obstacks;
}
if (context == NULL_TREE && extra_inline_obstacks)
{
current = extra_inline_obstacks;
extra_inline_obstacks = current->next;
}
else
{
current = ((struct simple_obstack_stack *)
xmalloc (sizeof (struct simple_obstack_stack)));
current->obstack
function_maybepermanent_obstack
= (struct obstack *) xmalloc (sizeof (struct obstack));
gcc_obstack_init (current->obstack);
}
function_maybepermanent_obstack = current->obstack;
current->next = *head;
*head = current;
}
gcc_obstack_init (function_maybepermanent_obstack);
maybepermanent_firstobj
= (char *) obstack_finish (function_maybepermanent_obstack);
......@@ -410,9 +354,8 @@ save_tree_status (p, context)
This is used after a nested function. */
void
restore_tree_status (p, context)
restore_tree_status (p)
struct function *p;
tree context;
{
all_types_permanent = p->all_types_permanent;
momentary_stack = p->momentary_stack;
......@@ -420,41 +363,16 @@ restore_tree_status (p, context)
obstack_free (&momentary_obstack, momentary_function_firstobj);
/* Free saveable storage used by the function just compiled and not
saved.
CAUTION: This is in function_obstack of the containing function.
So we must be sure that we never allocate from that obstack during
the compilation of a nested function if we expect it to survive
past the nested function's end. */
saved. */
obstack_free (function_maybepermanent_obstack, maybepermanent_firstobj);
/* If we were compiling a toplevel function, we can free this space now. */
if (context == NULL_TREE)
{
obstack_free (&temporary_obstack, temporary_firstobj);
obstack_free (&momentary_obstack, momentary_function_firstobj);
}
/* If we were compiling a toplevel function that we don't actually want
to save anything from, return the obstack to the pool. */
if (context == NULL_TREE
&& obstack_empty_p (function_maybepermanent_obstack))
{
struct simple_obstack_stack *current, **p = &toplev_inline_obstacks;
if ((*p) != NULL)
{
while ((*p)->obstack != function_maybepermanent_obstack)
p = &((*p)->next);
current = *p;
*p = current->next;
current->next = extra_inline_obstacks;
extra_inline_obstacks = current;
}
}
obstack_free (function_obstack, 0);
if (obstack_empty_p (function_maybepermanent_obstack))
free (function_maybepermanent_obstack);
free (function_obstack);
temporary_firstobj = p->temporary_firstobj;
......@@ -467,7 +385,6 @@ restore_tree_status (p, context)
expression_obstack = p->expression_obstack;
saveable_obstack = p->saveable_obstack;
rtl_obstack = p->rtl_obstack;
inline_obstacks = p->inline_obstacks;
}
/* Start allocating on the temporary (per function) obstack.
......@@ -484,7 +401,6 @@ temporary_allocation ()
expression_obstack = function_obstack;
rtl_obstack = saveable_obstack = function_maybepermanent_obstack;
momentary_stack = 0;
inline_obstacks = 0;
}
/* Start allocating on the permanent obstack but don't
......@@ -612,17 +528,6 @@ permanent_allocation (function_end)
obstack_free (function_maybepermanent_obstack, maybepermanent_firstobj);
obstack_free (&temp_decl_obstack, temp_decl_firstobj);
/* Free up the maybepermanent_obstacks for any of our nested functions
which were compiled at a lower level. */
while (inline_obstacks)
{
struct simple_obstack_stack *current = inline_obstacks;
inline_obstacks = current->next;
obstack_free (current->obstack, 0);
free (current->obstack);
free (current);
}
current_obstack = &permanent_obstack;
expression_obstack = &permanent_obstack;
rtl_obstack = saveable_obstack = &permanent_obstack;
......@@ -4751,32 +4656,6 @@ decl_type_context (decl)
return NULL_TREE;
}
/* Print debugging information about the size of the
toplev_inline_obstacks. */
void
print_inline_obstack_statistics ()
{
struct simple_obstack_stack *current = toplev_inline_obstacks;
int n_obstacks = 0;
int n_alloc = 0;
int n_chunks = 0;
for (; current; current = current->next, ++n_obstacks)
{
struct obstack *o = current->obstack;
struct _obstack_chunk *chunk = o->chunk;
n_alloc += o->next_free - chunk->contents;
chunk = chunk->prev;
++n_chunks;
for (; chunk; chunk = chunk->prev, ++n_chunks)
n_alloc += chunk->limit - &chunk->contents[0];
}
fprintf (stderr, "inline obstacks: %d obstacks, %d bytes, %d chunks\n",
n_obstacks, n_alloc, n_chunks);
}
/* Print debugging information about the obstack O, named STR. */
void
......@@ -4835,7 +4714,6 @@ dump_tree_statistics ()
print_obstack_statistics ("temporary_obstack", &temporary_obstack);
print_obstack_statistics ("momentary_obstack", &momentary_obstack);
print_obstack_statistics ("temp_decl_obstack", &temp_decl_obstack);
print_inline_obstack_statistics ();
print_lang_statistics ();
}
......
......@@ -85,6 +85,48 @@ extern struct obstack *rtl_obstack;
extern struct obstack permanent_obstack;
#define obstack_chunk_alloc xmalloc
struct addr_const;
struct constant_descriptor;
struct rtx_const;
struct pool_constant;
#define MAX_RTX_HASH_TABLE 61
struct varasm_status
{
/* Hash facility for making memory-constants
from constant rtl-expressions. It is used on RISC machines
where immediate integer arguments and constant addresses are restricted
so that such constants must be stored in memory.
This pool of constants is reinitialized for each function
so each function gets its own constants-pool that comes right before
it. */
struct constant_descriptor **x_const_rtx_hash_table;
struct pool_sym **x_const_rtx_sym_hash_table;
/* Pointers to first and last constant in pool. */
struct pool_constant *x_first_pool, *x_last_pool;
/* Current offset in constant pool (does not include any machine-specific
header. */
int x_pool_offset;
/* Chain of all CONST_DOUBLE rtx's constructed for the current function.
They are chained through the CONST_DOUBLE_CHAIN.
A CONST_DOUBLE rtx has CONST_DOUBLE_MEM != cc0_rtx iff it is on this chain.
In that case, CONST_DOUBLE_MEM is either a MEM,
or const0_rtx if no MEM has been made for this CONST_DOUBLE yet. */
rtx x_const_double_chain;
};
#define const_rtx_hash_table (current_function->varasm->x_const_rtx_hash_table)
#define const_rtx_sym_hash_table (current_function->varasm->x_const_rtx_sym_hash_table)
#define first_pool (current_function->varasm->x_first_pool)
#define last_pool (current_function->varasm->x_last_pool)
#define pool_offset (current_function->varasm->x_pool_offset)
#define const_double_chain (current_function->varasm->x_const_double_chain)
/* Number for making the label on the next
constant that is stored in memory. */
......@@ -111,11 +153,6 @@ tree last_assemble_variable_decl;
static int function_defined;
struct addr_const;
struct constant_descriptor;
struct rtx_const;
struct pool_constant;
static const char *strip_reg_name PROTO((const char *));
static int contains_pointers_p PROTO((tree));
static void decode_addr_const PROTO((tree, struct addr_const *));
......@@ -134,7 +171,7 @@ static int compare_constant_rtx PROTO((enum machine_mode, rtx,
struct constant_descriptor *));
static struct constant_descriptor *record_constant_rtx PROTO((enum machine_mode,
rtx));
static struct pool_constant *find_pool_constant PROTO((rtx));
static struct pool_constant *find_pool_constant PROTO((struct function *, rtx));
static void mark_constant_pool PROTO((void));
static void mark_constants PROTO((rtx));
static int output_addressed_constants PROTO((tree));
......@@ -1960,17 +1997,6 @@ assemble_real (d, mode)
/* Here we combine duplicate floating constants to make
CONST_DOUBLE rtx's, and force those out to memory when necessary. */
/* Chain of all CONST_DOUBLE rtx's constructed for the current function.
They are chained through the CONST_DOUBLE_CHAIN.
A CONST_DOUBLE rtx has CONST_DOUBLE_MEM != cc0_rtx iff it is on this chain.
In that case, CONST_DOUBLE_MEM is either a MEM,
or const0_rtx if no MEM has been made for this CONST_DOUBLE yet.
(CONST_DOUBLE_MEM is used only for top-level functions.
See force_const_mem for explanation.) */
static rtx const_double_chain;
/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair of ints.
For an integer, I0 is the low-order word and I1 is the high-order word.
For a real number, I0 is the word with the low address
......@@ -2045,7 +2071,7 @@ immed_double_const (i0, i1, mode)
/* Search the chain for an existing CONST_DOUBLE with the right value.
If one is found, return it. */
if (current_function != 0)
for (r = const_double_chain; r; r = CONST_DOUBLE_CHAIN (r))
if (CONST_DOUBLE_LOW (r) == i0 && CONST_DOUBLE_HIGH (r) == i1
&& GET_MODE (r) == mode)
......@@ -2064,9 +2090,8 @@ immed_double_const (i0, i1, mode)
r = gen_rtx_CONST_DOUBLE (mode, NULL_RTX, i0, i1);
pop_obstacks ();
/* Don't touch const_double_chain in nested function; see force_const_mem.
Also, don't touch it if not inside any function. */
if (outer_function_chain == 0 && current_function_decl != 0)
/* Don't touch const_double_chain if not inside any function. */
if (current_function_decl != 0)
{
CONST_DOUBLE_CHAIN (r) = const_double_chain;
const_double_chain = r;
......@@ -2118,7 +2143,7 @@ immed_real_const_1 (d, mode)
/* Search the chain for an existing CONST_DOUBLE with the right value.
If one is found, return it. */
if (current_function != 0)
for (r = const_double_chain; r; r = CONST_DOUBLE_CHAIN (r))
if (! bcmp ((char *) &CONST_DOUBLE_LOW (r), (char *) &u, sizeof u)
&& GET_MODE (r) == mode)
......@@ -2131,17 +2156,15 @@ immed_real_const_1 (d, mode)
we will be leaving this constant on the chain, so we cannot tolerate
freed memory. So switch to saveable_obstack for this allocation
and then switch back if we were in current_obstack. */
push_obstacks_nochange ();
rtl_in_saveable_obstack ();
r = rtx_alloc (CONST_DOUBLE);
pop_obstacks ();
PUT_MODE (r, mode);
bcopy ((char *) &u, (char *) &CONST_DOUBLE_LOW (r), sizeof u);
pop_obstacks ();
/* Don't touch const_double_chain in nested function; see force_const_mem.
Also, don't touch it if not inside any function. */
if (outer_function_chain == 0 && current_function_decl != 0)
/* Don't touch const_double_chain if not inside any function. */
if (current_function_decl != 0)
{
CONST_DOUBLE_CHAIN (r) = const_double_chain;
const_double_chain = r;
......@@ -2175,11 +2198,6 @@ clear_const_double_mem ()
{
register rtx r, next;
/* Don't touch CONST_DOUBLE_MEM for nested functions.
See force_const_mem for explanation. */
if (outer_function_chain != 0)
return;
for (r = const_double_chain; r; r = next)
{
next = CONST_DOUBLE_CHAIN (r);
......@@ -3110,20 +3128,6 @@ output_constant_def_contents (exp, reloc, labelno)
}
/* Similar hash facility for making memory-constants
from constant rtl-expressions. It is used on RISC machines
where immediate integer arguments and constant addresses are restricted
so that such constants must be stored in memory.
This pool of constants is reinitialized for each function
so each function gets its own constants-pool that comes right before it.
All structures allocated here are discarded when functions are saved for
inlining, so they do not need to be allocated permanently. */
#define MAX_RTX_HASH_TABLE 61
static struct constant_descriptor **const_rtx_hash_table;
/* Structure to represent sufficient information about a constant so that
it can be output when the constant pool is output, so that function
integration can be done, and to simplify handling on machines that reference
......@@ -3141,15 +3145,6 @@ struct pool_constant
int mark;
};
/* Pointers to first and last constant in pool. */
static struct pool_constant *first_pool, *last_pool;
/* Current offset in constant pool (does not include any machine-specific
header. */
static int pool_offset;
/* Structure used to maintain hash table mapping symbols used to their
corresponding constants. */
......@@ -3160,63 +3155,35 @@ struct pool_sym
struct pool_sym *next;
};
static struct pool_sym **const_rtx_sym_hash_table;
/* Hash code for a SYMBOL_REF with CONSTANT_POOL_ADDRESS_P true.
The argument is XSTR (... , 0) */
#define SYMHASH(LABEL) \
((((unsigned long) (LABEL)) & ((1 << HASHBITS) - 1)) % MAX_RTX_HASH_TABLE)
/* Initialize constant pool hashing for next function. */
/* Initialize constant pool hashing for a new function. */
void
init_const_rtx_hash_table ()
init_varasm_status (f)
struct function *f;
{
const_rtx_hash_table
struct varasm_status *p;
p = (struct varasm_status *) xmalloc (sizeof (struct varasm_status));
f->varasm = p;
p->x_const_rtx_hash_table
= ((struct constant_descriptor **)
oballoc (MAX_RTX_HASH_TABLE * sizeof (struct constant_descriptor *)));
const_rtx_sym_hash_table
xmalloc (MAX_RTX_HASH_TABLE * sizeof (struct constant_descriptor *)));
p->x_const_rtx_sym_hash_table
= ((struct pool_sym **)
oballoc (MAX_RTX_HASH_TABLE * sizeof (struct pool_sym *)));
bzero ((char *) const_rtx_hash_table,
xmalloc (MAX_RTX_HASH_TABLE * sizeof (struct pool_sym *)));
bzero ((char *) p->x_const_rtx_hash_table,
MAX_RTX_HASH_TABLE * sizeof (struct constant_descriptor *));
bzero ((char *) const_rtx_sym_hash_table,
bzero ((char *) p->x_const_rtx_sym_hash_table,
MAX_RTX_HASH_TABLE * sizeof (struct pool_sym *));
first_pool = last_pool = 0;
pool_offset = 0;
}
/* Save and restore status for a nested function. */
void
save_varasm_status (p, context)
struct function *p;
tree context;
{
p->const_rtx_hash_table = const_rtx_hash_table;
p->const_rtx_sym_hash_table = const_rtx_sym_hash_table;
p->first_pool = first_pool;
p->last_pool = last_pool;
p->pool_offset = pool_offset;
p->const_double_chain = const_double_chain;
/* If we are pushing to toplevel, we can't reuse const_double_chain. */
if (context == NULL_TREE)
const_double_chain = 0;
}
void
restore_varasm_status (p)
struct function *p;
{
const_rtx_hash_table = p->const_rtx_hash_table;
const_rtx_sym_hash_table = p->const_rtx_sym_hash_table;
first_pool = p->first_pool;
last_pool = p->last_pool;
pool_offset = p->pool_offset;
const_double_chain = p->const_double_chain;
p->x_first_pool = p->x_last_pool = 0;
p->x_pool_offset = 0;
p->x_const_double_chain = 0;
}
enum kind { RTX_DOUBLE, RTX_INT };
......@@ -3439,11 +3406,6 @@ force_const_mem (mode, x)
modes in an alternating fashion, we will allocate a lot of different
memory locations, but this should be extremely rare. */
/* Don't use CONST_DOUBLE_MEM in a nested function.
Nested functions have their own constant pools,
so they can't share the same values in CONST_DOUBLE_MEM
with the containing function. */
if (outer_function_chain == 0)
if (GET_CODE (x) == CONST_DOUBLE
&& GET_CODE (CONST_DOUBLE_MEM (x)) == MEM
&& GET_MODE (CONST_DOUBLE_MEM (x)) == mode)
......@@ -3558,7 +3520,6 @@ force_const_mem (mode, x)
CONSTANT_POOL_ADDRESS_P (XEXP (def, 0)) = 1;
current_function_uses_const_pool = 1;
if (outer_function_chain == 0)
if (GET_CODE (x) == CONST_DOUBLE)
{
if (CONST_DOUBLE_MEM (x) == cc0_rtx)
......@@ -3576,13 +3537,14 @@ force_const_mem (mode, x)
the corresponding pool_constant structure. */
static struct pool_constant *
find_pool_constant (addr)
find_pool_constant (f, addr)
struct function *f;
rtx addr;
{
struct pool_sym *sym;
char *label = XSTR (addr, 0);
for (sym = const_rtx_sym_hash_table[SYMHASH (label)]; sym; sym = sym->next)
for (sym = f->varasm->x_const_rtx_sym_hash_table[SYMHASH (label)]; sym; sym = sym->next)
if (sym->label == label)
return sym->pool;
......@@ -3595,7 +3557,17 @@ rtx
get_pool_constant (addr)
rtx addr;
{
return (find_pool_constant (addr))->constant;
return (find_pool_constant (current_function, addr))->constant;
}
/* Likewise, but for the constant pool of a specific function. */
rtx
get_pool_constant_for_function (f, addr)
struct function *f;
rtx addr;
{
return (find_pool_constant (f, addr))->constant;
}
/* Similar, return the mode. */
......@@ -3604,7 +3576,15 @@ enum machine_mode
get_pool_mode (addr)
rtx addr;
{
return (find_pool_constant (addr))->mode;
return (find_pool_constant (current_function, addr))->mode;
}
enum machine_mode
get_pool_mode_for_function (f, addr)
struct function *f;
rtx addr;
{
return (find_pool_constant (f, addr))->mode;
}
/* Similar, return the offset in the constant pool. */
......@@ -3613,7 +3593,7 @@ int
get_pool_offset (addr)
rtx addr;
{
return (find_pool_constant (addr))->offset;
return (find_pool_constant (current_function, addr))->offset;
}
/* Return the size of the constant pool. */
......@@ -3786,14 +3766,11 @@ mark_constants (x)
if (GET_CODE (x) == SYMBOL_REF)
{
if (CONSTANT_POOL_ADDRESS_P (x))
find_pool_constant (x)->mark = 1;
find_pool_constant (current_function, x)->mark = 1;
return;
}
/* Never search inside a CONST_DOUBLE, because CONST_DOUBLE_MEM may be
a MEM, but does not constitute a use of that MEM. This is particularly
important inside a nested function, because CONST_DOUBLE_MEM may be
a reference to a MEM in the parent's constant pool. See the comment
in force_const_mem. */
a MEM, but does not constitute a use of that MEM. */
else if (GET_CODE (x) == CONST_DOUBLE)
return;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment