Commit 1e5bd841 by Bernd Schmidt Committed by Jeff Law

reload1.c (reload): Break out several subroutines and make some variables global.

	* reload1.c (reload): Break out several subroutines and make some
	variables global.
	(calculate_needs_all_insns): New function, broken out of reload.
	(calculate_needs): Likewise.
	(find_reload_regs): Likewise.
	(find_group): Likewise.
	(find_tworeg_group): Likewise.
	(something_needs_reloads): New global variable, formerly in reload.
	(something_needs_elimination): Likewise.
	(caller_save_spill_class): Likewise.
	(caller_save_group_size): Likewise.
	(max_needs): Likewise.
	(group_size): Likewise.
	(max_groups): Likewise.
	(max_nongroups): Likewise.
	(group_mode): Likewise.
	(max_needs_insn): Likewise.
	(max_groups_insn): Likewise.
	(max_nongroups_insn): Likewise.
	(failure): Likewise.

From-SVN: r22367
parent 5a0a1a66
Wed Sep 9 21:58:41 1998 Bernd Schmidt <crux@pool.informatik.rwth-aachen.de> Wed Sep 9 21:58:41 1998 Bernd Schmidt <crux@pool.informatik.rwth-aachen.de>
* reload1.c (reload): Break out several subroutines and make some
variables global.
(calculate_needs_all_insns): New function, broken out of reload.
(calculate_needs): Likewise.
(find_reload_regs): Likewise.
(find_group): Likewise.
(find_tworeg_group): Likewise.
(something_needs_reloads): New global variable, formerly in reload.
(something_needs_elimination): Likewise.
(caller_save_spill_class): Likewise.
(caller_save_group_size): Likewise.
(max_needs): Likewise.
(group_size): Likewise.
(max_groups): Likewise.
(max_nongroups): Likewise.
(group_mode): Likewise.
(max_needs_insn): Likewise.
(max_groups_insn): Likewise.
(max_nongroups_insn): Likewise.
(failure): Likewise.
* print-rtl.c (print_rtx): For MEMs, print MEM_ALIAS_SET. * print-rtl.c (print_rtx): For MEMs, print MEM_ALIAS_SET.
Wed Sep 9 13:14:41 1998 Richard Henderson <rth@cygnus.com> Wed Sep 9 13:14:41 1998 Richard Henderson <rth@cygnus.com>
......
...@@ -351,6 +351,11 @@ static int num_labels; ...@@ -351,6 +351,11 @@ static int num_labels;
struct hard_reg_n_uses { int regno; int uses; }; struct hard_reg_n_uses { int regno; int uses; };
static int calculate_needs_all_insns PROTO((rtx, int));
static int calculate_needs PROTO((int, rtx, rtx, int));
static int find_reload_regs PROTO((int, FILE *));
static int find_tworeg_group PROTO((int, int, FILE *));
static int find_group PROTO((int, int, FILE *));
static int possible_group_p PROTO((int, int *)); static int possible_group_p PROTO((int, int *));
static void count_possible_groups PROTO((int *, enum machine_mode *, static void count_possible_groups PROTO((int *, enum machine_mode *,
int *, int)); int *, int));
...@@ -511,6 +516,49 @@ init_reload () ...@@ -511,6 +516,49 @@ init_reload ()
} }
} }
/* Global variables used by reload and its subroutines. */
/* Set during calculate_needs if an insn needs reloading. */
static int something_needs_reloads;
/* Set during calculate_needs if an insn needs register elimination. */
static int something_needs_elimination;
/* Indicate whether caller saves need a spill register. */
static enum reg_class caller_save_spill_class = NO_REGS;
static int caller_save_group_size = 1;
/* For each class, number of reload regs needed in that class.
This is the maximum over all insns of the needs in that class
of the individual insn. */
static int max_needs[N_REG_CLASSES];
/* For each class, size of group of consecutive regs
that is needed for the reloads of this class. */
static int group_size[N_REG_CLASSES];
/* For each class, max number of consecutive groups needed.
(Each group contains group_size[CLASS] consecutive registers.) */
static int max_groups[N_REG_CLASSES];
/* For each class, max number needed of regs that don't belong
to any of the groups. */
static int max_nongroups[N_REG_CLASSES];
/* For each class, the machine mode which requires consecutive
groups of regs of that class.
If two different modes ever require groups of one class,
they must be the same size and equally restrictive for that class,
otherwise we can't handle the complexity. */
static enum machine_mode group_mode[N_REG_CLASSES];
/* Record the insn where each maximum need is first found. */
static rtx max_needs_insn[N_REG_CLASSES];
static rtx max_groups_insn[N_REG_CLASSES];
static rtx max_nongroups_insn[N_REG_CLASSES];
/* Nonzero means we couldn't get enough spill regs. */
static int failure;
/* Main entry point for the reload pass. /* Main entry point for the reload pass.
FIRST is the first insn of the function being compiled. FIRST is the first insn of the function being compiled.
...@@ -535,8 +583,7 @@ reload (first, global, dumpfile) ...@@ -535,8 +583,7 @@ reload (first, global, dumpfile)
int global; int global;
FILE *dumpfile; FILE *dumpfile;
{ {
register int class; register int i, j;
register int i, j, k;
register rtx insn; register rtx insn;
register struct elim_table *ep; register struct elim_table *ep;
...@@ -546,24 +593,18 @@ reload (first, global, dumpfile) ...@@ -546,24 +593,18 @@ reload (first, global, dumpfile)
int (*real_at_ptr)[NUM_ELIMINABLE_REGS]; int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
int something_changed; int something_changed;
int something_needs_reloads;
int something_needs_elimination;
int new_basic_block_needs;
enum reg_class caller_save_spill_class = NO_REGS;
int caller_save_group_size = 1;
/* Nonzero means we couldn't get enough spill regs. */
int failure = 0;
/* The basic block number currently being processed for INSN. */
int this_block;
/* Make sure even insns with volatile mem refs are recognizable. */ /* Make sure even insns with volatile mem refs are recognizable. */
init_recog (); init_recog ();
failure = 0;
/* Enable find_equiv_reg to distinguish insns made by reload. */ /* Enable find_equiv_reg to distinguish insns made by reload. */
reload_first_uid = get_max_uid (); reload_first_uid = get_max_uid ();
caller_save_spill_class = NO_REGS;
caller_save_group_size = 1;
for (i = 0; i < N_REG_CLASSES; i++) for (i = 0; i < N_REG_CLASSES; i++)
basic_block_needs[i] = 0; basic_block_needs[i] = 0;
...@@ -864,31 +905,6 @@ reload (first, global, dumpfile) ...@@ -864,31 +905,6 @@ reload (first, global, dumpfile)
something_needs_elimination = 0; something_needs_elimination = 0;
while (something_changed) while (something_changed)
{ {
rtx after_call = 0;
/* For each class, number of reload regs needed in that class.
This is the maximum over all insns of the needs in that class
of the individual insn. */
int max_needs[N_REG_CLASSES];
/* For each class, size of group of consecutive regs
that is needed for the reloads of this class. */
int group_size[N_REG_CLASSES];
/* For each class, max number of consecutive groups needed.
(Each group contains group_size[CLASS] consecutive registers.) */
int max_groups[N_REG_CLASSES];
/* For each class, max number needed of regs that don't belong
to any of the groups. */
int max_nongroups[N_REG_CLASSES];
/* For each class, the machine mode which requires consecutive
groups of regs of that class.
If two different modes ever require groups of one class,
they must be the same size and equally restrictive for that class,
otherwise we can't handle the complexity. */
enum machine_mode group_mode[N_REG_CLASSES];
/* Record the insn where each maximum need is first found. */
rtx max_needs_insn[N_REG_CLASSES];
rtx max_groups_insn[N_REG_CLASSES];
rtx max_nongroups_insn[N_REG_CLASSES];
rtx x; rtx x;
HOST_WIDE_INT starting_frame_size; HOST_WIDE_INT starting_frame_size;
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
...@@ -907,13 +923,6 @@ reload (first, global, dumpfile) ...@@ -907,13 +923,6 @@ reload (first, global, dumpfile)
for (i = 0; i < N_REG_CLASSES; i++) for (i = 0; i < N_REG_CLASSES; i++)
group_mode[i] = VOIDmode; group_mode[i] = VOIDmode;
/* Keep track of which basic blocks are needing the reloads. */
this_block = 0;
/* Remember whether any element of basic_block_needs
changes from 0 to 1 in this pass. */
new_basic_block_needs = 0;
/* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
here because the stack size may be a part of the offset computation here because the stack size may be a part of the offset computation
for register elimination, and there might have been new stack slots for register elimination, and there might have been new stack slots
...@@ -1025,142 +1034,543 @@ reload (first, global, dumpfile) ...@@ -1025,142 +1034,543 @@ reload (first, global, dumpfile)
group_size[(int) caller_save_spill_class] = caller_save_group_size; group_size[(int) caller_save_spill_class] = caller_save_group_size;
} }
/* Compute the most additional registers needed by any instruction. something_changed |= calculate_needs_all_insns (first, global);
Collect information separately for each class of regs. */
for (insn = first; insn; insn = NEXT_INSN (insn)) /* If we allocated any new memory locations, make another pass
since it might have changed elimination offsets. */
if (starting_frame_size != get_frame_size ())
something_changed = 1;
if (dumpfile)
for (i = 0; i < N_REG_CLASSES; i++)
{ {
if (global && this_block + 1 < n_basic_blocks if (max_needs[i] > 0)
&& insn == basic_block_head[this_block+1]) fprintf (dumpfile,
++this_block; ";; Need %d reg%s of class %s (for insn %d).\n",
max_needs[i], max_needs[i] == 1 ? "" : "s",
reg_class_names[i], INSN_UID (max_needs_insn[i]));
if (max_nongroups[i] > 0)
fprintf (dumpfile,
";; Need %d nongroup reg%s of class %s (for insn %d).\n",
max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
if (max_groups[i] > 0)
fprintf (dumpfile,
";; Need %d group%s (%smode) of class %s (for insn %d).\n",
max_groups[i], max_groups[i] == 1 ? "" : "s",
mode_name[(int) group_mode[i]],
reg_class_names[i], INSN_UID (max_groups_insn[i]));
}
/* If this is a label, a JUMP_INSN, or has REG_NOTES (which /* If we have caller-saves, set up the save areas and see if caller-save
might include REG_LABEL), we need to see what effects this will need a spill register. */
has on the known offsets at labels. */
if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN if (caller_save_needed)
|| (GET_RTX_CLASS (GET_CODE (insn)) == 'i' {
&& REG_NOTES (insn) != 0)) /* Set the offsets for setup_save_areas. */
set_label_offsets (insn, insn, 0); for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
ep++)
ep->previous_offset = ep->max_offset;
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') if ( ! setup_save_areas (&something_changed)
&& caller_save_spill_class == NO_REGS)
{ {
/* Nonzero means don't use a reload reg that overlaps /* The class we will need depends on whether the machine
the place where a function value can be returned. */ supports the sum of two registers for an address; see
rtx avoid_return_reg = 0; find_address_reloads for details. */
rtx old_body = PATTERN (insn); caller_save_spill_class
int old_code = INSN_CODE (insn); = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
rtx old_notes = REG_NOTES (insn); caller_save_group_size
int did_elimination = 0; = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
something_changed = 1;
}
}
/* To compute the number of reload registers of each class /* See if anything that happened changes which eliminations are valid.
needed for an insn, we must simulate what choose_reload_regs For example, on the Sparc, whether or not the frame pointer can
can do. We do this by splitting an insn into an "input" and be eliminated can depend on what registers have been used. We need
an "output" part. RELOAD_OTHER reloads are used in both. not check some conditions again (such as flag_omit_frame_pointer)
The input part uses those reloads, RELOAD_FOR_INPUT reloads, since they can't have changed. */
which must be live over the entire input section of reloads,
and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
inputs.
The registers needed for output are RELOAD_OTHER and for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
RELOAD_FOR_OUTPUT, which are live for the entire output if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS #ifdef ELIMINABLE_REGS
reloads for each operand. || ! CAN_ELIMINATE (ep->from, ep->to)
#endif
)
ep->can_eliminate = 0;
The total number of registers needed is the maximum of the /* Look for the case where we have discovered that we can't replace
inputs and outputs. */ register A with register B and that means that we will now be
trying to replace register A with register C. This means we can
no longer replace register C with register B and we need to disable
such an elimination, if it exists. This occurs often with A == ap,
B == sp, and C == fp. */
struct needs for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
{ {
/* [0] is normal, [1] is nongroup. */ struct elim_table *op;
int regs[2][N_REG_CLASSES]; register int new_to = -1;
int groups[N_REG_CLASSES];
};
/* Each `struct needs' corresponds to one RELOAD_... type. */
struct {
struct needs other;
struct needs input;
struct needs output;
struct needs insn;
struct needs other_addr;
struct needs op_addr;
struct needs op_addr_reload;
struct needs in_addr[MAX_RECOG_OPERANDS];
struct needs in_addr_addr[MAX_RECOG_OPERANDS];
struct needs out_addr[MAX_RECOG_OPERANDS];
struct needs out_addr_addr[MAX_RECOG_OPERANDS];
} insn_needs;
/* If needed, eliminate any eliminable registers. */
if (num_eliminable)
did_elimination = eliminate_regs_in_insn (insn, 0);
/* Set avoid_return_reg if this is an insn if (! ep->can_eliminate && ep->can_eliminate_previous)
that might use the value of a function call. */
if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
{ {
if (GET_CODE (PATTERN (insn)) == SET) /* Find the current elimination for ep->from, if there is a
after_call = SET_DEST (PATTERN (insn)); new one. */
else if (GET_CODE (PATTERN (insn)) == PARALLEL for (op = reg_eliminate;
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); if (op->from == ep->from && op->can_eliminate)
else
after_call = 0;
}
else if (SMALL_REGISTER_CLASSES && after_call != 0
&& !(GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
&& GET_CODE (PATTERN (insn)) != USE)
{ {
if (reg_referenced_p (after_call, PATTERN (insn))) new_to = op->to;
avoid_return_reg = after_call; break;
after_call = 0;
} }
/* Analyze the instruction. */ /* See if there is an elimination of NEW_TO -> EP->TO. If so,
find_reloads (insn, 0, spill_indirect_levels, global, disable it. */
spill_reg_order); for (op = reg_eliminate;
op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
/* Remember for later shortcuts which insns had any reloads or if (op->from == new_to && op->to == ep->to)
register eliminations. op->can_eliminate = 0;
}
}
One might think that it would be worthwhile to mark insns /* See if any registers that we thought we could eliminate the previous
that need register replacements but not reloads, but this is time are no longer eliminable. If so, something has changed and we
not safe because find_reloads may do some manipulation of must spill the register. Also, recompute the number of eliminable
the insn (such as swapping commutative operands), which would registers and see if the frame pointer is needed; it is if there is
be lost when we restore the old pattern after register no elimination of the frame pointer that we can perform. */
replacement. So the actions of find_reloads must be redone in
subsequent passes or in reload_as_needed.
However, it is safe to mark insns that need reloads frame_pointer_needed = 1;
but not register replacement. */ for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
{
if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
&& ep->to != HARD_FRAME_POINTER_REGNUM)
frame_pointer_needed = 0;
PUT_MODE (insn, (did_elimination ? QImode if (! ep->can_eliminate && ep->can_eliminate_previous)
: n_reloads ? HImode {
: GET_MODE (insn) == DImode ? DImode ep->can_eliminate_previous = 0;
: VOIDmode)); spill_hard_reg (ep->from, global, dumpfile, 1);
something_changed = 1;
num_eliminable--;
}
}
/* Discard any register replacements done. */ #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
if (did_elimination) /* If we didn't need a frame pointer last time, but we do now, spill
the hard frame pointer. */
if (frame_pointer_needed && ! previous_frame_pointer_needed)
{ {
obstack_free (&reload_obstack, reload_firstobj); spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
PATTERN (insn) = old_body; something_changed = 1;
INSN_CODE (insn) = old_code;
REG_NOTES (insn) = old_notes;
something_needs_elimination = 1;
} }
#endif
/* If this insn has no reloads, we need not do anything except /* If all needs are met, we win. */
in the case of a CALL_INSN when we have caller-saves and
caller-save needs reloads. */
if (n_reloads == 0 for (i = 0; i < N_REG_CLASSES; i++)
&& ! (GET_CODE (insn) == CALL_INSN if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
&& caller_save_spill_class != NO_REGS)) break;
continue; if (i == N_REG_CLASSES && ! something_changed)
break;
/* Not all needs are met; must spill some hard regs. */
/* Put all registers spilled so far back in potential_reload_regs, but
put them at the front, since we've already spilled most of the
pseudos in them (we might have left some pseudos unspilled if they
were in a block that didn't need any spill registers of a conflicting
class. We used to try to mark off the need for those registers,
but doing so properly is very complex and reallocating them is the
simpler approach. First, "pack" potential_reload_regs by pushing
any nonnegative entries towards the end. That will leave room
for the registers we already spilled.
Also, undo the marking of the spill registers from the last time
around in FORBIDDEN_REGS since we will be probably be allocating
them again below.
??? It is theoretically possible that we might end up not using one
of our previously-spilled registers in this allocation, even though
they are at the head of the list. It's not clear what to do about
this, but it was no better before, when we marked off the needs met
by the previously-spilled registers. With the current code, globals
can be allocated into these registers, but locals cannot. */
if (n_spills)
{
for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
if (potential_reload_regs[i] != -1)
potential_reload_regs[j--] = potential_reload_regs[i];
for (i = 0; i < n_spills; i++)
{
potential_reload_regs[i] = spill_regs[i];
spill_reg_order[spill_regs[i]] = -1;
CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
}
n_spills = 0;
}
something_changed |= find_reload_regs (global, dumpfile);
if (failure)
goto failed;
}
/* If global-alloc was run, notify it of any register eliminations we have
done. */
if (global)
for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
if (ep->can_eliminate)
mark_elimination (ep->from, ep->to);
/* Insert code to save and restore call-clobbered hard regs
around calls. Tell if what mode to use so that we will process
those insns in reload_as_needed if we have to. */
if (caller_save_needed)
save_call_clobbered_regs (num_eliminable ? QImode
: caller_save_spill_class != NO_REGS ? HImode
: VOIDmode);
/* If a pseudo has no hard reg, delete the insns that made the equivalence.
If that insn didn't set the register (i.e., it copied the register to
memory), just delete that insn instead of the equivalencing insn plus
anything now dead. If we call delete_dead_insn on that insn, we may
delete the insn that actually sets the register if the register die
there and that is incorrect. */
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
&& GET_CODE (reg_equiv_init[i]) != NOTE)
{
if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
delete_dead_insn (reg_equiv_init[i]);
else
{
PUT_CODE (reg_equiv_init[i], NOTE);
NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
}
}
/* Use the reload registers where necessary
by generating move instructions to move the must-be-register
values into or out of the reload registers. */
if (something_needs_reloads || something_needs_elimination
|| (caller_save_needed && num_eliminable)
|| caller_save_spill_class != NO_REGS)
reload_as_needed (first, global);
/* If we were able to eliminate the frame pointer, show that it is no
longer live at the start of any basic block. If it ls live by
virtue of being in a pseudo, that pseudo will be marked live
and hence the frame pointer will be known to be live via that
pseudo. */
if (! frame_pointer_needed)
for (i = 0; i < n_basic_blocks; i++)
CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
HARD_FRAME_POINTER_REGNUM);
/* Come here (with failure set nonzero) if we can't get enough spill regs
and we decide not to abort about it. */
failed:
reload_in_progress = 0;
/* Now eliminate all pseudo regs by modifying them into
their equivalent memory references.
The REG-rtx's for the pseudos are modified in place,
so all insns that used to refer to them now refer to memory.
For a reg that has a reg_equiv_address, all those insns
were changed by reloading so that no insns refer to it any longer;
but the DECL_RTL of a variable decl may refer to it,
and if so this causes the debugging info to mention the variable. */
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
{
rtx addr = 0;
int in_struct = 0;
int is_readonly = 0;
if (reg_equiv_memory_loc[i])
{
in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
}
if (reg_equiv_mem[i])
addr = XEXP (reg_equiv_mem[i], 0);
if (reg_equiv_address[i])
addr = reg_equiv_address[i];
if (addr)
{
if (reg_renumber[i] < 0)
{
rtx reg = regno_reg_rtx[i];
XEXP (reg, 0) = addr;
REG_USERVAR_P (reg) = 0;
RTX_UNCHANGING_P (reg) = is_readonly;
MEM_IN_STRUCT_P (reg) = in_struct;
/* We have no alias information about this newly created
MEM. */
MEM_ALIAS_SET (reg) = 0;
PUT_CODE (reg, MEM);
}
else if (reg_equiv_mem[i])
XEXP (reg_equiv_mem[i], 0) = addr;
}
}
/* Make a pass over all the insns and delete all USEs which we inserted
only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
is defined, also remove death notes for things that are no longer
registers or no longer die in the insn (e.g., an input and output
pseudo being tied). */
for (insn = first; insn; insn = NEXT_INSN (insn))
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
#ifdef PRESERVE_DEATH_INFO_REGNO_P
rtx note, next;
#endif
if (GET_CODE (PATTERN (insn)) == USE
&& find_reg_note (insn, REG_EQUAL, NULL_RTX))
{
PUT_CODE (insn, NOTE);
NOTE_SOURCE_FILE (insn) = 0;
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
continue;
}
#ifdef PRESERVE_DEATH_INFO_REGNO_P
for (note = REG_NOTES (insn); note; note = next)
{
next = XEXP (note, 1);
if (REG_NOTE_KIND (note) == REG_DEAD
&& (GET_CODE (XEXP (note, 0)) != REG
|| reg_set_p (XEXP (note, 0), PATTERN (insn))))
remove_note (insn, note);
}
#endif
}
/* If we are doing stack checking, give a warning if this function's
frame size is larger than we expect. */
if (flag_stack_check && ! STACK_CHECK_BUILTIN)
{
HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
size += UNITS_PER_WORD;
if (size > STACK_CHECK_MAX_FRAME_SIZE)
warning ("frame size too large for reliable stack checking");
}
/* Indicate that we no longer have known memory locations or constants. */
reg_equiv_constant = 0;
reg_equiv_memory_loc = 0;
if (real_known_ptr)
free (real_known_ptr);
if (real_at_ptr)
free (real_at_ptr);
if (scratch_list)
free (scratch_list);
scratch_list = 0;
if (scratch_block)
free (scratch_block);
scratch_block = 0;
free (reg_equiv_constant);
free (reg_equiv_memory_loc);
free (reg_equiv_mem);
free (reg_equiv_init);
free (reg_equiv_address);
free (reg_max_ref_width);
CLEAR_HARD_REG_SET (used_spill_regs);
for (i = 0; i < n_spills; i++)
SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
return failure;
}
/* Walk the insns of the current function, starting with FIRST, and collect
information about the need to do register elimination and the need to
perform reloads. */
static int
calculate_needs_all_insns (first, global)
rtx first;
int global;
{
rtx insn;
int something_changed = 0;
rtx after_call = 0;
/* Keep track of which basic blocks are needing the reloads. */
int this_block = 0;
/* Compute the most additional registers needed by any instruction.
Collect information separately for each class of regs. */
for (insn = first; insn; insn = NEXT_INSN (insn))
{
if (global && this_block + 1 < n_basic_blocks
&& insn == basic_block_head[this_block+1])
++this_block;
/* If this is a label, a JUMP_INSN, or has REG_NOTES (which
might include REG_LABEL), we need to see what effects this
has on the known offsets at labels. */
if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
|| (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
&& REG_NOTES (insn) != 0))
set_label_offsets (insn, insn, 0);
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
rtx old_body = PATTERN (insn);
int old_code = INSN_CODE (insn);
rtx old_notes = REG_NOTES (insn);
int did_elimination = 0;
/* Nonzero means don't use a reload reg that overlaps
the place where a function value can be returned. */
rtx avoid_return_reg = 0;
/* Set avoid_return_reg if this is an insn
that might use the value of a function call. */
if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
{
if (GET_CODE (PATTERN (insn)) == SET)
after_call = SET_DEST (PATTERN (insn));
else if (GET_CODE (PATTERN (insn)) == PARALLEL
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
else
after_call = 0;
}
else if (SMALL_REGISTER_CLASSES && after_call != 0
&& !(GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
&& GET_CODE (PATTERN (insn)) != USE)
{
if (reg_referenced_p (after_call, PATTERN (insn)))
avoid_return_reg = after_call;
after_call = 0;
}
/* If needed, eliminate any eliminable registers. */
if (num_eliminable)
did_elimination = eliminate_regs_in_insn (insn, 0);
/* Analyze the instruction. */
find_reloads (insn, 0, spill_indirect_levels, global,
spill_reg_order);
/* Remember for later shortcuts which insns had any reloads or
register eliminations.
One might think that it would be worthwhile to mark insns
that need register replacements but not reloads, but this is
not safe because find_reloads may do some manipulation of
the insn (such as swapping commutative operands), which would
be lost when we restore the old pattern after register
replacement. So the actions of find_reloads must be redone in
subsequent passes or in reload_as_needed.
However, it is safe to mark insns that need reloads
but not register replacement. */
PUT_MODE (insn, (did_elimination ? QImode
: n_reloads ? HImode
: GET_MODE (insn) == DImode ? DImode
: VOIDmode));
/* Discard any register replacements done. */
if (did_elimination)
{
obstack_free (&reload_obstack, reload_firstobj);
PATTERN (insn) = old_body;
INSN_CODE (insn) = old_code;
REG_NOTES (insn) = old_notes;
something_needs_elimination = 1;
}
/* If this insn has no reloads, we need not do anything except
in the case of a CALL_INSN when we have caller-saves and
caller-save needs reloads. */
if (n_reloads != 0
|| (GET_CODE (insn) == CALL_INSN
&& caller_save_spill_class != NO_REGS))
something_changed |= calculate_needs (this_block, insn,
avoid_return_reg, global);
}
/* Note that there is a continue statement above. */
}
return something_changed;
}
/* To compute the number of reload registers of each class
needed for an insn, we must simulate what choose_reload_regs
can do. We do this by splitting an insn into an "input" and
an "output" part. RELOAD_OTHER reloads are used in both.
The input part uses those reloads, RELOAD_FOR_INPUT reloads,
which must be live over the entire input section of reloads,
and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
inputs.
The registers needed for output are RELOAD_OTHER and
RELOAD_FOR_OUTPUT, which are live for the entire output
portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
reloads for each operand.
The total number of registers needed is the maximum of the
inputs and outputs. */
static int
calculate_needs (this_block, insn, avoid_return_reg, global)
int this_block;
rtx insn, avoid_return_reg;
int global;
{
int something_changed = 0;
int i;
struct needs
{
/* [0] is normal, [1] is nongroup. */
int regs[2][N_REG_CLASSES];
int groups[N_REG_CLASSES];
};
/* Each `struct needs' corresponds to one RELOAD_... type. */
struct {
struct needs other;
struct needs input;
struct needs output;
struct needs insn;
struct needs other_addr;
struct needs op_addr;
struct needs op_addr_reload;
struct needs in_addr[MAX_RECOG_OPERANDS];
struct needs in_addr_addr[MAX_RECOG_OPERANDS];
struct needs out_addr[MAX_RECOG_OPERANDS];
struct needs out_addr_addr[MAX_RECOG_OPERANDS];
} insn_needs;
something_needs_reloads = 1; something_needs_reloads = 1;
bzero ((char *) &insn_needs, sizeof insn_needs); bzero ((char *) &insn_needs, sizeof insn_needs);
...@@ -1193,7 +1603,7 @@ reload (first, global, dumpfile) ...@@ -1193,7 +1603,7 @@ reload (first, global, dumpfile)
if (global && ! basic_block_needs[(int) class][this_block]) if (global && ! basic_block_needs[(int) class][this_block])
{ {
basic_block_needs[(int) class][this_block] = 1; basic_block_needs[(int) class][this_block] = 1;
new_basic_block_needs = 1; something_changed = 1;
} }
mode = reload_inmode[i]; mode = reload_inmode[i];
...@@ -1294,22 +1704,19 @@ reload (first, global, dumpfile) ...@@ -1294,22 +1704,19 @@ reload (first, global, dumpfile)
for (i = 0; i < N_REG_CLASSES; i++) for (i = 0; i < N_REG_CLASSES; i++)
{ {
int in_max, out_max; int j, in_max, out_max;
/* Compute normal and nongroup needs. */ /* Compute normal and nongroup needs. */
for (j = 0; j <= 1; j++) for (j = 0; j <= 1; j++)
{ {
for (in_max = 0, out_max = 0, k = 0; int k;
k < reload_n_operands; k++) for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
{ {
in_max in_max = MAX (in_max,
= MAX (in_max,
(insn_needs.in_addr[k].regs[j][i] (insn_needs.in_addr[k].regs[j][i]
+ insn_needs.in_addr_addr[k].regs[j][i])); + insn_needs.in_addr_addr[k].regs[j][i]));
out_max out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
= MAX (out_max, insn_needs.out_addr[k].regs[j][i]); out_max = MAX (out_max,
out_max
= MAX (out_max,
insn_needs.out_addr_addr[k].regs[j][i]); insn_needs.out_addr_addr[k].regs[j][i]);
} }
...@@ -1340,16 +1747,12 @@ reload (first, global, dumpfile) ...@@ -1340,16 +1747,12 @@ reload (first, global, dumpfile)
} }
/* Now compute group needs. */ /* Now compute group needs. */
for (in_max = 0, out_max = 0, j = 0; for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
j < reload_n_operands; j++)
{ {
in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]); in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
in_max = MAX (in_max, in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
insn_needs.in_addr_addr[j].groups[i]); out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
out_max out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
= MAX (out_max, insn_needs.out_addr[j].groups[i]);
out_max
= MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
} }
in_max = MAX (MAX (insn_needs.op_addr.groups[i], in_max = MAX (MAX (insn_needs.op_addr.groups[i],
...@@ -1389,6 +1792,7 @@ reload (first, global, dumpfile) ...@@ -1389,6 +1792,7 @@ reload (first, global, dumpfile)
if (GET_CODE (insn) == CALL_INSN if (GET_CODE (insn) == CALL_INSN
&& caller_save_spill_class != NO_REGS) && caller_save_spill_class != NO_REGS)
{ {
int j;
/* See if this register would conflict with any reload that /* See if this register would conflict with any reload that
needs a group or any reload that needs a nongroup. */ needs a group or any reload that needs a nongroup. */
int nongroup_need = 0; int nongroup_need = 0;
...@@ -1430,356 +1834,144 @@ reload (first, global, dumpfile) ...@@ -1430,356 +1834,144 @@ reload (first, global, dumpfile)
if (global if (global
&& ! (basic_block_needs[(int) caller_save_spill_class] && ! (basic_block_needs[(int) caller_save_spill_class]
[this_block])) [this_block]))
{
basic_block_needs[(int) caller_save_spill_class]
[this_block] = 1;
new_basic_block_needs = 1;
}
}
/* If this insn stores the value of a function call,
and that value is in a register that has been spilled,
and if the insn needs a reload in a class
that might use that register as the reload register,
then add an extra need in that class.
This makes sure we have a register available that does
not overlap the return value. */
if (SMALL_REGISTER_CLASSES && avoid_return_reg)
{
int regno = REGNO (avoid_return_reg);
int nregs
= HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
int r;
int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
/* First compute the "basic needs", which counts a
need only in the smallest class in which it
is required. */
bcopy ((char *) insn_needs.other.regs[0],
(char *) basic_needs, sizeof basic_needs);
bcopy ((char *) insn_needs.other.groups,
(char *) basic_groups, sizeof basic_groups);
for (i = 0; i < N_REG_CLASSES; i++)
{
enum reg_class *p;
if (basic_needs[i] >= 0)
for (p = reg_class_superclasses[i];
*p != LIM_REG_CLASSES; p++)
basic_needs[(int) *p] -= basic_needs[i];
if (basic_groups[i] >= 0)
for (p = reg_class_superclasses[i];
*p != LIM_REG_CLASSES; p++)
basic_groups[(int) *p] -= basic_groups[i];
}
/* Now count extra regs if there might be a conflict with
the return value register. */
for (r = regno; r < regno + nregs; r++)
if (spill_reg_order[r] >= 0)
for (i = 0; i < N_REG_CLASSES; i++)
if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
{
if (basic_needs[i] > 0)
{
enum reg_class *p;
insn_needs.other.regs[0][i]++;
p = reg_class_superclasses[i];
while (*p != LIM_REG_CLASSES)
insn_needs.other.regs[0][(int) *p++]++;
}
if (basic_groups[i] > 0)
{
enum reg_class *p;
insn_needs.other.groups[i]++;
p = reg_class_superclasses[i];
while (*p != LIM_REG_CLASSES)
insn_needs.other.groups[(int) *p++]++;
}
}
}
/* For each class, collect maximum need of any insn. */
for (i = 0; i < N_REG_CLASSES; i++)
{
if (max_needs[i] < insn_needs.other.regs[0][i])
{
max_needs[i] = insn_needs.other.regs[0][i];
max_needs_insn[i] = insn;
}
if (max_groups[i] < insn_needs.other.groups[i])
{
max_groups[i] = insn_needs.other.groups[i];
max_groups_insn[i] = insn;
}
if (max_nongroups[i] < insn_needs.other.regs[1][i])
{
max_nongroups[i] = insn_needs.other.regs[1][i];
max_nongroups_insn[i] = insn;
}
}
}
/* Note that there is a continue statement above. */
}
/* If we allocated any new memory locations, make another pass
since it might have changed elimination offsets. */
if (starting_frame_size != get_frame_size ())
something_changed = 1;
if (dumpfile)
for (i = 0; i < N_REG_CLASSES; i++)
{
if (max_needs[i] > 0)
fprintf (dumpfile,
";; Need %d reg%s of class %s (for insn %d).\n",
max_needs[i], max_needs[i] == 1 ? "" : "s",
reg_class_names[i], INSN_UID (max_needs_insn[i]));
if (max_nongroups[i] > 0)
fprintf (dumpfile,
";; Need %d nongroup reg%s of class %s (for insn %d).\n",
max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
if (max_groups[i] > 0)
fprintf (dumpfile,
";; Need %d group%s (%smode) of class %s (for insn %d).\n",
max_groups[i], max_groups[i] == 1 ? "" : "s",
mode_name[(int) group_mode[i]],
reg_class_names[i], INSN_UID (max_groups_insn[i]));
}
/* If we have caller-saves, set up the save areas and see if caller-save
will need a spill register. */
if (caller_save_needed)
{
/* Set the offsets for setup_save_areas. */
for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
ep++)
ep->previous_offset = ep->max_offset;
if ( ! setup_save_areas (&something_changed)
&& caller_save_spill_class == NO_REGS)
{
/* The class we will need depends on whether the machine
supports the sum of two registers for an address; see
find_address_reloads for details. */
caller_save_spill_class
= double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
caller_save_group_size
= CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
something_changed = 1;
}
}
/* See if anything that happened changes which eliminations are valid.
For example, on the Sparc, whether or not the frame pointer can
be eliminated can depend on what registers have been used. We need
not check some conditions again (such as flag_omit_frame_pointer)
since they can't have changed. */
for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
#ifdef ELIMINABLE_REGS
|| ! CAN_ELIMINATE (ep->from, ep->to)
#endif
)
ep->can_eliminate = 0;
/* Look for the case where we have discovered that we can't replace
register A with register B and that means that we will now be
trying to replace register A with register C. This means we can
no longer replace register C with register B and we need to disable
such an elimination, if it exists. This occurs often with A == ap,
B == sp, and C == fp. */
for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
{
struct elim_table *op;
register int new_to = -1;
if (! ep->can_eliminate && ep->can_eliminate_previous)
{
/* Find the current elimination for ep->from, if there is a
new one. */
for (op = reg_eliminate;
op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
if (op->from == ep->from && op->can_eliminate)
{
new_to = op->to;
break;
}
/* See if there is an elimination of NEW_TO -> EP->TO. If so,
disable it. */
for (op = reg_eliminate;
op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
if (op->from == new_to && op->to == ep->to)
op->can_eliminate = 0;
}
}
/* See if any registers that we thought we could eliminate the previous
time are no longer eliminable. If so, something has changed and we
must spill the register. Also, recompute the number of eliminable
registers and see if the frame pointer is needed; it is if there is
no elimination of the frame pointer that we can perform. */
frame_pointer_needed = 1;
for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
{
if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
&& ep->to != HARD_FRAME_POINTER_REGNUM)
frame_pointer_needed = 0;
if (! ep->can_eliminate && ep->can_eliminate_previous)
{ {
ep->can_eliminate_previous = 0; basic_block_needs[(int) caller_save_spill_class]
spill_hard_reg (ep->from, global, dumpfile, 1); [this_block] = 1;
something_changed = 1; something_changed = 1;
num_eliminable--;
} }
} }
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM /* If this insn stores the value of a function call,
/* If we didn't need a frame pointer last time, but we do now, spill and that value is in a register that has been spilled,
the hard frame pointer. */ and if the insn needs a reload in a class
if (frame_pointer_needed && ! previous_frame_pointer_needed) that might use that register as the reload register,
then add an extra need in that class.
This makes sure we have a register available that does
not overlap the return value. */
if (SMALL_REGISTER_CLASSES && avoid_return_reg)
{ {
spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1); int regno = REGNO (avoid_return_reg);
something_changed = 1; int nregs
} = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
#endif int r;
int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
/* If all needs are met, we win. */ /* First compute the "basic needs", which counts a
need only in the smallest class in which it
is required. */
for (i = 0; i < N_REG_CLASSES; i++) bcopy ((char *) insn_needs.other.regs[0],
if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0) (char *) basic_needs, sizeof basic_needs);
break; bcopy ((char *) insn_needs.other.groups,
if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed) (char *) basic_groups, sizeof basic_groups);
break;
/* Not all needs are met; must spill some hard regs. */ for (i = 0; i < N_REG_CLASSES; i++)
{
enum reg_class *p;
/* Put all registers spilled so far back in potential_reload_regs, but if (basic_needs[i] >= 0)
put them at the front, since we've already spilled most of the for (p = reg_class_superclasses[i];
pseudos in them (we might have left some pseudos unspilled if they *p != LIM_REG_CLASSES; p++)
were in a block that didn't need any spill registers of a conflicting basic_needs[(int) *p] -= basic_needs[i];
class. We used to try to mark off the need for those registers,
but doing so properly is very complex and reallocating them is the
simpler approach. First, "pack" potential_reload_regs by pushing
any nonnegative entries towards the end. That will leave room
for the registers we already spilled.
Also, undo the marking of the spill registers from the last time if (basic_groups[i] >= 0)
around in FORBIDDEN_REGS since we will be probably be allocating for (p = reg_class_superclasses[i];
them again below. *p != LIM_REG_CLASSES; p++)
basic_groups[(int) *p] -= basic_groups[i];
}
??? It is theoretically possible that we might end up not using one /* Now count extra regs if there might be a conflict with
of our previously-spilled registers in this allocation, even though the return value register. */
they are at the head of the list. It's not clear what to do about
this, but it was no better before, when we marked off the needs met
by the previously-spilled registers. With the current code, globals
can be allocated into these registers, but locals cannot. */
if (n_spills) for (r = regno; r < regno + nregs; r++)
if (spill_reg_order[r] >= 0)
for (i = 0; i < N_REG_CLASSES; i++)
if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
{ {
for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--) if (basic_needs[i] > 0)
if (potential_reload_regs[i] != -1)
potential_reload_regs[j--] = potential_reload_regs[i];
for (i = 0; i < n_spills; i++)
{ {
potential_reload_regs[i] = spill_regs[i]; enum reg_class *p;
spill_reg_order[spill_regs[i]] = -1;
CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
}
n_spills = 0; insn_needs.other.regs[0][i]++;
p = reg_class_superclasses[i];
while (*p != LIM_REG_CLASSES)
insn_needs.other.regs[0][(int) *p++]++;
} }
if (basic_groups[i] > 0)
{
enum reg_class *p;
/* Now find more reload regs to satisfy the remaining need insn_needs.other.groups[i]++;
Do it by ascending class number, since otherwise a reg p = reg_class_superclasses[i];
might be spilled for a big class and might fail to count while (*p != LIM_REG_CLASSES)
for a smaller class even though it belongs to that class. insn_needs.other.groups[(int) *p++]++;
}
Count spilled regs in `spills', and add entries to }
`spill_regs' and `spill_reg_order'. }
??? Note there is a problem here.
When there is a need for a group in a high-numbered class,
and also need for non-group regs that come from a lower class,
the non-group regs are chosen first. If there aren't many regs,
they might leave no room for a group.
This was happening on the 386. To fix it, we added the code
that calls possible_group_p, so that the lower class won't
break up the last possible group.
Really fixing the problem would require changes above
in counting the regs already spilled, and in choose_reload_regs.
It might be hard to avoid introducing bugs there. */
CLEAR_HARD_REG_SET (counted_for_groups); /* For each class, collect maximum need of any insn. */
CLEAR_HARD_REG_SET (counted_for_nongroups);
for (class = 0; class < N_REG_CLASSES; class++) for (i = 0; i < N_REG_CLASSES; i++)
{ {
/* First get the groups of registers. if (max_needs[i] < insn_needs.other.regs[0][i])
If we got single registers first, we might fragment
possible groups. */
while (max_groups[class] > 0)
{ {
/* If any single spilled regs happen to form groups, max_needs[i] = insn_needs.other.regs[0][i];
count them now. Maybe we don't really need max_needs_insn[i] = insn;
to spill another group. */ }
count_possible_groups (group_size, group_mode, max_groups, if (max_groups[i] < insn_needs.other.groups[i])
class); {
max_groups[i] = insn_needs.other.groups[i];
max_groups_insn[i] = insn;
}
if (max_nongroups[i] < insn_needs.other.regs[1][i])
{
max_nongroups[i] = insn_needs.other.regs[1][i];
max_nongroups_insn[i] = insn;
}
}
return something_changed;
}
if (max_groups[class] <= 0) /* Find a group of exactly 2 registers.
break;
/* Groups of size 2 (the only groups used on most machines) First try to fill out the group by spilling a single register which
are treated specially. */ would allow completion of the group.
if (group_size[class] == 2)
{ Then try to create a new group from a pair of registers, neither of
which are explicitly used.
Then try to create a group from any pair of registers. */
static int
find_tworeg_group (global, class, dumpfile)
int global;
int class;
FILE *dumpfile;
{
int i;
/* First, look for a register that will complete a group. */ /* First, look for a register that will complete a group. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{ {
int other; int j, other;
j = potential_reload_regs[i]; j = potential_reload_regs[i];
if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j) if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
&& && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
&& TEST_HARD_REG_BIT (reg_class_contents[class], j) && TEST_HARD_REG_BIT (reg_class_contents[class], j)
&& TEST_HARD_REG_BIT (reg_class_contents[class], other) && TEST_HARD_REG_BIT (reg_class_contents[class], other)
&& HARD_REGNO_MODE_OK (other, group_mode[class]) && HARD_REGNO_MODE_OK (other, group_mode[class])
&& ! TEST_HARD_REG_BIT (counted_for_nongroups, && ! TEST_HARD_REG_BIT (counted_for_nongroups, other)
other)
/* We don't want one part of another group. /* We don't want one part of another group.
We could get "two groups" that overlap! */ We could get "two groups" that overlap! */
&& ! TEST_HARD_REG_BIT (counted_for_groups, other)) && ! TEST_HARD_REG_BIT (counted_for_groups, other))
|| || (j < FIRST_PSEUDO_REGISTER - 1
(j < FIRST_PSEUDO_REGISTER - 1
&& (other = j + 1, spill_reg_order[other] >= 0) && (other = j + 1, spill_reg_order[other] >= 0)
&& TEST_HARD_REG_BIT (reg_class_contents[class], j) && TEST_HARD_REG_BIT (reg_class_contents[class], j)
&& TEST_HARD_REG_BIT (reg_class_contents[class], other) && TEST_HARD_REG_BIT (reg_class_contents[class], other)
&& HARD_REGNO_MODE_OK (j, group_mode[class]) && HARD_REGNO_MODE_OK (j, group_mode[class])
&& ! TEST_HARD_REG_BIT (counted_for_nongroups, && ! TEST_HARD_REG_BIT (counted_for_nongroups, other)
other) && ! TEST_HARD_REG_BIT (counted_for_groups, other))))
&& ! TEST_HARD_REG_BIT (counted_for_groups,
other))))
{ {
register enum reg_class *p; register enum reg_class *p;
...@@ -1805,7 +1997,7 @@ reload (first, global, dumpfile) ...@@ -1805,7 +1997,7 @@ reload (first, global, dumpfile)
if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER) if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{ {
int k; int j, k;
j = potential_reload_regs[i]; j = potential_reload_regs[i];
/* Verify that J+1 is a potential reload reg. */ /* Verify that J+1 is a potential reload reg. */
for (k = 0; k < FIRST_PSEUDO_REGISTER; k++) for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
...@@ -1830,7 +2022,7 @@ reload (first, global, dumpfile) ...@@ -1830,7 +2022,7 @@ reload (first, global, dumpfile)
if (i == FIRST_PSEUDO_REGISTER) if (i == FIRST_PSEUDO_REGISTER)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{ {
int k; int j, k;
j = potential_reload_regs[i]; j = potential_reload_regs[i];
/* Verify that J+1 is a potential reload reg. */ /* Verify that J+1 is a potential reload reg. */
for (k = 0; k < FIRST_PSEUDO_REGISTER; k++) for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
...@@ -1842,8 +2034,7 @@ reload (first, global, dumpfile) ...@@ -1842,8 +2034,7 @@ reload (first, global, dumpfile)
&& TEST_HARD_REG_BIT (reg_class_contents[class], j) && TEST_HARD_REG_BIT (reg_class_contents[class], j)
&& TEST_HARD_REG_BIT (reg_class_contents[class], j + 1) && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
&& HARD_REGNO_MODE_OK (j, group_mode[class]) && HARD_REGNO_MODE_OK (j, group_mode[class])
&& ! TEST_HARD_REG_BIT (counted_for_nongroups, && ! TEST_HARD_REG_BIT (counted_for_nongroups, j + 1)
j + 1)
&& ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)) && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
break; break;
} }
...@@ -1851,26 +2042,31 @@ reload (first, global, dumpfile) ...@@ -1851,26 +2042,31 @@ reload (first, global, dumpfile)
/* I should be the index in potential_reload_regs /* I should be the index in potential_reload_regs
of the new reload reg we have found. */ of the new reload reg we have found. */
if (i >= FIRST_PSEUDO_REGISTER) if (i < FIRST_PSEUDO_REGISTER)
{ return new_spill_reg (i, class, max_needs, NULL_PTR,
global, dumpfile);
/* There are no groups left to spill. */ /* There are no groups left to spill. */
spill_failure (max_groups_insn[class]); spill_failure (max_groups_insn[class]);
failure = 1; failure = 1;
goto failed; return 1;
} }
else
something_changed /* Find a group of more than 2 registers.
|= new_spill_reg (i, class, max_needs, NULL_PTR, Look for a sufficient sequence of unspilled registers, and spill them all
global, dumpfile); at once. */
} static int
else find_group (global, class, dumpfile)
{ int global;
/* For groups of more than 2 registers, int class;
look for a sufficient sequence of unspilled registers, FILE *dumpfile;
and spill them all at once. */ {
int something_changed = 0;
int i;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{ {
int k; int j, k;
j = potential_reload_regs[i]; j = potential_reload_regs[i];
if (j >= 0 if (j >= 0
...@@ -1886,72 +2082,125 @@ reload (first, global, dumpfile) ...@@ -1886,72 +2082,125 @@ reload (first, global, dumpfile)
/* We got a full sequence, so spill them all. */ /* We got a full sequence, so spill them all. */
if (k == group_size[class]) if (k == group_size[class])
{ {
register enum reg_class *p; register enum reg_class *p;
for (k = 0; k < group_size[class]; k++) for (k = 0; k < group_size[class]; k++)
{
int idx;
SET_HARD_REG_BIT (counted_for_groups, j + k);
for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
if (potential_reload_regs[idx] == j + k)
break;
something_changed |= new_spill_reg (idx, class, max_needs,
NULL_PTR, global,
dumpfile);
}
/* We have found one that will complete a group,
so count off one group as provided. */
max_groups[class]--;
p = reg_class_superclasses[class];
while (*p != LIM_REG_CLASSES)
{
if (group_size [(int) *p]
<= group_size [class])
max_groups[(int) *p]--;
p++;
}
return something_changed;
}
}
}
/* There are no groups left. */
spill_failure (max_groups_insn[class]);
failure = 1;
return 1;
}
/* Find more reload regs to satisfy the remaining need.
Do it by ascending class number, since otherwise a reg
might be spilled for a big class and might fail to count
for a smaller class even though it belongs to that class.
Count spilled regs in `spills', and add entries to
`spill_regs' and `spill_reg_order'.
??? Note there is a problem here.
When there is a need for a group in a high-numbered class,
and also need for non-group regs that come from a lower class,
the non-group regs are chosen first. If there aren't many regs,
they might leave no room for a group.
This was happening on the 386. To fix it, we added the code
that calls possible_group_p, so that the lower class won't
break up the last possible group.
Really fixing the problem would require changes above
in counting the regs already spilled, and in choose_reload_regs.
It might be hard to avoid introducing bugs there. */
static int
find_reload_regs (global, dumpfile)
int global;
FILE *dumpfile;
{
int class;
int something_changed = 0;
CLEAR_HARD_REG_SET (counted_for_groups);
CLEAR_HARD_REG_SET (counted_for_nongroups);
for (class = 0; class < N_REG_CLASSES; class++)
{
/* First get the groups of registers.
If we got single registers first, we might fragment
possible groups. */
while (max_groups[class] > 0)
{ {
int idx; /* If any single spilled regs happen to form groups,
SET_HARD_REG_BIT (counted_for_groups, j + k); count them now. Maybe we don't really need
for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++) to spill another group. */
if (potential_reload_regs[idx] == j + k) count_possible_groups (group_size, group_mode, max_groups, class);
break;
something_changed
|= new_spill_reg (idx, class,
max_needs, NULL_PTR,
global, dumpfile);
}
/* We have found one that will complete a group, if (max_groups[class] <= 0)
so count off one group as provided. */
max_groups[class]--;
p = reg_class_superclasses[class];
while (*p != LIM_REG_CLASSES)
{
if (group_size [(int) *p]
<= group_size [class])
max_groups[(int) *p]--;
p++;
}
break; break;
}
} /* Groups of size 2 (the only groups used on most machines)
} are treated specially. */
/* We couldn't find any registers for this reload. if (group_size[class] == 2)
Avoid going into an infinite loop. */ something_changed |= find_tworeg_group (global, class, dumpfile);
if (i >= FIRST_PSEUDO_REGISTER) else
{ something_changed |= find_group (global, class, dumpfile);
/* There are no groups left. */
spill_failure (max_groups_insn[class]); if (failure)
failure = 1; return 1;
goto failed;
}
}
} }
/* Now similarly satisfy all need for single registers. */ /* Now similarly satisfy all need for single registers. */
while (max_needs[class] > 0 || max_nongroups[class] > 0) while (max_needs[class] > 0 || max_nongroups[class] > 0)
{ {
int i;
/* If we spilled enough regs, but they weren't counted /* If we spilled enough regs, but they weren't counted
against the non-group need, see if we can count them now. against the non-group need, see if we can count them now.
If so, we can avoid some actual spilling. */ If so, we can avoid some actual spilling. */
if (max_needs[class] <= 0 && max_nongroups[class] > 0) if (max_needs[class] <= 0 && max_nongroups[class] > 0)
for (i = 0; i < n_spills; i++) for (i = 0; i < n_spills; i++)
if (TEST_HARD_REG_BIT (reg_class_contents[class], {
spill_regs[i]) int regno = spill_regs[i];
&& !TEST_HARD_REG_BIT (counted_for_groups, if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
spill_regs[i]) && !TEST_HARD_REG_BIT (counted_for_groups, regno)
&& !TEST_HARD_REG_BIT (counted_for_nongroups, && !TEST_HARD_REG_BIT (counted_for_nongroups, regno)
spill_regs[i])
&& max_nongroups[class] > 0) && max_nongroups[class] > 0)
{ {
register enum reg_class *p; register enum reg_class *p;
SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]); SET_HARD_REG_BIT (counted_for_nongroups, regno);
max_nongroups[class]--; max_nongroups[class]--;
p = reg_class_superclasses[class]; p = reg_class_superclasses[class];
while (*p != LIM_REG_CLASSES) while (*p != LIM_REG_CLASSES)
max_nongroups[(int) *p++]--; max_nongroups[(int) *p++]--;
} }
}
if (max_needs[class] <= 0 && max_nongroups[class] <= 0) if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
break; break;
...@@ -1960,9 +2209,10 @@ reload (first, global, dumpfile) ...@@ -1960,9 +2209,10 @@ reload (first, global, dumpfile)
Find the most preferred one that's in this class. */ Find the most preferred one that's in this class. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (potential_reload_regs[i] >= 0 {
&& TEST_HARD_REG_BIT (reg_class_contents[class], int regno = potential_reload_regs[i];
potential_reload_regs[i]) if (regno >= 0
&& TEST_HARD_REG_BIT (reg_class_contents[class], regno)
/* If this reg will not be available for groups, /* If this reg will not be available for groups,
pick one that does not foreclose possible groups. pick one that does not foreclose possible groups.
This is a kludge, and not very general, This is a kludge, and not very general,
...@@ -1970,8 +2220,9 @@ reload (first, global, dumpfile) ...@@ -1970,8 +2220,9 @@ reload (first, global, dumpfile)
and the problem should not occur on machines with and the problem should not occur on machines with
more registers. */ more registers. */
&& (max_nongroups[class] == 0 && (max_nongroups[class] == 0
|| possible_group_p (potential_reload_regs[i], max_groups))) || possible_group_p (regno, max_groups)))
break; break;
}
/* If we couldn't get a register, try to get one even if we /* If we couldn't get a register, try to get one even if we
might foreclose possible groups. This may cause problems might foreclose possible groups. This may cause problems
...@@ -1999,203 +2250,18 @@ reload (first, global, dumpfile) ...@@ -1999,203 +2250,18 @@ reload (first, global, dumpfile)
spill_failure (max_needs[class] > 0 ? max_needs_insn[class] spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
: max_nongroups_insn[class]); : max_nongroups_insn[class]);
failure = 1; failure = 1;
goto failed; return 1;
}
else
something_changed
|= new_spill_reg (i, class, max_needs, max_nongroups,
global, dumpfile);
}
}
} }
/* If global-alloc was run, notify it of any register eliminations we have
done. */
if (global)
for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
if (ep->can_eliminate)
mark_elimination (ep->from, ep->to);
/* Insert code to save and restore call-clobbered hard regs
around calls. Tell if what mode to use so that we will process
those insns in reload_as_needed if we have to. */
if (caller_save_needed)
save_call_clobbered_regs (num_eliminable ? QImode
: caller_save_spill_class != NO_REGS ? HImode
: VOIDmode);
/* If a pseudo has no hard reg, delete the insns that made the equivalence.
If that insn didn't set the register (i.e., it copied the register to
memory), just delete that insn instead of the equivalencing insn plus
anything now dead. If we call delete_dead_insn on that insn, we may
delete the insn that actually sets the register if the register die
there and that is incorrect. */
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
&& GET_CODE (reg_equiv_init[i]) != NOTE)
{
if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
delete_dead_insn (reg_equiv_init[i]);
else else
{ something_changed |= new_spill_reg (i, class, max_needs,
PUT_CODE (reg_equiv_init[i], NOTE); max_nongroups, global,
NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0; dumpfile);
NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
}
}
/* Use the reload registers where necessary
by generating move instructions to move the must-be-register
values into or out of the reload registers. */
if (something_needs_reloads || something_needs_elimination
|| (caller_save_needed && num_eliminable)
|| caller_save_spill_class != NO_REGS)
reload_as_needed (first, global);
/* If we were able to eliminate the frame pointer, show that it is no
longer live at the start of any basic block. If it ls live by
virtue of being in a pseudo, that pseudo will be marked live
and hence the frame pointer will be known to be live via that
pseudo. */
if (! frame_pointer_needed)
for (i = 0; i < n_basic_blocks; i++)
CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
HARD_FRAME_POINTER_REGNUM);
/* Come here (with failure set nonzero) if we can't get enough spill regs
and we decide not to abort about it. */
failed:
reload_in_progress = 0;
/* Now eliminate all pseudo regs by modifying them into
their equivalent memory references.
The REG-rtx's for the pseudos are modified in place,
so all insns that used to refer to them now refer to memory.
For a reg that has a reg_equiv_address, all those insns
were changed by reloading so that no insns refer to it any longer;
but the DECL_RTL of a variable decl may refer to it,
and if so this causes the debugging info to mention the variable. */
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
{
rtx addr = 0;
int in_struct = 0;
int is_readonly = 0;
if (reg_equiv_memory_loc[i])
{
in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
}
if (reg_equiv_mem[i])
addr = XEXP (reg_equiv_mem[i], 0);
if (reg_equiv_address[i])
addr = reg_equiv_address[i];
if (addr)
{
if (reg_renumber[i] < 0)
{
rtx reg = regno_reg_rtx[i];
XEXP (reg, 0) = addr;
REG_USERVAR_P (reg) = 0;
RTX_UNCHANGING_P (reg) = is_readonly;
MEM_IN_STRUCT_P (reg) = in_struct;
/* We have no alias information about this newly created
MEM. */
MEM_ALIAS_SET (reg) = 0;
PUT_CODE (reg, MEM);
}
else if (reg_equiv_mem[i])
XEXP (reg_equiv_mem[i], 0) = addr;
}
}
/* Make a pass over all the insns and delete all USEs which we inserted
only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
is defined, also remove death notes for things that are no longer
registers or no longer die in the insn (e.g., an input and output
pseudo being tied). */
for (insn = first; insn; insn = NEXT_INSN (insn))
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
#ifdef PRESERVE_DEATH_INFO_REGNO_P
rtx note, next;
#endif
if (GET_CODE (PATTERN (insn)) == USE
&& find_reg_note (insn, REG_EQUAL, NULL_RTX))
{
PUT_CODE (insn, NOTE);
NOTE_SOURCE_FILE (insn) = 0;
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
continue;
}
#ifdef PRESERVE_DEATH_INFO_REGNO_P
for (note = REG_NOTES (insn); note; note = next)
{
next = XEXP (note, 1);
if (REG_NOTE_KIND (note) == REG_DEAD
&& (GET_CODE (XEXP (note, 0)) != REG
|| reg_set_p (XEXP (note, 0), PATTERN (insn))))
remove_note (insn, note);
}
#endif
} }
/* If we are doing stack checking, give a warning if this function's
frame size is larger than we expect. */
if (flag_stack_check && ! STACK_CHECK_BUILTIN)
{
HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
size += UNITS_PER_WORD;
if (size > STACK_CHECK_MAX_FRAME_SIZE)
warning ("frame size too large for reliable stack checking");
} }
return something_changed;
/* Indicate that we no longer have known memory locations or constants. */
reg_equiv_constant = 0;
reg_equiv_memory_loc = 0;
if (real_known_ptr)
free (real_known_ptr);
if (real_at_ptr)
free (real_at_ptr);
if (scratch_list)
free (scratch_list);
scratch_list = 0;
if (scratch_block)
free (scratch_block);
scratch_block = 0;
free (reg_equiv_constant);
free (reg_equiv_memory_loc);
free (reg_equiv_mem);
free (reg_equiv_init);
free (reg_equiv_address);
free (reg_max_ref_width);
CLEAR_HARD_REG_SET (used_spill_regs);
for (i = 0; i < n_spills; i++)
SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
return failure;
} }
/* Nonzero if, after spilling reg REGNO for non-groups, /* Nonzero if, after spilling reg REGNO for non-groups,
it will still be possible to find a group if we still need one. */ it will still be possible to find a group if we still need one. */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment