Commit 165ccc54 by Trevor Saunders Committed by Trevor Saunders

reorg.c: use vec<rtx_insn *> instead of rtx_insn_list for the delay insn list

gcc/ChangeLog:

2015-10-06  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* reorg.c (emit_delay_sequence): Store list of delay slot insns
	in a vector instead of rtx_insn_list.
	(add_to_delay_list): Likewise.
	(delete_from_delay_slot): Likewise.
	(optimize_skip): Likewise.
	(redirect_with_delay_list_safe_p): Likewise.
	(check_annul_list_true_false): Likewise.
	(steal_delay_list_from_target): Likewise.
	(steal_delay_list_from_fallthrough): Likewise.
	(redundant_insn): Likewise.
	(fill_simple_delay_slots): Likewise.
	(fill_slots_from_thread): Likewise.
	(fill_eager_delay_slots): Likewise.
	(relax_delay_slots): Likewise.

From-SVN: r228558
parent 8237beb1
2015-10-06 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* reorg.c (emit_delay_sequence): Store list of delay slot insns
in a vector instead of rtx_insn_list.
(add_to_delay_list): Likewise.
(delete_from_delay_slot): Likewise.
(optimize_skip): Likewise.
(redirect_with_delay_list_safe_p): Likewise.
(check_annul_list_true_false): Likewise.
(steal_delay_list_from_target): Likewise.
(steal_delay_list_from_fallthrough): Likewise.
(redundant_insn): Likewise.
(fill_simple_delay_slots): Likewise.
(fill_slots_from_thread): Likewise.
(fill_eager_delay_slots): Likewise.
(relax_delay_slots): Likewise.
2015-10-06 Sandra Loosemore <sandra@codesourcery.com> 2015-10-06 Sandra Loosemore <sandra@codesourcery.com>
* config/nios2/nios2.c (nios2_symbol_ref_in_small_data_p): * config/nios2/nios2.c (nios2_symbol_ref_in_small_data_p):
...@@ -213,38 +213,38 @@ static int resource_conflicts_p (struct resources *, struct resources *); ...@@ -213,38 +213,38 @@ static int resource_conflicts_p (struct resources *, struct resources *);
static int insn_references_resource_p (rtx, struct resources *, bool); static int insn_references_resource_p (rtx, struct resources *, bool);
static int insn_sets_resource_p (rtx, struct resources *, bool); static int insn_sets_resource_p (rtx, struct resources *, bool);
static rtx_code_label *find_end_label (rtx); static rtx_code_label *find_end_label (rtx);
static rtx_insn *emit_delay_sequence (rtx_insn *, rtx_insn_list *, int); static rtx_insn *emit_delay_sequence (rtx_insn *, const vec<rtx_insn *> &,
static rtx_insn_list *add_to_delay_list (rtx_insn *, rtx_insn_list *); int);
static void add_to_delay_list (rtx_insn *, vec<rtx_insn *> *);
static rtx_insn *delete_from_delay_slot (rtx_insn *); static rtx_insn *delete_from_delay_slot (rtx_insn *);
static void delete_scheduled_jump (rtx_insn *); static void delete_scheduled_jump (rtx_insn *);
static void note_delay_statistics (int, int); static void note_delay_statistics (int, int);
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS) #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
static rtx_insn_list *optimize_skip (rtx_jump_insn *); static void optimize_skip (rtx_jump_insn *, vec<rtx_insn *> *);
#endif #endif
static int get_jump_flags (const rtx_insn *, rtx); static int get_jump_flags (const rtx_insn *, rtx);
static int mostly_true_jump (rtx); static int mostly_true_jump (rtx);
static rtx get_branch_condition (const rtx_insn *, rtx); static rtx get_branch_condition (const rtx_insn *, rtx);
static int condition_dominates_p (rtx, const rtx_insn *); static int condition_dominates_p (rtx, const rtx_insn *);
static int redirect_with_delay_slots_safe_p (rtx_insn *, rtx, rtx); static int redirect_with_delay_slots_safe_p (rtx_insn *, rtx, rtx);
static int redirect_with_delay_list_safe_p (rtx_insn *, rtx, rtx_insn_list *); static int redirect_with_delay_list_safe_p (rtx_insn *, rtx,
static int check_annul_list_true_false (int, rtx); const vec<rtx_insn *> &);
static rtx_insn_list *steal_delay_list_from_target (rtx_insn *, rtx, static int check_annul_list_true_false (int, const vec<rtx_insn *> &);
rtx_sequence *, static void steal_delay_list_from_target (rtx_insn *, rtx, rtx_sequence *,
rtx_insn_list *, vec<rtx_insn *> *,
struct resources *, struct resources *,
struct resources *, struct resources *,
struct resources *, struct resources *,
int, int *, int *, int, int *, int *,
rtx *); rtx *);
static rtx_insn_list *steal_delay_list_from_fallthrough (rtx_insn *, rtx, static void steal_delay_list_from_fallthrough (rtx_insn *, rtx, rtx_sequence *,
rtx_sequence *, vec<rtx_insn *> *,
rtx_insn_list *, struct resources *,
struct resources *, struct resources *,
struct resources *, struct resources *,
struct resources *, int, int *, int *);
int, int *, int *);
static void try_merge_delay_insns (rtx_insn *, rtx_insn *); static void try_merge_delay_insns (rtx_insn *, rtx_insn *);
static rtx redundant_insn (rtx, rtx_insn *, rtx); static rtx redundant_insn (rtx, rtx_insn *, const vec<rtx_insn *> &);
static int own_thread_p (rtx, rtx, int); static int own_thread_p (rtx, rtx, int);
static void update_block (rtx_insn *, rtx); static void update_block (rtx_insn *, rtx);
static int reorg_redirect_jump (rtx_jump_insn *, rtx); static int reorg_redirect_jump (rtx_jump_insn *, rtx);
...@@ -252,9 +252,9 @@ static void update_reg_dead_notes (rtx_insn *, rtx_insn *); ...@@ -252,9 +252,9 @@ static void update_reg_dead_notes (rtx_insn *, rtx_insn *);
static void fix_reg_dead_note (rtx, rtx); static void fix_reg_dead_note (rtx, rtx);
static void update_reg_unused_notes (rtx, rtx); static void update_reg_unused_notes (rtx, rtx);
static void fill_simple_delay_slots (int); static void fill_simple_delay_slots (int);
static rtx_insn_list *fill_slots_from_thread (rtx_jump_insn *, rtx, rtx, rtx, static void fill_slots_from_thread (rtx_jump_insn *, rtx, rtx, rtx,
int, int, int, int, int, int, int, int,
int *, rtx_insn_list *); int *, vec<rtx_insn *> *);
static void fill_eager_delay_slots (void); static void fill_eager_delay_slots (void);
static void relax_delay_slots (rtx_insn *); static void relax_delay_slots (rtx_insn *);
static void make_return_insns (rtx_insn *); static void make_return_insns (rtx_insn *);
...@@ -504,7 +504,7 @@ find_end_label (rtx kind) ...@@ -504,7 +504,7 @@ find_end_label (rtx kind)
Returns the insn containing the SEQUENCE that replaces INSN. */ Returns the insn containing the SEQUENCE that replaces INSN. */
static rtx_insn * static rtx_insn *
emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length) emit_delay_sequence (rtx_insn *insn, const vec<rtx_insn *> &list, int length)
{ {
/* Allocate the rtvec to hold the insns and the SEQUENCE. */ /* Allocate the rtvec to hold the insns and the SEQUENCE. */
rtvec seqv = rtvec_alloc (length + 1); rtvec seqv = rtvec_alloc (length + 1);
...@@ -523,12 +523,14 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length) ...@@ -523,12 +523,14 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length)
SET_NEXT_INSN (insn) = SET_PREV_INSN (insn) = NULL; SET_NEXT_INSN (insn) = SET_PREV_INSN (insn) = NULL;
/* Build our SEQUENCE and rebuild the insn chain. */ /* Build our SEQUENCE and rebuild the insn chain. */
int i = 1;
start_sequence (); start_sequence ();
XVECEXP (seq, 0, 0) = emit_insn (insn); XVECEXP (seq, 0, 0) = emit_insn (insn);
for (rtx_insn_list *li = list; li; li = li->next (), i++)
unsigned int delay_insns = list.length ();
gcc_assert (delay_insns == (unsigned int) length);
for (unsigned int i = 0; i < delay_insns; i++)
{ {
rtx_insn *tem = li->insn (); rtx_insn *tem = list[i];
rtx note, next; rtx note, next;
/* Show that this copy of the insn isn't deleted. */ /* Show that this copy of the insn isn't deleted. */
...@@ -537,7 +539,7 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length) ...@@ -537,7 +539,7 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length)
/* Unlink insn from its original place, and re-emit it into /* Unlink insn from its original place, and re-emit it into
the sequence. */ the sequence. */
SET_NEXT_INSN (tem) = SET_PREV_INSN (tem) = NULL; SET_NEXT_INSN (tem) = SET_PREV_INSN (tem) = NULL;
XVECEXP (seq, 0, i) = emit_insn (tem); XVECEXP (seq, 0, i + 1) = emit_insn (tem);
/* SPARC assembler, for instance, emit warning when debug info is output /* SPARC assembler, for instance, emit warning when debug info is output
into the delay slot. */ into the delay slot. */
...@@ -569,7 +571,6 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length) ...@@ -569,7 +571,6 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length)
} }
} }
end_sequence (); end_sequence ();
gcc_assert (i == length + 1);
/* Splice our SEQUENCE into the insn stream where INSN used to be. */ /* Splice our SEQUENCE into the insn stream where INSN used to be. */
add_insn_after (seq_insn, after, NULL); add_insn_after (seq_insn, after, NULL);
...@@ -580,24 +581,13 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length) ...@@ -580,24 +581,13 @@ emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length)
/* Add INSN to DELAY_LIST and return the head of the new list. The list must /* Add INSN to DELAY_LIST and return the head of the new list. The list must
be in the order in which the insns are to be executed. */ be in the order in which the insns are to be executed. */
static rtx_insn_list * static void
add_to_delay_list (rtx_insn *insn, rtx_insn_list *delay_list) add_to_delay_list (rtx_insn *insn, vec<rtx_insn *> *delay_list)
{ {
/* If we have an empty list, just make a new list element. If /* If INSN has its block number recorded, clear it since we may
INSN has its block number recorded, clear it since we may
be moving the insn to a new block. */ be moving the insn to a new block. */
if (delay_list == 0)
{
clear_hashed_info_for_insn (insn); clear_hashed_info_for_insn (insn);
return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX); delay_list->safe_push (insn);
}
/* Otherwise this must be an INSN_LIST. Add INSN to the end of the
list. */
XEXP (delay_list, 1) = add_to_delay_list (insn, delay_list->next ());
return delay_list;
} }
/* Delete INSN from the delay slot of the insn that it is in, which may /* Delete INSN from the delay slot of the insn that it is in, which may
...@@ -608,7 +598,6 @@ delete_from_delay_slot (rtx_insn *insn) ...@@ -608,7 +598,6 @@ delete_from_delay_slot (rtx_insn *insn)
{ {
rtx_insn *trial, *seq_insn, *prev; rtx_insn *trial, *seq_insn, *prev;
rtx_sequence *seq; rtx_sequence *seq;
rtx_insn_list *delay_list = 0;
int i; int i;
int had_barrier = 0; int had_barrier = 0;
...@@ -629,10 +618,11 @@ delete_from_delay_slot (rtx_insn *insn) ...@@ -629,10 +618,11 @@ delete_from_delay_slot (rtx_insn *insn)
/* Create a delay list consisting of all the insns other than the one /* Create a delay list consisting of all the insns other than the one
we are deleting (unless we were the only one). */ we are deleting (unless we were the only one). */
auto_vec<rtx_insn *, 5> delay_list;
if (seq->len () > 2) if (seq->len () > 2)
for (i = 1; i < seq->len (); i++) for (i = 1; i < seq->len (); i++)
if (seq->insn (i) != insn) if (seq->insn (i) != insn)
delay_list = add_to_delay_list (seq->insn (i), delay_list); add_to_delay_list (seq->insn (i), &delay_list);
/* Delete the old SEQUENCE, re-emit the insn that used to have the delay /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
list, and rebuild the delay list if non-empty. */ list, and rebuild the delay list if non-empty. */
...@@ -647,7 +637,7 @@ delete_from_delay_slot (rtx_insn *insn) ...@@ -647,7 +637,7 @@ delete_from_delay_slot (rtx_insn *insn)
/* If there are any delay insns, remit them. Otherwise clear the /* If there are any delay insns, remit them. Otherwise clear the
annul flag. */ annul flag. */
if (delay_list) if (!delay_list.is_empty ())
trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2); trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
else if (JUMP_P (trial)) else if (JUMP_P (trial))
INSN_ANNULLED_BRANCH_P (trial) = 0; INSN_ANNULLED_BRANCH_P (trial) = 0;
...@@ -761,12 +751,11 @@ note_delay_statistics (int slots_filled, int index) ...@@ -761,12 +751,11 @@ note_delay_statistics (int slots_filled, int index)
This should be expanded to skip over N insns, where N is the number This should be expanded to skip over N insns, where N is the number
of delay slots required. */ of delay slots required. */
static rtx_insn_list * static void
optimize_skip (rtx_jump_insn *insn) optimize_skip (rtx_jump_insn *insn, vec<rtx_insn *> *delay_list)
{ {
rtx_insn *trial = next_nonnote_insn (insn); rtx_insn *trial = next_nonnote_insn (insn);
rtx_insn *next_trial = next_active_insn (trial); rtx_insn *next_trial = next_active_insn (trial);
rtx_insn_list *delay_list = 0;
int flags; int flags;
flags = get_jump_flags (insn, JUMP_LABEL (insn)); flags = get_jump_flags (insn, JUMP_LABEL (insn));
...@@ -778,7 +767,7 @@ optimize_skip (rtx_jump_insn *insn) ...@@ -778,7 +767,7 @@ optimize_skip (rtx_jump_insn *insn)
|| (! eligible_for_annul_false (insn, 0, trial, flags) || (! eligible_for_annul_false (insn, 0, trial, flags)
&& ! eligible_for_annul_true (insn, 0, trial, flags)) && ! eligible_for_annul_true (insn, 0, trial, flags))
|| can_throw_internal (trial)) || can_throw_internal (trial))
return 0; return;
/* There are two cases where we are just executing one insn (we assume /* There are two cases where we are just executing one insn (we assume
here that a branch requires only one insn; this should be generalized here that a branch requires only one insn; this should be generalized
...@@ -796,10 +785,10 @@ optimize_skip (rtx_jump_insn *insn) ...@@ -796,10 +785,10 @@ optimize_skip (rtx_jump_insn *insn)
if (invert_jump (insn, JUMP_LABEL (insn), 1)) if (invert_jump (insn, JUMP_LABEL (insn), 1))
INSN_FROM_TARGET_P (trial) = 1; INSN_FROM_TARGET_P (trial) = 1;
else if (! eligible_for_annul_true (insn, 0, trial, flags)) else if (! eligible_for_annul_true (insn, 0, trial, flags))
return 0; return;
} }
delay_list = add_to_delay_list (trial, NULL); add_to_delay_list (trial, delay_list);
next_trial = next_active_insn (trial); next_trial = next_active_insn (trial);
update_block (trial, trial); update_block (trial, trial);
delete_related_insns (trial); delete_related_insns (trial);
...@@ -828,8 +817,6 @@ optimize_skip (rtx_jump_insn *insn) ...@@ -828,8 +817,6 @@ optimize_skip (rtx_jump_insn *insn)
INSN_ANNULLED_BRANCH_P (insn) = 1; INSN_ANNULLED_BRANCH_P (insn) = 1;
} }
return delay_list;
} }
#endif #endif
...@@ -1007,32 +994,31 @@ redirect_with_delay_slots_safe_p (rtx_insn *jump, rtx newlabel, rtx seq) ...@@ -1007,32 +994,31 @@ redirect_with_delay_slots_safe_p (rtx_insn *jump, rtx newlabel, rtx seq)
static int static int
redirect_with_delay_list_safe_p (rtx_insn *jump, rtx newlabel, redirect_with_delay_list_safe_p (rtx_insn *jump, rtx newlabel,
rtx_insn_list *delay_list) const vec<rtx_insn *> &delay_list)
{ {
int flags, i;
rtx_insn_list *li;
/* Make sure all the insns in DELAY_LIST would still be /* Make sure all the insns in DELAY_LIST would still be
valid after threading the jump. If they are still valid after threading the jump. If they are still
valid, then return nonzero. */ valid, then return nonzero. */
flags = get_jump_flags (jump, newlabel); int flags = get_jump_flags (jump, newlabel);
for (li = delay_list, i = 0; li; li = li->next (), i++) unsigned int delay_insns = delay_list.length ();
unsigned int i = 0;
for (; i < delay_insns; i++)
if (! ( if (! (
#ifdef ANNUL_IFFALSE_SLOTS #ifdef ANNUL_IFFALSE_SLOTS
(INSN_ANNULLED_BRANCH_P (jump) (INSN_ANNULLED_BRANCH_P (jump)
&& INSN_FROM_TARGET_P (li->insn ())) && INSN_FROM_TARGET_P (delay_list[i]))
? eligible_for_annul_false (jump, i, li->insn (), flags) : ? eligible_for_annul_false (jump, i, delay_list[i], flags) :
#endif #endif
#ifdef ANNUL_IFTRUE_SLOTS #ifdef ANNUL_IFTRUE_SLOTS
(INSN_ANNULLED_BRANCH_P (jump) (INSN_ANNULLED_BRANCH_P (jump)
&& ! INSN_FROM_TARGET_P (XEXP (li, 0))) && ! INSN_FROM_TARGET_P (delay_list[i]))
? eligible_for_annul_true (jump, i, li->insn (), flags) : ? eligible_for_annul_true (jump, i, delay_list[i], flags) :
#endif #endif
eligible_for_delay (jump, i, li->insn (), flags))) eligible_for_delay (jump, i, delay_list[i], flags)))
break; break;
return (li == NULL); return i == delay_insns;
} }
/* DELAY_LIST is a list of insns that have already been placed into delay /* DELAY_LIST is a list of insns that have already been placed into delay
...@@ -1040,21 +1026,15 @@ redirect_with_delay_list_safe_p (rtx_insn *jump, rtx newlabel, ...@@ -1040,21 +1026,15 @@ redirect_with_delay_list_safe_p (rtx_insn *jump, rtx newlabel,
If not, return 0; otherwise return 1. */ If not, return 0; otherwise return 1. */
static int static int
check_annul_list_true_false (int annul_true_p, rtx delay_list) check_annul_list_true_false (int annul_true_p,
const vec<rtx_insn *> &delay_list)
{ {
rtx temp; rtx_insn *trial;
unsigned int i;
if (delay_list) FOR_EACH_VEC_ELT (delay_list, i, trial)
{ if ((annul_true_p && INSN_FROM_TARGET_P (trial))
for (temp = delay_list; temp; temp = XEXP (temp, 1)) || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
{ return 0;
rtx trial = XEXP (temp, 0);
if ((annul_true_p && INSN_FROM_TARGET_P (trial))
|| (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
return 0;
}
}
return 1; return 1;
} }
...@@ -1079,9 +1059,9 @@ check_annul_list_true_false (int annul_true_p, rtx delay_list) ...@@ -1079,9 +1059,9 @@ check_annul_list_true_false (int annul_true_p, rtx delay_list)
PNEW_THREAD points to a location that is to receive the place at which PNEW_THREAD points to a location that is to receive the place at which
execution should continue. */ execution should continue. */
static rtx_insn_list * static void
steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
rtx_insn_list *delay_list, struct resources *sets, vec<rtx_insn *> *delay_list, resources *sets,
struct resources *needed, struct resources *needed,
struct resources *other_needed, struct resources *other_needed,
int slots_to_fill, int *pslots_filled, int slots_to_fill, int *pslots_filled,
...@@ -1089,7 +1069,7 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, ...@@ -1089,7 +1069,7 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
{ {
int slots_remaining = slots_to_fill - *pslots_filled; int slots_remaining = slots_to_fill - *pslots_filled;
int total_slots_filled = *pslots_filled; int total_slots_filled = *pslots_filled;
rtx_insn_list *new_delay_list = 0; auto_vec<rtx_insn *, 5> new_delay_list;
int must_annul = *pannul_p; int must_annul = *pannul_p;
int used_annul = 0; int used_annul = 0;
int i; int i;
...@@ -1113,25 +1093,25 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, ...@@ -1113,25 +1093,25 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
will effect the direction of the jump in the sequence. */ will effect the direction of the jump in the sequence. */
CLEAR_RESOURCE (&cc_set); CLEAR_RESOURCE (&cc_set);
for (rtx_insn_list *temp = delay_list; temp; temp = temp->next ())
{
rtx_insn *trial = temp->insn ();
rtx_insn *trial;
FOR_EACH_VEC_ELT (*delay_list, i, trial)
{
mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL); mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
if (insn_references_resource_p (seq->insn (0), &cc_set, false)) if (insn_references_resource_p (seq->insn (0), &cc_set, false))
return delay_list; return;
} }
if (XVECLEN (seq, 0) - 1 > slots_remaining if (XVECLEN (seq, 0) - 1 > slots_remaining
|| ! condition_dominates_p (condition, seq->insn (0)) || ! condition_dominates_p (condition, seq->insn (0))
|| ! single_set (seq->insn (0))) || ! single_set (seq->insn (0)))
return delay_list; return;
/* On some targets, branches with delay slots can have a limited /* On some targets, branches with delay slots can have a limited
displacement. Give the back end a chance to tell us we can't do displacement. Give the back end a chance to tell us we can't do
this. */ this. */
if (! targetm.can_follow_jump (insn, seq->insn (0))) if (! targetm.can_follow_jump (insn, seq->insn (0)))
return delay_list; return;
redundant = XALLOCAVEC (bool, XVECLEN (seq, 0)); redundant = XALLOCAVEC (bool, XVECLEN (seq, 0));
for (i = 1; i < seq->len (); i++) for (i = 1; i < seq->len (); i++)
...@@ -1149,7 +1129,7 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, ...@@ -1149,7 +1129,7 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
in SEQ, we cannot use it. */ in SEQ, we cannot use it. */
|| (INSN_ANNULLED_BRANCH_P (seq->insn (0)) || (INSN_ANNULLED_BRANCH_P (seq->insn (0))
&& ! INSN_FROM_TARGET_P (trial))) && ! INSN_FROM_TARGET_P (trial)))
return delay_list; return;
/* If this insn was already done (usually in a previous delay slot), /* If this insn was already done (usually in a previous delay slot),
pretend we put it in our delay slot. */ pretend we put it in our delay slot. */
...@@ -1166,9 +1146,9 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, ...@@ -1166,9 +1146,9 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
|| (! insn_sets_resource_p (trial, other_needed, false) || (! insn_sets_resource_p (trial, other_needed, false)
&& ! may_trap_or_fault_p (PATTERN (trial))))) && ! may_trap_or_fault_p (PATTERN (trial)))))
? eligible_for_delay (insn, total_slots_filled, trial, flags) ? eligible_for_delay (insn, total_slots_filled, trial, flags)
: (must_annul || (delay_list == NULL && new_delay_list == NULL)) : (must_annul || (delay_list->is_empty () && new_delay_list.is_empty ()))
&& (must_annul = 1, && (must_annul = 1,
check_annul_list_true_false (0, delay_list) check_annul_list_true_false (0, *delay_list)
&& check_annul_list_true_false (0, new_delay_list) && check_annul_list_true_false (0, new_delay_list)
&& eligible_for_annul_false (insn, total_slots_filled, && eligible_for_annul_false (insn, total_slots_filled,
trial, flags))) trial, flags)))
...@@ -1177,14 +1157,14 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, ...@@ -1177,14 +1157,14 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
used_annul = 1; used_annul = 1;
rtx_insn *temp = copy_delay_slot_insn (trial); rtx_insn *temp = copy_delay_slot_insn (trial);
INSN_FROM_TARGET_P (temp) = 1; INSN_FROM_TARGET_P (temp) = 1;
new_delay_list = add_to_delay_list (temp, new_delay_list); add_to_delay_list (temp, &new_delay_list);
total_slots_filled++; total_slots_filled++;
if (--slots_remaining == 0) if (--slots_remaining == 0)
break; break;
} }
else else
return delay_list; return;
} }
/* Record the effect of the instructions that were redundant and which /* Record the effect of the instructions that were redundant and which
...@@ -1202,13 +1182,9 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, ...@@ -1202,13 +1182,9 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
if (used_annul) if (used_annul)
*pannul_p = 1; *pannul_p = 1;
if (delay_list == 0) rtx_insn *temp;
return new_delay_list; FOR_EACH_VEC_ELT (new_delay_list, i, temp)
add_to_delay_list (temp, delay_list);
for (rtx_insn_list *temp = new_delay_list; temp; temp = temp->next ())
delay_list = add_to_delay_list (temp->insn (), delay_list);
return delay_list;
} }
/* Similar to steal_delay_list_from_target except that SEQ is on the /* Similar to steal_delay_list_from_target except that SEQ is on the
...@@ -1216,10 +1192,10 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq, ...@@ -1216,10 +1192,10 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
of SEQ is an unconditional branch. In that case we steal its delay slot of SEQ is an unconditional branch. In that case we steal its delay slot
for INSN since unconditional branches are much easier to fill. */ for INSN since unconditional branches are much easier to fill. */
static rtx_insn_list * static void
steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition, steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
rtx_sequence *seq, rtx_sequence *seq,
rtx_insn_list *delay_list, vec<rtx_insn *> *delay_list,
struct resources *sets, struct resources *sets,
struct resources *needed, struct resources *needed,
struct resources *other_needed, struct resources *other_needed,
...@@ -1237,7 +1213,7 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition, ...@@ -1237,7 +1213,7 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
unconditional branch. */ unconditional branch. */
if (! simplejump_or_return_p (seq->insn (0))) if (! simplejump_or_return_p (seq->insn (0)))
return delay_list; return;
for (i = 1; i < seq->len (); i++) for (i = 1; i < seq->len (); i++)
{ {
...@@ -1253,7 +1229,7 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition, ...@@ -1253,7 +1229,7 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
break; break;
/* If this insn was already done, we don't need it. */ /* If this insn was already done, we don't need it. */
if (redundant_insn (trial, insn, delay_list)) if (redundant_insn (trial, insn, *delay_list))
{ {
update_block (trial, insn); update_block (trial, insn);
delete_from_delay_slot (trial); delete_from_delay_slot (trial);
...@@ -1265,14 +1241,14 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition, ...@@ -1265,14 +1241,14 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
|| (! insn_sets_resource_p (trial, other_needed, false) || (! insn_sets_resource_p (trial, other_needed, false)
&& ! may_trap_or_fault_p (PATTERN (trial))))) && ! may_trap_or_fault_p (PATTERN (trial)))))
? eligible_for_delay (insn, *pslots_filled, trial, flags) ? eligible_for_delay (insn, *pslots_filled, trial, flags)
: (must_annul || delay_list == NULL) && (must_annul = 1, : (must_annul || delay_list->is_empty ()) && (must_annul = 1,
check_annul_list_true_false (1, delay_list) check_annul_list_true_false (1, *delay_list)
&& eligible_for_annul_true (insn, *pslots_filled, trial, flags))) && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
{ {
if (must_annul) if (must_annul)
used_annul = 1; used_annul = 1;
delete_from_delay_slot (trial); delete_from_delay_slot (trial);
delay_list = add_to_delay_list (trial, delay_list); add_to_delay_list (trial, delay_list);
if (++(*pslots_filled) == slots_to_fill) if (++(*pslots_filled) == slots_to_fill)
break; break;
...@@ -1283,7 +1259,6 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition, ...@@ -1283,7 +1259,6 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
if (used_annul) if (used_annul)
*pannul_p = 1; *pannul_p = 1;
return delay_list;
} }
/* Try merging insns starting at THREAD which match exactly the insns in /* Try merging insns starting at THREAD which match exactly the insns in
...@@ -1500,7 +1475,7 @@ try_merge_delay_insns (rtx_insn *insn, rtx_insn *thread) ...@@ -1500,7 +1475,7 @@ try_merge_delay_insns (rtx_insn *insn, rtx_insn *thread)
gain in rare cases. */ gain in rare cases. */
static rtx static rtx
redundant_insn (rtx insn, rtx_insn *target, rtx delay_list) redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list)
{ {
rtx target_main = target; rtx target_main = target;
rtx ipat = PATTERN (insn); rtx ipat = PATTERN (insn);
...@@ -1602,12 +1577,11 @@ redundant_insn (rtx insn, rtx_insn *target, rtx delay_list) ...@@ -1602,12 +1577,11 @@ redundant_insn (rtx insn, rtx_insn *target, rtx delay_list)
/* This insn isn't redundant if it conflicts with an insn that either is /* This insn isn't redundant if it conflicts with an insn that either is
or will be in a delay slot of TARGET. */ or will be in a delay slot of TARGET. */
while (delay_list) unsigned int j;
{ rtx_insn *temp;
if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true)) FOR_EACH_VEC_ELT (delay_list, j, temp)
return 0; if (insn_sets_resource_p (temp, &needed, true))
delay_list = XEXP (delay_list, 1); return 0;
}
if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE) if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
for (i = 1; i < XVECLEN (PATTERN (target), 0); i++) for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
...@@ -1928,7 +1902,7 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -1928,7 +1902,7 @@ fill_simple_delay_slots (int non_jumps_p)
int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base; int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
struct resources needed, set; struct resources needed, set;
int slots_to_fill, slots_filled; int slots_to_fill, slots_filled;
rtx_insn_list *delay_list; auto_vec<rtx_insn *, 5> delay_list;
for (i = 0; i < num_unfilled_slots; i++) for (i = 0; i < num_unfilled_slots; i++)
{ {
...@@ -1984,7 +1958,7 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -1984,7 +1958,7 @@ fill_simple_delay_slots (int non_jumps_p)
CALL_INSNs. */ CALL_INSNs. */
slots_filled = 0; slots_filled = 0;
delay_list = 0; delay_list.truncate (0);
if (JUMP_P (insn)) if (JUMP_P (insn))
flags = get_jump_flags (insn, JUMP_LABEL (insn)); flags = get_jump_flags (insn, JUMP_LABEL (insn));
...@@ -2000,7 +1974,7 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2000,7 +1974,7 @@ fill_simple_delay_slots (int non_jumps_p)
{ {
rtx_insn **tmp; rtx_insn **tmp;
slots_filled++; slots_filled++;
delay_list = add_to_delay_list (trial, delay_list); add_to_delay_list (trial, &delay_list);
/* TRIAL may have had its delay slot filled, then unfilled. When /* TRIAL may have had its delay slot filled, then unfilled. When
the delay slot is unfilled, TRIAL is placed back on the unfilled the delay slot is unfilled, TRIAL is placed back on the unfilled
...@@ -2093,8 +2067,7 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2093,8 +2067,7 @@ fill_simple_delay_slots (int non_jumps_p)
tail, of the list. */ tail, of the list. */
update_reg_dead_notes (trial, insn); update_reg_dead_notes (trial, insn);
delay_list = gen_rtx_INSN_LIST (VOIDmode, delay_list.safe_insert (0, trial);
trial, delay_list);
update_block (trial, trial); update_block (trial, trial);
delete_related_insns (trial); delete_related_insns (trial);
if (slots_to_fill == ++slots_filled) if (slots_to_fill == ++slots_filled)
...@@ -2125,13 +2098,13 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2125,13 +2098,13 @@ fill_simple_delay_slots (int non_jumps_p)
/* Try to optimize case of jumping around a single insn. */ /* Try to optimize case of jumping around a single insn. */
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS) #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
if (slots_filled != slots_to_fill if (slots_filled != slots_to_fill
&& delay_list == 0 && delay_list.is_empty ()
&& JUMP_P (insn) && JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn)) && (condjump_p (insn) || condjump_in_parallel_p (insn))
&& !ANY_RETURN_P (JUMP_LABEL (insn))) && !ANY_RETURN_P (JUMP_LABEL (insn)))
{ {
delay_list = optimize_skip (as_a <rtx_jump_insn *> (insn)); optimize_skip (as_a <rtx_jump_insn *> (insn), &delay_list);
if (delay_list) if (!delay_list.is_empty ())
slots_filled += 1; slots_filled += 1;
} }
#endif #endif
...@@ -2219,7 +2192,7 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2219,7 +2192,7 @@ fill_simple_delay_slots (int non_jumps_p)
&& ! can_throw_internal (trial)) && ! can_throw_internal (trial))
{ {
next_trial = next_nonnote_insn (trial); next_trial = next_nonnote_insn (trial);
delay_list = add_to_delay_list (trial, delay_list); add_to_delay_list (trial, &delay_list);
if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, pat)) if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, pat))
link_cc0_insns (trial); link_cc0_insns (trial);
...@@ -2275,9 +2248,8 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2275,9 +2248,8 @@ fill_simple_delay_slots (int non_jumps_p)
if (new_label) if (new_label)
{ {
delay_list add_to_delay_list (copy_delay_slot_insn (next_trial),
= add_to_delay_list (copy_delay_slot_insn (next_trial), &delay_list);
delay_list);
slots_filled++; slots_filled++;
reorg_redirect_jump (as_a <rtx_jump_insn *> (trial), reorg_redirect_jump (as_a <rtx_jump_insn *> (trial),
new_label); new_label);
...@@ -2291,16 +2263,13 @@ fill_simple_delay_slots (int non_jumps_p) ...@@ -2291,16 +2263,13 @@ fill_simple_delay_slots (int non_jumps_p)
if ((jump_insn = dyn_cast <rtx_jump_insn *> (insn)) if ((jump_insn = dyn_cast <rtx_jump_insn *> (insn))
&& simplejump_p (jump_insn) && simplejump_p (jump_insn)
&& slots_filled != slots_to_fill) && slots_filled != slots_to_fill)
delay_list fill_slots_from_thread (jump_insn, const_true_rtx,
= fill_slots_from_thread (jump_insn, const_true_rtx, next_active_insn (JUMP_LABEL (insn)), NULL, 1,
next_active_insn (JUMP_LABEL (insn)), 1, own_thread_p (JUMP_LABEL (insn),
NULL, 1, 1, JUMP_LABEL (insn), 0),
own_thread_p (JUMP_LABEL (insn), slots_to_fill, &slots_filled, &delay_list);
JUMP_LABEL (insn), 0),
slots_to_fill, &slots_filled, if (!delay_list.is_empty ())
delay_list);
if (delay_list)
unfilled_slots_base[i] unfilled_slots_base[i]
= emit_delay_sequence (insn, delay_list, slots_filled); = emit_delay_sequence (insn, delay_list, slots_filled);
...@@ -2395,11 +2364,11 @@ follow_jumps (rtx label, rtx_insn *jump, bool *crossing) ...@@ -2395,11 +2364,11 @@ follow_jumps (rtx label, rtx_insn *jump, bool *crossing)
case, we can only take insns from the head of the thread for our delay case, we can only take insns from the head of the thread for our delay
slot. We then adjust the jump to point after the insns we have taken. */ slot. We then adjust the jump to point after the insns we have taken. */
static rtx_insn_list * static void
fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
rtx thread_or_return, rtx opposite_thread, int likely, rtx thread_or_return, rtx opposite_thread, int likely,
int thread_if_true, int own_thread, int slots_to_fill, int thread_if_true, int own_thread, int slots_to_fill,
int *pslots_filled, rtx_insn_list *delay_list) int *pslots_filled, vec<rtx_insn *> *delay_list)
{ {
rtx new_thread; rtx new_thread;
struct resources opposite_needed, set, needed; struct resources opposite_needed, set, needed;
...@@ -2417,7 +2386,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2417,7 +2386,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
/* If our thread is the end of subroutine, we can't get any delay /* If our thread is the end of subroutine, we can't get any delay
insns from that. */ insns from that. */
if (thread_or_return == NULL_RTX || ANY_RETURN_P (thread_or_return)) if (thread_or_return == NULL_RTX || ANY_RETURN_P (thread_or_return))
return delay_list; return;
rtx_insn *thread = as_a <rtx_insn *> (thread_or_return); rtx_insn *thread = as_a <rtx_insn *> (thread_or_return);
...@@ -2479,7 +2448,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2479,7 +2448,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
/* If TRIAL is redundant with some insn before INSN, we don't /* If TRIAL is redundant with some insn before INSN, we don't
actually need to add it to the delay list; we can merely pretend actually need to add it to the delay list; we can merely pretend
we did. */ we did. */
if ((prior_insn = redundant_insn (trial, insn, delay_list))) if ((prior_insn = redundant_insn (trial, insn, *delay_list)))
{ {
fix_reg_dead_note (prior_insn, insn); fix_reg_dead_note (prior_insn, insn);
if (own_thread) if (own_thread)
...@@ -2540,10 +2509,10 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2540,10 +2509,10 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
if (thread == old_trial) if (thread == old_trial)
thread = trial; thread = trial;
pat = PATTERN (trial); pat = PATTERN (trial);
if ((must_annul || delay_list == NULL) && (thread_if_true if ((must_annul || delay_list->is_empty ()) && (thread_if_true
? check_annul_list_true_false (0, delay_list) ? check_annul_list_true_false (0, *delay_list)
&& eligible_for_annul_false (insn, *pslots_filled, trial, flags) && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
: check_annul_list_true_false (1, delay_list) : check_annul_list_true_false (1, *delay_list)
&& eligible_for_annul_true (insn, *pslots_filled, trial, flags))) && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
{ {
rtx_insn *temp; rtx_insn *temp;
...@@ -2616,7 +2585,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2616,7 +2585,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
if (thread_if_true) if (thread_if_true)
INSN_FROM_TARGET_P (temp) = 1; INSN_FROM_TARGET_P (temp) = 1;
delay_list = add_to_delay_list (temp, delay_list); add_to_delay_list (temp, delay_list);
if (slots_to_fill == ++(*pslots_filled)) if (slots_to_fill == ++(*pslots_filled))
{ {
...@@ -2631,7 +2600,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2631,7 +2600,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
&set, true) &set, true)
&& (prior_insn && (prior_insn
= redundant_insn (new_thread, insn, = redundant_insn (new_thread, insn,
delay_list))) *delay_list)))
{ {
/* We know we do not own the thread, so no need /* We know we do not own the thread, so no need
to call update_block and delete_insn. */ to call update_block and delete_insn. */
...@@ -2702,24 +2671,21 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2702,24 +2671,21 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
so we can only do this if we have taken everything up to here. */ so we can only do this if we have taken everything up to here. */
if (thread_if_true && trial == new_thread) if (thread_if_true && trial == new_thread)
{ {
delay_list steal_delay_list_from_target (insn, condition, sequence,
= steal_delay_list_from_target (insn, condition, sequence, delay_list, &set, &needed,
delay_list, &set, &needed, &opposite_needed, slots_to_fill,
&opposite_needed, slots_to_fill, pslots_filled, &must_annul,
pslots_filled, &must_annul, &new_thread);
&new_thread);
/* If we owned the thread and are told that it branched /* If we owned the thread and are told that it branched
elsewhere, make sure we own the thread at the new location. */ elsewhere, make sure we own the thread at the new location. */
if (own_thread && trial != new_thread) if (own_thread && trial != new_thread)
own_thread = own_thread_p (new_thread, new_thread, 0); own_thread = own_thread_p (new_thread, new_thread, 0);
} }
else if (! thread_if_true) else if (! thread_if_true)
delay_list steal_delay_list_from_fallthrough (insn, condition, sequence,
= steal_delay_list_from_fallthrough (insn, condition, delay_list, &set, &needed,
sequence, &opposite_needed, slots_to_fill,
delay_list, &set, &needed, pslots_filled, &must_annul);
&opposite_needed, slots_to_fill,
pslots_filled, &must_annul);
} }
/* If we haven't found anything for this delay slot and it is very /* If we haven't found anything for this delay slot and it is very
...@@ -2728,7 +2694,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2728,7 +2694,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
depend on the destination register. If so, try to place the opposite depend on the destination register. If so, try to place the opposite
arithmetic insn after the jump insn and put the arithmetic insn in the arithmetic insn after the jump insn and put the arithmetic insn in the
delay slot. If we can't do this, return. */ delay slot. If we can't do this, return. */
if (delay_list == 0 && likely if (delay_list->is_empty () && likely
&& new_thread && !ANY_RETURN_P (new_thread) && new_thread && !ANY_RETURN_P (new_thread)
&& NONJUMP_INSN_P (new_thread) && NONJUMP_INSN_P (new_thread)
&& !RTX_FRAME_RELATED_P (new_thread) && !RTX_FRAME_RELATED_P (new_thread)
...@@ -2748,7 +2714,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2748,7 +2714,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
|| GET_CODE (pat) != SET || GET_CODE (pat) != SET
|| ! eligible_for_delay (insn, 0, trial, flags) || ! eligible_for_delay (insn, 0, trial, flags)
|| can_throw_internal (trial)) || can_throw_internal (trial))
return 0; return;
dest = SET_DEST (pat), src = SET_SRC (pat); dest = SET_DEST (pat), src = SET_SRC (pat);
if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS) if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
...@@ -2779,7 +2745,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2779,7 +2745,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
!constrain_operands (1, get_preferred_alternatives (ninsn)))) !constrain_operands (1, get_preferred_alternatives (ninsn))))
{ {
delete_related_insns (ninsn); delete_related_insns (ninsn);
return 0; return;
} }
if (own_thread) if (own_thread)
...@@ -2800,12 +2766,12 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2800,12 +2766,12 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
if (thread_if_true) if (thread_if_true)
INSN_FROM_TARGET_P (ninsn) = 1; INSN_FROM_TARGET_P (ninsn) = 1;
delay_list = add_to_delay_list (ninsn, NULL); add_to_delay_list (ninsn, delay_list);
(*pslots_filled)++; (*pslots_filled)++;
} }
} }
if (delay_list && must_annul) if (!delay_list->is_empty () && must_annul)
INSN_ANNULLED_BRANCH_P (insn) = 1; INSN_ANNULLED_BRANCH_P (insn) = 1;
/* If we are to branch into the middle of this thread, find an appropriate /* If we are to branch into the middle of this thread, find an appropriate
...@@ -2821,7 +2787,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2821,7 +2787,7 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
if (new_thread && simplejump_or_return_p (new_thread) if (new_thread && simplejump_or_return_p (new_thread)
&& redirect_with_delay_list_safe_p (insn, && redirect_with_delay_list_safe_p (insn,
JUMP_LABEL (new_thread), JUMP_LABEL (new_thread),
delay_list)) *delay_list))
new_thread = follow_jumps (JUMP_LABEL (new_thread), insn, new_thread = follow_jumps (JUMP_LABEL (new_thread), insn,
&crossing); &crossing);
...@@ -2840,8 +2806,6 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, ...@@ -2840,8 +2806,6 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
CROSSING_JUMP_P (insn) = 1; CROSSING_JUMP_P (insn) = 1;
} }
} }
return delay_list;
} }
/* Make another attempt to find insns to place in delay slots. /* Make another attempt to find insns to place in delay slots.
...@@ -2866,7 +2830,7 @@ fill_eager_delay_slots (void) ...@@ -2866,7 +2830,7 @@ fill_eager_delay_slots (void)
rtx condition; rtx condition;
rtx target_label, insn_at_target; rtx target_label, insn_at_target;
rtx_insn *fallthrough_insn; rtx_insn *fallthrough_insn;
rtx_insn_list *delay_list = 0; auto_vec<rtx_insn *, 5> delay_list;
rtx_jump_insn *jump_insn; rtx_jump_insn *jump_insn;
int own_target; int own_target;
int own_fallthrough; int own_fallthrough;
...@@ -2927,13 +2891,12 @@ fill_eager_delay_slots (void) ...@@ -2927,13 +2891,12 @@ fill_eager_delay_slots (void)
if (prediction > 0) if (prediction > 0)
{ {
delay_list fill_slots_from_thread (jump_insn, condition, insn_at_target,
= fill_slots_from_thread (jump_insn, condition, insn_at_target, fallthrough_insn, prediction == 2, 1,
fallthrough_insn, prediction == 2, 1, own_target,
own_target, slots_to_fill, &slots_filled, &delay_list);
slots_to_fill, &slots_filled, delay_list);
if (delay_list == 0 && own_fallthrough) if (delay_list.is_empty () && own_fallthrough)
{ {
/* Even though we didn't find anything for delay slots, /* Even though we didn't find anything for delay slots,
we might have found a redundant insn which we deleted we might have found a redundant insn which we deleted
...@@ -2942,35 +2905,26 @@ fill_eager_delay_slots (void) ...@@ -2942,35 +2905,26 @@ fill_eager_delay_slots (void)
target_label = JUMP_LABEL (jump_insn); target_label = JUMP_LABEL (jump_insn);
insn_at_target = first_active_target_insn (target_label); insn_at_target = first_active_target_insn (target_label);
delay_list fill_slots_from_thread (jump_insn, condition, fallthrough_insn,
= fill_slots_from_thread (jump_insn, condition, insn_at_target, 0, 0, own_fallthrough,
fallthrough_insn, slots_to_fill, &slots_filled,
insn_at_target, 0, 0, &delay_list);
own_fallthrough,
slots_to_fill, &slots_filled,
delay_list);
} }
} }
else else
{ {
if (own_fallthrough) if (own_fallthrough)
delay_list fill_slots_from_thread (jump_insn, condition, fallthrough_insn,
= fill_slots_from_thread (jump_insn, condition, fallthrough_insn, insn_at_target, 0, 0, own_fallthrough,
insn_at_target, 0, 0, slots_to_fill, &slots_filled, &delay_list);
own_fallthrough,
slots_to_fill, &slots_filled, if (delay_list.is_empty ())
delay_list); fill_slots_from_thread (jump_insn, condition, insn_at_target,
next_active_insn (insn), 0, 1, own_target,
if (delay_list == 0) slots_to_fill, &slots_filled, &delay_list);
delay_list
= fill_slots_from_thread (jump_insn, condition, insn_at_target,
next_active_insn (insn), 0, 1,
own_target,
slots_to_fill, &slots_filled,
delay_list);
} }
if (delay_list) if (!delay_list.is_empty ())
unfilled_slots_base[i] unfilled_slots_base[i]
= emit_delay_sequence (jump_insn, delay_list, slots_filled); = emit_delay_sequence (jump_insn, delay_list, slots_filled);
...@@ -3304,7 +3258,7 @@ relax_delay_slots (rtx_insn *first) ...@@ -3304,7 +3258,7 @@ relax_delay_slots (rtx_insn *first)
/* See if the first insn in the delay slot is redundant with some /* See if the first insn in the delay slot is redundant with some
previous insn. Remove it from the delay slot if so; then set up previous insn. Remove it from the delay slot if so; then set up
to reprocess this insn. */ to reprocess this insn. */
if (redundant_insn (pat->insn (1), delay_insn, 0)) if (redundant_insn (pat->insn (1), delay_insn, vNULL))
{ {
update_block (pat->insn (1), insn); update_block (pat->insn (1), insn);
delete_from_delay_slot (pat->insn (1)); delete_from_delay_slot (pat->insn (1));
...@@ -3390,7 +3344,7 @@ relax_delay_slots (rtx_insn *first) ...@@ -3390,7 +3344,7 @@ relax_delay_slots (rtx_insn *first)
liveness info. */ liveness info. */
trial = next_real_insn (target_label); trial = next_real_insn (target_label);
if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
&& redundant_insn (trial, insn, 0) && redundant_insn (trial, insn, vNULL)
&& ! can_throw_internal (trial)) && ! can_throw_internal (trial))
{ {
/* Figure out where to emit the special USE insn so we don't /* Figure out where to emit the special USE insn so we don't
...@@ -3423,7 +3377,7 @@ relax_delay_slots (rtx_insn *first) ...@@ -3423,7 +3377,7 @@ relax_delay_slots (rtx_insn *first)
&& trial_seq->len () == 2 && trial_seq->len () == 2
&& JUMP_P (trial_seq->insn (0)) && JUMP_P (trial_seq->insn (0))
&& simplejump_or_return_p (trial_seq->insn (0)) && simplejump_or_return_p (trial_seq->insn (0))
&& redundant_insn (trial_seq->insn (1), insn, 0)) && redundant_insn (trial_seq->insn (1), insn, vNULL))
{ {
target_label = JUMP_LABEL (trial_seq->insn (0)); target_label = JUMP_LABEL (trial_seq->insn (0));
if (ANY_RETURN_P (target_label)) if (ANY_RETURN_P (target_label))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment