Commit 4241ecb0 by J"orn Rennecke Committed by Joern Rennecke

sh.c (max_labelno_before_reorg): New variable.

	* sh.c (max_labelno_before_reorg): New variable.
	(sh_reorg): Initialize it.
	(find_barrier): Check max_labelno_before_reorg before using
	label_to_alignment.  Take length of explicit alignment insns
	into account.  When seeing a UNSPECV_CONST_END, return it.

	* sh.c (fixup_mova): Set mode of affected label to QImode.
	(untangle_mova): New function.
	(find_barrier): Use it.  Check mode of label before decrementing
	num_mova.
	(sh_reorg): Likewise.
	Set mode of all insns back to VOIDmode.

	* sh.c (MOVA_LABELREF): New macro.
	(mova_p, fixup_mova, sh_reorg): Use it.
	(find_barrier, sh_reorg): Don't count num_mova back to 0 unless
	ADDR_DIFF_VEC matches mova.

From-SVN: r112173
parent 79252435
2006-03-17 J"orn Rennecke <joern.rennecke@st.com>
* sh.c (max_labelno_before_reorg): New variable.
(sh_reorg): Initialize it.
(find_barrier): Check max_labelno_before_reorg before using
label_to_alignment. Take length of explicit alignment insns
into account. When seeing a UNSPECV_CONST_END, return it.
* sh.c (fixup_mova): Set mode of affected label to QImode.
(untangle_mova): New function.
(find_barrier): Use it. Check mode of label before decrementing
num_mova.
(sh_reorg): Likewise.
Set mode of all insns back to VOIDmode.
* sh.c (MOVA_LABELREF): New macro.
(mova_p, fixup_mova, sh_reorg): Use it.
(find_barrier, sh_reorg): Don't count num_mova back to 0 unless
ADDR_DIFF_VEC matches mova.
2006-03-17 Steven Bosscher <stevenb.gcc@gmail.com>
* dwarf2asm.c (dw2_asm_output_offset): Mark 'base' argument
......
......@@ -3048,6 +3048,8 @@ static int pool_size;
static rtx pool_window_label;
static int pool_window_last;
static int max_labelno_before_reorg;
/* ??? If we need a constant in HImode which is the truncated value of a
constant we need in SImode, we could combine the two entries thus saving
two bytes. Is this common enough to be worth the effort of implementing
......@@ -3332,6 +3334,8 @@ hi_const (rtx src)
&& INTVAL (src) <= 32767);
}
#define MOVA_LABELREF(mova) XVECEXP (SET_SRC (PATTERN (mova)), 0, 0)
/* Nonzero if the insn is a move instruction which needs to be fixed. */
/* ??? For a DImode/DFmode moves, we don't need to fix it if each half of the
......@@ -3391,16 +3395,17 @@ mova_p (rtx insn)
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_MOVA
/* Don't match mova_const. */
&& GET_CODE (XVECEXP (SET_SRC (PATTERN (insn)), 0, 0)) == LABEL_REF);
&& GET_CODE (MOVA_LABELREF (insn)) == LABEL_REF);
}
/* Fix up a mova from a switch that went out of range. */
static void
fixup_mova (rtx mova)
{
PUT_MODE (XEXP (MOVA_LABELREF (mova), 0), QImode);
if (! flag_pic)
{
SET_SRC (PATTERN (mova)) = XVECEXP (SET_SRC (PATTERN (mova)), 0, 0);
SET_SRC (PATTERN (mova)) = MOVA_LABELREF (mova);
INSN_CODE (mova) = -1;
}
else
......@@ -3434,6 +3439,53 @@ fixup_mova (rtx mova)
}
}
/* NEW_MOVA is a mova we've just encountered while scanning forward. Update
*num_mova, and check if the new mova is not nested within the first one.
return 0 if *first_mova was replaced, 1 if new_mova was replaced,
2 if new_mova has been assigned to *first_mova, -1 otherwise.. */
static int
untangle_mova (int *num_mova, rtx *first_mova, rtx new_mova)
{
int n_addr;
int f_target, n_target;
if (optimize)
{
n_addr = INSN_ADDRESSES (INSN_UID (new_mova));
n_target = INSN_ADDRESSES (INSN_UID (XEXP (MOVA_LABELREF (new_mova), 0)));
if (n_addr > n_target || n_addr + 1022 < n_target)
{
/* Change the mova into a load.
broken_move will then return true for it. */
fixup_mova (new_mova);
return 1;
}
}
if (!(*num_mova)++)
{
*first_mova = new_mova;
return 2;
}
if (!optimize
|| ((f_target
= INSN_ADDRESSES (INSN_UID (XEXP (MOVA_LABELREF (*first_mova), 0))))
>= n_target))
return -1;
(*num_mova)--;
if (f_target - INSN_ADDRESSES (INSN_UID (*first_mova))
> n_target - n_addr)
{
fixup_mova (*first_mova);
return 0;
}
else
{
fixup_mova (new_mova);
return 1;
}
}
/* Find the last barrier from insn FROM which is close enough to hold the
constant pool. If we can't find one, then create one near the end of
the range. */
......@@ -3477,7 +3529,12 @@ find_barrier (int num_mova, rtx mova, rtx from)
int inc = get_attr_length (from);
int new_align = 1;
if (GET_CODE (from) == CODE_LABEL)
/* If this is a label that existed at the time of the compute_alignments
call, determine the alignment. N.B. When find_barrier recurses for
an out-of-reach mova, we might see labels at the start of previously
inserted constant tables. */
if (GET_CODE (from) == CODE_LABEL
&& CODE_LABEL_NUMBER (from) <= max_labelno_before_reorg)
{
if (optimize)
new_align = 1 << label_to_alignment (from);
......@@ -3487,6 +3544,22 @@ find_barrier (int num_mova, rtx mova, rtx from)
new_align = 1;
inc = 0;
}
/* In case we are scanning a constant table because of recursion, check
for explicit alignments. If the table is long, we might be forced
to emit the new table in front of it; the length of the alignment
might be the last straw. */
else if (GET_CODE (from) == INSN
&& GET_CODE (PATTERN (from)) == UNSPEC_VOLATILE
&& XINT (PATTERN (from), 1) == UNSPECV_ALIGN)
new_align = INTVAL (XVECEXP (PATTERN (from), 0, 0));
/* When we find the end of a constant table, paste the new constant
at the end. That is better than putting it in front because
this way, we don't need extra alignment for adding a 4-byte-aligned
mov(a) label to a 2/4 or 8/4 byte aligned table. */
else if (GET_CODE (from) == INSN
&& GET_CODE (PATTERN (from)) == UNSPEC_VOLATILE
&& XINT (PATTERN (from), 1) == UNSPECV_CONST_END)
return from;
if (GET_CODE (from) == BARRIER)
{
......@@ -3551,11 +3624,16 @@ find_barrier (int num_mova, rtx mova, rtx from)
if (mova_p (from))
{
if (! num_mova++)
switch (untangle_mova (&num_mova, &mova, from))
{
case 0: return find_barrier (0, 0, mova);
case 2:
{
leading_mova = 0;
mova = from;
barrier_before_mova = good_barrier ? good_barrier : found_barrier;
barrier_before_mova
= good_barrier ? good_barrier : found_barrier;
}
default: break;
}
if (found_si > count_si)
count_si = found_si;
......@@ -3564,7 +3642,10 @@ find_barrier (int num_mova, rtx mova, rtx from)
&& (GET_CODE (PATTERN (from)) == ADDR_VEC
|| GET_CODE (PATTERN (from)) == ADDR_DIFF_VEC))
{
if (num_mova)
if ((num_mova > 1 && GET_MODE (prev_nonnote_insn (from)) == VOIDmode)
|| (num_mova
&& (prev_nonnote_insn (from)
== XEXP (MOVA_LABELREF (mova), 0))))
num_mova--;
if (barrier_align (next_real_insn (from)) == align_jumps_log)
{
......@@ -4284,6 +4365,7 @@ sh_reorg (void)
rtx r0_inc_rtx = gen_rtx_POST_INC (Pmode, r0_rtx);
first = get_insns ();
max_labelno_before_reorg = max_label_num ();
/* We must split call insns before introducing `mova's. If we're
optimizing, they'll have already been split. Otherwise, make
......@@ -4545,21 +4627,23 @@ sh_reorg (void)
below the switch table. Check if that has happened.
We only have the addresses available when optimizing; but then,
this check shouldn't be needed when not optimizing. */
rtx label_ref = XVECEXP (SET_SRC (PATTERN (insn)), 0, 0);
if (optimize
&& (INSN_ADDRESSES (INSN_UID (insn))
> INSN_ADDRESSES (INSN_UID (XEXP (label_ref, 0)))))
if (!untangle_mova (&num_mova, &mova, insn))
{
/* Change the mova into a load.
broken_move will then return true for it. */
fixup_mova (insn);
insn = mova;
num_mova = 0;
}
else if (! num_mova++)
mova = insn;
}
else if (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
&& num_mova)
&& num_mova
/* ??? loop invariant motion can also move a mova out of a
loop. Since loop does this code motion anyway, maybe we
should wrap UNSPEC_MOVA into a CONST, so that reload can
move it back. */
&& ((num_mova > 1
&& GET_MODE (prev_nonnote_insn (insn)) == VOIDmode)
|| (prev_nonnote_insn (insn)
== XEXP (MOVA_LABELREF (mova), 0))))
{
rtx scan;
int total;
......@@ -4717,6 +4801,8 @@ sh_reorg (void)
}
}
free_alloc_pool (label_ref_list_pool);
for (insn = first; insn; insn = NEXT_INSN (insn))
PUT_MODE (insn, VOIDmode);
mdep_reorg_phase = SH_SHORTEN_BRANCHES1;
INSN_ADDRESSES_FREE ();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment