Commit f5b9e7c9 by Nathan Sidwell Committed by Nathan Sidwell

sh.c (print_operand_address): Use gcc_assert and gcc_unreachable as appropriate.

	* config/sh/sh.c (print_operand_address): Use gcc_assert and
	gcc_unreachable as appropriate.
	(print_operand, prepare_move_operands, prepare_scc_operands,
	output_movedouble, output_branch, shift_insns_rtx, gen_shifty_op,
	gen_shl_and, shl_sext_kind, gen_datalabel_ref, dump_table,
	fixup_mova, gen_far_branch, sh_reorg, split_branches,
	final_prescan_insn, output_stack_adjust, sh_expand_epilogue,
	sh_set_return_address, sh_setup_incoming_varargs,
	initial_elimination_offset, sh_pch_valid_p, get_free_reg,
	sh_expand_builtin, sh_output_mi_thunk, extract_sfunc_addr,
	check_use_sfunc_addr): Likewise.
	* config/sh/netbsd-elf.h (FUNCTION_PROFILER): Likewise.
	* config/sh/sh.h (ASM_MAYBE_OUTPUT_ENCODED_ADDR_RTX): Likewise.
	* config/sh/symbian.c (sh_symbian_mark_dllexport,
	sh_symbian_mark_dllimport): Likewise.
	* config/sh/sh.md: Likewise.
	(movdicc, call_pop, call_value_pop, casesi_worker_1,
	casesi_worker_2, casesi_shift_media, casesi_load_media,
	return_media): Likewise.

From-SVN: r99425
parent 4581ba9d
2005-05-09 Nathan Sidwell <nathan@codesourcery.com>
* config/sh/sh.c (print_operand_address): Use gcc_assert and
gcc_unreachable as appropriate.
(print_operand, prepare_move_operands, prepare_scc_operands,
output_movedouble, output_branch, shift_insns_rtx, gen_shifty_op,
gen_shl_and, shl_sext_kind, gen_datalabel_ref, dump_table,
fixup_mova, gen_far_branch, sh_reorg, split_branches,
final_prescan_insn, output_stack_adjust, sh_expand_epilogue,
sh_set_return_address, sh_setup_incoming_varargs,
initial_elimination_offset, sh_pch_valid_p, get_free_reg,
sh_expand_builtin, sh_output_mi_thunk, extract_sfunc_addr,
check_use_sfunc_addr): Likewise.
* config/sh/netbsd-elf.h (FUNCTION_PROFILER): Likewise.
* config/sh/sh.h (ASM_MAYBE_OUTPUT_ENCODED_ADDR_RTX): Likewise.
* config/sh/symbian.c (sh_symbian_mark_dllexport,
sh_symbian_mark_dllimport): Likewise.
* config/sh/sh.md: Likewise.
(movdicc, call_pop, call_value_pop, casesi_worker_1,
casesi_worker_2, casesi_shift_media, casesi_load_media,
return_media): Likewise.
2005-05-08 Roger Sayle <roger@eyesopen.com>
PR inline-asm/8788
......
......@@ -89,15 +89,10 @@ Boston, MA 02111-1307, USA. */
#define FUNCTION_PROFILER(STREAM,LABELNO) \
do \
{ \
if (TARGET_SHMEDIA32) \
if (TARGET_SHMEDIA32 || TARGET_SHMEDIA64) \
{ \
/* FIXME */ \
abort (); \
} \
else if (TARGET_SHMEDIA64) \
{ \
/* FIXME */ \
abort (); \
sorry ("unimplemented-shmedia profiling"); \
} \
else \
{ \
......
......@@ -518,8 +518,7 @@ print_operand_address (FILE *stream, rtx x)
}
default:
debug_rtx (x);
abort ();
gcc_unreachable ();
}
}
break;
......@@ -646,8 +645,7 @@ print_operand (FILE *stream, rtx x, int code)
break;
case 'm':
if (GET_CODE (x) != MEM)
abort ();
gcc_assert (GET_CODE (x) == MEM);
x = XEXP (x, 0);
switch (GET_CODE (x))
{
......@@ -664,13 +662,12 @@ print_operand (FILE *stream, rtx x, int code)
break;
default:
abort ();
gcc_unreachable ();
}
break;
case 'd':
if (GET_CODE (x) != REG || GET_MODE (x) != V2SFmode)
abort ();
gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == V2SFmode);
fprintf ((stream), "d%s", reg_names[REGNO (x)] + 1);
break;
......@@ -700,9 +697,8 @@ print_operand (FILE *stream, rtx x, int code)
subreg:SI of the DImode register. Maybe reload should be
fixed so as to apply alter_subreg to such loads? */
case SUBREG:
if (SUBREG_BYTE (x) != 0
|| GET_CODE (SUBREG_REG (x)) != REG)
abort ();
gcc_assert (SUBREG_BYTE (x) == 0
&& GET_CODE (SUBREG_REG (x)) == REG);
x = SUBREG_REG (x);
/* Fall through. */
......@@ -1078,7 +1074,7 @@ prepare_move_operands (rtx operands[], enum machine_mode mode)
break;
default:
abort ();
gcc_unreachable ();
}
operands[1] = op1;
}
......@@ -1101,7 +1097,7 @@ prepare_scc_operands (enum rtx_code code)
{
case NE:
/* It isn't possible to handle this case. */
abort ();
gcc_unreachable ();
case LT:
code = GT;
break;
......@@ -1239,12 +1235,17 @@ output_movedouble (rtx insn ATTRIBUTE_UNUSED, rtx operands[],
int dreg = REGNO (dst);
rtx inside = XEXP (src, 0);
if (GET_CODE (inside) == REG)
switch (GET_CODE (inside))
{
case REG:
ptrreg = REGNO (inside);
else if (GET_CODE (inside) == SUBREG)
break;
case SUBREG:
ptrreg = subreg_regno (inside);
else if (GET_CODE (inside) == PLUS)
{
break;
case PLUS:
ptrreg = REGNO (XEXP (inside, 0));
/* ??? A r0+REG address shouldn't be possible here, because it isn't
an offsettable address. Unfortunately, offsettable addresses use
......@@ -1253,15 +1254,16 @@ output_movedouble (rtx insn ATTRIBUTE_UNUSED, rtx operands[],
supported, so we can't use the 'o' constraint.
Thus we must check for and handle r0+REG addresses here.
We punt for now, since this is likely very rare. */
if (GET_CODE (XEXP (inside, 1)) == REG)
abort ();
}
else if (GET_CODE (inside) == LABEL_REF)
gcc_assert (GET_CODE (XEXP (inside, 1)) != REG);
break;
case LABEL_REF:
return "mov.l %1,%0\n\tmov.l %1+4,%T0";
else if (GET_CODE (inside) == POST_INC)
case POST_INC:
return "mov.l %1,%0\n\tmov.l %1,%T0";
else
abort ();
default:
gcc_unreachable ();
}
/* Work out the safe way to copy. Copy into the second half first. */
if (dreg == ptrreg)
......@@ -1430,9 +1432,9 @@ output_branch (int logic, rtx insn, rtx *operands)
{
int label = lf++;
if (final_sequence
&& INSN_ANNULLED_BRANCH_P (XVECEXP (final_sequence, 0, 0)))
abort ();
gcc_assert (!final_sequence
|| !(INSN_ANNULLED_BRANCH_P
(XVECEXP (final_sequence, 0, 0))));
asm_fprintf (asm_out_file, "b%s%ss\t%LLF%d\n",
logic ? "f" : "t",
ASSEMBLER_DIALECT ? "/" : ".", label);
......@@ -1459,7 +1461,7 @@ output_branch (int logic, rtx insn, rtx *operands)
/* There should be no longer branches now - that would
indicate that something has destroyed the branches set
up in machine_dependent_reorg. */
abort ();
gcc_unreachable ();
}
}
......@@ -1663,7 +1665,7 @@ shift_insns_rtx (rtx insn)
case ASHIFT:
return shift_insns[shift_count];
default:
abort ();
gcc_unreachable ();
}
}
......@@ -2023,7 +2025,7 @@ gen_shifty_op (int code, rtx *operands)
else if (value == 0)
{
/* This can happen when not optimizing. We must output something here
to prevent the compiler from aborting in final.c after the try_split
to prevent the compiler from dying in final.c after the try_split
call. */
emit_insn (gen_nop ());
return;
......@@ -2389,8 +2391,7 @@ gen_shl_and (rtx dest, rtx left_rtx, rtx mask_rtx, rtx source)
/* Cases 3 and 4 should be handled by this split
only while combining */
if (kind > 2)
abort ();
gcc_assert (kind <= 2);
if (right)
{
emit_insn (gen_lshrsi3 (dest, source, GEN_INT (right)));
......@@ -2457,8 +2458,7 @@ shl_sext_kind (rtx left_rtx, rtx size_rtx, int *costp)
left = INTVAL (left_rtx);
size = INTVAL (size_rtx);
insize = size - left;
if (insize <= 0)
abort ();
gcc_assert (insize > 0);
/* Default to left / right shift. */
kind = 0;
best_cost = shift_insns[32 - insize] + ashiftrt_insns[32 - size];
......@@ -2686,8 +2686,7 @@ gen_datalabel_ref (rtx sym)
gen_rtvec (1, sym),
UNSPEC_DATALABEL));
if (GET_CODE (sym) != SYMBOL_REF)
abort ();
gcc_assert (GET_CODE (sym) == SYMBOL_REF);
return sym;
}
......@@ -2965,8 +2964,7 @@ dump_table (rtx start, rtx barrier)
scan);
break;
default:
abort ();
break;
gcc_unreachable ();
}
if (p->mode != HImode)
......@@ -3018,8 +3016,7 @@ dump_table (rtx start, rtx barrier)
scan);
break;
default:
abort ();
break;
gcc_unreachable ();
}
if (p->mode != HImode)
......@@ -3130,10 +3127,9 @@ fixup_mova (rtx mova)
do
{
worker = NEXT_INSN (worker);
if (! worker
|| GET_CODE (worker) == CODE_LABEL
|| GET_CODE (worker) == JUMP_INSN)
abort ();
gcc_assert (worker
&& GET_CODE (worker) != CODE_LABEL
&& GET_CODE (worker) != JUMP_INSN);
} while (recog_memoized (worker) != CODE_FOR_casesi_worker_1);
wpat = PATTERN (worker);
wpat0 = XVECEXP (wpat, 0, 0);
......@@ -3772,6 +3768,7 @@ gen_far_branch (struct far_branch *bp)
rtx insn = bp->insert_place;
rtx jump;
rtx label = gen_label_rtx ();
int ok;
emit_label_after (label, insn);
if (bp->far_label)
......@@ -3790,8 +3787,9 @@ gen_far_branch (struct far_branch *bp)
emit_barrier_after (jump);
emit_label_after (bp->near_label, insn);
JUMP_LABEL (jump) = bp->far_label;
if (! invert_jump (insn, label, 1))
abort ();
ok = invert_jump (insn, label, 1);
gcc_assert (ok);
/* If we are branching around a jump (rather than a return), prevent
reorg from using an insn from the jump target as the delay slot insn -
when reorg did this, it pessimized code (we rather hide the delay slot)
......@@ -4365,9 +4363,8 @@ sh_reorg (void)
- 1);
rtx clobber = *clobberp;
if (GET_CODE (clobber) != CLOBBER
|| ! rtx_equal_p (XEXP (clobber, 0), r0_rtx))
abort ();
gcc_assert (GET_CODE (clobber) == CLOBBER
&& rtx_equal_p (XEXP (clobber, 0), r0_rtx));
if (last_float
&& reg_set_between_p (r0_rtx, last_float_move, scan))
......@@ -4501,6 +4498,7 @@ split_branches (rtx first)
rtx insn;
struct far_branch **uid_branch, *far_branch_list = 0;
int max_uid = get_max_uid ();
int ok;
/* Find out which branches are out of range. */
shorten_branches (first);
......@@ -4590,8 +4588,8 @@ split_branches (rtx first)
bp->insert_place = insn;
bp->address = addr;
}
if (! redirect_jump (insn, label, 1))
abort ();
ok = redirect_jump (insn, label, 1);
gcc_assert (ok);
}
else
{
......@@ -4737,19 +4735,29 @@ final_prescan_insn (rtx insn, rtx *opvec ATTRIBUTE_UNUSED,
rtx pattern;
pattern = PATTERN (insn);
if (GET_CODE (pattern) == PARALLEL)
switch (GET_CODE (pattern))
{
case PARALLEL:
pattern = XVECEXP (pattern, 0, 0);
if (GET_CODE (pattern) == CALL
|| (GET_CODE (pattern) == SET
&& (GET_CODE (SET_SRC (pattern)) == CALL
|| get_attr_type (insn) == TYPE_SFUNC)))
break;
case SET:
if (GET_CODE (SET_SRC (pattern)) != CALL
&& get_attr_type (insn) != TYPE_SFUNC)
{
targetm.asm_out.internal_label
(asm_out_file, "L", CODE_LABEL_NUMBER (XEXP (note, 0)));
break;
}
/* else FALLTHROUGH */
case CALL:
asm_fprintf (asm_out_file, "\t.uses %LL%d\n",
CODE_LABEL_NUMBER (XEXP (note, 0)));
else if (GET_CODE (pattern) == SET)
(*targetm.asm_out.internal_label) (asm_out_file, "L",
CODE_LABEL_NUMBER (XEXP (note, 0)));
else
abort ();
break;
default:
gcc_unreachable ();
}
}
}
}
......@@ -4819,8 +4827,7 @@ output_stack_adjust (int size, rtx reg, int epilogue_p,
/* This test is bogus, as output_stack_adjust is used to re-align the
stack. */
#if 0
if (size % align)
abort ();
gcc_assert (!(size % align));
#endif
if (CONST_OK_FOR_ADD (size))
......@@ -4843,7 +4850,7 @@ output_stack_adjust (int size, rtx reg, int epilogue_p,
/* If TEMP is invalid, we could temporarily save a general
register to MACL. However, there is currently no need
to handle this case, so just abort when we see it. */
to handle this case, so just die when we see it. */
if (epilogue_p < 0
|| current_function_interrupt
|| ! call_really_used_regs[temp] || fixed_regs[temp])
......@@ -4890,18 +4897,22 @@ output_stack_adjust (int size, rtx reg, int epilogue_p,
temp = scavenge_reg (live_regs_mask);
if (temp < 0)
{
rtx adj_reg, tmp_reg, mem;
/* If we reached here, the most likely case is the (sibcall)
epilogue for non SHmedia. Put a special push/pop sequence
for such case as the last resort. This looks lengthy but
would not be problem because it seems to be very rare. */
if (! TARGET_SHMEDIA && epilogue_p)
{
rtx adj_reg, tmp_reg, mem;
would not be problem because it seems to be very
rare. */
gcc_assert (!TARGET_SHMEDIA && epilogue_p);
/* ??? There is still the slight possibility that r4 or
r5 have been reserved as fixed registers or assigned
as global registers, and they change during an
interrupt. There are possible ways to handle this:
/* ??? There is still the slight possibility that r4 or r5
have been reserved as fixed registers or assigned as
global registers, and they change during an interrupt.
There are possible ways to handle this:
- If we are adjusting the frame pointer (r14), we can do
with a single temp register and an ordinary push / pop
on the stack.
......@@ -4909,14 +4920,13 @@ output_stack_adjust (int size, rtx reg, int epilogue_p,
fixed or globals) for the temps we need. We might
also grab r14 if we are adjusting the stack pointer.
If we can't find enough available registers, issue
a diagnostic and abort - the user must have reserved
a diagnostic and die - the user must have reserved
way too many registers.
But since all this is rather unlikely to happen and
would require extra testing, we just abort if r4 / r5
would require extra testing, we just die if r4 / r5
are not available. */
if (fixed_regs[4] || fixed_regs[5]
|| global_regs[4] || global_regs[5])
abort ();
gcc_assert (!fixed_regs[4] && !fixed_regs[5]
&& !global_regs[4] && !global_regs[5]);
adj_reg = gen_rtx_REG (GET_MODE (reg), 4);
tmp_reg = gen_rtx_REG (GET_MODE (reg), 5);
......@@ -4935,9 +4945,6 @@ output_stack_adjust (int size, rtx reg, int epilogue_p,
emit_move_insn (tmp_reg, mem);
return;
}
else
abort ();
}
const_reg = gen_rtx_REG (GET_MODE (reg), temp);
/* If SIZE is negative, subtract the positive value.
......@@ -5590,8 +5597,7 @@ sh_expand_prologue (void)
GO_IF_LEGITIMATE_ADDRESS (mode, XEXP (mem_rtx, 0), try_pre_dec);
if (! r0)
abort ();
gcc_assert (r0);
mem_rtx = NULL_RTX;
try_pre_dec:
......@@ -5661,10 +5667,9 @@ sh_expand_prologue (void)
registers or for special registers without pre-dec
memory addresses, since we store their values in r0
first. */
if (TARGET_REGISTER_P (reg)
|| ((reg == PR_REG || SPECIAL_REGISTER_P (reg))
&& mem_rtx != pre_dec))
abort ();
gcc_assert (!TARGET_REGISTER_P (reg)
&& ((reg != PR_REG && !SPECIAL_REGISTER_P (reg))
|| mem_rtx == pre_dec));
addr_ok:
orig_reg_rtx = reg_rtx;
......@@ -5680,8 +5685,8 @@ sh_expand_prologue (void)
{
offset_in_r0 = -1;
sp_in_r0 = 0;
if (refers_to_regno_p (R0_REG, R0_REG+1, mem_rtx, (rtx *) 0))
abort ();
gcc_assert (!refers_to_regno_p
(R0_REG, R0_REG+1, mem_rtx, (rtx *) 0));
}
if (*++tmp_pnt <= 0)
......@@ -5729,8 +5734,7 @@ sh_expand_prologue (void)
}
}
if (entry->offset != d_rounding)
abort ();
gcc_assert (entry->offset == d_rounding);
}
else
push_regs (&live_regs_mask, current_function_interrupt);
......@@ -5974,9 +5978,8 @@ sh_expand_epilogue (bool sibcall_p)
stack_pointer_rtx,
r0));
if ((reg == PR_REG || SPECIAL_REGISTER_P (reg))
&& mem_rtx != post_inc)
abort ();
gcc_assert ((reg != PR_REG && !SPECIAL_REGISTER_P (reg))
|| mem_rtx == post_inc);
addr_ok:
if ((reg == PR_REG || SPECIAL_REGISTER_P (reg))
......@@ -6005,8 +6008,7 @@ sh_expand_epilogue (bool sibcall_p)
REG_NOTES (insn));
}
if (entry->offset + offset_base != d + d_rounding)
abort ();
gcc_assert (entry->offset + offset_base == d + d_rounding);
}
else /* ! TARGET_SH5 */
{
......@@ -6121,7 +6123,7 @@ sh_set_return_address (rtx ra, rtx tmp)
goto found;
/* We can't find pr register. */
abort ();
gcc_unreachable ();
found:
offset = entry->offset - offset;
......@@ -6989,8 +6991,7 @@ sh_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
int *pretend_arg_size,
int second_time ATTRIBUTE_UNUSED)
{
if (! current_function_stdarg)
abort ();
gcc_assert (current_function_stdarg);
if (TARGET_VARARGS_PRETEND_ARGS (current_function_decl))
{
int named_parm_regs, anon_parm_regs;
......@@ -7064,9 +7065,8 @@ initial_elimination_offset (int from, int to)
if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
return 0;
if (from == RETURN_ADDRESS_POINTER_REGNUM
&& (to == FRAME_POINTER_REGNUM || to == STACK_POINTER_REGNUM))
{
gcc_assert (from == RETURN_ADDRESS_POINTER_REGNUM
&& (to == FRAME_POINTER_REGNUM || to == STACK_POINTER_REGNUM));
if (TARGET_SH5)
{
int n = total_saved_regs_space;
......@@ -7089,13 +7089,10 @@ initial_elimination_offset (int from, int to)
target_flags = save_flags;
return entry->offset;
}
abort ();
gcc_unreachable ();
}
else
return total_auto_space;
}
abort ();
}
/* Handle machine specific pragmas to be semi-compatible with Renesas
......@@ -7353,7 +7350,7 @@ sh_pch_valid_p (const void *data_p, size_t len)
goto make_message;
}
}
abort ();
gcc_unreachable ();
}
data += sizeof (target_flags);
len -= sizeof (target_flags);
......@@ -8260,10 +8257,8 @@ get_free_reg (HARD_REG_SET regs_live)
/* Hard reg 1 is live; since this is a SMALL_REGISTER_CLASSES target,
there shouldn't be anything but a jump before the function end. */
if (! TEST_HARD_REG_BIT (regs_live, 7))
gcc_assert (!TEST_HARD_REG_BIT (regs_live, 7));
return gen_rtx_REG (Pmode, 7);
abort ();
}
/* This function will set the fpscr from memory.
......@@ -9538,7 +9533,7 @@ sh_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
pat = (*insn_data[d->icode].genfun) (op[0], op[1], op[2], op[3]);
break;
default:
abort ();
gcc_unreachable ();
}
if (! pat)
return 0;
......@@ -9817,7 +9812,7 @@ sh_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
offset_addr = scratch0;
}
else
abort (); /* FIXME */
gcc_unreachable (); /* FIXME */
emit_load_ptr (scratch0, offset_addr);
if (Pmode != ptr_mode)
......@@ -9982,9 +9977,8 @@ extract_sfunc_addr (rtx insn)
&& GENERAL_REGISTER_P (true_regnum (XEXP (part, 0))))
return XEXP (part, 0);
}
if (GET_CODE (XVECEXP (pattern, 0, 0)) == UNSPEC_VOLATILE)
gcc_assert (GET_CODE (XVECEXP (pattern, 0, 0)) == UNSPEC_VOLATILE);
return XVECEXP (XVECEXP (pattern, 0, 0), 0, 1);
abort ();
}
/* Verify that the register in use_sfunc_addr still agrees with the address
......@@ -10010,7 +10004,7 @@ check_use_sfunc_addr (rtx insn, rtx reg)
continue;
return rtx_equal_p (extract_sfunc_addr (insn), reg);
}
abort ();
gcc_unreachable ();
}
/* Returns 1 if OP is a MEM that can be source of a simple move operation. */
......
......@@ -3475,8 +3475,7 @@ extern int rtx_equal_function_value_matters;
if (((ENCODING) & 0xf) != DW_EH_PE_sdata4 \
&& ((ENCODING) & 0xf) != DW_EH_PE_sdata8) \
{ \
if (GET_CODE (ADDR) != SYMBOL_REF) \
abort (); \
gcc_assert (GET_CODE (ADDR) == SYMBOL_REF); \
SYMBOL_REF_FLAGS (ADDR) |= SYMBOL_FLAG_FUNCTION; \
if (0) goto DONE; \
} \
......
......@@ -889,7 +889,7 @@
FAIL;
default:
abort ();
gcc_unreachable ();
}
}
}")
......@@ -1950,7 +1950,7 @@
[(const_int 0)]
"
{
if (INTVAL (operands[2]) == (unsigned) 0xffffffff)
if ((unsigned)INTVAL (operands[2]) == (unsigned) 0xffffffff)
emit_insn (gen_mshflo_l_di (operands[0], operands[1], CONST0_RTX (DImode)));
else
emit_insn (gen_mshfhi_l_di (operands[0], CONST0_RTX (DImode), operands[1]));
......@@ -2022,8 +2022,7 @@
offset = SUBREG_BYTE (operands[0]);
operands[0] = SUBREG_REG (operands[0]);
}
if (GET_CODE (operands[0]) != REG)
abort ();
gcc_assert (GET_CODE (operands[0]) == REG);
if (! TARGET_LITTLE_ENDIAN)
offset += 8 - GET_MODE_SIZE (inmode);
operands[5] = gen_rtx_SUBREG (inmode, operands[0], offset);
......@@ -3812,14 +3811,20 @@
&& GET_CODE (XEXP (operands[1], 0)) == POST_INC))
FAIL;
if (GET_CODE (operands[0]) == REG)
switch (GET_CODE (operands[0]))
{
case REG:
regno = REGNO (operands[0]);
else if (GET_CODE (operands[0]) == SUBREG)
break;
case SUBREG:
regno = subreg_regno (operands[0]);
else if (GET_CODE (operands[0]) == MEM)
break;
case MEM:
regno = -1;
else
abort ();
break;
default:
gcc_unreachable ();
}
if (regno == -1
|| ! refers_to_regno_p (regno, regno + 1, operands[1], 0))
......@@ -4186,11 +4191,12 @@
operands[2] = immed_double_const ((unsigned long) values[endian]
| ((HOST_WIDE_INT) values[1 - endian]
<< 32), 0, DImode);
else if (HOST_BITS_PER_WIDE_INT == 32)
else
{
gcc_assert (HOST_BITS_PER_WIDE_INT == 32);
operands[2] = immed_double_const (values[endian], values[1 - endian],
DImode);
else
abort ();
}
operands[3] = gen_rtx_REG (DImode, true_regnum (operands[0]));
}")
......@@ -4558,14 +4564,20 @@
&& GET_CODE (XEXP (operands[1], 0)) == POST_INC))
FAIL;
if (GET_CODE (operands[0]) == REG)
switch (GET_CODE (operands[0]))
{
case REG:
regno = REGNO (operands[0]);
else if (GET_CODE (operands[0]) == SUBREG)
break;
case SUBREG:
regno = subreg_regno (operands[0]);
else if (GET_CODE (operands[0]) == MEM)
break;
case MEM:
regno = -1;
else
abort ();
break;
default:
gcc_unreachable ();
}
if (regno == -1
|| ! refers_to_regno_p (regno, regno + 1, operands[1], 0))
......@@ -5959,19 +5971,21 @@
"TARGET_SHCOMPACT"
"
{
if (operands[2] && INTVAL (operands[2]))
{
rtx cookie_rtx = operands[2];
long cookie = INTVAL (cookie_rtx);
rtx func = XEXP (operands[0], 0);
rtx cookie_rtx;
long cookie;
rtx func;
rtx r0, r1;
gcc_assert (operands[2] && INTVAL (operands[2]));
cookie_rtx = operands[2];
cookie = INTVAL (cookie_rtx);
func = XEXP (operands[0], 0);
if (flag_pic)
{
if (GET_CODE (func) == SYMBOL_REF && ! SYMBOL_REF_LOCAL_P (func))
{
rtx reg = gen_reg_rtx (Pmode);
emit_insn (gen_symGOTPLT2reg (reg, func));
func = reg;
}
......@@ -6008,9 +6022,6 @@
(operands[0], operands[1], operands[2], operands[3]));
DONE;
}
abort ();
}")
(define_expand "call_value"
......@@ -6430,13 +6441,16 @@
"TARGET_SHCOMPACT"
"
{
if (TARGET_SHCOMPACT && operands[3] && INTVAL (operands[3]))
{
rtx cookie_rtx = operands[3];
long cookie = INTVAL (cookie_rtx);
rtx func = XEXP (operands[1], 0);
rtx cookie_rtx;
long cookie;
rtx func;
rtx r0, r1;
gcc_assert (TARGET_SHCOMPACT && operands[3] && INTVAL (operands[3]));
cookie_rtx = operands[3];
cookie = INTVAL (cookie_rtx);
func = XEXP (operands[1], 0);
if (flag_pic)
{
if (GET_CODE (func) == SYMBOL_REF && ! SYMBOL_REF_LOCAL_P (func))
......@@ -6481,9 +6495,6 @@
operands[3], operands[4]));
DONE;
}
abort ();
}")
(define_expand "sibcall_epilogue"
......@@ -7145,8 +7156,7 @@ mov.l\\t1f,r0\\n\\
{
rtx diff_vec = PATTERN (next_real_insn (operands[2]));
if (GET_CODE (diff_vec) != ADDR_DIFF_VEC)
abort ();
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
switch (GET_MODE (diff_vec))
{
......@@ -7159,7 +7169,7 @@ mov.l\\t1f,r0\\n\\
return \"mov.b @(r0,%1),%0\;extu.b %0,%0\";
return \"mov.b @(r0,%1),%0\";
default:
abort ();
gcc_unreachable ();
}
}"
[(set_attr "length" "4")])
......@@ -7177,8 +7187,7 @@ mov.l\\t1f,r0\\n\\
rtx diff_vec = PATTERN (next_real_insn (operands[2]));
const char *load;
if (GET_CODE (diff_vec) != ADDR_DIFF_VEC)
abort ();
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
switch (GET_MODE (diff_vec))
{
......@@ -7195,7 +7204,7 @@ mov.l\\t1f,r0\\n\\
load = \"mov.b @(r0,%1),%0\";
break;
default:
abort ();
gcc_unreachable ();
}
output_asm_insn (\"add\tr0,%1\;mova\t%O3,r0\\n\", operands);
return load;
......@@ -7212,8 +7221,7 @@ mov.l\\t1f,r0\\n\\
{
rtx diff_vec = PATTERN (next_real_insn (operands[2]));
if (GET_CODE (diff_vec) != ADDR_DIFF_VEC)
abort ();
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
switch (GET_MODE (diff_vec))
{
......@@ -7226,7 +7234,7 @@ mov.l\\t1f,r0\\n\\
return \"\";
return \"add %1, r63, %0\";
default:
abort ();
gcc_unreachable ();
}
}"
[(set_attr "type" "arith_media")])
......@@ -7241,8 +7249,7 @@ mov.l\\t1f,r0\\n\\
{
rtx diff_vec = PATTERN (next_real_insn (operands[3]));
if (GET_CODE (diff_vec) != ADDR_DIFF_VEC)
abort ();
gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);
switch (GET_MODE (diff_vec))
{
......@@ -7259,7 +7266,7 @@ mov.l\\t1f,r0\\n\\
return \"ldx.ub %1, %2, %0\";
return \"ldx.b %1, %2, %0\";
default:
abort ();
gcc_unreachable ();
}
}"
[(set_attr "type" "load_media")])
......@@ -7348,8 +7355,7 @@ mov.l\\t1f,r0\\n\\
{
rtx r18 = gen_rtx_REG (DImode, PR_MEDIA_REG);
if (! call_really_used_regs[TR0_REG] || fixed_regs[TR0_REG])
abort ();
gcc_assert (call_really_used_regs[TR0_REG] && !fixed_regs[TR0_REG]);
tr_regno = TR0_REG;
tr = gen_rtx_REG (DImode, tr_regno);
emit_move_insn (tr, r18);
......
......@@ -216,14 +216,10 @@ sh_symbian_mark_dllexport (tree decl)
tree idp;
rtlname = XEXP (DECL_RTL (decl), 0);
if (GET_CODE (rtlname) == SYMBOL_REF)
if (GET_CODE (rtlname) == MEM)
rtlname = XEXP (rtlname, 0);
gcc_assert (GET_CODE (rtlname) == SYMBOL_REF);
oldname = XSTR (rtlname, 0);
else if (GET_CODE (rtlname) == MEM
&& GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
oldname = XSTR (XEXP (rtlname, 0), 0);
else
abort ();
if (sh_symbian_dllimport_name_p (oldname))
{
......@@ -265,14 +261,10 @@ sh_symbian_mark_dllimport (tree decl)
rtx newrtl;
rtlname = XEXP (DECL_RTL (decl), 0);
if (GET_CODE (rtlname) == SYMBOL_REF)
if (GET_CODE (rtlname) == MEM)
rtlname = XEXP (rtlname, 0);
gcc_assert (GET_CODE (rtlname) == SYMBOL_REF);
oldname = XSTR (rtlname, 0);
else if (GET_CODE (rtlname) == MEM
&& GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
oldname = XSTR (XEXP (rtlname, 0), 0);
else
abort ();
if (sh_symbian_dllexport_name_p (oldname))
{
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment