Commit 9b901d50 by Richard Earnshaw Committed by Richard Earnshaw

arm.c (arm_const_double_inline_cost): Handle any constant by using gen_lowpart…

arm.c (arm_const_double_inline_cost): Handle any constant by using gen_lowpart and gen_highpart_mode.

	* arm.c (arm_const_double_inline_cost): Handle any constant by
	using gen_lowpart and gen_highpart_mode.
	(note_invalid_constants): All constants in an RTX with a constraint
	that permits memory are now pushed to the constant pool.
	(output_move_double): Delete code to handle reg to reg and
	constant to reg moves.
	(const_double_needs_minipool, output_mov_immediate): Delete.
	* arm.h (EXTRA_CONSTRAINT_STR_ARM): All 'D' variants now handle
	CONST_INT and CONST_VECTOR.
	* arm.md (ANY64): New mode macro.
	(arm_movdi): Split reg-reg and const-reg moves.  Simplify constraints.
	(movdf_soft_insn): Split reg-reg and const-reg moves.
	(split patterns for 64-bit constant and register moves): New.
	* cirrus.md (cirrus_arm_movdi): Split reg-reg and const-reg moves.
	(cirrus_movdf_hard_insn): Likewise.
	* fpa.md (movdf_fpa): Likewise.
	* iwmmxt.md (iwmmxt_arm_movdi): Likewise.
	(movv8qi_internal, movv4hi_internal, movv2si_internal): Fix
	constraints.
	(movv2si_internal_2): Likewise.
	* vfp.md (arm_movdi_vfp): Split reg-reg and const-reg moves.
	(movdf_vfp): Likewise.
	* arm-protos.h (output_mov_immediate): Delete prototype.

From-SVN: r97778
parent 7443a71d
2005-04-07 Richard Earnshaw <richard.earnshaw@arm.com>
* arm.c (arm_const_double_inline_cost): Handle any constant by
using gen_lowpart and gen_highpart_mode.
(note_invalid_constants): All constants in an RTX with a constraint
that permits memory are now pushed to the constant pool.
(output_move_double): Delete code to handle reg to reg and
constant to reg moves.
(const_double_needs_minipool, output_mov_immediate): Delete.
* arm.h (EXTRA_CONSTRAINT_STR_ARM): All 'D' variants now handle
CONST_INT and CONST_VECTOR.
* arm.md (ANY64): New mode macro.
(arm_movdi): Split reg-reg and const-reg moves. Simplify constraints.
(movdf_soft_insn): Split reg-reg and const-reg moves.
(split patterns for 64-bit constant and register moves): New.
* cirrus.md (cirrus_arm_movdi): Split reg-reg and const-reg moves.
(cirrus_movdf_hard_insn): Likewise.
* fpa.md (movdf_fpa): Likewise.
* iwmmxt.md (iwmmxt_arm_movdi): Likewise.
(movv8qi_internal, movv4hi_internal, movv2si_internal): Fix
constraints.
(movv2si_internal_2): Likewise.
* vfp.md (arm_movdi_vfp): Split reg-reg and const-reg moves.
(movdf_vfp): Likewise.
* arm-protos.h (output_mov_immediate): Delete prototype.
2005-04-07 Joseph S. Myers <joseph@codesourcery.com> 2005-04-07 Joseph S. Myers <joseph@codesourcery.com>
PR target/20093 PR target/20093
......
...@@ -100,7 +100,6 @@ extern const char *output_mov_long_double_arm_from_arm (rtx *); ...@@ -100,7 +100,6 @@ extern const char *output_mov_long_double_arm_from_arm (rtx *);
extern const char *output_mov_double_fpa_from_arm (rtx *); extern const char *output_mov_double_fpa_from_arm (rtx *);
extern const char *output_mov_double_arm_from_fpa (rtx *); extern const char *output_mov_double_arm_from_fpa (rtx *);
extern const char *output_move_double (rtx *); extern const char *output_move_double (rtx *);
extern const char *output_mov_immediate (rtx *);
extern const char *output_add_immediate (rtx *); extern const char *output_add_immediate (rtx *);
extern const char *arithmetic_instr (rtx, int); extern const char *arithmetic_instr (rtx, int);
extern void output_ascii_pseudo_op (FILE *, const unsigned char *, int); extern void output_ascii_pseudo_op (FILE *, const unsigned char *, int);
......
...@@ -7390,57 +7390,37 @@ push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc, ...@@ -7390,57 +7390,37 @@ push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc,
minipool_fix_tail = fix; minipool_fix_tail = fix;
} }
/* Return the cost of synthesizing the const_double VAL inline. /* Return the cost of synthesizing a 64-bit constant VAL inline.
Returns the number of insns needed, or 99 if we don't know how to Returns the number of insns needed, or 99 if we don't know how to
do it. */ do it. */
int int
arm_const_double_inline_cost (rtx val) arm_const_double_inline_cost (rtx val)
{ {
long parts[2]; rtx lowpart, highpart;
enum machine_mode mode;
if (GET_MODE (val) == DFmode) mode = GET_MODE (val);
{
REAL_VALUE_TYPE r;
if (!TARGET_SOFT_FLOAT)
return 99;
REAL_VALUE_FROM_CONST_DOUBLE (r, val);
REAL_VALUE_TO_TARGET_DOUBLE (r, parts);
}
else if (GET_MODE (val) != VOIDmode)
return 99;
else
{
parts[0] = CONST_DOUBLE_LOW (val);
parts[1] = CONST_DOUBLE_HIGH (val);
}
return (arm_gen_constant (SET, SImode, NULL_RTX, parts[0], if (mode == VOIDmode)
NULL_RTX, NULL_RTX, 0, 0) mode = DImode;
+ arm_gen_constant (SET, SImode, NULL_RTX, parts[1],
NULL_RTX, NULL_RTX, 0, 0));
}
/* Determine if a CONST_DOUBLE should be pushed to the minipool */ gcc_assert (GET_MODE_SIZE (mode) == 8);
static bool
const_double_needs_minipool (rtx val) lowpart = gen_lowpart (SImode, val);
{ highpart = gen_highpart_mode (SImode, mode, val);
/* thumb only knows to load a CONST_DOUBLE from memory at the moment */
if (TARGET_THUMB) gcc_assert (GET_CODE (lowpart) == CONST_INT);
return true; gcc_assert (GET_CODE (highpart) == CONST_INT);
/* Don't push anything to the minipool if a CONST_DOUBLE can be built with return (arm_gen_constant (SET, SImode, NULL_RTX, INTVAL (lowpart),
a few ALU insns directly. On balance, the optimum is likely to be around NULL_RTX, NULL_RTX, 0, 0)
3 insns, except when there are no load delay slots where it should be 4. + arm_gen_constant (SET, SImode, NULL_RTX, INTVAL (highpart),
When optimizing for size, a limit of 3 allows saving at least one word NULL_RTX, NULL_RTX, 0, 0));
except for cases where a single minipool entry could be shared more than
2 times which is rather unlikely to outweight the overall savings. */
return (arm_const_double_inline_cost (val)
> ((optimize_size || arm_ld_sched) ? 3 : 4));
} }
/* Scan INSN and note any of its operands that need fixing. /* Scan INSN and note any of its operands that need fixing.
If DO_PUSHES is false we do not actually push any of the fixups If DO_PUSHES is false we do not actually push any of the fixups
needed. The function returns TRUE is any fixups were needed/pushed. needed. The function returns TRUE if any fixups were needed/pushed.
This is used by arm_memory_load_p() which needs to know about loads This is used by arm_memory_load_p() which needs to know about loads
of constants that will be converted into minipool loads. */ of constants that will be converted into minipool loads. */
static bool static bool
...@@ -7457,7 +7437,8 @@ note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes) ...@@ -7457,7 +7437,8 @@ note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
if (recog_data.n_alternatives == 0) if (recog_data.n_alternatives == 0)
return false; return false;
/* Fill in recog_op_alt with information about the constraints of this insn. */ /* Fill in recog_op_alt with information about the constraints of
this insn. */
preprocess_constraints (); preprocess_constraints ();
for (opno = 0; opno < recog_data.n_operands; opno++) for (opno = 0; opno < recog_data.n_operands; opno++)
...@@ -7474,9 +7455,7 @@ note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes) ...@@ -7474,9 +7455,7 @@ note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
{ {
rtx op = recog_data.operand[opno]; rtx op = recog_data.operand[opno];
if (CONSTANT_P (op) if (CONSTANT_P (op))
&& (GET_CODE (op) != CONST_DOUBLE
|| const_double_needs_minipool (op)))
{ {
if (do_pushes) if (do_pushes)
push_minipool_fix (insn, address, recog_data.operand_loc[opno], push_minipool_fix (insn, address, recog_data.operand_loc[opno],
...@@ -8072,175 +8051,7 @@ output_move_double (rtx *operands) ...@@ -8072,175 +8051,7 @@ output_move_double (rtx *operands)
otherops[0] = gen_rtx_REG (SImode, 1 + reg0); otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
if (code1 == REG) if (code1 == MEM)
{
int reg1 = REGNO (operands[1]);
if (reg1 == IP_REGNUM)
abort ();
/* Ensure the second source is not overwritten. */
if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
else
output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
}
else if (code1 == CONST_VECTOR)
{
HOST_WIDE_INT hint = 0;
switch (GET_MODE (operands[1]))
{
case V2SImode:
otherops[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 1)));
operands[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)));
break;
case V4HImode:
if (BYTES_BIG_ENDIAN)
{
hint = INTVAL (CONST_VECTOR_ELT (operands[1], 2));
hint <<= 16;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
}
else
{
hint = INTVAL (CONST_VECTOR_ELT (operands[1], 3));
hint <<= 16;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
}
otherops[1] = GEN_INT (hint);
hint = 0;
if (BYTES_BIG_ENDIAN)
{
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
hint <<= 16;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
}
else
{
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
hint <<= 16;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
}
operands[1] = GEN_INT (hint);
break;
case V8QImode:
if (BYTES_BIG_ENDIAN)
{
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
}
else
{
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
}
otherops[1] = GEN_INT (hint);
hint = 0;
if (BYTES_BIG_ENDIAN)
{
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
}
else
{
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
hint <<= 8;
hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
}
operands[1] = GEN_INT (hint);
break;
default:
abort ();
}
output_mov_immediate (operands);
output_mov_immediate (otherops);
}
else if (code1 == CONST_DOUBLE)
{
if (GET_MODE (operands[1]) == DFmode)
{
REAL_VALUE_TYPE r;
long l[2];
REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
REAL_VALUE_TO_TARGET_DOUBLE (r, l);
otherops[1] = GEN_INT (l[1]);
operands[1] = GEN_INT (l[0]);
}
else if (GET_MODE (operands[1]) != VOIDmode)
abort ();
else if (WORDS_BIG_ENDIAN)
{
otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
}
else
{
otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
}
output_mov_immediate (operands);
output_mov_immediate (otherops);
}
else if (code1 == CONST_INT)
{
#if HOST_BITS_PER_WIDE_INT > 32
/* If HOST_WIDE_INT is more than 32 bits, the intval tells us
what the upper word is. */
if (WORDS_BIG_ENDIAN)
{
otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
}
else
{
otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
}
#else
/* Sign extend the intval into the high-order word. */
if (WORDS_BIG_ENDIAN)
{
otherops[1] = operands[1];
operands[1] = (INTVAL (operands[1]) < 0
? constm1_rtx : const0_rtx);
}
else
otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
#endif
output_mov_immediate (otherops);
output_mov_immediate (operands);
}
else if (code1 == MEM)
{ {
switch (GET_CODE (XEXP (operands[1], 0))) switch (GET_CODE (XEXP (operands[1], 0)))
{ {
...@@ -8479,43 +8290,6 @@ output_move_double (rtx *operands) ...@@ -8479,43 +8290,6 @@ output_move_double (rtx *operands)
return ""; return "";
} }
/* Output an arbitrary MOV reg, #n.
OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
const char *
output_mov_immediate (rtx *operands)
{
HOST_WIDE_INT n = INTVAL (operands[1]);
/* Try to use one MOV. */
if (const_ok_for_arm (n))
output_asm_insn ("mov%?\t%0, %1", operands);
/* Try to use one MVN. */
else if (const_ok_for_arm (~n))
{
operands[1] = GEN_INT (~n);
output_asm_insn ("mvn%?\t%0, %1", operands);
}
else
{
int n_ones = 0;
int i;
/* If all else fails, make it out of ORRs or BICs as appropriate. */
for (i = 0; i < 32; i++)
if (n & 1 << i)
n_ones++;
if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
else
output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
}
return "";
}
/* Output an ADD r, s, #n where n may be too big for one instruction. /* Output an ADD r, s, #n where n may be too big for one instruction.
If adding zero to one register, output nothing. */ If adding zero to one register, output nothing. */
const char * const char *
......
...@@ -1323,7 +1323,9 @@ enum reg_class ...@@ -1323,7 +1323,9 @@ enum reg_class
'Uq' is an address valid for ldrsb. */ 'Uq' is an address valid for ldrsb. */
#define EXTRA_CONSTRAINT_STR_ARM(OP, C, STR) \ #define EXTRA_CONSTRAINT_STR_ARM(OP, C, STR) \
(((C) == 'D') ? (GET_CODE (OP) == CONST_DOUBLE \ (((C) == 'D') ? ((GET_CODE (OP) == CONST_DOUBLE \
|| GET_CODE (OP) == CONST_INT \
|| GET_CODE (OP) == CONST_VECTOR) \
&& (((STR)[1] == 'a' \ && (((STR)[1] == 'a' \
&& arm_const_double_inline_cost (OP) == 2) \ && arm_const_double_inline_cost (OP) == 2) \
|| ((STR)[1] == 'b' \ || ((STR)[1] == 'b' \
......
...@@ -289,6 +289,18 @@ ...@@ -289,6 +289,18 @@
;; distant label. Only applicable to Thumb code. ;; distant label. Only applicable to Thumb code.
(define_attr "far_jump" "yes,no" (const_string "no")) (define_attr "far_jump" "yes,no" (const_string "no"))
;;---------------------------------------------------------------------------
;; Mode macros
; A list of modes that are exactly 64 bits in size. We use this to expand
; some splits that are the same for all modes when operating on ARM
; registers.
(define_mode_macro ANY64 [DI DF V8QI V4HI V2SI V2SF])
;;---------------------------------------------------------------------------
;; Predicates
(include "predicates.md") (include "predicates.md")
;;--------------------------------------------------------------------------- ;;---------------------------------------------------------------------------
...@@ -4137,13 +4149,21 @@ ...@@ -4137,13 +4149,21 @@
) )
(define_insn "*arm_movdi" (define_insn "*arm_movdi"
[(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m") [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
(match_operand:DI 1 "di_operand" "rIKDa,Db,Dc,mi,r"))] (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
"TARGET_ARM "TARGET_ARM
&& !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP)) && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
&& !TARGET_IWMMXT" && !TARGET_IWMMXT"
"* "*
return (output_move_double (operands)); switch (which_alternative)
{
case 0:
case 1:
case 2:
return \"#\";
default:
return output_move_double (operands);
}
" "
[(set_attr "length" "8,12,16,8,8") [(set_attr "length" "8,12,16,8,8")
(set_attr "type" "*,*,*,load2,store2") (set_attr "type" "*,*,*,load2,store2")
...@@ -4151,6 +4171,53 @@ ...@@ -4151,6 +4171,53 @@
(set_attr "neg_pool_range" "*,*,*,1008,*")] (set_attr "neg_pool_range" "*,*,*,1008,*")]
) )
(define_split
[(set (match_operand:ANY64 0 "arm_general_register_operand" "")
(match_operand:ANY64 1 "const_double_operand" ""))]
"TARGET_ARM
&& reload_completed
&& (arm_const_double_inline_cost (operands[1])
<= ((optimize_size || arm_ld_sched) ? 3 : 4))"
[(const_int 0)]
"
arm_split_constant (SET, SImode, curr_insn,
INTVAL (gen_lowpart (SImode, operands[1])),
gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
arm_split_constant (SET, SImode, curr_insn,
INTVAL (gen_highpart_mode (SImode,
GET_MODE (operands[0]),
operands[1])),
gen_highpart (SImode, operands[0]), NULL_RTX, 0);
DONE;
"
)
(define_split
[(set (match_operand:ANY64 0 "arm_general_register_operand" "")
(match_operand:ANY64 1 "arm_general_register_operand" ""))]
"TARGET_EITHER && reload_completed"
[(set (match_dup 0) (match_dup 1))
(set (match_dup 2) (match_dup 3))]
"
operands[2] = gen_highpart (SImode, operands[0]);
operands[3] = gen_highpart (SImode, operands[1]);
operands[0] = gen_lowpart (SImode, operands[0]);
operands[1] = gen_lowpart (SImode, operands[1]);
/* Handle a partial overlap. */
if (rtx_equal_p (operands[0], operands[3]))
{
rtx tmp0 = operands[0];
rtx tmp1 = operands[1];
operands[0] = operands[2];
operands[1] = operands[3];
operands[2] = tmp0;
operands[3] = tmp1;
}
"
)
;; We can't actually do base+index doubleword loads if the index and ;; We can't actually do base+index doubleword loads if the index and
;; destination overlap. Split here so that we at least have chance to ;; destination overlap. Split here so that we at least have chance to
;; schedule. ;; schedule.
...@@ -5150,7 +5217,17 @@ ...@@ -5150,7 +5217,17 @@
(match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))] (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
"TARGET_ARM && TARGET_SOFT_FLOAT "TARGET_ARM && TARGET_SOFT_FLOAT
" "
"* return output_move_double (operands);" "*
switch (which_alternative)
{
case 0:
case 1:
case 2:
return \"#\";
default:
return output_move_double (operands);
}
"
[(set_attr "length" "8,12,16,8,8") [(set_attr "length" "8,12,16,8,8")
(set_attr "type" "*,*,*,load2,store2") (set_attr "type" "*,*,*,load2,store2")
(set_attr "pool_range" "1020") (set_attr "pool_range" "1020")
......
...@@ -377,9 +377,10 @@ ...@@ -377,9 +377,10 @@
switch (which_alternative) switch (which_alternative)
{ {
case 0: case 0:
return \"#\";
case 1: case 1:
case 2: case 2:
return (output_move_double (operands)); return output_move_double (operands);
case 3: return \"cfmv64lr%?\\t%V0, %Q1\;cfmv64hr%?\\t%V0, %R1\"; case 3: return \"cfmv64lr%?\\t%V0, %Q1\;cfmv64hr%?\\t%V0, %R1\";
case 4: return \"cfmvr64l%?\\t%Q0, %V1\;cfmvr64h%?\\t%R0, %V1\"; case 4: return \"cfmvr64l%?\\t%Q0, %V1\;cfmvr64h%?\\t%R0, %V1\";
...@@ -460,7 +461,8 @@ ...@@ -460,7 +461,8 @@
{ {
case 0: return \"ldm%?ia\\t%m1, %M0\\t%@ double\"; case 0: return \"ldm%?ia\\t%m1, %M0\\t%@ double\";
case 1: return \"stm%?ia\\t%m0, %M1\\t%@ double\"; case 1: return \"stm%?ia\\t%m0, %M1\\t%@ double\";
case 2: case 3: case 4: return output_move_double (operands); case 2: return \"#\";
case 3: case 4: return output_move_double (operands);
case 5: return \"cfcpyd%?\\t%V0, %V1\"; case 5: return \"cfcpyd%?\\t%V0, %V1\";
case 6: return \"cfldrd%?\\t%V0, %1\"; case 6: return \"cfldrd%?\\t%V0, %1\";
case 7: return \"cfmvdlr\\t%V0, %Q1\;cfmvdhr%?\\t%V0, %R1\"; case 7: return \"cfmvdlr\\t%V0, %Q1\;cfmvdhr%?\\t%V0, %R1\";
......
...@@ -563,7 +563,8 @@ ...@@ -563,7 +563,8 @@
default: default:
case 0: return \"ldm%?ia\\t%m1, %M0\\t%@ double\"; case 0: return \"ldm%?ia\\t%m1, %M0\\t%@ double\";
case 1: return \"stm%?ia\\t%m0, %M1\\t%@ double\"; case 1: return \"stm%?ia\\t%m0, %M1\\t%@ double\";
case 2: case 3: case 4: return output_move_double (operands); case 2: return \"#\";
case 3: case 4: return output_move_double (operands);
case 5: return \"mvf%?d\\t%0, %1\"; case 5: return \"mvf%?d\\t%0, %1\";
case 6: return \"mnf%?d\\t%0, #%N1\"; case 6: return \"mnf%?d\\t%0, #%N1\";
case 7: return \"ldf%?d\\t%0, %1\"; case 7: return \"ldf%?d\\t%0, %1\";
......
...@@ -73,6 +73,8 @@ ...@@ -73,6 +73,8 @@
{ {
default: default:
return output_move_double (operands); return output_move_double (operands);
case 0:
return \"#\";
case 3: case 3:
return \"wmov%?\\t%0,%1\"; return \"wmov%?\\t%0,%1\";
case 4: case 4:
...@@ -155,7 +157,7 @@ ...@@ -155,7 +157,7 @@
(define_insn "movv8qi_internal" (define_insn "movv8qi_internal"
[(set (match_operand:V8QI 0 "nonimmediate_operand" "=y,m,y,?r,?y,?r") [(set (match_operand:V8QI 0 "nonimmediate_operand" "=y,m,y,?r,?y,?r")
(match_operand:V8QI 1 "general_operand" "y,y,m,y,r,i"))] (match_operand:V8QI 1 "general_operand" "y,y,mi,y,r,mi"))]
"TARGET_REALLY_IWMMXT" "TARGET_REALLY_IWMMXT"
"* "*
switch (which_alternative) switch (which_alternative)
...@@ -175,7 +177,7 @@ ...@@ -175,7 +177,7 @@
(define_insn "movv4hi_internal" (define_insn "movv4hi_internal"
[(set (match_operand:V4HI 0 "nonimmediate_operand" "=y,m,y,?r,?y,?r") [(set (match_operand:V4HI 0 "nonimmediate_operand" "=y,m,y,?r,?y,?r")
(match_operand:V4HI 1 "general_operand" "y,y,m,y,r,i"))] (match_operand:V4HI 1 "general_operand" "y,y,mi,y,r,mi"))]
"TARGET_REALLY_IWMMXT" "TARGET_REALLY_IWMMXT"
"* "*
switch (which_alternative) switch (which_alternative)
...@@ -195,7 +197,7 @@ ...@@ -195,7 +197,7 @@
(define_insn "movv2si_internal" (define_insn "movv2si_internal"
[(set (match_operand:V2SI 0 "nonimmediate_operand" "=y,m,y,?r,?y,?r") [(set (match_operand:V2SI 0 "nonimmediate_operand" "=y,m,y,?r,?y,?r")
(match_operand:V2SI 1 "general_operand" "y,y,m,y,r,i"))] (match_operand:V2SI 1 "general_operand" "y,y,mi,y,r,mi"))]
"TARGET_REALLY_IWMMXT" "TARGET_REALLY_IWMMXT"
"* "*
switch (which_alternative) switch (which_alternative)
...@@ -220,7 +222,7 @@ ...@@ -220,7 +222,7 @@
;; deliberately omitted. ;; deliberately omitted.
(define_insn "movv2si_internal_2" (define_insn "movv2si_internal_2"
[(set (match_operand:V2SI 0 "nonimmediate_operand" "=?r") [(set (match_operand:V2SI 0 "nonimmediate_operand" "=?r")
(match_operand 1 "immediate_operand" "i"))] (match_operand 1 "immediate_operand" "mi"))]
"TARGET_REALLY_IWMMXT" "TARGET_REALLY_IWMMXT"
"* return output_move_double (operands);" "* return output_move_double (operands);"
[(set_attr "predicable" "yes") [(set_attr "predicable" "yes")
......
...@@ -142,8 +142,11 @@ ...@@ -142,8 +142,11 @@
"* "*
switch (which_alternative) switch (which_alternative)
{ {
case 0: case 1: case 2: case 0:
return (output_move_double (operands)); return \"#\";
case 1:
case 2:
return output_move_double (operands);
case 3: case 3:
return \"fmdrr%?\\t%P0, %1\\t%@ int\"; return \"fmdrr%?\\t%P0, %1\\t%@ int\";
case 4: case 4:
...@@ -203,7 +206,7 @@ ...@@ -203,7 +206,7 @@
return \"fmdrr%?\\t%P0, %Q1, %R1\"; return \"fmdrr%?\\t%P0, %Q1, %R1\";
case 1: case 1:
return \"fmrrd%?\\t%Q0, %R0, %P1\"; return \"fmrrd%?\\t%Q0, %R0, %P1\";
case 2: case 3: case 7: case 2: case 3:
return output_move_double (operands); return output_move_double (operands);
case 4: case 4:
return \"fldd%?\\t%P0, %1\"; return \"fldd%?\\t%P0, %1\";
...@@ -211,6 +214,8 @@ ...@@ -211,6 +214,8 @@
return \"fstd%?\\t%P1, %0\"; return \"fstd%?\\t%P1, %0\";
case 6: case 6:
return \"fcpyd%?\\t%P0, %P1\"; return \"fcpyd%?\\t%P0, %P1\";
case 7:
return \"#\";
default: default:
abort (); abort ();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment