Commit bb8df8a6 by Eric Christopher

rs6000.c (mems_ok_for_quad_peep): Rewrite.

2005-08-10  Eric Christopher  <echristo@apple.com>

        * config/rs6000/rs6000.c (mems_ok_for_quad_peep): Rewrite.
        * config/rs6000/rs6000.md (*lfq_power2, *stfq_power2): Use
        V2DFmode.

From-SVN: r102956
parent 4f8dbd34
2005-08-10 Eric Christopher <echristo@apple.com>
* config/rs6000/rs6000.c (mems_ok_for_quad_peep): Rewrite.
* config/rs6000/rs6000.md (*lfq_power2, *stfq_power2): Use
V2DFmode.
2005-08-10 Andrew Pinski <pinskia@physics.uc.edu> 2005-08-10 Andrew Pinski <pinskia@physics.uc.edu>
* config/darwin.c (machopic_indirect_data_reference): Use a new register * config/darwin.c (machopic_indirect_data_reference): Use a new register
...@@ -74,7 +80,7 @@ ...@@ -74,7 +80,7 @@
2005-08-09 Dorit Nuzman <dorit@il.ibm.com> 2005-08-09 Dorit Nuzman <dorit@il.ibm.com>
* tree-vect-transform.c (vect_create_epilog_for_reduction): Set * tree-vect-transform.c (vect_create_epilog_for_reduction): Set
BIT_FIELD_REF_UNSIGNED for newly created BIT_FIELD_REFs. BIT_FIELD_REF_UNSIGNED for newly created BIT_FIELD_REFs.
2005-08-09 Richard Guenther <rguenther@suse.de> 2005-08-09 Richard Guenther <rguenther@suse.de>
......
...@@ -1975,7 +1975,7 @@ int ...@@ -1975,7 +1975,7 @@ int
num_insns_constant (rtx op, enum machine_mode mode) num_insns_constant (rtx op, enum machine_mode mode)
{ {
HOST_WIDE_INT low, high; HOST_WIDE_INT low, high;
switch (GET_CODE (op)) switch (GET_CODE (op))
{ {
case CONST_INT: case CONST_INT:
...@@ -1992,7 +1992,7 @@ num_insns_constant (rtx op, enum machine_mode mode) ...@@ -1992,7 +1992,7 @@ num_insns_constant (rtx op, enum machine_mode mode)
{ {
long l; long l;
REAL_VALUE_TYPE rv; REAL_VALUE_TYPE rv;
REAL_VALUE_FROM_CONST_DOUBLE (rv, op); REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
REAL_VALUE_TO_TARGET_SINGLE (rv, l); REAL_VALUE_TO_TARGET_SINGLE (rv, l);
return num_insns_constant_wide ((HOST_WIDE_INT) l); return num_insns_constant_wide ((HOST_WIDE_INT) l);
...@@ -2007,7 +2007,7 @@ num_insns_constant (rtx op, enum machine_mode mode) ...@@ -2007,7 +2007,7 @@ num_insns_constant (rtx op, enum machine_mode mode)
{ {
long l[2]; long l[2];
REAL_VALUE_TYPE rv; REAL_VALUE_TYPE rv;
REAL_VALUE_FROM_CONST_DOUBLE (rv, op); REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
REAL_VALUE_TO_TARGET_DOUBLE (rv, l); REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
high = l[WORDS_BIG_ENDIAN == 0]; high = l[WORDS_BIG_ENDIAN == 0];
...@@ -2022,18 +2022,18 @@ num_insns_constant (rtx op, enum machine_mode mode) ...@@ -2022,18 +2022,18 @@ num_insns_constant (rtx op, enum machine_mode mode)
if ((high == 0 && low >= 0) if ((high == 0 && low >= 0)
|| (high == -1 && low < 0)) || (high == -1 && low < 0))
return num_insns_constant_wide (low); return num_insns_constant_wide (low);
else if (mask_operand (op, mode)) else if (mask_operand (op, mode))
return 2; return 2;
else if (low == 0) else if (low == 0)
return num_insns_constant_wide (high) + 1; return num_insns_constant_wide (high) + 1;
else else
return (num_insns_constant_wide (high) return (num_insns_constant_wide (high)
+ num_insns_constant_wide (low) + 1); + num_insns_constant_wide (low) + 1);
} }
default: default:
gcc_unreachable (); gcc_unreachable ();
} }
...@@ -2128,7 +2128,7 @@ output_vec_const_move (rtx *operands) ...@@ -2128,7 +2128,7 @@ output_vec_const_move (rtx *operands)
return "vxor %0,%0,%0"; return "vxor %0,%0,%0";
gcc_assert (easy_vector_constant (vec, mode)); gcc_assert (easy_vector_constant (vec, mode));
operands[1] = GEN_INT (cst); operands[1] = GEN_INT (cst);
switch (mode) switch (mode)
{ {
...@@ -2162,14 +2162,14 @@ output_vec_const_move (rtx *operands) ...@@ -2162,14 +2162,14 @@ output_vec_const_move (rtx *operands)
} }
else if (EASY_VECTOR_15_ADD_SELF (cst)) else if (EASY_VECTOR_15_ADD_SELF (cst))
return "#"; return "#";
default: default:
gcc_unreachable (); gcc_unreachable ();
} }
} }
gcc_assert (TARGET_SPE); gcc_assert (TARGET_SPE);
/* Vector constant 0 is handled as a splitter of V2SI, and in the /* Vector constant 0 is handled as a splitter of V2SI, and in the
pattern of V1DI, V4HI, and V2SF. pattern of V1DI, V4HI, and V2SF.
...@@ -2355,7 +2355,7 @@ mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED, ...@@ -2355,7 +2355,7 @@ mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED,
{ {
HOST_WIDE_INT c, lsb; HOST_WIDE_INT c, lsb;
bool one_ok; bool one_ok;
c = INTVAL (op); c = INTVAL (op);
/* Disallow all zeros. */ /* Disallow all zeros. */
...@@ -2366,7 +2366,7 @@ mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED, ...@@ -2366,7 +2366,7 @@ mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED,
AND there are zero, one or two transitions in the _whole_ of AND there are zero, one or two transitions in the _whole_ of
C. */ C. */
one_ok = !(c & ~(HOST_WIDE_INT)0xffffffff); one_ok = !(c & ~(HOST_WIDE_INT)0xffffffff);
/* We don't change the number of transitions by inverting, /* We don't change the number of transitions by inverting,
so make sure we start with the LS bit zero. */ so make sure we start with the LS bit zero. */
if (c & 1) if (c & 1)
...@@ -2501,7 +2501,7 @@ rs6000_special_round_type_align (tree type, int computed, int specified) ...@@ -2501,7 +2501,7 @@ rs6000_special_round_type_align (tree type, int computed, int specified)
{ {
tree field = TYPE_FIELDS (type); tree field = TYPE_FIELDS (type);
/* Skip all non field decls */ /* Skip all non field decls */
while (field != NULL && TREE_CODE (field) != FIELD_DECL) while (field != NULL && TREE_CODE (field) != FIELD_DECL)
field = TREE_CHAIN (field); field = TREE_CHAIN (field);
...@@ -3574,10 +3574,10 @@ rs6000_emit_set_const (rtx dest, enum machine_mode mode, ...@@ -3574,10 +3574,10 @@ rs6000_emit_set_const (rtx dest, enum machine_mode mode,
dest = gen_reg_rtx (mode); dest = gen_reg_rtx (mode);
emit_insn (gen_rtx_SET (VOIDmode, dest, source)); emit_insn (gen_rtx_SET (VOIDmode, dest, source));
return dest; return dest;
case SImode: case SImode:
result = no_new_pseudos ? dest : gen_reg_rtx (SImode); result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
emit_insn (gen_rtx_SET (VOIDmode, result, emit_insn (gen_rtx_SET (VOIDmode, result,
GEN_INT (INTVAL (source) GEN_INT (INTVAL (source)
& (~ (HOST_WIDE_INT) 0xffff)))); & (~ (HOST_WIDE_INT) 0xffff))));
...@@ -3594,7 +3594,7 @@ rs6000_emit_set_const (rtx dest, enum machine_mode mode, ...@@ -3594,7 +3594,7 @@ rs6000_emit_set_const (rtx dest, enum machine_mode mode,
c0 = INTVAL (source); c0 = INTVAL (source);
c1 = -(c0 < 0); c1 = -(c0 < 0);
break; break;
case CONST_DOUBLE: case CONST_DOUBLE:
#if HOST_BITS_PER_WIDE_INT >= 64 #if HOST_BITS_PER_WIDE_INT >= 64
c0 = CONST_DOUBLE_LOW (source); c0 = CONST_DOUBLE_LOW (source);
...@@ -3758,7 +3758,7 @@ rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode) ...@@ -3758,7 +3758,7 @@ rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
|| CONST_DOUBLE_LOW (operands[1]) < 0) || CONST_DOUBLE_LOW (operands[1]) < 0)
&& (CONST_DOUBLE_HIGH (operands[1]) != -1 && (CONST_DOUBLE_HIGH (operands[1]) != -1
|| CONST_DOUBLE_LOW (operands[1]) >= 0))); || CONST_DOUBLE_LOW (operands[1]) >= 0)));
/* Check if GCC is setting up a block move that will end up using FP /* Check if GCC is setting up a block move that will end up using FP
registers as temporaries. We must make sure this is acceptable. */ registers as temporaries. We must make sure this is acceptable. */
if (GET_CODE (operands[0]) == MEM if (GET_CODE (operands[0]) == MEM
...@@ -4549,7 +4549,7 @@ function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, ...@@ -4549,7 +4549,7 @@ function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
grovel through the fields for these too. */ grovel through the fields for these too. */
cum->intoffset = 0; cum->intoffset = 0;
rs6000_darwin64_record_arg_advance_recurse (cum, type, 0); rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
rs6000_darwin64_record_arg_advance_flush (cum, rs6000_darwin64_record_arg_advance_flush (cum,
size * BITS_PER_UNIT); size * BITS_PER_UNIT);
} }
} }
...@@ -4711,7 +4711,7 @@ rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, ...@@ -4711,7 +4711,7 @@ rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
structure between cum->intoffset and bitpos to integer registers. */ structure between cum->intoffset and bitpos to integer registers. */
static void static void
rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum, rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
HOST_WIDE_INT bitpos, rtx rvec[], int *k) HOST_WIDE_INT bitpos, rtx rvec[], int *k)
{ {
enum machine_mode mode; enum machine_mode mode;
...@@ -4755,7 +4755,7 @@ rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum, ...@@ -4755,7 +4755,7 @@ rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno) if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
cum->use_stack = 1; cum->use_stack = 1;
intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno); intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
if (intregs <= 0) if (intregs <= 0)
return; return;
...@@ -4779,7 +4779,7 @@ rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum, ...@@ -4779,7 +4779,7 @@ rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
/* Recursive workhorse for the following. */ /* Recursive workhorse for the following. */
static void static void
rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type, rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
HOST_WIDE_INT startbitpos, rtx rvec[], HOST_WIDE_INT startbitpos, rtx rvec[],
int *k) int *k)
{ {
...@@ -4813,7 +4813,7 @@ rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type, ...@@ -4813,7 +4813,7 @@ rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
#endif #endif
rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k); rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
rvec[(*k)++] rvec[(*k)++]
= gen_rtx_EXPR_LIST (VOIDmode, = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_REG (mode, cum->fregno++), gen_rtx_REG (mode, cum->fregno++),
GEN_INT (bitpos / BITS_PER_UNIT)); GEN_INT (bitpos / BITS_PER_UNIT));
if (mode == TFmode) if (mode == TFmode)
...@@ -4823,8 +4823,8 @@ rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type, ...@@ -4823,8 +4823,8 @@ rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
{ {
rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k); rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
rvec[(*k)++] rvec[(*k)++]
= gen_rtx_EXPR_LIST (VOIDmode, = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_REG (mode, cum->vregno++), gen_rtx_REG (mode, cum->vregno++),
GEN_INT (bitpos / BITS_PER_UNIT)); GEN_INT (bitpos / BITS_PER_UNIT));
} }
else if (cum->intoffset == -1) else if (cum->intoffset == -1)
...@@ -4837,7 +4837,7 @@ rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type, ...@@ -4837,7 +4837,7 @@ rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
being passed by value, along with the offset of where the being passed by value, along with the offset of where the
register's value may be found in the block. FP fields go in FP register's value may be found in the block. FP fields go in FP
register, vector fields go in vector registers, and everything register, vector fields go in vector registers, and everything
else goes in int registers, packed as in memory. else goes in int registers, packed as in memory.
This code is also used for function return values. RETVAL indicates This code is also used for function return values. RETVAL indicates
whether this is the case. whether this is the case.
...@@ -5249,7 +5249,7 @@ rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode, ...@@ -5249,7 +5249,7 @@ rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
/* If we are passing this arg in the fixed parameter save area /* If we are passing this arg in the fixed parameter save area
(gprs or memory) as well as fprs, then this function should (gprs or memory) as well as fprs, then this function should
return the number of bytes passed in the parameter save area return the number of bytes passed in the parameter save area
rather than bytes passed in fprs. */ rather than bytes passed in fprs. */
&& !(type && !(type
&& (cum->nargs_prototype <= 0 && (cum->nargs_prototype <= 0
|| (DEFAULT_ABI == ABI_AIX || (DEFAULT_ABI == ABI_AIX
...@@ -7736,13 +7736,13 @@ rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED, ...@@ -7736,13 +7736,13 @@ rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
} }
gcc_assert (TARGET_ALTIVEC || TARGET_SPE); gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
/* Handle simple unary operations. */ /* Handle simple unary operations. */
d = (struct builtin_description *) bdesc_1arg; d = (struct builtin_description *) bdesc_1arg;
for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++) for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
if (d->code == fcode) if (d->code == fcode)
return rs6000_expand_unop_builtin (d->icode, arglist, target); return rs6000_expand_unop_builtin (d->icode, arglist, target);
/* Handle simple binary operations. */ /* Handle simple binary operations. */
d = (struct builtin_description *) bdesc_2arg; d = (struct builtin_description *) bdesc_2arg;
for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++) for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
...@@ -7754,7 +7754,7 @@ rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED, ...@@ -7754,7 +7754,7 @@ rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++) for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
if (d->code == fcode) if (d->code == fcode)
return rs6000_expand_ternop_builtin (d->icode, arglist, target); return rs6000_expand_ternop_builtin (d->icode, arglist, target);
gcc_unreachable (); gcc_unreachable ();
} }
...@@ -8698,7 +8698,7 @@ rs6000_common_init_builtins (void) ...@@ -8698,7 +8698,7 @@ rs6000_common_init_builtins (void)
mode2 = insn_data[d->icode].operand[2].mode; mode2 = insn_data[d->icode].operand[2].mode;
mode3 = insn_data[d->icode].operand[3].mode; mode3 = insn_data[d->icode].operand[3].mode;
} }
/* When all four are of the same mode. */ /* When all four are of the same mode. */
if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3) if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
{ {
...@@ -8795,7 +8795,7 @@ rs6000_common_init_builtins (void) ...@@ -8795,7 +8795,7 @@ rs6000_common_init_builtins (void)
mode2 = VOIDmode; mode2 = VOIDmode;
} }
else else
{ {
if (d->name == 0 || d->icode == CODE_FOR_nothing) if (d->name == 0 || d->icode == CODE_FOR_nothing)
continue; continue;
...@@ -8945,7 +8945,7 @@ rs6000_common_init_builtins (void) ...@@ -8945,7 +8945,7 @@ rs6000_common_init_builtins (void)
{ {
if (d->name == 0 || d->icode == CODE_FOR_nothing) if (d->name == 0 || d->icode == CODE_FOR_nothing)
continue; continue;
mode0 = insn_data[d->icode].operand[0].mode; mode0 = insn_data[d->icode].operand[0].mode;
mode1 = insn_data[d->icode].operand[1].mode; mode1 = insn_data[d->icode].operand[1].mode;
} }
...@@ -9647,8 +9647,8 @@ int ...@@ -9647,8 +9647,8 @@ int
mems_ok_for_quad_peep (rtx mem1, rtx mem2) mems_ok_for_quad_peep (rtx mem1, rtx mem2)
{ {
rtx addr1, addr2; rtx addr1, addr2;
unsigned int reg1; unsigned int reg1, reg2;
int offset1; int offset1, offset2;
/* The mems cannot be volatile. */ /* The mems cannot be volatile. */
if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2)) if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
...@@ -9681,23 +9681,36 @@ mems_ok_for_quad_peep (rtx mem1, rtx mem2) ...@@ -9681,23 +9681,36 @@ mems_ok_for_quad_peep (rtx mem1, rtx mem2)
offset1 = 0; offset1 = 0;
} }
/* Make sure the second address is a (mem (plus (reg) (const_int))) /* And now for the second addr. */
or if it is (mem (reg)) then make sure that offset1 is -8 and the same if (GET_CODE (addr2) == PLUS)
register as addr1. */ {
if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2)) /* If not a REG, return zero. */
return 1; if (GET_CODE (XEXP (addr2, 0)) != REG)
if (GET_CODE (addr2) != PLUS) return 0;
return 0; else
{
if (GET_CODE (XEXP (addr2, 0)) != REG reg2 = REGNO (XEXP (addr2, 0));
|| GET_CODE (XEXP (addr2, 1)) != CONST_INT) /* The offset must be constant. */
if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
return 0;
offset2 = INTVAL (XEXP (addr2, 1));
}
}
else if (GET_CODE (addr2) != REG)
return 0; return 0;
else
{
reg2 = REGNO (addr2);
/* This was a simple (mem (reg)) expression. Offset is 0. */
offset2 = 0;
}
if (reg1 != REGNO (XEXP (addr2, 0))) /* Both of these must have the same base register. */
if (reg1 != reg2)
return 0; return 0;
/* The offset for the second addr must be 8 more than the first addr. */ /* The offset for the second addr must be 8 more than the first addr. */
if (INTVAL (XEXP (addr2, 1)) != offset1 + 8) if (offset2 != offset1 + 8)
return 0; return 0;
/* All the tests passed. addr1 and addr2 are valid for lfq or stfq /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
...@@ -10617,7 +10630,7 @@ print_operand (FILE *file, rtx x, int code) ...@@ -10617,7 +10630,7 @@ print_operand (FILE *file, rtx x, int code)
{ {
gcc_assert (GET_CODE (tmp) == PLUS gcc_assert (GET_CODE (tmp) == PLUS
&& GET_CODE (XEXP (tmp, 1)) == REG); && GET_CODE (XEXP (tmp, 1)) == REG);
if (REGNO (XEXP (tmp, 0)) == 0) if (REGNO (XEXP (tmp, 0)) == 0)
fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ], fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
reg_names[ REGNO (XEXP (tmp, 0)) ]); reg_names[ REGNO (XEXP (tmp, 0)) ]);
...@@ -10918,7 +10931,7 @@ rs6000_generate_compare (enum rtx_code code) ...@@ -10918,7 +10931,7 @@ rs6000_generate_compare (enum rtx_code code)
gcc_unreachable (); gcc_unreachable ();
} }
break; break;
case GT: case GTU: case UNGT: case UNGE: case GE: case GEU: case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
switch (op_mode) switch (op_mode)
{ {
...@@ -10929,7 +10942,7 @@ rs6000_generate_compare (enum rtx_code code) ...@@ -10929,7 +10942,7 @@ rs6000_generate_compare (enum rtx_code code)
: gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0, : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
rs6000_compare_op1); rs6000_compare_op1);
break; break;
case DFmode: case DFmode:
cmp = flag_unsafe_math_optimizations cmp = flag_unsafe_math_optimizations
? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0, ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
...@@ -10942,7 +10955,7 @@ rs6000_generate_compare (enum rtx_code code) ...@@ -10942,7 +10955,7 @@ rs6000_generate_compare (enum rtx_code code)
gcc_unreachable (); gcc_unreachable ();
} }
break; break;
case LT: case LTU: case UNLT: case UNLE: case LE: case LEU: case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
switch (op_mode) switch (op_mode)
{ {
...@@ -10953,7 +10966,7 @@ rs6000_generate_compare (enum rtx_code code) ...@@ -10953,7 +10966,7 @@ rs6000_generate_compare (enum rtx_code code)
: gen_cmpsflt_gpr (compare_result, rs6000_compare_op0, : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
rs6000_compare_op1); rs6000_compare_op1);
break; break;
case DFmode: case DFmode:
cmp = flag_unsafe_math_optimizations cmp = flag_unsafe_math_optimizations
? gen_tstdflt_gpr (compare_result, rs6000_compare_op0, ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
...@@ -11577,7 +11590,7 @@ rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask) ...@@ -11577,7 +11590,7 @@ rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
temp = gen_reg_rtx (dest_mode); temp = gen_reg_rtx (dest_mode);
/* For each vector element, select op1 when mask is 1 otherwise /* For each vector element, select op1 when mask is 1 otherwise
select op2. */ select op2. */
t = gen_rtx_SET (VOIDmode, temp, t = gen_rtx_SET (VOIDmode, temp,
gen_rtx_UNSPEC (dest_mode, gen_rtx_UNSPEC (dest_mode,
...@@ -11867,7 +11880,7 @@ rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1) ...@@ -11867,7 +11880,7 @@ rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
(set M (CODE:MODE M OP)) (set M (CODE:MODE M OP))
If not NULL, BEFORE is atomically set to M before the operation, and If not NULL, BEFORE is atomically set to M before the operation, and
AFTER is set to M after the operation (that is, (CODE:MODE M OP)). AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
If SYNC_P then a memory barrier is emitted before the operation. If SYNC_P then a memory barrier is emitted before the operation.
Either OP or M may be wrapped in a NOT operation. */ Either OP or M may be wrapped in a NOT operation. */
void void
...@@ -11881,10 +11894,10 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode, ...@@ -11881,10 +11894,10 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
rtvec vec; rtvec vec;
HOST_WIDE_INT imask = GET_MODE_MASK (mode); HOST_WIDE_INT imask = GET_MODE_MASK (mode);
rtx shift = NULL_RTX; rtx shift = NULL_RTX;
if (sync_p) if (sync_p)
emit_insn (gen_memory_barrier ()); emit_insn (gen_memory_barrier ());
if (GET_CODE (m) == NOT) if (GET_CODE (m) == NOT)
used_m = XEXP (m, 0); used_m = XEXP (m, 0);
else else
...@@ -11901,14 +11914,14 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode, ...@@ -11901,14 +11914,14 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
int ishift = 0; int ishift = 0;
if (BYTES_BIG_ENDIAN) if (BYTES_BIG_ENDIAN)
ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode); ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
shift = GEN_INT (ishift); shift = GEN_INT (ishift);
} }
else else
{ {
rtx addrSI, aligned_addr; rtx addrSI, aligned_addr;
int shift_mask = mode == QImode ? 0x18 : 0x10; int shift_mask = mode == QImode ? 0x18 : 0x10;
addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI = force_reg (SImode, gen_lowpart_common (SImode,
XEXP (used_m, 0))); XEXP (used_m, 0)));
shift = gen_reg_rtx (SImode); shift = gen_reg_rtx (SImode);
...@@ -11961,7 +11974,7 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode, ...@@ -11961,7 +11974,7 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
case MINUS: case MINUS:
{ {
rtx mask; rtx mask;
newop = expand_binop (SImode, and_optab, newop = expand_binop (SImode, and_optab,
oldop, GEN_INT (imask), NULL_RTX, oldop, GEN_INT (imask), NULL_RTX,
1, OPTAB_LIB_WIDEN); 1, OPTAB_LIB_WIDEN);
...@@ -12016,7 +12029,7 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode, ...@@ -12016,7 +12029,7 @@ rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
if (after == NULL_RTX) if (after == NULL_RTX)
after = gen_reg_rtx (used_mode); after = gen_reg_rtx (used_mode);
} }
if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT) if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
&& used_mode != mode) && used_mode != mode)
the_op = op; /* Computed above. */ the_op = op; /* Computed above. */
...@@ -12109,7 +12122,7 @@ emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val) ...@@ -12109,7 +12122,7 @@ emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
} }
/* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation /* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
to perform. MEM is the memory on which to operate. VAL is the second to perform. MEM is the memory on which to operate. VAL is the second
operand of the binary operator. BEFORE and AFTER are optional locations to operand of the binary operator. BEFORE and AFTER are optional locations to
return the value of MEM either before of after the operation. SCRATCH is return the value of MEM either before of after the operation. SCRATCH is
a scratch register. */ a scratch register. */
...@@ -13361,7 +13374,7 @@ rs6000_emit_load_toc_table (int fromprolog) ...@@ -13361,7 +13374,7 @@ rs6000_emit_load_toc_table (int fromprolog)
else else
{ {
gcc_assert (DEFAULT_ABI == ABI_AIX); gcc_assert (DEFAULT_ABI == ABI_AIX);
if (TARGET_32BIT) if (TARGET_32BIT)
insn = emit_insn (gen_load_toc_aix_si (dest)); insn = emit_insn (gen_load_toc_aix_si (dest));
else else
...@@ -17767,7 +17780,7 @@ rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode, ...@@ -17767,7 +17780,7 @@ rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
return orig; return orig;
gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS); gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
/* Use a different reg for the intermediate value, as /* Use a different reg for the intermediate value, as
it will be marked UNCHANGING. */ it will be marked UNCHANGING. */
reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode); reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
...@@ -17776,7 +17789,7 @@ rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode, ...@@ -17776,7 +17789,7 @@ rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
offset = offset =
rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
Pmode, reg); Pmode, reg);
if (GET_CODE (offset) == CONST_INT) if (GET_CODE (offset) == CONST_INT)
{ {
if (SMALL_INT (offset)) if (SMALL_INT (offset))
...@@ -18990,8 +19003,8 @@ rs6000_vector_mode_supported_p (enum machine_mode mode) ...@@ -18990,8 +19003,8 @@ rs6000_vector_mode_supported_p (enum machine_mode mode)
return false; return false;
} }
/* Target hook for invalid_arg_for_unprototyped_fn. */ /* Target hook for invalid_arg_for_unprototyped_fn. */
static const char * static const char *
invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val) invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
{ {
return (!rs6000_darwin64_abi return (!rs6000_darwin64_abi
......
...@@ -4830,7 +4830,7 @@ ...@@ -4830,7 +4830,7 @@
(match_dup 3) (match_dup 3)
(match_dup 4)))] (match_dup 4)))]
"TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS "TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS
&& !HONOR_NANS (SFmode) && !HONOR_SIGNED_ZEROS (SFmode)" && !HONOR_NANS (SFmode) && !HONOR_SIGNED_ZEROS (SFmode)"
{ {
operands[3] = gen_reg_rtx (SFmode); operands[3] = gen_reg_rtx (SFmode);
operands[4] = gen_reg_rtx (SFmode); operands[4] = gen_reg_rtx (SFmode);
...@@ -9144,11 +9144,11 @@ ...@@ -9144,11 +9144,11 @@
;; Peephole to convert two consecutive FP loads or stores into lfq/stfq. ;; Peephole to convert two consecutive FP loads or stores into lfq/stfq.
(define_insn "*lfq_power2" (define_insn "*lfq_power2"
[(set (match_operand:TF 0 "gpc_reg_operand" "=f") [(set (match_operand:V2DF 0 "gpc_reg_operand" "=f")
(match_operand:TF 1 "memory_operand" ""))] (match_operand:V2DF 1 "memory_operand" ""))]
"TARGET_POWER2 "TARGET_POWER2
&& TARGET_HARD_FLOAT && TARGET_FPRS" && TARGET_HARD_FLOAT && TARGET_FPRS"
"lfq%U1%X1 %0,%1") "lfq%U1%X1 %0,%1")
(define_peephole2 (define_peephole2
[(set (match_operand:DF 0 "gpc_reg_operand" "") [(set (match_operand:DF 0 "gpc_reg_operand" "")
...@@ -9160,13 +9160,13 @@ ...@@ -9160,13 +9160,13 @@
&& registers_ok_for_quad_peep (operands[0], operands[2]) && registers_ok_for_quad_peep (operands[0], operands[2])
&& mems_ok_for_quad_peep (operands[1], operands[3])" && mems_ok_for_quad_peep (operands[1], operands[3])"
[(set (match_dup 0) [(set (match_dup 0)
(match_dup 1))] (match_dup 1))]
"operands[1] = widen_memory_access (operands[1], TFmode, 0); "operands[1] = widen_memory_access (operands[1], V2DFmode, 0);
operands[0] = gen_rtx_REG (TFmode, REGNO (operands[0]));") operands[0] = gen_rtx_REG (V2DFmode, REGNO (operands[0]));")
(define_insn "*stfq_power2" (define_insn "*stfq_power2"
[(set (match_operand:TF 0 "memory_operand" "") [(set (match_operand:V2DF 0 "memory_operand" "")
(match_operand:TF 1 "gpc_reg_operand" "f"))] (match_operand:V2DF 1 "gpc_reg_operand" "f"))]
"TARGET_POWER2 "TARGET_POWER2
&& TARGET_HARD_FLOAT && TARGET_FPRS" && TARGET_HARD_FLOAT && TARGET_FPRS"
"stfq%U0%X0 %1,%0") "stfq%U0%X0 %1,%0")
...@@ -9183,8 +9183,8 @@ ...@@ -9183,8 +9183,8 @@
&& mems_ok_for_quad_peep (operands[0], operands[2])" && mems_ok_for_quad_peep (operands[0], operands[2])"
[(set (match_dup 0) [(set (match_dup 0)
(match_dup 1))] (match_dup 1))]
"operands[0] = widen_memory_access (operands[0], TFmode, 0); "operands[0] = widen_memory_access (operands[0], V2DFmode, 0);
operands[1] = gen_rtx_REG (TFmode, REGNO (operands[1]));") operands[1] = gen_rtx_REG (V2DFmode, REGNO (operands[1]));")
;; after inserting conditional returns we can sometimes have ;; after inserting conditional returns we can sometimes have
;; unnecessary register moves. Unfortunately we cannot have a ;; unnecessary register moves. Unfortunately we cannot have a
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment