Commit b282a8d3 by H.J. Lu Committed by H.J. Lu

i386.c (bdesc_ptest): Removed.

2008-05-10  H.J. Lu  <hongjiu.lu@intel.com>

	* config/i386/i386.c (bdesc_ptest): Removed.
	(ix86_builtin_type): Add INT_FTYPE_V2DI_V2DI_PTEST.
	(bdesc_args): Add __builtin_ia32_ptestz128,
	__builtin_ia32_ptestc128 and __builtin_ia32_ptestnzc128.
	(ix86_init_mmx_sse_builtins): Updated.
	(ix86_expand_args_builtin): Handle INT_FTYPE_V2DI_V2DI_PTEST.
	(ix86_expand_builtin): Updated.

From-SVN: r135144
parent 17d23165
2008-05-10 H.J. Lu <hongjiu.lu@intel.com>
* config/i386/i386.c (bdesc_ptest): Removed.
(ix86_builtin_type): Add INT_FTYPE_V2DI_V2DI_PTEST.
(bdesc_args): Add __builtin_ia32_ptestz128,
__builtin_ia32_ptestc128 and __builtin_ia32_ptestnzc128.
(ix86_init_mmx_sse_builtins): Updated.
(ix86_expand_args_builtin): Handle INT_FTYPE_V2DI_V2DI_PTEST.
(ix86_expand_builtin): Updated.
2008-05-10 Richard Sandiford <rdsandiford@googlemail.com>
* tree-cfg.c (valid_fixed_convert_types_p): New function.
......
......@@ -17989,14 +17989,6 @@ static const struct builtin_description bdesc_comi[] =
{ OPTION_MASK_ISA_SSE2, CODE_FOR_sse2_ucomi, "__builtin_ia32_ucomisdneq", IX86_BUILTIN_UCOMINEQSD, LTGT, 0 },
};
static const struct builtin_description bdesc_ptest[] =
{
/* SSE4.1 */
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_ptest, "__builtin_ia32_ptestz128", IX86_BUILTIN_PTESTZ, EQ, 0 },
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_ptest, "__builtin_ia32_ptestc128", IX86_BUILTIN_PTESTC, LTU, 0 },
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_ptest, "__builtin_ia32_ptestnzc128", IX86_BUILTIN_PTESTNZC, GTU, 0 },
};
static const struct builtin_description bdesc_pcmpestr[] =
{
/* SSE4.2 */
......@@ -18048,6 +18040,7 @@ enum ix86_builtin_type
FLOAT128_FTYPE_FLOAT128,
FLOAT_FTYPE_FLOAT,
FLOAT128_FTYPE_FLOAT128_FLOAT128,
INT_FTYPE_V2DI_V2DI_PTEST,
INT64_FTYPE_V4SF,
INT64_FTYPE_V2DF,
INT_FTYPE_V16QI,
......@@ -18655,6 +18648,10 @@ static const struct builtin_description bdesc_args[] =
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_roundsd, "__builtin_ia32_roundsd", IX86_BUILTIN_ROUNDSD, UNKNOWN, (int) V2DF_FTYPE_V2DF_V2DF_INT },
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_roundss, "__builtin_ia32_roundss", IX86_BUILTIN_ROUNDSS, UNKNOWN, (int) V4SF_FTYPE_V4SF_V4SF_INT },
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_ptest, "__builtin_ia32_ptestz128", IX86_BUILTIN_PTESTZ, EQ, (int) INT_FTYPE_V2DI_V2DI_PTEST },
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_ptest, "__builtin_ia32_ptestc128", IX86_BUILTIN_PTESTC, LTU, (int) INT_FTYPE_V2DI_V2DI_PTEST },
{ OPTION_MASK_ISA_ROUND, CODE_FOR_sse4_1_ptest, "__builtin_ia32_ptestnzc128", IX86_BUILTIN_PTESTNZC, GTU, (int) INT_FTYPE_V2DI_V2DI_PTEST },
/* SSE4.2 */
{ OPTION_MASK_ISA_SSE4_2, CODE_FOR_sse4_2_gtv2di3, "__builtin_ia32_pcmpgtq", IX86_BUILTIN_PCMPGTQ, UNKNOWN, (int) V2DI_FTYPE_V2DI_V2DI },
{ OPTION_MASK_ISA_SSE4_2, CODE_FOR_sse4_2_crc32qi, "__builtin_ia32_crc32qi", IX86_BUILTIN_CRC32QI, UNKNOWN, (int) UINT_FTYPE_UINT_UCHAR },
......@@ -19597,6 +19594,9 @@ ix86_init_mmx_sse_builtins (void)
case FLOAT_FTYPE_FLOAT:
type = float_ftype_float;
break;
case INT_FTYPE_V2DI_V2DI_PTEST:
type = int_ftype_v2di_v2di;
break;
case INT64_FTYPE_V4SF:
type = int64_ftype_v4sf;
break;
......@@ -19935,10 +19935,6 @@ ix86_init_mmx_sse_builtins (void)
else
def_builtin_const (d->mask, d->name, int_ftype_v4sf_v4sf, d->code);
/* ptest insns. */
for (i = 0, d = bdesc_ptest; i < ARRAY_SIZE (bdesc_ptest); i++, d++)
def_builtin_const (d->mask, d->name, int_ftype_v2di_v2di, d->code);
/* SSE */
def_builtin (OPTION_MASK_ISA_SSE, "__builtin_ia32_ldmxcsr", void_ftype_unsigned, IX86_BUILTIN_LDMXCSR);
def_builtin (OPTION_MASK_ISA_SSE, "__builtin_ia32_stmxcsr", unsigned_ftype_void, IX86_BUILTIN_STMXCSR);
......@@ -20451,462 +20447,64 @@ ix86_expand_sse_compare (const struct builtin_description *d,
return target;
}
/* Subroutine of ix86_expand_builtin to take care of insns with
variable number of operands. */
/* Subroutine of ix86_expand_builtin to take care of comi insns. */
static rtx
ix86_expand_args_builtin (const struct builtin_description *d,
tree exp, rtx target)
ix86_expand_sse_comi (const struct builtin_description *d, tree exp,
rtx target)
{
rtx pat, real_target;
unsigned int i, nargs;
unsigned int nargs_constant = 0;
int num_memory = 0;
struct
{
rtx op;
enum machine_mode mode;
} args[4];
bool last_arg_count = false;
enum insn_code icode = d->icode;
const struct insn_data *insn_p = &insn_data[icode];
enum machine_mode tmode = insn_p->operand[0].mode;
enum machine_mode rmode = VOIDmode;
bool swap = false;
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
enum machine_mode mode0 = insn_data[d->icode].operand[0].mode;
enum machine_mode mode1 = insn_data[d->icode].operand[1].mode;
enum rtx_code comparison = d->comparison;
switch ((enum ix86_builtin_type) d->flag)
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
if (VECTOR_MODE_P (mode1))
op1 = safe_vector_operand (op1, mode1);
/* Swap operands if we have a comparison that isn't available in
hardware. */
if (d->flag & BUILTIN_DESC_SWAP_OPERANDS)
{
case FLOAT128_FTYPE_FLOAT128:
case FLOAT_FTYPE_FLOAT:
case INT64_FTYPE_V4SF:
case INT64_FTYPE_V2DF:
case INT_FTYPE_V16QI:
case INT_FTYPE_V8QI:
case INT_FTYPE_V4SF:
case INT_FTYPE_V2DF:
case V16QI_FTYPE_V16QI:
case V8HI_FTYPE_V8HI:
case V8HI_FTYPE_V16QI:
case V8QI_FTYPE_V8QI:
case V4SI_FTYPE_V4SI:
case V4SI_FTYPE_V16QI:
case V4SI_FTYPE_V4SF:
case V4SI_FTYPE_V8HI:
case V4SI_FTYPE_V2DF:
case V4HI_FTYPE_V4HI:
case V4SF_FTYPE_V4SF:
case V4SF_FTYPE_V4SI:
case V4SF_FTYPE_V2DF:
case V2DI_FTYPE_V2DI:
case V2DI_FTYPE_V16QI:
case V2DI_FTYPE_V8HI:
case V2DI_FTYPE_V4SI:
case V2DF_FTYPE_V2DF:
case V2DF_FTYPE_V4SI:
case V2DF_FTYPE_V4SF:
case V2DF_FTYPE_V2SI:
case V2SI_FTYPE_V2SI:
case V2SI_FTYPE_V4SF:
case V2SI_FTYPE_V2SF:
case V2SI_FTYPE_V2DF:
case V2SF_FTYPE_V2SF:
case V2SF_FTYPE_V2SI:
nargs = 1;
break;
case V4SF_FTYPE_V4SF_VEC_MERGE:
case V2DF_FTYPE_V2DF_VEC_MERGE:
return ix86_expand_unop_vec_merge_builtin (icode, exp, target);
case FLOAT128_FTYPE_FLOAT128_FLOAT128:
case V16QI_FTYPE_V16QI_V16QI:
case V16QI_FTYPE_V8HI_V8HI:
case V8QI_FTYPE_V8QI_V8QI:
case V8QI_FTYPE_V4HI_V4HI:
case V8HI_FTYPE_V8HI_V8HI:
case V8HI_FTYPE_V16QI_V16QI:
case V8HI_FTYPE_V4SI_V4SI:
case V4SI_FTYPE_V4SI_V4SI:
case V4SI_FTYPE_V8HI_V8HI:
case V4SI_FTYPE_V4SF_V4SF:
case V4SI_FTYPE_V2DF_V2DF:
case V4HI_FTYPE_V4HI_V4HI:
case V4HI_FTYPE_V8QI_V8QI:
case V4HI_FTYPE_V2SI_V2SI:
case V4SF_FTYPE_V4SF_V4SF:
case V4SF_FTYPE_V4SF_V2SI:
case V4SF_FTYPE_V4SF_V2DF:
case V4SF_FTYPE_V4SF_DI:
case V4SF_FTYPE_V4SF_SI:
case V2DI_FTYPE_V2DI_V2DI:
case V2DI_FTYPE_V16QI_V16QI:
case V2DI_FTYPE_V4SI_V4SI:
case V2DI_FTYPE_V2DI_V16QI:
case V2DI_FTYPE_V2DF_V2DF:
case V2SI_FTYPE_V2SI_V2SI:
case V2SI_FTYPE_V4HI_V4HI:
case V2SI_FTYPE_V2SF_V2SF:
case V2DF_FTYPE_V2DF_V2DF:
case V2DF_FTYPE_V2DF_V4SF:
case V2DF_FTYPE_V2DF_DI:
case V2DF_FTYPE_V2DF_SI:
case V2SF_FTYPE_V2SF_V2SF:
case V1DI_FTYPE_V1DI_V1DI:
case V1DI_FTYPE_V8QI_V8QI:
case V1DI_FTYPE_V2SI_V2SI:
if (comparison == UNKNOWN)
return ix86_expand_binop_builtin (icode, exp, target);
nargs = 2;
break;
case V4SF_FTYPE_V4SF_V4SF_SWAP:
case V2DF_FTYPE_V2DF_V2DF_SWAP:
gcc_assert (comparison != UNKNOWN);
nargs = 2;
swap = true;
break;
case V8HI_FTYPE_V8HI_V8HI_COUNT:
case V8HI_FTYPE_V8HI_SI_COUNT:
case V4SI_FTYPE_V4SI_V4SI_COUNT:
case V4SI_FTYPE_V4SI_SI_COUNT:
case V4HI_FTYPE_V4HI_V4HI_COUNT:
case V4HI_FTYPE_V4HI_SI_COUNT:
case V2DI_FTYPE_V2DI_V2DI_COUNT:
case V2DI_FTYPE_V2DI_SI_COUNT:
case V2SI_FTYPE_V2SI_V2SI_COUNT:
case V2SI_FTYPE_V2SI_SI_COUNT:
case V1DI_FTYPE_V1DI_V1DI_COUNT:
case V1DI_FTYPE_V1DI_SI_COUNT:
nargs = 2;
last_arg_count = true;
break;
case UINT64_FTYPE_UINT64_UINT64:
case UINT_FTYPE_UINT_UINT:
case UINT_FTYPE_UINT_USHORT:
case UINT_FTYPE_UINT_UCHAR:
nargs = 2;
break;
case V2DI2TI_FTYPE_V2DI_INT:
nargs = 2;
rmode = V2DImode;
nargs_constant = 1;
break;
case V8HI_FTYPE_V8HI_INT:
case V4SI_FTYPE_V4SI_INT:
case V4HI_FTYPE_V4HI_INT:
case V4SF_FTYPE_V4SF_INT:
case V2DI_FTYPE_V2DI_INT:
case V2DF_FTYPE_V2DF_INT:
nargs = 2;
nargs_constant = 1;
break;
case V16QI_FTYPE_V16QI_V16QI_V16QI:
case V4SF_FTYPE_V4SF_V4SF_V4SF:
case V2DF_FTYPE_V2DF_V2DF_V2DF:
nargs = 3;
break;
case V16QI_FTYPE_V16QI_V16QI_INT:
case V8HI_FTYPE_V8HI_V8HI_INT:
case V4SI_FTYPE_V4SI_V4SI_INT:
case V4SF_FTYPE_V4SF_V4SF_INT:
case V2DI_FTYPE_V2DI_V2DI_INT:
case V2DF_FTYPE_V2DF_V2DF_INT:
nargs = 3;
nargs_constant = 1;
break;
case V2DI2TI_FTYPE_V2DI_V2DI_INT:
nargs = 3;
rmode = V2DImode;
nargs_constant = 1;
break;
case V1DI2DI_FTYPE_V1DI_V1DI_INT:
nargs = 3;
rmode = DImode;
nargs_constant = 1;
break;
case V2DI_FTYPE_V2DI_UINT_UINT:
nargs = 3;
nargs_constant = 2;
break;
case V2DI_FTYPE_V2DI_V2DI_UINT_UINT:
nargs = 4;
nargs_constant = 2;
break;
default:
gcc_unreachable ();
rtx tmp = op1;
op1 = op0;
op0 = tmp;
}
gcc_assert (nargs <= ARRAY_SIZE (args));
target = gen_reg_rtx (SImode);
emit_move_insn (target, const0_rtx);
target = gen_rtx_SUBREG (QImode, target, 0);
if (comparison != UNKNOWN)
{
gcc_assert (nargs == 2);
return ix86_expand_sse_compare (d, exp, target, swap);
}
if ((optimize && !register_operand (op0, mode0))
|| !(*insn_data[d->icode].operand[0].predicate) (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if ((optimize && !register_operand (op1, mode1))
|| !(*insn_data[d->icode].operand[1].predicate) (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
if (rmode == VOIDmode || rmode == tmode)
{
if (optimize
|| target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_p->operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
real_target = target;
}
else
{
target = gen_reg_rtx (rmode);
real_target = simplify_gen_subreg (tmode, target, rmode, 0);
}
pat = GEN_FCN (d->icode) (op0, op1);
if (! pat)
return 0;
emit_insn (pat);
emit_insn (gen_rtx_SET (VOIDmode,
gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (comparison, QImode,
SET_DEST (pat),
const0_rtx)));
for (i = 0; i < nargs; i++)
{
tree arg = CALL_EXPR_ARG (exp, i);
rtx op = expand_normal (arg);
enum machine_mode mode = insn_p->operand[i + 1].mode;
bool match = (*insn_p->operand[i + 1].predicate) (op, mode);
return SUBREG_REG (target);
}
if (last_arg_count && (i + 1) == nargs)
{
/* SIMD shift insns take either an 8-bit immediate or
register as count. But builtin functions take int as
count. If count doesn't match, we put it in register. */
if (!match)
{
op = simplify_gen_subreg (SImode, op, GET_MODE (op), 0);
if (!(*insn_p->operand[i + 1].predicate) (op, mode))
op = copy_to_reg (op);
}
}
else if ((nargs - i) <= nargs_constant)
{
if (!match)
switch (icode)
{
case CODE_FOR_sse4_1_roundpd:
case CODE_FOR_sse4_1_roundps:
case CODE_FOR_sse4_1_roundsd:
case CODE_FOR_sse4_1_roundss:
case CODE_FOR_sse4_1_blendps:
error ("the last argument must be a 4-bit immediate");
return const0_rtx;
case CODE_FOR_sse4_1_blendpd:
error ("the last argument must be a 2-bit immediate");
return const0_rtx;
default:
switch (nargs_constant)
{
case 2:
if ((nargs - i) == nargs_constant)
{
error ("the next to last argument must be an 8-bit immediate");
break;
}
case 1:
error ("the last argument must be an 8-bit immediate");
break;
default:
gcc_unreachable ();
}
return const0_rtx;
}
}
else
{
if (VECTOR_MODE_P (mode))
op = safe_vector_operand (op, mode);
/* If we aren't optimizing, only allow one memory operand to
be generated. */
if (memory_operand (op, mode))
num_memory++;
if (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
{
if (optimize || !match || num_memory > 1)
op = copy_to_mode_reg (mode, op);
}
else
{
op = copy_to_reg (op);
op = simplify_gen_subreg (mode, op, GET_MODE (op), 0);
}
}
args[i].op = op;
args[i].mode = mode;
}
switch (nargs)
{
case 1:
pat = GEN_FCN (icode) (real_target, args[0].op);
break;
case 2:
pat = GEN_FCN (icode) (real_target, args[0].op, args[1].op);
break;
case 3:
pat = GEN_FCN (icode) (real_target, args[0].op, args[1].op,
args[2].op);
break;
case 4:
pat = GEN_FCN (icode) (real_target, args[0].op, args[1].op,
args[2].op, args[3].op);
break;
default:
gcc_unreachable ();
}
if (! pat)
return 0;
emit_insn (pat);
return target;
}
/* Subroutine of ix86_expand_builtin to take care of special insns
with variable number of operands. */
static rtx
ix86_expand_special_args_builtin (const struct builtin_description *d,
tree exp, rtx target)
{
tree arg;
rtx pat, op;
unsigned int i, nargs, arg_adjust, memory;
struct
{
rtx op;
enum machine_mode mode;
} args[2];
enum insn_code icode = d->icode;
bool last_arg_constant = false;
const struct insn_data *insn_p = &insn_data[icode];
enum machine_mode tmode = insn_p->operand[0].mode;
enum { load, store } class;
switch ((enum ix86_special_builtin_type) d->flag)
{
case VOID_FTYPE_VOID:
emit_insn (GEN_FCN (icode) (target));
return 0;
case V2DI_FTYPE_PV2DI:
case V16QI_FTYPE_PCCHAR:
case V4SF_FTYPE_PCFLOAT:
case V2DF_FTYPE_PCDOUBLE:
nargs = 1;
class = load;
memory = 0;
break;
case VOID_FTYPE_PV2SF_V4SF:
case VOID_FTYPE_PV2DI_V2DI:
case VOID_FTYPE_PCHAR_V16QI:
case VOID_FTYPE_PFLOAT_V4SF:
case VOID_FTYPE_PDOUBLE_V2DF:
case VOID_FTYPE_PDI_DI:
case VOID_FTYPE_PINT_INT:
nargs = 1;
class = store;
/* Reserve memory operand for target. */
memory = ARRAY_SIZE (args);
break;
case V4SF_FTYPE_V4SF_PCV2SF:
case V2DF_FTYPE_V2DF_PCDOUBLE:
nargs = 2;
class = load;
memory = 1;
break;
default:
gcc_unreachable ();
}
gcc_assert (nargs <= ARRAY_SIZE (args));
if (class == store)
{
arg = CALL_EXPR_ARG (exp, 0);
op = expand_normal (arg);
gcc_assert (target == 0);
target = gen_rtx_MEM (tmode, copy_to_mode_reg (Pmode, op));
arg_adjust = 1;
}
else
{
arg_adjust = 0;
if (optimize
|| target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_p->operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
}
for (i = 0; i < nargs; i++)
{
enum machine_mode mode = insn_p->operand[i + 1].mode;
bool match;
arg = CALL_EXPR_ARG (exp, i + arg_adjust);
op = expand_normal (arg);
match = (*insn_p->operand[i + 1].predicate) (op, mode);
if (last_arg_constant && (i + 1) == nargs)
{
if (!match)
switch (icode)
{
default:
error ("the last argument must be an 8-bit immediate");
return const0_rtx;
}
}
else
{
if (i == memory)
{
/* This must be the memory operand. */
op = gen_rtx_MEM (mode, copy_to_mode_reg (Pmode, op));
gcc_assert (GET_MODE (op) == mode
|| GET_MODE (op) == VOIDmode);
}
else
{
/* This must be register. */
if (VECTOR_MODE_P (mode))
op = safe_vector_operand (op, mode);
gcc_assert (GET_MODE (op) == mode
|| GET_MODE (op) == VOIDmode);
op = copy_to_mode_reg (mode, op);
}
}
args[i].op = op;
args[i].mode = mode;
}
switch (nargs)
{
case 1:
pat = GEN_FCN (icode) (target, args[0].op);
break;
case 2:
pat = GEN_FCN (icode) (target, args[0].op, args[1].op);
break;
default:
gcc_unreachable ();
}
if (! pat)
return 0;
emit_insn (pat);
return class == store ? 0 : target;
}
/* Subroutine of ix86_expand_builtin to take care of comi insns. */
/* Subroutine of ix86_expand_builtin to take care of ptest insns. */
static rtx
ix86_expand_sse_comi (const struct builtin_description *d, tree exp,
rtx target)
ix86_expand_sse_ptest (const struct builtin_description *d, tree exp,
rtx target)
{
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
......@@ -20922,15 +20520,6 @@ ix86_expand_sse_comi (const struct builtin_description *d, tree exp,
if (VECTOR_MODE_P (mode1))
op1 = safe_vector_operand (op1, mode1);
/* Swap operands if we have a comparison that isn't available in
hardware. */
if (d->flag & BUILTIN_DESC_SWAP_OPERANDS)
{
rtx tmp = op1;
op1 = op0;
op0 = tmp;
}
target = gen_reg_rtx (SImode);
emit_move_insn (target, const0_rtx);
target = gen_rtx_SUBREG (QImode, target, 0);
......@@ -20955,63 +20544,19 @@ ix86_expand_sse_comi (const struct builtin_description *d, tree exp,
return SUBREG_REG (target);
}
/* Subroutine of ix86_expand_builtin to take care of ptest insns. */
/* Subroutine of ix86_expand_builtin to take care of pcmpestr[im] insns. */
static rtx
ix86_expand_sse_ptest (const struct builtin_description *d, tree exp,
rtx target)
ix86_expand_sse_pcmpestr (const struct builtin_description *d,
tree exp, rtx target)
{
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
tree arg1 = CALL_EXPR_ARG (exp, 1);
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
enum machine_mode mode0 = insn_data[d->icode].operand[0].mode;
enum machine_mode mode1 = insn_data[d->icode].operand[1].mode;
enum rtx_code comparison = d->comparison;
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
if (VECTOR_MODE_P (mode1))
op1 = safe_vector_operand (op1, mode1);
target = gen_reg_rtx (SImode);
emit_move_insn (target, const0_rtx);
target = gen_rtx_SUBREG (QImode, target, 0);
if ((optimize && !register_operand (op0, mode0))
|| !(*insn_data[d->icode].operand[0].predicate) (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if ((optimize && !register_operand (op1, mode1))
|| !(*insn_data[d->icode].operand[1].predicate) (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
pat = GEN_FCN (d->icode) (op0, op1);
if (! pat)
return 0;
emit_insn (pat);
emit_insn (gen_rtx_SET (VOIDmode,
gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (comparison, QImode,
SET_DEST (pat),
const0_rtx)));
return SUBREG_REG (target);
}
/* Subroutine of ix86_expand_builtin to take care of pcmpestr[im] insns. */
static rtx
ix86_expand_sse_pcmpestr (const struct builtin_description *d,
tree exp, rtx target)
{
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
tree arg1 = CALL_EXPR_ARG (exp, 1);
tree arg2 = CALL_EXPR_ARG (exp, 2);
tree arg3 = CALL_EXPR_ARG (exp, 3);
tree arg4 = CALL_EXPR_ARG (exp, 4);
rtx scratch0, scratch1;
tree arg2 = CALL_EXPR_ARG (exp, 2);
tree arg3 = CALL_EXPR_ARG (exp, 3);
tree arg4 = CALL_EXPR_ARG (exp, 4);
rtx scratch0, scratch1;
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
......@@ -21066,136 +20611,589 @@ ix86_expand_sse_pcmpestr (const struct builtin_description *d,
|| ! (*insn_data[d->icode].operand[1].predicate) (target, tmode1))
target = gen_reg_rtx (tmode1);
scratch0 = gen_reg_rtx (tmode0);
scratch0 = gen_reg_rtx (tmode0);
pat = GEN_FCN (d->icode) (scratch0, target, op0, op1, op2, op3, op4);
}
else
{
gcc_assert (d->flag);
scratch0 = gen_reg_rtx (tmode0);
scratch1 = gen_reg_rtx (tmode1);
pat = GEN_FCN (d->icode) (scratch0, scratch1, op0, op1, op2, op3, op4);
}
if (! pat)
return 0;
emit_insn (pat);
if (d->flag)
{
target = gen_reg_rtx (SImode);
emit_move_insn (target, const0_rtx);
target = gen_rtx_SUBREG (QImode, target, 0);
emit_insn
(gen_rtx_SET (VOIDmode, gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (EQ, QImode,
gen_rtx_REG ((enum machine_mode) d->flag,
FLAGS_REG),
const0_rtx)));
return SUBREG_REG (target);
}
else
return target;
}
/* Subroutine of ix86_expand_builtin to take care of pcmpistr[im] insns. */
static rtx
ix86_expand_sse_pcmpistr (const struct builtin_description *d,
tree exp, rtx target)
{
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
tree arg1 = CALL_EXPR_ARG (exp, 1);
tree arg2 = CALL_EXPR_ARG (exp, 2);
rtx scratch0, scratch1;
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
enum machine_mode tmode0, tmode1, modev2, modev3, modeimm;
tmode0 = insn_data[d->icode].operand[0].mode;
tmode1 = insn_data[d->icode].operand[1].mode;
modev2 = insn_data[d->icode].operand[2].mode;
modev3 = insn_data[d->icode].operand[3].mode;
modeimm = insn_data[d->icode].operand[4].mode;
if (VECTOR_MODE_P (modev2))
op0 = safe_vector_operand (op0, modev2);
if (VECTOR_MODE_P (modev3))
op1 = safe_vector_operand (op1, modev3);
if (! (*insn_data[d->icode].operand[2].predicate) (op0, modev2))
op0 = copy_to_mode_reg (modev2, op0);
if ((optimize && !register_operand (op1, modev3))
|| !(*insn_data[d->icode].operand[3].predicate) (op1, modev3))
op1 = copy_to_mode_reg (modev3, op1);
if (! (*insn_data[d->icode].operand[4].predicate) (op2, modeimm))
{
error ("the third argument must be a 8-bit immediate");
return const0_rtx;
}
if (d->code == IX86_BUILTIN_PCMPISTRI128)
{
if (optimize || !target
|| GET_MODE (target) != tmode0
|| ! (*insn_data[d->icode].operand[0].predicate) (target, tmode0))
target = gen_reg_rtx (tmode0);
scratch1 = gen_reg_rtx (tmode1);
pat = GEN_FCN (d->icode) (target, scratch1, op0, op1, op2);
}
else if (d->code == IX86_BUILTIN_PCMPISTRM128)
{
if (optimize || !target
|| GET_MODE (target) != tmode1
|| ! (*insn_data[d->icode].operand[1].predicate) (target, tmode1))
target = gen_reg_rtx (tmode1);
scratch0 = gen_reg_rtx (tmode0);
pat = GEN_FCN (d->icode) (scratch0, target, op0, op1, op2);
}
else
{
gcc_assert (d->flag);
scratch0 = gen_reg_rtx (tmode0);
scratch1 = gen_reg_rtx (tmode1);
pat = GEN_FCN (d->icode) (scratch0, scratch1, op0, op1, op2);
}
if (! pat)
return 0;
emit_insn (pat);
if (d->flag)
{
target = gen_reg_rtx (SImode);
emit_move_insn (target, const0_rtx);
target = gen_rtx_SUBREG (QImode, target, 0);
emit_insn
(gen_rtx_SET (VOIDmode, gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (EQ, QImode,
gen_rtx_REG ((enum machine_mode) d->flag,
FLAGS_REG),
const0_rtx)));
return SUBREG_REG (target);
}
else
return target;
}
/* Subroutine of ix86_expand_builtin to take care of insns with
variable number of operands. */
static rtx
ix86_expand_args_builtin (const struct builtin_description *d,
tree exp, rtx target)
{
rtx pat, real_target;
unsigned int i, nargs;
unsigned int nargs_constant = 0;
int num_memory = 0;
struct
{
rtx op;
enum machine_mode mode;
} args[4];
bool last_arg_count = false;
enum insn_code icode = d->icode;
const struct insn_data *insn_p = &insn_data[icode];
enum machine_mode tmode = insn_p->operand[0].mode;
enum machine_mode rmode = VOIDmode;
bool swap = false;
enum rtx_code comparison = d->comparison;
switch ((enum ix86_builtin_type) d->flag)
{
case INT_FTYPE_V2DI_V2DI_PTEST:
return ix86_expand_sse_ptest (d, exp, target);
case FLOAT128_FTYPE_FLOAT128:
case FLOAT_FTYPE_FLOAT:
case INT64_FTYPE_V4SF:
case INT64_FTYPE_V2DF:
case INT_FTYPE_V16QI:
case INT_FTYPE_V8QI:
case INT_FTYPE_V4SF:
case INT_FTYPE_V2DF:
case V16QI_FTYPE_V16QI:
case V8HI_FTYPE_V8HI:
case V8HI_FTYPE_V16QI:
case V8QI_FTYPE_V8QI:
case V4SI_FTYPE_V4SI:
case V4SI_FTYPE_V16QI:
case V4SI_FTYPE_V4SF:
case V4SI_FTYPE_V8HI:
case V4SI_FTYPE_V2DF:
case V4HI_FTYPE_V4HI:
case V4SF_FTYPE_V4SF:
case V4SF_FTYPE_V4SI:
case V4SF_FTYPE_V2DF:
case V2DI_FTYPE_V2DI:
case V2DI_FTYPE_V16QI:
case V2DI_FTYPE_V8HI:
case V2DI_FTYPE_V4SI:
case V2DF_FTYPE_V2DF:
case V2DF_FTYPE_V4SI:
case V2DF_FTYPE_V4SF:
case V2DF_FTYPE_V2SI:
case V2SI_FTYPE_V2SI:
case V2SI_FTYPE_V4SF:
case V2SI_FTYPE_V2SF:
case V2SI_FTYPE_V2DF:
case V2SF_FTYPE_V2SF:
case V2SF_FTYPE_V2SI:
nargs = 1;
break;
case V4SF_FTYPE_V4SF_VEC_MERGE:
case V2DF_FTYPE_V2DF_VEC_MERGE:
return ix86_expand_unop_vec_merge_builtin (icode, exp, target);
case FLOAT128_FTYPE_FLOAT128_FLOAT128:
case V16QI_FTYPE_V16QI_V16QI:
case V16QI_FTYPE_V8HI_V8HI:
case V8QI_FTYPE_V8QI_V8QI:
case V8QI_FTYPE_V4HI_V4HI:
case V8HI_FTYPE_V8HI_V8HI:
case V8HI_FTYPE_V16QI_V16QI:
case V8HI_FTYPE_V4SI_V4SI:
case V4SI_FTYPE_V4SI_V4SI:
case V4SI_FTYPE_V8HI_V8HI:
case V4SI_FTYPE_V4SF_V4SF:
case V4SI_FTYPE_V2DF_V2DF:
case V4HI_FTYPE_V4HI_V4HI:
case V4HI_FTYPE_V8QI_V8QI:
case V4HI_FTYPE_V2SI_V2SI:
case V4SF_FTYPE_V4SF_V4SF:
case V4SF_FTYPE_V4SF_V2SI:
case V4SF_FTYPE_V4SF_V2DF:
case V4SF_FTYPE_V4SF_DI:
case V4SF_FTYPE_V4SF_SI:
case V2DI_FTYPE_V2DI_V2DI:
case V2DI_FTYPE_V16QI_V16QI:
case V2DI_FTYPE_V4SI_V4SI:
case V2DI_FTYPE_V2DI_V16QI:
case V2DI_FTYPE_V2DF_V2DF:
case V2SI_FTYPE_V2SI_V2SI:
case V2SI_FTYPE_V4HI_V4HI:
case V2SI_FTYPE_V2SF_V2SF:
case V2DF_FTYPE_V2DF_V2DF:
case V2DF_FTYPE_V2DF_V4SF:
case V2DF_FTYPE_V2DF_DI:
case V2DF_FTYPE_V2DF_SI:
case V2SF_FTYPE_V2SF_V2SF:
case V1DI_FTYPE_V1DI_V1DI:
case V1DI_FTYPE_V8QI_V8QI:
case V1DI_FTYPE_V2SI_V2SI:
if (comparison == UNKNOWN)
return ix86_expand_binop_builtin (icode, exp, target);
nargs = 2;
break;
case V4SF_FTYPE_V4SF_V4SF_SWAP:
case V2DF_FTYPE_V2DF_V2DF_SWAP:
gcc_assert (comparison != UNKNOWN);
nargs = 2;
swap = true;
break;
case V8HI_FTYPE_V8HI_V8HI_COUNT:
case V8HI_FTYPE_V8HI_SI_COUNT:
case V4SI_FTYPE_V4SI_V4SI_COUNT:
case V4SI_FTYPE_V4SI_SI_COUNT:
case V4HI_FTYPE_V4HI_V4HI_COUNT:
case V4HI_FTYPE_V4HI_SI_COUNT:
case V2DI_FTYPE_V2DI_V2DI_COUNT:
case V2DI_FTYPE_V2DI_SI_COUNT:
case V2SI_FTYPE_V2SI_V2SI_COUNT:
case V2SI_FTYPE_V2SI_SI_COUNT:
case V1DI_FTYPE_V1DI_V1DI_COUNT:
case V1DI_FTYPE_V1DI_SI_COUNT:
nargs = 2;
last_arg_count = true;
break;
case UINT64_FTYPE_UINT64_UINT64:
case UINT_FTYPE_UINT_UINT:
case UINT_FTYPE_UINT_USHORT:
case UINT_FTYPE_UINT_UCHAR:
nargs = 2;
break;
case V2DI2TI_FTYPE_V2DI_INT:
nargs = 2;
rmode = V2DImode;
nargs_constant = 1;
break;
case V8HI_FTYPE_V8HI_INT:
case V4SI_FTYPE_V4SI_INT:
case V4HI_FTYPE_V4HI_INT:
case V4SF_FTYPE_V4SF_INT:
case V2DI_FTYPE_V2DI_INT:
case V2DF_FTYPE_V2DF_INT:
nargs = 2;
nargs_constant = 1;
break;
case V16QI_FTYPE_V16QI_V16QI_V16QI:
case V4SF_FTYPE_V4SF_V4SF_V4SF:
case V2DF_FTYPE_V2DF_V2DF_V2DF:
nargs = 3;
break;
case V16QI_FTYPE_V16QI_V16QI_INT:
case V8HI_FTYPE_V8HI_V8HI_INT:
case V4SI_FTYPE_V4SI_V4SI_INT:
case V4SF_FTYPE_V4SF_V4SF_INT:
case V2DI_FTYPE_V2DI_V2DI_INT:
case V2DF_FTYPE_V2DF_V2DF_INT:
nargs = 3;
nargs_constant = 1;
break;
case V2DI2TI_FTYPE_V2DI_V2DI_INT:
nargs = 3;
rmode = V2DImode;
nargs_constant = 1;
break;
case V1DI2DI_FTYPE_V1DI_V1DI_INT:
nargs = 3;
rmode = DImode;
nargs_constant = 1;
break;
case V2DI_FTYPE_V2DI_UINT_UINT:
nargs = 3;
nargs_constant = 2;
break;
case V2DI_FTYPE_V2DI_V2DI_UINT_UINT:
nargs = 4;
nargs_constant = 2;
break;
default:
gcc_unreachable ();
}
gcc_assert (nargs <= ARRAY_SIZE (args));
if (comparison != UNKNOWN)
{
gcc_assert (nargs == 2);
return ix86_expand_sse_compare (d, exp, target, swap);
}
if (rmode == VOIDmode || rmode == tmode)
{
if (optimize
|| target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_p->operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
real_target = target;
}
else
{
target = gen_reg_rtx (rmode);
real_target = simplify_gen_subreg (tmode, target, rmode, 0);
}
for (i = 0; i < nargs; i++)
{
tree arg = CALL_EXPR_ARG (exp, i);
rtx op = expand_normal (arg);
enum machine_mode mode = insn_p->operand[i + 1].mode;
bool match = (*insn_p->operand[i + 1].predicate) (op, mode);
if (last_arg_count && (i + 1) == nargs)
{
/* SIMD shift insns take either an 8-bit immediate or
register as count. But builtin functions take int as
count. If count doesn't match, we put it in register. */
if (!match)
{
op = simplify_gen_subreg (SImode, op, GET_MODE (op), 0);
if (!(*insn_p->operand[i + 1].predicate) (op, mode))
op = copy_to_reg (op);
}
}
else if ((nargs - i) <= nargs_constant)
{
if (!match)
switch (icode)
{
case CODE_FOR_sse4_1_roundpd:
case CODE_FOR_sse4_1_roundps:
case CODE_FOR_sse4_1_roundsd:
case CODE_FOR_sse4_1_roundss:
case CODE_FOR_sse4_1_blendps:
error ("the last argument must be a 4-bit immediate");
return const0_rtx;
case CODE_FOR_sse4_1_blendpd:
error ("the last argument must be a 2-bit immediate");
return const0_rtx;
default:
switch (nargs_constant)
{
case 2:
if ((nargs - i) == nargs_constant)
{
error ("the next to last argument must be an 8-bit immediate");
break;
}
case 1:
error ("the last argument must be an 8-bit immediate");
break;
default:
gcc_unreachable ();
}
return const0_rtx;
}
}
else
{
if (VECTOR_MODE_P (mode))
op = safe_vector_operand (op, mode);
/* If we aren't optimizing, only allow one memory operand to
be generated. */
if (memory_operand (op, mode))
num_memory++;
pat = GEN_FCN (d->icode) (scratch0, target, op0, op1, op2, op3, op4);
}
else
{
gcc_assert (d->flag);
if (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode)
{
if (optimize || !match || num_memory > 1)
op = copy_to_mode_reg (mode, op);
}
else
{
op = copy_to_reg (op);
op = simplify_gen_subreg (mode, op, GET_MODE (op), 0);
}
}
scratch0 = gen_reg_rtx (tmode0);
scratch1 = gen_reg_rtx (tmode1);
args[i].op = op;
args[i].mode = mode;
}
pat = GEN_FCN (d->icode) (scratch0, scratch1, op0, op1, op2, op3, op4);
switch (nargs)
{
case 1:
pat = GEN_FCN (icode) (real_target, args[0].op);
break;
case 2:
pat = GEN_FCN (icode) (real_target, args[0].op, args[1].op);
break;
case 3:
pat = GEN_FCN (icode) (real_target, args[0].op, args[1].op,
args[2].op);
break;
case 4:
pat = GEN_FCN (icode) (real_target, args[0].op, args[1].op,
args[2].op, args[3].op);
break;
default:
gcc_unreachable ();
}
if (! pat)
return 0;
emit_insn (pat);
if (d->flag)
{
target = gen_reg_rtx (SImode);
emit_move_insn (target, const0_rtx);
target = gen_rtx_SUBREG (QImode, target, 0);
emit_insn
(gen_rtx_SET (VOIDmode, gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (EQ, QImode,
gen_rtx_REG ((enum machine_mode) d->flag,
FLAGS_REG),
const0_rtx)));
return SUBREG_REG (target);
}
else
return target;
return target;
}
/* Subroutine of ix86_expand_builtin to take care of pcmpistr[im] insns. */
/* Subroutine of ix86_expand_builtin to take care of special insns
with variable number of operands. */
static rtx
ix86_expand_sse_pcmpistr (const struct builtin_description *d,
tree exp, rtx target)
ix86_expand_special_args_builtin (const struct builtin_description *d,
tree exp, rtx target)
{
rtx pat;
tree arg0 = CALL_EXPR_ARG (exp, 0);
tree arg1 = CALL_EXPR_ARG (exp, 1);
tree arg2 = CALL_EXPR_ARG (exp, 2);
rtx scratch0, scratch1;
rtx op0 = expand_normal (arg0);
rtx op1 = expand_normal (arg1);
rtx op2 = expand_normal (arg2);
enum machine_mode tmode0, tmode1, modev2, modev3, modeimm;
tmode0 = insn_data[d->icode].operand[0].mode;
tmode1 = insn_data[d->icode].operand[1].mode;
modev2 = insn_data[d->icode].operand[2].mode;
modev3 = insn_data[d->icode].operand[3].mode;
modeimm = insn_data[d->icode].operand[4].mode;
tree arg;
rtx pat, op;
unsigned int i, nargs, arg_adjust, memory;
struct
{
rtx op;
enum machine_mode mode;
} args[2];
enum insn_code icode = d->icode;
bool last_arg_constant = false;
const struct insn_data *insn_p = &insn_data[icode];
enum machine_mode tmode = insn_p->operand[0].mode;
enum { load, store } class;
if (VECTOR_MODE_P (modev2))
op0 = safe_vector_operand (op0, modev2);
if (VECTOR_MODE_P (modev3))
op1 = safe_vector_operand (op1, modev3);
switch ((enum ix86_special_builtin_type) d->flag)
{
case VOID_FTYPE_VOID:
emit_insn (GEN_FCN (icode) (target));
return 0;
case V2DI_FTYPE_PV2DI:
case V16QI_FTYPE_PCCHAR:
case V4SF_FTYPE_PCFLOAT:
case V2DF_FTYPE_PCDOUBLE:
nargs = 1;
class = load;
memory = 0;
break;
case VOID_FTYPE_PV2SF_V4SF:
case VOID_FTYPE_PV2DI_V2DI:
case VOID_FTYPE_PCHAR_V16QI:
case VOID_FTYPE_PFLOAT_V4SF:
case VOID_FTYPE_PDOUBLE_V2DF:
case VOID_FTYPE_PDI_DI:
case VOID_FTYPE_PINT_INT:
nargs = 1;
class = store;
/* Reserve memory operand for target. */
memory = ARRAY_SIZE (args);
break;
case V4SF_FTYPE_V4SF_PCV2SF:
case V2DF_FTYPE_V2DF_PCDOUBLE:
nargs = 2;
class = load;
memory = 1;
break;
default:
gcc_unreachable ();
}
if (! (*insn_data[d->icode].operand[2].predicate) (op0, modev2))
op0 = copy_to_mode_reg (modev2, op0);
if ((optimize && !register_operand (op1, modev3))
|| !(*insn_data[d->icode].operand[3].predicate) (op1, modev3))
op1 = copy_to_mode_reg (modev3, op1);
gcc_assert (nargs <= ARRAY_SIZE (args));
if (! (*insn_data[d->icode].operand[4].predicate) (op2, modeimm))
if (class == store)
{
error ("the third argument must be a 8-bit immediate");
return const0_rtx;
arg = CALL_EXPR_ARG (exp, 0);
op = expand_normal (arg);
gcc_assert (target == 0);
target = gen_rtx_MEM (tmode, copy_to_mode_reg (Pmode, op));
arg_adjust = 1;
}
else
{
arg_adjust = 0;
if (optimize
|| target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_p->operand[0].predicate) (target, tmode))
target = gen_reg_rtx (tmode);
}
if (d->code == IX86_BUILTIN_PCMPISTRI128)
for (i = 0; i < nargs; i++)
{
if (optimize || !target
|| GET_MODE (target) != tmode0
|| ! (*insn_data[d->icode].operand[0].predicate) (target, tmode0))
target = gen_reg_rtx (tmode0);
enum machine_mode mode = insn_p->operand[i + 1].mode;
bool match;
scratch1 = gen_reg_rtx (tmode1);
arg = CALL_EXPR_ARG (exp, i + arg_adjust);
op = expand_normal (arg);
match = (*insn_p->operand[i + 1].predicate) (op, mode);
pat = GEN_FCN (d->icode) (target, scratch1, op0, op1, op2);
}
else if (d->code == IX86_BUILTIN_PCMPISTRM128)
{
if (optimize || !target
|| GET_MODE (target) != tmode1
|| ! (*insn_data[d->icode].operand[1].predicate) (target, tmode1))
target = gen_reg_rtx (tmode1);
if (last_arg_constant && (i + 1) == nargs)
{
if (!match)
switch (icode)
{
default:
error ("the last argument must be an 8-bit immediate");
return const0_rtx;
}
}
else
{
if (i == memory)
{
/* This must be the memory operand. */
op = gen_rtx_MEM (mode, copy_to_mode_reg (Pmode, op));
gcc_assert (GET_MODE (op) == mode
|| GET_MODE (op) == VOIDmode);
}
else
{
/* This must be register. */
if (VECTOR_MODE_P (mode))
op = safe_vector_operand (op, mode);
scratch0 = gen_reg_rtx (tmode0);
gcc_assert (GET_MODE (op) == mode
|| GET_MODE (op) == VOIDmode);
op = copy_to_mode_reg (mode, op);
}
}
pat = GEN_FCN (d->icode) (scratch0, target, op0, op1, op2);
args[i].op = op;
args[i].mode = mode;
}
else
{
gcc_assert (d->flag);
scratch0 = gen_reg_rtx (tmode0);
scratch1 = gen_reg_rtx (tmode1);
pat = GEN_FCN (d->icode) (scratch0, scratch1, op0, op1, op2);
switch (nargs)
{
case 1:
pat = GEN_FCN (icode) (target, args[0].op);
break;
case 2:
pat = GEN_FCN (icode) (target, args[0].op, args[1].op);
break;
default:
gcc_unreachable ();
}
if (! pat)
return 0;
emit_insn (pat);
if (d->flag)
{
target = gen_reg_rtx (SImode);
emit_move_insn (target, const0_rtx);
target = gen_rtx_SUBREG (QImode, target, 0);
emit_insn
(gen_rtx_SET (VOIDmode, gen_rtx_STRICT_LOW_PART (VOIDmode, target),
gen_rtx_fmt_ee (EQ, QImode,
gen_rtx_REG ((enum machine_mode) d->flag,
FLAGS_REG),
const0_rtx)));
return SUBREG_REG (target);
}
else
return target;
return class == store ? 0 : target;
}
/* Return the integer constant in ARG. Constrain it to be in the range
......@@ -21486,10 +21484,6 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
if (d->code == fcode)
return ix86_expand_sse_comi (d, exp, target);
for (i = 0, d = bdesc_ptest; i < ARRAY_SIZE (bdesc_ptest); i++, d++)
if (d->code == fcode)
return ix86_expand_sse_ptest (d, exp, target);
for (i = 0, d = bdesc_pcmpestr;
i < ARRAY_SIZE (bdesc_pcmpestr);
i++, d++)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment