Commit f8e1995a by Uros Bizjak Committed by Uros Bizjak

i386.c: Use short syntax for function calls through function pointers.

	* config/i386/i386.c: Use short syntax for function calls
	through function pointers.
	* config/i386/i386.md: Ditto.

From-SVN: r161962
parent a80903ff
2010-07-08 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.c: Use short syntax for function calls
through function pointers.
* config/i386/i386.md: Ditto.
2010-07-08 Eric Botcazou <ebotcazou@adacore.com>
* emit-rtl.c (set_mem_attributes_minus_bitpos): Fix formatting issues.
......@@ -441,7 +447,7 @@
2010-07-06 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.md (immediate_operand): New mode attribute.
(pro_epilogue_adjust_stack_<mode>_1): Macroize insn from
pro_epilogue_adjust_stack and pro_epilogue_adjust_stack_rex64
using P mode iterator.
......
......@@ -6937,7 +6937,7 @@ ix86_build_builtin_va_list_abi (enum calling_abi abi)
if (!TARGET_64BIT || abi == MS_ABI)
return build_pointer_type (char_type_node);
record = (*lang_hooks.types.make_type) (RECORD_TYPE);
record = lang_hooks.types.make_type (RECORD_TYPE);
type_decl = build_decl (BUILTINS_LOCATION,
TYPE_DECL, get_identifier ("__va_list_tag"), record);
......@@ -7926,10 +7926,10 @@ ix86_code_end (void)
{
DECL_COMDAT_GROUP (decl) = DECL_ASSEMBLER_NAME (decl);
(*targetm.asm_out.unique_section) (decl, 0);
targetm.asm_out.unique_section (decl, 0);
switch_to_section (get_named_section (decl, NULL, 0));
(*targetm.asm_out.globalize_label) (asm_out_file, name);
targetm.asm_out.globalize_label (asm_out_file, name);
fputs ("\t.hidden\t", asm_out_file);
assemble_name (asm_out_file, name);
putc ('\n', asm_out_file);
......@@ -8020,8 +8020,8 @@ output_set_got (rtx dest, rtx label ATTRIBUTE_UNUSED)
ASM_OUTPUT_LABEL (asm_out_file, MACHOPIC_FUNCTION_BASE_NAME);
#endif
(*targetm.asm_out.internal_label) (asm_out_file, "L",
CODE_LABEL_NUMBER (XEXP (xops[2], 0)));
targetm.asm_out.internal_label (asm_out_file, "L",
CODE_LABEL_NUMBER (XEXP (xops[2], 0)));
if (flag_pic)
{
......@@ -9191,7 +9191,7 @@ ix86_expand_prologue (void)
the base pointer again, align the stack, and later regenerate
the frame pointer setup. The frame pointer generated by the
hook prologue is not aligned, so it can't be used. */
insn = emit_insn ((*ix86_gen_pop1) (hard_frame_pointer_rtx));
insn = emit_insn (ix86_gen_pop1 (hard_frame_pointer_rtx));
}
/* The first insn of a function that accepts its static chain on the
......@@ -9246,9 +9246,9 @@ ix86_expand_prologue (void)
ix86_cfa_state->reg = crtl->drap_reg;
/* Align the stack. */
insn = emit_insn ((*ix86_gen_andsp) (stack_pointer_rtx,
stack_pointer_rtx,
GEN_INT (-align_bytes)));
insn = emit_insn (ix86_gen_andsp (stack_pointer_rtx,
stack_pointer_rtx,
GEN_INT (-align_bytes)));
RTX_FRAME_RELATED_P (insn) = 1;
/* Replicate the return address on the stack so that return
......@@ -9283,9 +9283,9 @@ ix86_expand_prologue (void)
gcc_assert (align_bytes > MIN_STACK_BOUNDARY / BITS_PER_UNIT);
/* Align the stack. */
insn = emit_insn ((*ix86_gen_andsp) (stack_pointer_rtx,
stack_pointer_rtx,
GEN_INT (-align_bytes)));
insn = emit_insn (ix86_gen_andsp (stack_pointer_rtx,
stack_pointer_rtx,
GEN_INT (-align_bytes)));
RTX_FRAME_RELATED_P (insn) = 1;
}
......@@ -9360,7 +9360,7 @@ ix86_expand_prologue (void)
emit_move_insn (eax, GEN_INT (allocate));
insn = emit_insn ((*ix86_gen_allocate_stack_worker) (eax, eax));
insn = emit_insn (ix86_gen_allocate_stack_worker (eax, eax));
if (ix86_cfa_state->reg == stack_pointer_rtx)
{
......@@ -19785,7 +19785,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
QImode, 1, end_0_label);
/* Increment the address. */
emit_insn ((*ix86_gen_add3) (out, out, const1_rtx));
emit_insn (ix86_gen_add3 (out, out, const1_rtx));
/* Not needed with an alignment of 2 */
if (align != 2)
......@@ -19795,7 +19795,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
emit_cmp_and_jump_insns (mem, const0_rtx, EQ, NULL, QImode, 1,
end_0_label);
emit_insn ((*ix86_gen_add3) (out, out, const1_rtx));
emit_insn (ix86_gen_add3 (out, out, const1_rtx));
emit_label (align_3_label);
}
......@@ -19803,7 +19803,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
emit_cmp_and_jump_insns (mem, const0_rtx, EQ, NULL, QImode, 1,
end_0_label);
emit_insn ((*ix86_gen_add3) (out, out, const1_rtx));
emit_insn (ix86_gen_add3 (out, out, const1_rtx));
}
/* Generate loop to check 4 bytes at a time. It is not a good idea to
......@@ -19813,7 +19813,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
mem = change_address (src, SImode, out);
emit_move_insn (scratch, mem);
emit_insn ((*ix86_gen_add3) (out, out, GEN_INT (4)));
emit_insn (ix86_gen_add3 (out, out, GEN_INT (4)));
/* This formula yields a nonzero result iff one of the bytes is zero.
This saves three branches inside loop and many cycles. */
......@@ -19868,7 +19868,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
/* Not in the first two. Move two bytes forward. */
emit_insn (gen_lshrsi3 (tmpreg, tmpreg, GEN_INT (16)));
emit_insn ((*ix86_gen_add3) (out, out, const2_rtx));
emit_insn (ix86_gen_add3 (out, out, const2_rtx));
emit_label (end_2_label);
......@@ -19879,7 +19879,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
emit_insn (gen_addqi3_cc (tmpreg, tmpreg, tmpreg));
tmp = gen_rtx_REG (CCmode, FLAGS_REG);
cmp = gen_rtx_LTU (VOIDmode, tmp, const0_rtx);
emit_insn ((*ix86_gen_sub3_carry) (out, out, GEN_INT (3), tmp, cmp));
emit_insn (ix86_gen_sub3_carry (out, out, GEN_INT (3), tmp, cmp));
emit_label (end_0_label);
}
......@@ -19921,7 +19921,7 @@ ix86_expand_strlen (rtx out, rtx src, rtx eoschar, rtx align)
/* strlensi_unroll_1 returns the address of the zero at the end of
the string, like memchr(), so compute the length by subtracting
the start address. */
emit_insn ((*ix86_gen_sub3) (out, out, addr));
emit_insn (ix86_gen_sub3 (out, out, addr));
}
else
{
......@@ -19944,8 +19944,8 @@ ix86_expand_strlen (rtx out, rtx src, rtx eoschar, rtx align)
unspec = gen_rtx_UNSPEC (Pmode, gen_rtvec (4, src, eoschar, align,
scratch4), UNSPEC_SCAS);
emit_insn (gen_strlenqi_1 (scratch1, scratch3, unspec));
emit_insn ((*ix86_gen_one_cmpl2) (scratch2, scratch1));
emit_insn ((*ix86_gen_add3) (out, scratch2, constm1_rtx));
emit_insn (ix86_gen_one_cmpl2 (scratch2, scratch1));
emit_insn (ix86_gen_add3 (out, scratch2, constm1_rtx));
}
return 1;
}
......@@ -23557,13 +23557,13 @@ ix86_init_builtin_types (void)
TYPE_PRECISION (float80_type_node) = 80;
layout_type (float80_type_node);
}
(*lang_hooks.types.register_builtin_type) (float80_type_node, "__float80");
lang_hooks.types.register_builtin_type (float80_type_node, "__float80");
/* The __float128 type. */
float128_type_node = make_node (REAL_TYPE);
TYPE_PRECISION (float128_type_node) = 128;
layout_type (float128_type_node);
(*lang_hooks.types.register_builtin_type) (float128_type_node, "__float128");
lang_hooks.types.register_builtin_type (float128_type_node, "__float128");
/* This macro is built by i386-builtin-types.awk. */
DEFINE_BUILTIN_PRIMITIVE_TYPES;
......@@ -23645,7 +23645,7 @@ ix86_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
if (optimize || !target
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|| !insn_data[icode].operand[0].predicate (target, tmode))
target = gen_reg_rtx (tmode);
if (GET_MODE (op1) == SImode && mode1 == TImode)
......@@ -23655,9 +23655,9 @@ ix86_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
op1 = gen_lowpart (TImode, x);
}
if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
if (!insn_data[icode].operand[1].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
if (!insn_data[icode].operand[2].predicate (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
pat = GEN_FCN (icode) (target, op0, op1);
......@@ -23775,7 +23775,7 @@ ix86_expand_multi_arg_builtin (enum insn_code icode, tree exp, rtx target,
if (optimize || !target
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|| !insn_data[icode].operand[0].predicate (target, tmode))
target = gen_reg_rtx (tmode);
gcc_assert (nargs <= 4);
......@@ -23808,7 +23808,7 @@ ix86_expand_multi_arg_builtin (enum insn_code icode, tree exp, rtx target,
gcc_assert (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode);
if (optimize
|| ! (*insn_data[icode].operand[i+adjust+1].predicate) (op, mode)
|| !insn_data[icode].operand[i+adjust+1].predicate (op, mode)
|| num_memory > 1)
op = force_reg (mode, op);
}
......@@ -23873,18 +23873,18 @@ ix86_expand_unop_vec_merge_builtin (enum insn_code icode, tree exp,
if (optimize || !target
|| GET_MODE (target) != tmode
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|| !insn_data[icode].operand[0].predicate (target, tmode))
target = gen_reg_rtx (tmode);
if (VECTOR_MODE_P (mode0))
op0 = safe_vector_operand (op0, mode0);
if ((optimize && !register_operand (op0, mode0))
|| ! (*insn_data[icode].operand[1].predicate) (op0, mode0))
|| !insn_data[icode].operand[1].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
op1 = op0;
if (! (*insn_data[icode].operand[2].predicate) (op1, mode0))
if (!insn_data[icode].operand[2].predicate (op1, mode0))
op1 = copy_to_mode_reg (mode0, op1);
pat = GEN_FCN (icode) (target, op0, op1);
......@@ -23928,14 +23928,14 @@ ix86_expand_sse_compare (const struct builtin_description *d,
if (optimize || !target
|| GET_MODE (target) != tmode
|| ! (*insn_data[d->icode].operand[0].predicate) (target, tmode))
|| !insn_data[d->icode].operand[0].predicate (target, tmode))
target = gen_reg_rtx (tmode);
if ((optimize && !register_operand (op0, mode0))
|| ! (*insn_data[d->icode].operand[1].predicate) (op0, mode0))
|| !insn_data[d->icode].operand[1].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if ((optimize && !register_operand (op1, mode1))
|| ! (*insn_data[d->icode].operand[2].predicate) (op1, mode1))
|| !insn_data[d->icode].operand[2].predicate (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
op2 = gen_rtx_fmt_ee (comparison, mode0, op0, op1);
......@@ -23980,10 +23980,10 @@ ix86_expand_sse_comi (const struct builtin_description *d, tree exp,
target = gen_rtx_SUBREG (QImode, target, 0);
if ((optimize && !register_operand (op0, mode0))
|| !(*insn_data[d->icode].operand[0].predicate) (op0, mode0))
|| !insn_data[d->icode].operand[0].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if ((optimize && !register_operand (op1, mode1))
|| !(*insn_data[d->icode].operand[1].predicate) (op1, mode1))
|| !insn_data[d->icode].operand[1].predicate (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
pat = GEN_FCN (d->icode) (op0, op1);
......@@ -24024,10 +24024,10 @@ ix86_expand_sse_ptest (const struct builtin_description *d, tree exp,
target = gen_rtx_SUBREG (QImode, target, 0);
if ((optimize && !register_operand (op0, mode0))
|| !(*insn_data[d->icode].operand[0].predicate) (op0, mode0))
|| !insn_data[d->icode].operand[0].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if ((optimize && !register_operand (op1, mode1))
|| !(*insn_data[d->icode].operand[1].predicate) (op1, mode1))
|| !insn_data[d->icode].operand[1].predicate (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
pat = GEN_FCN (d->icode) (op0, op1);
......@@ -24076,17 +24076,17 @@ ix86_expand_sse_pcmpestr (const struct builtin_description *d,
if (VECTOR_MODE_P (modev4))
op2 = safe_vector_operand (op2, modev4);
if (! (*insn_data[d->icode].operand[2].predicate) (op0, modev2))
if (!insn_data[d->icode].operand[2].predicate (op0, modev2))
op0 = copy_to_mode_reg (modev2, op0);
if (! (*insn_data[d->icode].operand[3].predicate) (op1, modei3))
if (!insn_data[d->icode].operand[3].predicate (op1, modei3))
op1 = copy_to_mode_reg (modei3, op1);
if ((optimize && !register_operand (op2, modev4))
|| !(*insn_data[d->icode].operand[4].predicate) (op2, modev4))
|| !insn_data[d->icode].operand[4].predicate (op2, modev4))
op2 = copy_to_mode_reg (modev4, op2);
if (! (*insn_data[d->icode].operand[5].predicate) (op3, modei5))
if (!insn_data[d->icode].operand[5].predicate (op3, modei5))
op3 = copy_to_mode_reg (modei5, op3);
if (! (*insn_data[d->icode].operand[6].predicate) (op4, modeimm))
if (!insn_data[d->icode].operand[6].predicate (op4, modeimm))
{
error ("the fifth argument must be a 8-bit immediate");
return const0_rtx;
......@@ -24096,7 +24096,7 @@ ix86_expand_sse_pcmpestr (const struct builtin_description *d,
{
if (optimize || !target
|| GET_MODE (target) != tmode0
|| ! (*insn_data[d->icode].operand[0].predicate) (target, tmode0))
|| !insn_data[d->icode].operand[0].predicate (target, tmode0))
target = gen_reg_rtx (tmode0);
scratch1 = gen_reg_rtx (tmode1);
......@@ -24107,7 +24107,7 @@ ix86_expand_sse_pcmpestr (const struct builtin_description *d,
{
if (optimize || !target
|| GET_MODE (target) != tmode1
|| ! (*insn_data[d->icode].operand[1].predicate) (target, tmode1))
|| !insn_data[d->icode].operand[1].predicate (target, tmode1))
target = gen_reg_rtx (tmode1);
scratch0 = gen_reg_rtx (tmode0);
......@@ -24175,13 +24175,13 @@ ix86_expand_sse_pcmpistr (const struct builtin_description *d,
if (VECTOR_MODE_P (modev3))
op1 = safe_vector_operand (op1, modev3);
if (! (*insn_data[d->icode].operand[2].predicate) (op0, modev2))
if (!insn_data[d->icode].operand[2].predicate (op0, modev2))
op0 = copy_to_mode_reg (modev2, op0);
if ((optimize && !register_operand (op1, modev3))
|| !(*insn_data[d->icode].operand[3].predicate) (op1, modev3))
|| !insn_data[d->icode].operand[3].predicate (op1, modev3))
op1 = copy_to_mode_reg (modev3, op1);
if (! (*insn_data[d->icode].operand[4].predicate) (op2, modeimm))
if (!insn_data[d->icode].operand[4].predicate (op2, modeimm))
{
error ("the third argument must be a 8-bit immediate");
return const0_rtx;
......@@ -24191,7 +24191,7 @@ ix86_expand_sse_pcmpistr (const struct builtin_description *d,
{
if (optimize || !target
|| GET_MODE (target) != tmode0
|| ! (*insn_data[d->icode].operand[0].predicate) (target, tmode0))
|| !insn_data[d->icode].operand[0].predicate (target, tmode0))
target = gen_reg_rtx (tmode0);
scratch1 = gen_reg_rtx (tmode1);
......@@ -24202,7 +24202,7 @@ ix86_expand_sse_pcmpistr (const struct builtin_description *d,
{
if (optimize || !target
|| GET_MODE (target) != tmode1
|| ! (*insn_data[d->icode].operand[1].predicate) (target, tmode1))
|| !insn_data[d->icode].operand[1].predicate (target, tmode1))
target = gen_reg_rtx (tmode1);
scratch0 = gen_reg_rtx (tmode0);
......@@ -24497,7 +24497,7 @@ ix86_expand_args_builtin (const struct builtin_description *d,
if (optimize
|| target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_p->operand[0].predicate) (target, tmode))
|| !insn_p->operand[0].predicate (target, tmode))
target = gen_reg_rtx (tmode);
real_target = target;
}
......@@ -24512,7 +24512,7 @@ ix86_expand_args_builtin (const struct builtin_description *d,
tree arg = CALL_EXPR_ARG (exp, i);
rtx op = expand_normal (arg);
enum machine_mode mode = insn_p->operand[i + 1].mode;
bool match = (*insn_p->operand[i + 1].predicate) (op, mode);
bool match = insn_p->operand[i + 1].predicate (op, mode);
if (last_arg_count && (i + 1) == nargs)
{
......@@ -24522,7 +24522,7 @@ ix86_expand_args_builtin (const struct builtin_description *d,
if (!match)
{
op = simplify_gen_subreg (SImode, op, GET_MODE (op), 0);
if (!(*insn_p->operand[i + 1].predicate) (op, mode))
if (!insn_p->operand[i + 1].predicate (op, mode))
op = copy_to_reg (op);
}
}
......@@ -24767,7 +24767,7 @@ ix86_expand_special_args_builtin (const struct builtin_description *d,
if (optimize
|| target == 0
|| GET_MODE (target) != tmode
|| ! (*insn_p->operand[0].predicate) (target, tmode))
|| !insn_p->operand[0].predicate (target, tmode))
target = gen_reg_rtx (tmode);
}
......@@ -24778,7 +24778,7 @@ ix86_expand_special_args_builtin (const struct builtin_description *d,
arg = CALL_EXPR_ARG (exp, i + arg_adjust);
op = expand_normal (arg);
match = (*insn_p->operand[i + 1].predicate) (op, mode);
match = insn_p->operand[i + 1].predicate (op, mode);
if (last_arg_constant && (i + 1) == nargs)
{
......@@ -25028,11 +25028,11 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
op0 = force_reg (Pmode, op0);
op0 = gen_rtx_MEM (mode1, op0);
if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
if (!insn_data[icode].operand[0].predicate (op0, mode0))
op0 = copy_to_mode_reg (mode0, op0);
if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
if (!insn_data[icode].operand[1].predicate (op1, mode1))
op1 = copy_to_mode_reg (mode1, op1);
if (! (*insn_data[icode].operand[2].predicate) (op2, mode2))
if (!insn_data[icode].operand[2].predicate (op2, mode2))
op2 = copy_to_mode_reg (mode2, op2);
pat = GEN_FCN (icode) (op0, op1, op2);
if (! pat)
......@@ -25056,7 +25056,7 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
arg0 = CALL_EXPR_ARG (exp, 0);
op0 = expand_normal (arg0);
icode = CODE_FOR_sse2_clflush;
if (! (*insn_data[icode].operand[0].predicate) (op0, Pmode))
if (!insn_data[icode].operand[0].predicate (op0, Pmode))
op0 = copy_to_mode_reg (Pmode, op0);
emit_insn (gen_sse2_clflush (op0));
......@@ -25075,7 +25075,7 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
op1 = copy_to_mode_reg (SImode, op1);
if (!REG_P (op2))
op2 = copy_to_mode_reg (SImode, op2);
emit_insn ((*ix86_gen_monitor) (op0, op1, op2));
emit_insn (ix86_gen_monitor (op0, op1, op2));
return 0;
case IX86_BUILTIN_MWAIT:
......@@ -25149,7 +25149,7 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
arg0 = CALL_EXPR_ARG (exp, 0);
op0 = expand_normal (arg0);
icode = CODE_FOR_lwp_llwpcb;
if (! (*insn_data[icode].operand[0].predicate) (op0, Pmode))
if (!insn_data[icode].operand[0].predicate (op0, Pmode))
op0 = copy_to_mode_reg (Pmode, op0);
emit_insn (gen_lwp_llwpcb (op0));
return 0;
......@@ -25157,7 +25157,7 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
case IX86_BUILTIN_SLWPCB:
icode = CODE_FOR_lwp_slwpcb;
if (!target
|| ! (*insn_data[icode].operand[0].predicate) (target, Pmode))
|| !insn_data[icode].operand[0].predicate (target, Pmode))
target = gen_reg_rtx (Pmode);
emit_insn (gen_lwp_slwpcb (target));
return target;
......@@ -25278,8 +25278,8 @@ ix86_builtin_vectorized_function (tree fndecl, tree type_out,
/* Dispatch to a handler for a vectorization library. */
if (ix86_veclib_handler)
return (*ix86_veclib_handler) ((enum built_in_function) fn, type_out,
type_in);
return ix86_veclib_handler ((enum built_in_function) fn, type_out,
type_in);
return NULL_TREE;
}
......@@ -26786,7 +26786,7 @@ machopic_output_stub (FILE *file, const char *symb, const char *stub)
gcc_assert (!TARGET_64BIT);
/* Lose our funky encoding stuff so it doesn't contaminate the stub. */
symb = (*targetm.strip_name_encoding) (symb);
symb = targetm.strip_name_encoding (symb);
length = strlen (stub);
binder_name = XALLOCAVEC (char, length + 32);
......@@ -27071,7 +27071,7 @@ x86_can_output_mi_thunk (const_tree thunk ATTRIBUTE_UNUSED,
return false;
/* Need a free register for GOT references. */
if (flag_pic && !(*targetm.binds_local_p) (function))
if (flag_pic && !targetm.binds_local_p (function))
return false;
/* Otherwise ok. */
......@@ -27181,7 +27181,7 @@ x86_output_mi_thunk (FILE *file,
xops[0] = XEXP (DECL_RTL (function), 0);
if (TARGET_64BIT)
{
if (!flag_pic || (*targetm.binds_local_p) (function))
if (!flag_pic || targetm.binds_local_p (function))
output_asm_insn ("jmp\t%P0", xops);
/* All thunks should be in the same object as their target,
and thus binds_local_p should be true. */
......@@ -27198,7 +27198,7 @@ x86_output_mi_thunk (FILE *file,
}
else
{
if (!flag_pic || (*targetm.binds_local_p) (function))
if (!flag_pic || targetm.binds_local_p (function))
output_asm_insn ("jmp\t%P0", xops);
else
#if TARGET_MACHO
......@@ -28252,10 +28252,10 @@ ix86_expand_vector_init_interleave (enum machine_mode mode,
emit_move_insn (op0, gen_lowpart (mode, op1));
/* Load even elements into the second positon. */
emit_insn ((*gen_load_even) (op0,
force_reg (inner_mode,
ops [i + i + 1]),
const1_rtx));
emit_insn (gen_load_even (op0,
force_reg (inner_mode,
ops [i + i + 1]),
const1_rtx));
/* Cast vector to FIRST_IMODE vector. */
ops[i] = gen_reg_rtx (first_imode);
......@@ -28266,7 +28266,7 @@ ix86_expand_vector_init_interleave (enum machine_mode mode,
for (i = j = 0; i < n; i += 2, j++)
{
op0 = gen_reg_rtx (first_imode);
emit_insn ((*gen_interleave_first_low) (op0, ops[i], ops[i + 1]));
emit_insn (gen_interleave_first_low (op0, ops[i], ops[i + 1]));
/* Cast FIRST_IMODE vector to SECOND_IMODE vector. */
ops[j] = gen_reg_rtx (second_imode);
......@@ -28280,8 +28280,8 @@ ix86_expand_vector_init_interleave (enum machine_mode mode,
for (i = j = 0; i < n / 2; i += 2, j++)
{
op0 = gen_reg_rtx (second_imode);
emit_insn ((*gen_interleave_second_low) (op0, ops[i],
ops[i + 1]));
emit_insn (gen_interleave_second_low (op0, ops[i],
ops[i + 1]));
/* Cast the SECOND_IMODE vector to the THIRD_IMODE
vector. */
......@@ -28294,8 +28294,8 @@ ix86_expand_vector_init_interleave (enum machine_mode mode,
case V2DImode:
op0 = gen_reg_rtx (second_imode);
emit_insn ((*gen_interleave_second_low) (op0, ops[0],
ops[1]));
emit_insn (gen_interleave_second_low (op0, ops[0],
ops[1]));
/* Cast the SECOND_IMODE vector back to a vector on original
mode. */
......@@ -28726,13 +28726,13 @@ half:
/* Extract the half. */
tmp = gen_reg_rtx (half_mode);
emit_insn ((*gen_extract[j][i]) (tmp, target));
emit_insn (gen_extract[j][i] (tmp, target));
/* Put val in tmp at elt. */
ix86_expand_vector_set (false, tmp, val, elt);
/* Put it back. */
emit_insn ((*gen_insert[j][i]) (target, target, tmp));
emit_insn (gen_insert[j][i] (target, target, tmp));
return;
default:
......
......@@ -17981,8 +17981,8 @@
else
output_asm_insn ("%vmovaps\t{%5, %4|%4, %5}", operands);
}
(*targetm.asm_out.internal_label) (asm_out_file, "L",
CODE_LABEL_NUMBER (operands[3]));
targetm.asm_out.internal_label (asm_out_file, "L",
CODE_LABEL_NUMBER (operands[3]));
return "";
}
[(set_attr "type" "other")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment