Commit 780a5b71 by Uros Bizjak

i386.c (x86_64_ms_sysv_extra_clobbered_registers): New array.

	* config/i386/i386.c (x86_64_ms_sysv_extra_clobbered_registers):
	New array.
	(ix86_expand_call): Remove clobbered_registers array and use
	x86_64_ms_sysv_extra_clobbered_registers instead.
	* config/i386/i386.h (x86_64_ms_sysv_extra_clobbered_registers):
	Declare here.
	* config/i386/predicates.md (call_rex64_ms_sysv_operation): New
	predicate.
	* config/i386/i386.md (*call_rex64_ms_sysv): Use
	call_rex64_ms_sysv_operation predicate.  Remove explicit clobbers.
	(*call_value_rex64_ms_sysv): Ditto.

From-SVN: r198069
parent 36abe895
2013-04-18 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.c (x86_64_ms_sysv_extra_clobbered_registers):
New array.
(ix86_expand_call): Remove clobbered_registers array and use
x86_64_ms_sysv_extra_clobbered_registers instead.
* config/i386/i386.h (x86_64_ms_sysv_extra_clobbered_registers):
Declare here.
* config/i386/predicates.md (call_rex64_ms_sysv_operation): New
predicate.
* config/i386/i386.md (*call_rex64_ms_sysv): Use
call_rex64_ms_sysv_operation predicate. Remove explicit clobbers.
(*call_value_rex64_ms_sysv): Ditto.
2013-04-18 Cary Coutant <ccoutant@google.com>
* dwarf2out.c (output_pubnames): Check die_perennial_p of
......@@ -12,7 +26,7 @@
* gimple.h (enum ssa_mode): Define.
(gimple_seq_set_location): New.
* asan.c (build_check_stmt): Change some gimple_build_* calls
to use build_assign and build_type_cast.
to use build_assign and build_type_cast.
2013-04-18 Richard Biener <rguenther@suse.de>
......
......@@ -2216,6 +2216,16 @@ static int const x86_64_int_return_registers[4] =
AX_REG, DX_REG, DI_REG, SI_REG
};
/* Additional registers that are clobbered by SYSV calls. */
int const x86_64_ms_sysv_extra_clobbered_registers[12] =
{
SI_REG, DI_REG,
XMM6_REG, XMM7_REG,
XMM8_REG, XMM9_REG, XMM10_REG, XMM11_REG,
XMM12_REG, XMM13_REG, XMM14_REG, XMM15_REG
};
/* Define the structure for the machine field in struct function. */
struct GTY(()) stack_local_entry {
......@@ -23704,17 +23714,10 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
rtx callarg2,
rtx pop, bool sibcall)
{
/* We need to represent that SI and DI registers are clobbered
by SYSV calls. */
static int clobbered_registers[] = {
XMM6_REG, XMM7_REG, XMM8_REG,
XMM9_REG, XMM10_REG, XMM11_REG,
XMM12_REG, XMM13_REG, XMM14_REG,
XMM15_REG, SI_REG, DI_REG
};
rtx vec[ARRAY_SIZE (clobbered_registers) + 3];
int const cregs_size = ARRAY_SIZE (x86_64_ms_sysv_extra_clobbered_registers);
rtx vec[3 + cregs_size];
rtx use = NULL, call;
unsigned int vec_len;
unsigned int vec_len = 0;
if (pop == const0_rtx)
pop = NULL;
......@@ -23730,8 +23733,10 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
else
{
/* Static functions and indirect calls don't need the pic register. */
if (flag_pic && (!TARGET_64BIT
|| (ix86_cmodel == CM_LARGE_PIC && DEFAULT_ABI != MS_ABI))
if (flag_pic
&& (!TARGET_64BIT
|| (ix86_cmodel == CM_LARGE_PIC
&& DEFAULT_ABI != MS_ABI))
&& GET_CODE (XEXP (fnaddr, 0)) == SYMBOL_REF
&& ! SYMBOL_REF_LOCAL_P (XEXP (fnaddr, 0)))
use_reg (&use, pic_offset_table_rtx);
......@@ -23758,7 +23763,6 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
fnaddr = gen_rtx_MEM (QImode, copy_to_mode_reg (word_mode, fnaddr));
}
vec_len = 0;
call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
if (retval)
call = gen_rtx_SET (VOIDmode, retval, call);
......@@ -23779,12 +23783,14 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
vec[vec_len++] = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, const0_rtx),
UNSPEC_MS_TO_SYSV_CALL);
for (i = 0; i < ARRAY_SIZE (clobbered_registers); i++)
vec[vec_len++]
= gen_rtx_CLOBBER (VOIDmode,
gen_rtx_REG (SSE_REGNO_P (clobbered_registers[i])
? TImode : DImode,
clobbered_registers[i]));
for (i = 0; i < cregs_size; i++)
{
int regno = x86_64_ms_sysv_extra_clobbered_registers[i];
enum machine_mode mode = SSE_REGNO_P (regno) ? TImode : DImode;
vec[vec_len++]
= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno));
}
}
if (vec_len > 1)
......@@ -1963,6 +1963,8 @@ extern int const dbx_register_map[FIRST_PSEUDO_REGISTER];
extern int const dbx64_register_map[FIRST_PSEUDO_REGISTER];
extern int const svr4_dbx_register_map[FIRST_PSEUDO_REGISTER];
extern int const x86_64_ms_sysv_extra_clobbered_registers[12];
/* Before the prologue, RA is at 0(%esp). */
#define INCOMING_RETURN_ADDR_RTX \
gen_rtx_MEM (VOIDmode, gen_rtx_REG (VOIDmode, STACK_POINTER_REGNUM))
......
......@@ -10903,21 +10903,10 @@
[(set_attr "type" "call")])
(define_insn "*call_rex64_ms_sysv"
[(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw"))
(match_operand 1))
(unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
(clobber (reg:TI XMM6_REG))
(clobber (reg:TI XMM7_REG))
(clobber (reg:TI XMM8_REG))
(clobber (reg:TI XMM9_REG))
(clobber (reg:TI XMM10_REG))
(clobber (reg:TI XMM11_REG))
(clobber (reg:TI XMM12_REG))
(clobber (reg:TI XMM13_REG))
(clobber (reg:TI XMM14_REG))
(clobber (reg:TI XMM15_REG))
(clobber (reg:DI SI_REG))
(clobber (reg:DI DI_REG))]
[(match_parallel 2 "call_rex64_ms_sysv_operation"
[(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw"))
(match_operand 1))
(unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)])]
"TARGET_64BIT && !SIBLING_CALL_P (insn)"
"* return ix86_output_call_insn (insn, operands[0]);"
[(set_attr "type" "call")])
......@@ -11005,23 +10994,12 @@
[(set_attr "type" "callv")])
(define_insn "*call_value_rex64_ms_sysv"
[(set (match_operand 0)
(call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw"))
(match_operand 2)))
(unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
(clobber (reg:TI XMM6_REG))
(clobber (reg:TI XMM7_REG))
(clobber (reg:TI XMM8_REG))
(clobber (reg:TI XMM9_REG))
(clobber (reg:TI XMM10_REG))
(clobber (reg:TI XMM11_REG))
(clobber (reg:TI XMM12_REG))
(clobber (reg:TI XMM13_REG))
(clobber (reg:TI XMM14_REG))
(clobber (reg:TI XMM15_REG))
(clobber (reg:DI SI_REG))
(clobber (reg:DI DI_REG))]
"TARGET_64BIT && !SIBLING_CALL_P (insn)"
[(match_parallel 3 "call_rex64_ms_sysv_operation"
[(set (match_operand 0)
(call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw"))
(match_operand 2)))
(unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)])]
"TARGET_64BIT && !SIBLING_CALL_P (insn)"
"* return ix86_output_call_insn (insn, operands[1]);"
[(set_attr "type" "callv")])
......
......@@ -573,6 +573,36 @@
(op, mode == VOIDmode ? mode : Pmode)")
(match_operand 0 "register_no_elim_operand")))
;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
(define_predicate "call_rex64_ms_sysv_operation"
(match_code "parallel")
{
unsigned creg_size = ARRAY_SIZE (x86_64_ms_sysv_extra_clobbered_registers);
unsigned i;
if ((unsigned) XVECLEN (op, 0) != creg_size + 2)
return false;
for (i = 0; i < creg_size; i++)
{
rtx elt = XVECEXP (op, 0, i+2);
enum machine_mode mode;
unsigned regno;
if (GET_CODE (elt) != CLOBBER
|| GET_CODE (SET_DEST (elt)) != REG)
return false;
regno = x86_64_ms_sysv_extra_clobbered_registers[i];
mode = SSE_REGNO_P (regno) ? TImode : DImode;
if (GET_MODE (SET_DEST (elt)) != mode
|| REGNO (SET_DEST (elt)) != regno)
return false;
}
return true;
})
;; Match exactly zero.
(define_predicate "const0_operand"
(match_code "const_int,const_double,const_vector")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment