Commit 6394830f by Richard Henderson Committed by Richard Henderson

re PR bootstrap/49964 (Bootstrap failed with AVX turned on)

PR target/49964
        * config/i386/i386.c (ix86_expand_call): Don't create nested
        PARALLELs for TARGET_VZEROUPPER.
        (ix86_split_call_vzeroupper): Fix extraction of the original call.
        * config/i386/i386.md (*call_rex64_ms_sysv_vzeroupper): Don't
        recognize nested PARALLELs.
        (*call_pop_vzeroupper, *sibcall_pop_vzeroupper,
        *call_value_rex64_ms_sysv_vzeroupper, *call_value_pop_vzeroupper,
        *sibcall_value_pop_vzeroupper): Likewise.

From-SVN: r177408
parent 276e7ed0
2011-08-04 Richard Henderson <rth@redhat.com> 2011-08-04 Richard Henderson <rth@redhat.com>
PR target/49964
* config/i386/i386.c (ix86_expand_call): Don't create nested
PARALLELs for TARGET_VZEROUPPER.
(ix86_split_call_vzeroupper): Fix extraction of the original call.
* config/i386/i386.md (*call_rex64_ms_sysv_vzeroupper): Don't
recognize nested PARALLELs.
(*call_pop_vzeroupper, *sibcall_pop_vzeroupper,
*call_value_rex64_ms_sysv_vzeroupper, *call_value_pop_vzeroupper,
*sibcall_value_pop_vzeroupper): Likewise.
2011-08-04 Richard Henderson <rth@redhat.com>
PR middle-end/49968 PR middle-end/49968
* calls.c (expand_call): Use fixup_args_size_notes for * calls.c (expand_call): Use fixup_args_size_notes for
emit_stack_restore. emit_stack_restore.
......
...@@ -21501,7 +21501,17 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1, ...@@ -21501,7 +21501,17 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
rtx callarg2, rtx callarg2,
rtx pop, bool sibcall) rtx pop, bool sibcall)
{ {
/* We need to represent that SI and DI registers are clobbered
by SYSV calls. */
static int clobbered_registers[] = {
XMM6_REG, XMM7_REG, XMM8_REG,
XMM9_REG, XMM10_REG, XMM11_REG,
XMM12_REG, XMM13_REG, XMM14_REG,
XMM15_REG, SI_REG, DI_REG
};
rtx vec[ARRAY_SIZE (clobbered_registers) + 3];
rtx use = NULL, call; rtx use = NULL, call;
unsigned int vec_len;
if (pop == const0_rtx) if (pop == const0_rtx)
pop = NULL; pop = NULL;
...@@ -21545,52 +21555,40 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1, ...@@ -21545,52 +21555,40 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
fnaddr = gen_rtx_MEM (QImode, copy_to_mode_reg (Pmode, fnaddr)); fnaddr = gen_rtx_MEM (QImode, copy_to_mode_reg (Pmode, fnaddr));
} }
vec_len = 0;
call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1); call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
if (retval) if (retval)
call = gen_rtx_SET (VOIDmode, retval, call); call = gen_rtx_SET (VOIDmode, retval, call);
vec[vec_len++] = call;
if (pop) if (pop)
{ {
pop = gen_rtx_PLUS (Pmode, stack_pointer_rtx, pop); pop = gen_rtx_PLUS (Pmode, stack_pointer_rtx, pop);
pop = gen_rtx_SET (VOIDmode, stack_pointer_rtx, pop); pop = gen_rtx_SET (VOIDmode, stack_pointer_rtx, pop);
call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call, pop)); vec[vec_len++] = pop;
} }
if (TARGET_64BIT_MS_ABI if (TARGET_64BIT_MS_ABI
&& (!callarg2 || INTVAL (callarg2) != -2)) && (!callarg2 || INTVAL (callarg2) != -2))
{ {
/* We need to represent that SI and DI registers are clobbered unsigned i;
by SYSV calls. */
static int clobbered_registers[] = { vec[vec_len++] = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, const0_rtx),
XMM6_REG, XMM7_REG, XMM8_REG,
XMM9_REG, XMM10_REG, XMM11_REG,
XMM12_REG, XMM13_REG, XMM14_REG,
XMM15_REG, SI_REG, DI_REG
};
unsigned int i;
rtx vec[ARRAY_SIZE (clobbered_registers) + 2];
rtx unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, const0_rtx),
UNSPEC_MS_TO_SYSV_CALL); UNSPEC_MS_TO_SYSV_CALL);
vec[0] = call;
vec[1] = unspec;
for (i = 0; i < ARRAY_SIZE (clobbered_registers); i++) for (i = 0; i < ARRAY_SIZE (clobbered_registers); i++)
vec[i + 2] = gen_rtx_CLOBBER (SSE_REGNO_P (clobbered_registers[i]) vec[vec_len++]
= gen_rtx_CLOBBER (SSE_REGNO_P (clobbered_registers[i])
? TImode : DImode, ? TImode : DImode,
gen_rtx_REG gen_rtx_REG (SSE_REGNO_P (clobbered_registers[i])
(SSE_REGNO_P (clobbered_registers[i])
? TImode : DImode, ? TImode : DImode,
clobbered_registers[i])); clobbered_registers[i]));
call = gen_rtx_PARALLEL (VOIDmode,
gen_rtvec_v (ARRAY_SIZE (clobbered_registers)
+ 2, vec));
} }
/* Add UNSPEC_CALL_NEEDS_VZEROUPPER decoration. */ /* Add UNSPEC_CALL_NEEDS_VZEROUPPER decoration. */
if (TARGET_VZEROUPPER) if (TARGET_VZEROUPPER)
{ {
rtx unspec;
int avx256; int avx256;
if (cfun->machine->callee_pass_avx256_p) if (cfun->machine->callee_pass_avx256_p)
{ {
if (cfun->machine->callee_return_avx256_p) if (cfun->machine->callee_return_avx256_p)
...@@ -21606,15 +21604,13 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1, ...@@ -21606,15 +21604,13 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
if (reload_completed) if (reload_completed)
emit_insn (gen_avx_vzeroupper (GEN_INT (avx256))); emit_insn (gen_avx_vzeroupper (GEN_INT (avx256)));
else else
{ vec[vec_len++] = gen_rtx_UNSPEC (VOIDmode,
unspec = gen_rtx_UNSPEC (VOIDmode,
gen_rtvec (1, GEN_INT (avx256)), gen_rtvec (1, GEN_INT (avx256)),
UNSPEC_CALL_NEEDS_VZEROUPPER); UNSPEC_CALL_NEEDS_VZEROUPPER);
call = gen_rtx_PARALLEL (VOIDmode,
gen_rtvec (2, call, unspec));
}
} }
if (vec_len > 1)
call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (vec_len, vec));
call = emit_call_insn (call); call = emit_call_insn (call);
if (use) if (use)
CALL_INSN_FUNCTION_USAGE (call) = use; CALL_INSN_FUNCTION_USAGE (call) = use;
...@@ -21625,9 +21621,20 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1, ...@@ -21625,9 +21621,20 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
void void
ix86_split_call_vzeroupper (rtx insn, rtx vzeroupper) ix86_split_call_vzeroupper (rtx insn, rtx vzeroupper)
{ {
rtx call = XVECEXP (PATTERN (insn), 0, 0); rtx pat = PATTERN (insn);
rtvec vec = XVEC (pat, 0);
int len = GET_NUM_ELEM (vec) - 1;
/* Strip off the last entry of the parallel. */
gcc_assert (GET_CODE (RTVEC_ELT (vec, len)) == UNSPEC);
gcc_assert (XINT (RTVEC_ELT (vec, len), 1) == UNSPEC_CALL_NEEDS_VZEROUPPER);
if (len == 1)
pat = RTVEC_ELT (vec, 0);
else
pat = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (len, &RTVEC_ELT (vec, 0)));
emit_insn (gen_avx_vzeroupper (vzeroupper)); emit_insn (gen_avx_vzeroupper (vzeroupper));
emit_call_insn (call); emit_call_insn (pat);
} }
/* Output the assembly for a call instruction. */ /* Output the assembly for a call instruction. */
...@@ -11050,7 +11050,6 @@ ...@@ -11050,7 +11050,6 @@
[(set_attr "type" "call")]) [(set_attr "type" "call")])
(define_insn_and_split "*call_rex64_ms_sysv_vzeroupper" (define_insn_and_split "*call_rex64_ms_sysv_vzeroupper"
[(parallel
[(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw")) [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw"))
(match_operand 1 "" "")) (match_operand 1 "" ""))
(unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL)
...@@ -11065,7 +11064,7 @@ ...@@ -11065,7 +11064,7 @@
(clobber (reg:TI XMM14_REG)) (clobber (reg:TI XMM14_REG))
(clobber (reg:TI XMM15_REG)) (clobber (reg:TI XMM15_REG))
(clobber (reg:DI SI_REG)) (clobber (reg:DI SI_REG))
(clobber (reg:DI DI_REG))]) (clobber (reg:DI DI_REG))
(unspec [(match_operand 2 "const_int_operand" "")] (unspec [(match_operand 2 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)] UNSPEC_CALL_NEEDS_VZEROUPPER)]
"TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)" "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
...@@ -11128,12 +11127,11 @@ ...@@ -11128,12 +11127,11 @@
}) })
(define_insn_and_split "*call_pop_vzeroupper" (define_insn_and_split "*call_pop_vzeroupper"
[(parallel
[(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lzm")) [(call (mem:QI (match_operand:SI 0 "call_insn_operand" "lzm"))
(match_operand:SI 1 "" "")) (match_operand:SI 1 "" ""))
(set (reg:SI SP_REG) (set (reg:SI SP_REG)
(plus:SI (reg:SI SP_REG) (plus:SI (reg:SI SP_REG)
(match_operand:SI 2 "immediate_operand" "i")))]) (match_operand:SI 2 "immediate_operand" "i")))
(unspec [(match_operand 3 "const_int_operand" "")] (unspec [(match_operand 3 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)] UNSPEC_CALL_NEEDS_VZEROUPPER)]
"TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)" "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
...@@ -11154,12 +11152,11 @@ ...@@ -11154,12 +11152,11 @@
[(set_attr "type" "call")]) [(set_attr "type" "call")])
(define_insn_and_split "*sibcall_pop_vzeroupper" (define_insn_and_split "*sibcall_pop_vzeroupper"
[(parallel
[(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "Uz")) [(call (mem:QI (match_operand:SI 0 "sibcall_insn_operand" "Uz"))
(match_operand 1 "" "")) (match_operand 1 "" ""))
(set (reg:SI SP_REG) (set (reg:SI SP_REG)
(plus:SI (reg:SI SP_REG) (plus:SI (reg:SI SP_REG)
(match_operand:SI 2 "immediate_operand" "i")))]) (match_operand:SI 2 "immediate_operand" "i")))
(unspec [(match_operand 3 "const_int_operand" "")] (unspec [(match_operand 3 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)] UNSPEC_CALL_NEEDS_VZEROUPPER)]
"TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)" "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
...@@ -11248,7 +11245,6 @@ ...@@ -11248,7 +11245,6 @@
[(set_attr "type" "callv")]) [(set_attr "type" "callv")])
(define_insn_and_split "*call_value_rex64_ms_sysv_vzeroupper" (define_insn_and_split "*call_value_rex64_ms_sysv_vzeroupper"
[(parallel
[(set (match_operand 0 "" "") [(set (match_operand 0 "" "")
(call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw")) (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw"))
(match_operand 2 "" ""))) (match_operand 2 "" "")))
...@@ -11264,7 +11260,7 @@ ...@@ -11264,7 +11260,7 @@
(clobber (reg:TI XMM14_REG)) (clobber (reg:TI XMM14_REG))
(clobber (reg:TI XMM15_REG)) (clobber (reg:TI XMM15_REG))
(clobber (reg:DI SI_REG)) (clobber (reg:DI SI_REG))
(clobber (reg:DI DI_REG))]) (clobber (reg:DI DI_REG))
(unspec [(match_operand 3 "const_int_operand" "")] (unspec [(match_operand 3 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)] UNSPEC_CALL_NEEDS_VZEROUPPER)]
"TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)" "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
...@@ -11310,13 +11306,12 @@ ...@@ -11310,13 +11306,12 @@
}) })
(define_insn_and_split "*call_value_pop_vzeroupper" (define_insn_and_split "*call_value_pop_vzeroupper"
[(parallel
[(set (match_operand 0 "" "") [(set (match_operand 0 "" "")
(call (mem:QI (match_operand:SI 1 "call_insn_operand" "lzm")) (call (mem:QI (match_operand:SI 1 "call_insn_operand" "lzm"))
(match_operand 2 "" ""))) (match_operand 2 "" "")))
(set (reg:SI SP_REG) (set (reg:SI SP_REG)
(plus:SI (reg:SI SP_REG) (plus:SI (reg:SI SP_REG)
(match_operand:SI 3 "immediate_operand" "i")))]) (match_operand:SI 3 "immediate_operand" "i")))
(unspec [(match_operand 4 "const_int_operand" "")] (unspec [(match_operand 4 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)] UNSPEC_CALL_NEEDS_VZEROUPPER)]
"TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)" "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
...@@ -11338,13 +11333,12 @@ ...@@ -11338,13 +11333,12 @@
[(set_attr "type" "callv")]) [(set_attr "type" "callv")])
(define_insn_and_split "*sibcall_value_pop_vzeroupper" (define_insn_and_split "*sibcall_value_pop_vzeroupper"
[(parallel
[(set (match_operand 0 "" "") [(set (match_operand 0 "" "")
(call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "Uz")) (call (mem:QI (match_operand:SI 1 "sibcall_insn_operand" "Uz"))
(match_operand 2 "" ""))) (match_operand 2 "" "")))
(set (reg:SI SP_REG) (set (reg:SI SP_REG)
(plus:SI (reg:SI SP_REG) (plus:SI (reg:SI SP_REG)
(match_operand:SI 3 "immediate_operand" "i")))]) (match_operand:SI 3 "immediate_operand" "i")))
(unspec [(match_operand 4 "const_int_operand" "")] (unspec [(match_operand 4 "const_int_operand" "")]
UNSPEC_CALL_NEEDS_VZEROUPPER)] UNSPEC_CALL_NEEDS_VZEROUPPER)]
"TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)" "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment