Commit 2371d1a0 by Richard Henderson Committed by Richard Henderson

alpha: Convert to atomic optabs.

From-SVN: r181395
parent 6dc88283
2011-11-15 Richard Henderson <rth@redhat.com>
* config/alpha/alpha.c (alpha_pre_atomic_barrier): New.
(alpha_post_atomic_barrier): New.
(alpha_split_atomic_op): New memmodel argument; honor it.
(alpha_split_compare_and_swap): Take array of operands. Honor
memmodel; always set bool output
(alpha_expand_compare_and_swap_12): Similarly.
(alpha_split_compare_and_swap_12): Similarly.
(alpha_split_atomic_exchange): Similarly. Rename from
alpha_split_lock_test_and_set.
(alpha_expand_atomic_exchange_12): Similarly. Rename from
alpha_expand_lock_test_and_set_12.
(alpha_split_atomic_exchange_12): Similarly. Rename from
alpha_split_lock_test_and_set_12.
* config/alpha/alpha-protos.h: Update.
* config/alpha/alpha.md (UNSPECV_CMPXCHG): New.
* config/alpha/constraints.md ("w"): New.
* config/alpha/predicates.md (mem_noofs_operand): New.
* config/alpha/sync.md (atomic_compare_and_swap<mode>): Rename from
sync_compare_and_swap<mode>; add the new parameters.
(atomic_exchange<mode>): Update from sync_test_and_set<mode>.
(atomic_fetch_<op><mode>): Update from sync_old_<op><mode>.
(atomic_<op>_fetch<mode>): Update from sync_new_<op><mode>.
(atomic_<op><mode>): Update from sync_<op><mode>.
2011-11-16 Tom de Vries <tom@codesourcery.com> 2011-11-16 Tom de Vries <tom@codesourcery.com>
* tree-ssa-tail-merge.c (replace_block_by): Add frequency of bb2 to bb1. * tree-ssa-tail-merge.c (replace_block_by): Add frequency of bb2 to bb1.
...@@ -88,15 +88,14 @@ extern bool alpha_emit_setcc (rtx[], enum machine_mode); ...@@ -88,15 +88,14 @@ extern bool alpha_emit_setcc (rtx[], enum machine_mode);
extern int alpha_split_conditional_move (enum rtx_code, rtx, rtx, rtx, rtx); extern int alpha_split_conditional_move (enum rtx_code, rtx, rtx, rtx, rtx);
extern void alpha_emit_xfloating_arith (enum rtx_code, rtx[]); extern void alpha_emit_xfloating_arith (enum rtx_code, rtx[]);
extern void alpha_emit_xfloating_cvt (enum rtx_code, rtx[]); extern void alpha_emit_xfloating_cvt (enum rtx_code, rtx[]);
extern void alpha_split_atomic_op (enum rtx_code, rtx, rtx, rtx, rtx, rtx); extern void alpha_split_atomic_op (enum rtx_code, rtx, rtx, rtx, rtx, rtx,
extern void alpha_split_compare_and_swap (rtx, rtx, rtx, rtx, rtx); enum memmodel);
extern void alpha_expand_compare_and_swap_12 (rtx, rtx, rtx, rtx); extern void alpha_split_compare_and_swap (rtx op[]);
extern void alpha_split_compare_and_swap_12 (enum machine_mode, rtx, rtx, extern void alpha_expand_compare_and_swap_12 (rtx op[]);
rtx, rtx, rtx, rtx, rtx); extern void alpha_split_compare_and_swap_12 (rtx op[]);
extern void alpha_split_lock_test_and_set (rtx, rtx, rtx, rtx); extern void alpha_split_atomic_exchange (rtx op[]);
extern void alpha_expand_lock_test_and_set_12 (rtx, rtx, rtx); extern void alpha_expand_atomic_exchange_12 (rtx op[]);
extern void alpha_split_lock_test_and_set_12 (enum machine_mode, rtx, rtx, extern void alpha_split_atomic_exchange_12 (rtx op[]);
rtx, rtx, rtx);
#endif #endif
extern rtx alpha_use_linkage (rtx, bool, bool); extern rtx alpha_use_linkage (rtx, bool, bool);
......
...@@ -4196,6 +4196,47 @@ emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val) ...@@ -4196,6 +4196,47 @@ emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
emit_insn (fn (res, mem, val)); emit_insn (fn (res, mem, val));
} }
/* Subroutines of the atomic operation splitters. Emit barriers
as needed for the memory MODEL. */
static void
alpha_pre_atomic_barrier (enum memmodel model)
{
switch (model)
{
case MEMMODEL_RELAXED:
case MEMMODEL_CONSUME:
case MEMMODEL_ACQUIRE:
break;
case MEMMODEL_RELEASE:
case MEMMODEL_ACQ_REL:
case MEMMODEL_SEQ_CST:
emit_insn (gen_memory_barrier ());
break;
default:
gcc_unreachable ();
}
}
static void
alpha_post_atomic_barrier (enum memmodel model)
{
switch (model)
{
case MEMMODEL_RELAXED:
case MEMMODEL_CONSUME:
case MEMMODEL_RELEASE:
break;
case MEMMODEL_ACQUIRE:
case MEMMODEL_ACQ_REL:
case MEMMODEL_SEQ_CST:
emit_insn (gen_memory_barrier ());
break;
default:
gcc_unreachable ();
}
}
/* A subroutine of the atomic operation splitters. Emit an insxl /* A subroutine of the atomic operation splitters. Emit an insxl
instruction in MODE. */ instruction in MODE. */
...@@ -4236,13 +4277,13 @@ emit_insxl (enum machine_mode mode, rtx op1, rtx op2) ...@@ -4236,13 +4277,13 @@ emit_insxl (enum machine_mode mode, rtx op1, rtx op2)
a scratch register. */ a scratch register. */
void void
alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, rtx before,
rtx before, rtx after, rtx scratch) rtx after, rtx scratch, enum memmodel model)
{ {
enum machine_mode mode = GET_MODE (mem); enum machine_mode mode = GET_MODE (mem);
rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch)); rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch));
emit_insn (gen_memory_barrier ()); alpha_pre_atomic_barrier (model);
label = gen_label_rtx (); label = gen_label_rtx ();
emit_label (label); emit_label (label);
...@@ -4270,29 +4311,48 @@ alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, ...@@ -4270,29 +4311,48 @@ alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
x = gen_rtx_EQ (DImode, cond, const0_rtx); x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label); emit_unlikely_jump (x, label);
emit_insn (gen_memory_barrier ()); alpha_post_atomic_barrier (model);
} }
/* Expand a compare and swap operation. */ /* Expand a compare and swap operation. */
void void
alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval, alpha_split_compare_and_swap (rtx operands[])
rtx scratch)
{ {
enum machine_mode mode = GET_MODE (mem); rtx cond, retval, mem, oldval, newval;
rtx label1, label2, x, cond = gen_lowpart (DImode, scratch); bool is_weak;
enum memmodel mod_s, mod_f;
enum machine_mode mode;
rtx label1, label2, x;
cond = operands[0];
retval = operands[1];
mem = operands[2];
oldval = operands[3];
newval = operands[4];
is_weak = (operands[5] != const0_rtx);
mod_s = (enum memmodel) INTVAL (operands[6]);
mod_f = (enum memmodel) INTVAL (operands[7]);
mode = GET_MODE (mem);
emit_insn (gen_memory_barrier ()); alpha_pre_atomic_barrier (mod_s);
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); label1 = NULL_RTX;
if (!is_weak)
{
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
}
label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
emit_load_locked (mode, retval, mem); emit_load_locked (mode, retval, mem);
x = gen_lowpart (DImode, retval); x = gen_lowpart (DImode, retval);
if (oldval == const0_rtx) if (oldval == const0_rtx)
x = gen_rtx_NE (DImode, x, const0_rtx); {
emit_move_insn (cond, const0_rtx);
x = gen_rtx_NE (DImode, x, const0_rtx);
}
else else
{ {
x = gen_rtx_EQ (DImode, x, oldval); x = gen_rtx_EQ (DImode, x, oldval);
...@@ -4301,54 +4361,99 @@ alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval, ...@@ -4301,54 +4361,99 @@ alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
} }
emit_unlikely_jump (x, label2); emit_unlikely_jump (x, label2);
emit_move_insn (scratch, newval); emit_move_insn (cond, newval);
emit_store_conditional (mode, cond, mem, scratch); emit_store_conditional (mode, cond, mem, gen_lowpart (mode, cond));
x = gen_rtx_EQ (DImode, cond, const0_rtx); if (!is_weak)
emit_unlikely_jump (x, label1); {
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label1);
}
if (mod_f != MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
emit_insn (gen_memory_barrier ()); alpha_post_atomic_barrier (mod_s);
emit_label (XEXP (label2, 0));
if (mod_f == MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
} }
void void
alpha_expand_compare_and_swap_12 (rtx dst, rtx mem, rtx oldval, rtx newval) alpha_expand_compare_and_swap_12 (rtx operands[])
{ {
enum machine_mode mode = GET_MODE (mem); rtx cond, dst, mem, oldval, newval, is_weak, mod_s, mod_f;
enum machine_mode mode;
rtx addr, align, wdst; rtx addr, align, wdst;
rtx (*fn5) (rtx, rtx, rtx, rtx, rtx); rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
cond = operands[0];
dst = operands[1];
mem = operands[2];
oldval = operands[3];
newval = operands[4];
is_weak = operands[5];
mod_s = operands[6];
mod_f = operands[7];
mode = GET_MODE (mem);
/* We forced the address into a register via mem_noofs_operand. */
addr = XEXP (mem, 0);
gcc_assert (register_operand (addr, DImode));
addr = force_reg (DImode, XEXP (mem, 0));
align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8), align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
NULL_RTX, 1, OPTAB_DIRECT); NULL_RTX, 1, OPTAB_DIRECT);
oldval = convert_modes (DImode, mode, oldval, 1); oldval = convert_modes (DImode, mode, oldval, 1);
newval = emit_insxl (mode, newval, addr);
if (newval != const0_rtx)
newval = emit_insxl (mode, newval, addr);
wdst = gen_reg_rtx (DImode); wdst = gen_reg_rtx (DImode);
if (mode == QImode) if (mode == QImode)
fn5 = gen_sync_compare_and_swapqi_1; gen = gen_atomic_compare_and_swapqi_1;
else else
fn5 = gen_sync_compare_and_swaphi_1; gen = gen_atomic_compare_and_swaphi_1;
emit_insn (fn5 (wdst, addr, oldval, newval, align)); emit_insn (gen (cond, wdst, mem, oldval, newval, align,
is_weak, mod_s, mod_f));
emit_move_insn (dst, gen_lowpart (mode, wdst)); emit_move_insn (dst, gen_lowpart (mode, wdst));
} }
void void
alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr, alpha_split_compare_and_swap_12 (rtx operands[])
rtx oldval, rtx newval, rtx align,
rtx scratch, rtx cond)
{ {
rtx label1, label2, mem, width, mask, x; rtx cond, dest, orig_mem, oldval, newval, align, scratch;
enum machine_mode mode;
bool is_weak;
enum memmodel mod_s, mod_f;
rtx label1, label2, mem, addr, width, mask, x;
cond = operands[0];
dest = operands[1];
orig_mem = operands[2];
oldval = operands[3];
newval = operands[4];
align = operands[5];
is_weak = (operands[6] != const0_rtx);
mod_s = (enum memmodel) INTVAL (operands[7]);
mod_f = (enum memmodel) INTVAL (operands[8]);
scratch = operands[9];
mode = GET_MODE (orig_mem);
addr = XEXP (orig_mem, 0);
mem = gen_rtx_MEM (DImode, align); mem = gen_rtx_MEM (DImode, align);
MEM_VOLATILE_P (mem) = 1; MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
alpha_pre_atomic_barrier (mod_s);
emit_insn (gen_memory_barrier ()); label1 = NULL_RTX;
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); if (!is_weak)
{
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
}
label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
emit_load_locked (DImode, scratch, mem); emit_load_locked (DImode, scratch, mem);
...@@ -4357,7 +4462,10 @@ alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr, ...@@ -4357,7 +4462,10 @@ alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
emit_insn (gen_extxl (dest, scratch, width, addr)); emit_insn (gen_extxl (dest, scratch, width, addr));
if (oldval == const0_rtx) if (oldval == const0_rtx)
x = gen_rtx_NE (DImode, dest, const0_rtx); {
emit_move_insn (cond, const0_rtx);
x = gen_rtx_NE (DImode, dest, const0_rtx);
}
else else
{ {
x = gen_rtx_EQ (DImode, dest, oldval); x = gen_rtx_EQ (DImode, dest, oldval);
...@@ -4366,25 +4474,47 @@ alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr, ...@@ -4366,25 +4474,47 @@ alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
} }
emit_unlikely_jump (x, label2); emit_unlikely_jump (x, label2);
emit_insn (gen_mskxl (scratch, scratch, mask, addr)); emit_insn (gen_mskxl (cond, scratch, mask, addr));
emit_insn (gen_iordi3 (scratch, scratch, newval));
emit_store_conditional (DImode, scratch, mem, scratch); if (newval != const0_rtx)
emit_insn (gen_iordi3 (cond, cond, newval));
x = gen_rtx_EQ (DImode, scratch, const0_rtx); emit_store_conditional (DImode, cond, mem, cond);
emit_unlikely_jump (x, label1);
emit_insn (gen_memory_barrier ()); if (!is_weak)
emit_label (XEXP (label2, 0)); {
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label1);
}
if (mod_f != MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
alpha_post_atomic_barrier (mod_s);
if (mod_f == MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
} }
/* Expand an atomic exchange operation. */ /* Expand an atomic exchange operation. */
void void
alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch) alpha_split_atomic_exchange (rtx operands[])
{ {
enum machine_mode mode = GET_MODE (mem); rtx retval, mem, val, scratch;
rtx label, x, cond = gen_lowpart (DImode, scratch); enum memmodel model;
enum machine_mode mode;
rtx label, x, cond;
retval = operands[0];
mem = operands[1];
val = operands[2];
model = (enum memmodel) INTVAL (operands[3]);
scratch = operands[4];
mode = GET_MODE (mem);
cond = gen_lowpart (DImode, scratch);
alpha_pre_atomic_barrier (model);
label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label, 0)); emit_label (XEXP (label, 0));
...@@ -4396,44 +4526,65 @@ alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch) ...@@ -4396,44 +4526,65 @@ alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
x = gen_rtx_EQ (DImode, cond, const0_rtx); x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label); emit_unlikely_jump (x, label);
emit_insn (gen_memory_barrier ()); alpha_post_atomic_barrier (model);
} }
void void
alpha_expand_lock_test_and_set_12 (rtx dst, rtx mem, rtx val) alpha_expand_atomic_exchange_12 (rtx operands[])
{ {
enum machine_mode mode = GET_MODE (mem); rtx dst, mem, val, model;
enum machine_mode mode;
rtx addr, align, wdst; rtx addr, align, wdst;
rtx (*fn4) (rtx, rtx, rtx, rtx); rtx (*gen) (rtx, rtx, rtx, rtx, rtx);
dst = operands[0];
mem = operands[1];
val = operands[2];
model = operands[3];
mode = GET_MODE (mem);
/* Force the address into a register. */ /* We forced the address into a register via mem_noofs_operand. */
addr = force_reg (DImode, XEXP (mem, 0)); addr = XEXP (mem, 0);
gcc_assert (register_operand (addr, DImode));
/* Align it to a multiple of 8. */
align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8), align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
NULL_RTX, 1, OPTAB_DIRECT); NULL_RTX, 1, OPTAB_DIRECT);
/* Insert val into the correct byte location within the word. */ /* Insert val into the correct byte location within the word. */
val = emit_insxl (mode, val, addr); if (val != const0_rtx)
val = emit_insxl (mode, val, addr);
wdst = gen_reg_rtx (DImode); wdst = gen_reg_rtx (DImode);
if (mode == QImode) if (mode == QImode)
fn4 = gen_sync_lock_test_and_setqi_1; gen = gen_atomic_exchangeqi_1;
else else
fn4 = gen_sync_lock_test_and_sethi_1; gen = gen_atomic_exchangehi_1;
emit_insn (fn4 (wdst, addr, val, align)); emit_insn (gen (wdst, mem, val, align, model));
emit_move_insn (dst, gen_lowpart (mode, wdst)); emit_move_insn (dst, gen_lowpart (mode, wdst));
} }
void void
alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr, alpha_split_atomic_exchange_12 (rtx operands[])
rtx val, rtx align, rtx scratch)
{ {
rtx dest, orig_mem, addr, val, align, scratch;
rtx label, mem, width, mask, x; rtx label, mem, width, mask, x;
enum machine_mode mode;
enum memmodel model;
dest = operands[0];
orig_mem = operands[1];
val = operands[2];
align = operands[3];
model = (enum memmodel) INTVAL (operands[4]);
scratch = operands[5];
mode = GET_MODE (orig_mem);
addr = XEXP (orig_mem, 0);
mem = gen_rtx_MEM (DImode, align); mem = gen_rtx_MEM (DImode, align);
MEM_VOLATILE_P (mem) = 1; MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
alpha_pre_atomic_barrier (model);
label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label, 0)); emit_label (XEXP (label, 0));
...@@ -4444,14 +4595,15 @@ alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr, ...@@ -4444,14 +4595,15 @@ alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr,
mask = GEN_INT (mode == QImode ? 0xff : 0xffff); mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
emit_insn (gen_extxl (dest, scratch, width, addr)); emit_insn (gen_extxl (dest, scratch, width, addr));
emit_insn (gen_mskxl (scratch, scratch, mask, addr)); emit_insn (gen_mskxl (scratch, scratch, mask, addr));
emit_insn (gen_iordi3 (scratch, scratch, val)); if (val != const0_rtx)
emit_insn (gen_iordi3 (scratch, scratch, val));
emit_store_conditional (DImode, scratch, mem, scratch); emit_store_conditional (DImode, scratch, mem, scratch);
x = gen_rtx_EQ (DImode, scratch, const0_rtx); x = gen_rtx_EQ (DImode, scratch, const0_rtx);
emit_unlikely_jump (x, label); emit_unlikely_jump (x, label);
emit_insn (gen_memory_barrier ()); alpha_post_atomic_barrier (model);
} }
/* Adjust the cost of a scheduling dependency. Return the new cost of /* Adjust the cost of a scheduling dependency. Return the new cost of
......
...@@ -81,6 +81,7 @@ ...@@ -81,6 +81,7 @@
UNSPECV_SETJMPR_ER ; builtin_setjmp_receiver fragment UNSPECV_SETJMPR_ER ; builtin_setjmp_receiver fragment
UNSPECV_LL ; load-locked UNSPECV_LL ; load-locked
UNSPECV_SC ; store-conditional UNSPECV_SC ; store-conditional
UNSPECV_CMPXCHG
]) ])
;; On non-BWX targets, CQImode must be handled the similarly to HImode ;; On non-BWX targets, CQImode must be handled the similarly to HImode
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
;;; Unused letters: ;;; Unused letters:
;;; ABCDEF V YZ ;;; ABCDEF V YZ
;;; de ghijklmnopq stu wxyz ;;; de ghijkl pq tu wxyz
;; Integer register constraints. ;; Integer register constraints.
...@@ -38,6 +38,10 @@ ...@@ -38,6 +38,10 @@
(define_register_constraint "v" "R0_REG" (define_register_constraint "v" "R0_REG"
"General register 0, function value return address") "General register 0, function value return address")
(define_memory_constraint "w"
"A memory whose address is only a register"
(match_operand 0 "mem_noofs_operand"))
;; Integer constant constraints. ;; Integer constant constraints.
(define_constraint "I" (define_constraint "I"
"An unsigned 8 bit constant" "An unsigned 8 bit constant"
......
...@@ -623,3 +623,8 @@ ...@@ -623,3 +623,8 @@
(ior (match_operand 0 "register_operand") (ior (match_operand 0 "register_operand")
(and (match_test "TARGET_BWX") (and (match_test "TARGET_BWX")
(match_operand 0 "memory_operand")))) (match_operand 0 "memory_operand"))))
;; Accept a memory whose address is only a register.
(define_predicate "mem_noofs_operand"
(and (match_code "mem")
(match_code "reg" "0")))
;; GCC machine description for Alpha synchronization instructions. ;; GCC machine description for Alpha synchronization instructions.
;; Copyright (C) 2005, 2007, 2008, 2009 Free Software Foundation, Inc. ;; Copyright (C) 2005, 2007, 2008, 2009, 2011 Free Software Foundation, Inc.
;; ;;
;; This file is part of GCC. ;; This file is part of GCC.
;; ;;
...@@ -62,95 +62,160 @@ ...@@ -62,95 +62,160 @@
[(set_attr "type" "st_c")]) [(set_attr "type" "st_c")])
;; The Alpha Architecture Handbook says that it is UNPREDICTABLE whether ;; The Alpha Architecture Handbook says that it is UNPREDICTABLE whether
;; the lock is cleared by a TAKEN branch. This means that we can not ;; the lock is cleared by a normal load or store. This means we cannot
;; expand a ll/sc sequence until after the final basic-block reordering pass. ;; expand a ll/sc sequence before reload, lest a register spill is
;; inserted inside the sequence. It is also UNPREDICTABLE whether the
;; lock is cleared by a TAKEN branch. This means that we can not expand
;; a ll/sc sequence containing a branch (i.e. compare-and-swap) until after
;; the final basic-block reordering pass.
(define_insn_and_split "sync_<fetchop_name><mode>" (define_expand "atomic_compare_and_swap<mode>"
[(set (match_operand:I48MODE 0 "memory_operand" "+m") [(parallel
(unspec:I48MODE [(set (match_operand:DI 0 "register_operand" "") ;; bool out
[(FETCHOP:I48MODE (match_dup 0) (unspec_volatile:DI [(const_int 0)] UNSPECV_CMPXCHG))
(match_operand:I48MODE 1 "<fetchop_pred>" "<fetchop_constr>"))] (set (match_operand:I48MODE 1 "register_operand" "") ;; val out
UNSPEC_ATOMIC)) (unspec_volatile:I48MODE [(const_int 0)] UNSPECV_CMPXCHG))
(clobber (match_scratch:I48MODE 2 "=&r"))] (set (match_operand:I48MODE 2 "memory_operand" "") ;; memory
(unspec_volatile:I48MODE
[(match_dup 2)
(match_operand:I48MODE 3 "reg_or_8bit_operand" "") ;; expected
(match_operand:I48MODE 4 "add_operand" "") ;; desired
(match_operand:SI 5 "const_int_operand" "") ;; is_weak
(match_operand:SI 6 "const_int_operand" "") ;; succ model
(match_operand:SI 7 "const_int_operand" "")] ;; fail model
UNSPECV_CMPXCHG))])]
""
{
if (<MODE>mode == SImode)
{
operands[3] = convert_modes (DImode, SImode, operands[3], 0);
operands[4] = convert_modes (DImode, SImode, operands[4], 0);
}
})
(define_insn_and_split "*atomic_compare_and_swap<mode>"
[(set (match_operand:DI 0 "register_operand" "=&r") ;; bool out
(unspec_volatile:DI [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:I48MODE 1 "register_operand" "=&r") ;; val out
(unspec_volatile:I48MODE [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:I48MODE 2 "memory_operand" "+m") ;; memory
(unspec_volatile:I48MODE
[(match_dup 2)
(match_operand:DI 3 "reg_or_8bit_operand" "rI") ;; expected
(match_operand:DI 4 "add_operand" "rKL") ;; desired
(match_operand:SI 5 "const_int_operand" "") ;; is_weak
(match_operand:SI 6 "const_int_operand" "") ;; succ model
(match_operand:SI 7 "const_int_operand" "")] ;; fail model
UNSPECV_CMPXCHG))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_atomic_op (<CODE>, operands[0], operands[1], alpha_split_compare_and_swap (operands);
NULL, NULL, operands[2]);
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_insn_and_split "sync_nand<mode>" (define_expand "atomic_compare_and_swap<mode>"
[(set (match_operand:I48MODE 0 "memory_operand" "+m") [(match_operand:DI 0 "register_operand" "") ;; bool out
(unspec:I48MODE (match_operand:I12MODE 1 "register_operand" "") ;; val out
[(not:I48MODE (match_operand:I12MODE 2 "mem_noofs_operand" "") ;; memory
(and:I48MODE (match_dup 0) (match_operand:I12MODE 3 "register_operand" "") ;; expected
(match_operand:I48MODE 1 "register_operand" "r")))] (match_operand:I12MODE 4 "add_operand" "") ;; desired
UNSPEC_ATOMIC)) (match_operand:SI 5 "const_int_operand" "") ;; is_weak
(clobber (match_scratch:I48MODE 2 "=&r"))] (match_operand:SI 6 "const_int_operand" "") ;; succ model
(match_operand:SI 7 "const_int_operand" "")] ;; fail model
""
{
alpha_expand_compare_and_swap_12 (operands);
DONE;
})
(define_insn_and_split "atomic_compare_and_swap<mode>_1"
[(set (match_operand:DI 0 "register_operand" "=&r") ;; bool out
(unspec_volatile:DI [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:DI 1 "register_operand" "=&r") ;; val out
(zero_extend:DI
(unspec_volatile:I12MODE [(const_int 0)] UNSPECV_CMPXCHG)))
(set (match_operand:I12MODE 2 "mem_noofs_operand" "+w") ;; memory
(unspec_volatile:I12MODE
[(match_dup 2)
(match_operand:DI 3 "reg_or_8bit_operand" "rI") ;; expected
(match_operand:DI 4 "reg_or_0_operand" "rJ") ;; desired
(match_operand:DI 5 "register_operand" "r") ;; align
(match_operand:SI 6 "const_int_operand" "") ;; is_weak
(match_operand:SI 7 "const_int_operand" "") ;; succ model
(match_operand:SI 8 "const_int_operand" "")] ;; fail model
UNSPECV_CMPXCHG))
(clobber (match_scratch:DI 9 "=&r"))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_atomic_op (NOT, operands[0], operands[1], alpha_split_compare_and_swap_12 (operands);
NULL, NULL, operands[2]);
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_insn_and_split "sync_old_<fetchop_name><mode>" (define_insn_and_split "atomic_exchange<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r") [(set (match_operand:I48MODE 0 "register_operand" "=&r") ;; output
(match_operand:I48MODE 1 "memory_operand" "+m")) (match_operand:I48MODE 1 "memory_operand" "+m")) ;; memory
(set (match_dup 1) (set (match_dup 1)
(unspec:I48MODE (unspec:I48MODE
[(FETCHOP:I48MODE (match_dup 1) [(match_operand:I48MODE 2 "add_operand" "rKL") ;; input
(match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>"))] (match_operand:SI 3 "const_int_operand" "")] ;; model
UNSPEC_ATOMIC)) UNSPEC_XCHG))
(clobber (match_scratch:I48MODE 3 "=&r"))] (clobber (match_scratch:I48MODE 4 "=&r"))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_atomic_op (<CODE>, operands[1], operands[2], alpha_split_atomic_exchange (operands);
operands[0], NULL, operands[3]);
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_insn_and_split "sync_old_nand<mode>" (define_expand "atomic_exchange<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r") [(match_operand:I12MODE 0 "register_operand" "") ;; output
(match_operand:I48MODE 1 "memory_operand" "+m")) (match_operand:I12MODE 1 "mem_noofs_operand" "") ;; memory
(match_operand:I12MODE 2 "reg_or_0_operand" "") ;; input
(match_operand:SI 3 "const_int_operand" "")] ;; model
""
{
alpha_expand_atomic_exchange_12 (operands);
DONE;
})
(define_insn_and_split "atomic_exchange<mode>_1"
[(set (match_operand:DI 0 "register_operand" "=&r") ;; output
(zero_extend:DI
(match_operand:I12MODE 1 "mem_noofs_operand" "+w"))) ;; memory
(set (match_dup 1) (set (match_dup 1)
(unspec:I48MODE (unspec:I12MODE
[(not:I48MODE [(match_operand:DI 2 "reg_or_8bit_operand" "rI") ;; input
(and:I48MODE (match_dup 1) (match_operand:DI 3 "register_operand" "r") ;; align
(match_operand:I48MODE 2 "register_operand" "r")))] (match_operand:SI 4 "const_int_operand" "")] ;; model
UNSPEC_ATOMIC)) UNSPEC_XCHG))
(clobber (match_scratch:I48MODE 3 "=&r"))] (clobber (match_scratch:DI 5 "=&r"))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_atomic_op (NOT, operands[1], operands[2], alpha_split_atomic_exchange_12 (operands);
operands[0], NULL, operands[3]);
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_insn_and_split "sync_new_<fetchop_name><mode>" (define_insn_and_split "atomic_<fetchop_name><mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r") [(set (match_operand:I48MODE 0 "memory_operand" "+m")
(FETCHOP:I48MODE
(match_operand:I48MODE 1 "memory_operand" "+m")
(match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>")))
(set (match_dup 1)
(unspec:I48MODE (unspec:I48MODE
[(FETCHOP:I48MODE (match_dup 1) (match_dup 2))] [(FETCHOP:I48MODE (match_dup 0)
(match_operand:I48MODE 1 "<fetchop_pred>" "<fetchop_constr>"))
(match_operand:SI 2 "const_int_operand" "")]
UNSPEC_ATOMIC)) UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))] (clobber (match_scratch:I48MODE 3 "=&r"))]
"" ""
...@@ -158,20 +223,20 @@ ...@@ -158,20 +223,20 @@
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_atomic_op (<CODE>, operands[1], operands[2], alpha_split_atomic_op (<CODE>, operands[0], operands[1],
NULL, operands[0], operands[3]); NULL, NULL, operands[3],
(enum memmodel) INTVAL (operands[2]));
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_insn_and_split "sync_new_nand<mode>" (define_insn_and_split "atomic_nand<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r") [(set (match_operand:I48MODE 0 "memory_operand" "+m")
(not:I48MODE
(and:I48MODE (match_operand:I48MODE 1 "memory_operand" "+m")
(match_operand:I48MODE 2 "register_operand" "r"))))
(set (match_dup 1)
(unspec:I48MODE (unspec:I48MODE
[(not:I48MODE (and:I48MODE (match_dup 1) (match_dup 2)))] [(not:I48MODE
(and:I48MODE (match_dup 0)
(match_operand:I48MODE 1 "register_operand" "r")))
(match_operand:SI 2 "const_int_operand" "")]
UNSPEC_ATOMIC)) UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))] (clobber (match_scratch:I48MODE 3 "=&r"))]
"" ""
...@@ -179,130 +244,100 @@ ...@@ -179,130 +244,100 @@
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_atomic_op (NOT, operands[1], operands[2], alpha_split_atomic_op (NOT, operands[0], operands[1],
NULL, operands[0], operands[3]); NULL, NULL, operands[3],
(enum memmodel) INTVAL (operands[2]));
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_expand "sync_compare_and_swap<mode>" (define_insn_and_split "atomic_fetch_<fetchop_name><mode>"
[(match_operand:I12MODE 0 "register_operand" "") [(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I12MODE 1 "memory_operand" "") (match_operand:I48MODE 1 "memory_operand" "+m"))
(match_operand:I12MODE 2 "register_operand" "") (set (match_dup 1)
(match_operand:I12MODE 3 "add_operand" "")] (unspec:I48MODE
"" [(FETCHOP:I48MODE (match_dup 1)
{ (match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>"))
alpha_expand_compare_and_swap_12 (operands[0], operands[1], (match_operand:SI 3 "const_int_operand" "")]
operands[2], operands[3]); UNSPEC_ATOMIC))
DONE; (clobber (match_scratch:I48MODE 4 "=&r"))]
})
(define_insn_and_split "sync_compare_and_swap<mode>_1"
[(set (match_operand:DI 0 "register_operand" "=&r,&r")
(zero_extend:DI
(mem:I12MODE (match_operand:DI 1 "register_operand" "r,r"))))
(set (mem:I12MODE (match_dup 1))
(unspec:I12MODE
[(match_operand:DI 2 "reg_or_8bit_operand" "J,rI")
(match_operand:DI 3 "register_operand" "r,r")
(match_operand:DI 4 "register_operand" "r,r")]
UNSPEC_CMPXCHG))
(clobber (match_scratch:DI 5 "=&r,&r"))
(clobber (match_scratch:DI 6 "=X,&r"))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_compare_and_swap_12 (<MODE>mode, operands[0], operands[1], alpha_split_atomic_op (<CODE>, operands[1], operands[2],
operands[2], operands[3], operands[4], operands[0], NULL, operands[4],
operands[5], operands[6]); (enum memmodel) INTVAL (operands[3]));
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_expand "sync_compare_and_swap<mode>" (define_insn_and_split "atomic_fetch_nand<mode>"
[(parallel
[(set (match_operand:I48MODE 0 "register_operand" "")
(match_operand:I48MODE 1 "memory_operand" ""))
(set (match_dup 1)
(unspec:I48MODE
[(match_operand:I48MODE 2 "reg_or_8bit_operand" "")
(match_operand:I48MODE 3 "add_operand" "rKL")]
UNSPEC_CMPXCHG))
(clobber (match_scratch:I48MODE 4 "=&r"))])]
""
{
if (<MODE>mode == SImode)
operands[2] = convert_modes (DImode, SImode, operands[2], 0);
})
(define_insn_and_split "*sync_compare_and_swap<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r") [(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m")) (match_operand:I48MODE 1 "memory_operand" "+m"))
(set (match_dup 1) (set (match_dup 1)
(unspec:I48MODE (unspec:I48MODE
[(match_operand:DI 2 "reg_or_8bit_operand" "rI") [(not:I48MODE
(match_operand:I48MODE 3 "add_operand" "rKL")] (and:I48MODE (match_dup 1)
UNSPEC_CMPXCHG)) (match_operand:I48MODE 2 "register_operand" "r")))
(match_operand:SI 3 "const_int_operand" "")]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 4 "=&r"))] (clobber (match_scratch:I48MODE 4 "=&r"))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_compare_and_swap (operands[0], operands[1], operands[2], alpha_split_atomic_op (NOT, operands[1], operands[2],
operands[3], operands[4]); operands[0], NULL, operands[4],
(enum memmodel) INTVAL (operands[3]));
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_expand "sync_lock_test_and_set<mode>" (define_insn_and_split "atomic_<fetchop_name>_fetch<mode>"
[(match_operand:I12MODE 0 "register_operand" "") [(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I12MODE 1 "memory_operand" "") (FETCHOP:I48MODE
(match_operand:I12MODE 2 "register_operand" "")] (match_operand:I48MODE 1 "memory_operand" "+m")
"" (match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>")))
{ (set (match_dup 1)
alpha_expand_lock_test_and_set_12 (operands[0], operands[1], operands[2]); (unspec:I48MODE
DONE; [(FETCHOP:I48MODE (match_dup 1) (match_dup 2))
}) (match_operand:SI 3 "const_int_operand" "")]
UNSPEC_ATOMIC))
(define_insn_and_split "sync_lock_test_and_set<mode>_1" (clobber (match_scratch:I48MODE 4 "=&r"))]
[(set (match_operand:DI 0 "register_operand" "=&r")
(zero_extend:DI
(mem:I12MODE (match_operand:DI 1 "register_operand" "r"))))
(set (mem:I12MODE (match_dup 1))
(unspec:I12MODE
[(match_operand:DI 2 "reg_or_8bit_operand" "rI")
(match_operand:DI 3 "register_operand" "r")]
UNSPEC_XCHG))
(clobber (match_scratch:DI 4 "=&r"))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_lock_test_and_set_12 (<MODE>mode, operands[0], operands[1], alpha_split_atomic_op (<CODE>, operands[1], operands[2],
operands[2], operands[3], operands[4]); NULL, operands[0], operands[4],
(enum memmodel) INTVAL (operands[3]));
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
(define_insn_and_split "sync_lock_test_and_set<mode>" (define_insn_and_split "atomic_nand_fetch<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r") [(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m")) (not:I48MODE
(and:I48MODE (match_operand:I48MODE 1 "memory_operand" "+m")
(match_operand:I48MODE 2 "register_operand" "r"))))
(set (match_dup 1) (set (match_dup 1)
(unspec:I48MODE (unspec:I48MODE
[(match_operand:I48MODE 2 "add_operand" "rKL")] [(not:I48MODE (and:I48MODE (match_dup 1) (match_dup 2)))
UNSPEC_XCHG)) (match_operand:SI 3 "const_int_operand" "")]
(clobber (match_scratch:I48MODE 3 "=&r"))] UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 4 "=&r"))]
"" ""
"#" "#"
"epilogue_completed" "epilogue_completed"
[(const_int 0)] [(const_int 0)]
{ {
alpha_split_lock_test_and_set (operands[0], operands[1], alpha_split_atomic_op (NOT, operands[1], operands[2],
operands[2], operands[3]); NULL, operands[0], operands[4],
(enum memmodel) INTVAL (operands[3]));
DONE; DONE;
} }
[(set_attr "type" "multi")]) [(set_attr "type" "multi")])
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment