alpha: Convert to atomic optabs.

From-SVN: r181395
This commit is contained in:
Richard Henderson 2011-11-15 15:43:08 -08:00 committed by Richard Henderson
parent 6dc88283f6
commit 2371d1a0a1
7 changed files with 492 additions and 270 deletions

View File

@ -1,3 +1,29 @@
2011-11-15 Richard Henderson <rth@redhat.com>
* config/alpha/alpha.c (alpha_pre_atomic_barrier): New.
(alpha_post_atomic_barrier): New.
(alpha_split_atomic_op): New memmodel argument; honor it.
(alpha_split_compare_and_swap): Take array of operands. Honor
memmodel; always set bool output
(alpha_expand_compare_and_swap_12): Similarly.
(alpha_split_compare_and_swap_12): Similarly.
(alpha_split_atomic_exchange): Similarly. Rename from
alpha_split_lock_test_and_set.
(alpha_expand_atomic_exchange_12): Similarly. Rename from
alpha_expand_lock_test_and_set_12.
(alpha_split_atomic_exchange_12): Similarly. Rename from
alpha_split_lock_test_and_set_12.
* config/alpha/alpha-protos.h: Update.
* config/alpha/alpha.md (UNSPECV_CMPXCHG): New.
* config/alpha/constraints.md ("w"): New.
* config/alpha/predicates.md (mem_noofs_operand): New.
* config/alpha/sync.md (atomic_compare_and_swap<mode>): Rename from
sync_compare_and_swap<mode>; add the new parameters.
(atomic_exchange<mode>): Update from sync_test_and_set<mode>.
(atomic_fetch_<op><mode>): Update from sync_old_<op><mode>.
(atomic_<op>_fetch<mode>): Update from sync_new_<op><mode>.
(atomic_<op><mode>): Update from sync_<op><mode>.
2011-11-16 Tom de Vries <tom@codesourcery.com>
* tree-ssa-tail-merge.c (replace_block_by): Add frequency of bb2 to bb1.

View File

@ -88,15 +88,14 @@ extern bool alpha_emit_setcc (rtx[], enum machine_mode);
extern int alpha_split_conditional_move (enum rtx_code, rtx, rtx, rtx, rtx);
extern void alpha_emit_xfloating_arith (enum rtx_code, rtx[]);
extern void alpha_emit_xfloating_cvt (enum rtx_code, rtx[]);
extern void alpha_split_atomic_op (enum rtx_code, rtx, rtx, rtx, rtx, rtx);
extern void alpha_split_compare_and_swap (rtx, rtx, rtx, rtx, rtx);
extern void alpha_expand_compare_and_swap_12 (rtx, rtx, rtx, rtx);
extern void alpha_split_compare_and_swap_12 (enum machine_mode, rtx, rtx,
rtx, rtx, rtx, rtx, rtx);
extern void alpha_split_lock_test_and_set (rtx, rtx, rtx, rtx);
extern void alpha_expand_lock_test_and_set_12 (rtx, rtx, rtx);
extern void alpha_split_lock_test_and_set_12 (enum machine_mode, rtx, rtx,
rtx, rtx, rtx);
extern void alpha_split_atomic_op (enum rtx_code, rtx, rtx, rtx, rtx, rtx,
enum memmodel);
extern void alpha_split_compare_and_swap (rtx op[]);
extern void alpha_expand_compare_and_swap_12 (rtx op[]);
extern void alpha_split_compare_and_swap_12 (rtx op[]);
extern void alpha_split_atomic_exchange (rtx op[]);
extern void alpha_expand_atomic_exchange_12 (rtx op[]);
extern void alpha_split_atomic_exchange_12 (rtx op[]);
#endif
extern rtx alpha_use_linkage (rtx, bool, bool);

View File

@ -4196,6 +4196,47 @@ emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
emit_insn (fn (res, mem, val));
}
/* Subroutines of the atomic operation splitters. Emit barriers
as needed for the memory MODEL. */
static void
alpha_pre_atomic_barrier (enum memmodel model)
{
switch (model)
{
case MEMMODEL_RELAXED:
case MEMMODEL_CONSUME:
case MEMMODEL_ACQUIRE:
break;
case MEMMODEL_RELEASE:
case MEMMODEL_ACQ_REL:
case MEMMODEL_SEQ_CST:
emit_insn (gen_memory_barrier ());
break;
default:
gcc_unreachable ();
}
}
static void
alpha_post_atomic_barrier (enum memmodel model)
{
switch (model)
{
case MEMMODEL_RELAXED:
case MEMMODEL_CONSUME:
case MEMMODEL_RELEASE:
break;
case MEMMODEL_ACQUIRE:
case MEMMODEL_ACQ_REL:
case MEMMODEL_SEQ_CST:
emit_insn (gen_memory_barrier ());
break;
default:
gcc_unreachable ();
}
}
/* A subroutine of the atomic operation splitters. Emit an insxl
instruction in MODE. */
@ -4236,13 +4277,13 @@ emit_insxl (enum machine_mode mode, rtx op1, rtx op2)
a scratch register. */
void
alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
rtx before, rtx after, rtx scratch)
alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, rtx before,
rtx after, rtx scratch, enum memmodel model)
{
enum machine_mode mode = GET_MODE (mem);
rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch));
emit_insn (gen_memory_barrier ());
alpha_pre_atomic_barrier (model);
label = gen_label_rtx ();
emit_label (label);
@ -4270,29 +4311,48 @@ alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label);
emit_insn (gen_memory_barrier ());
alpha_post_atomic_barrier (model);
}
/* Expand a compare and swap operation. */
void
alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
rtx scratch)
alpha_split_compare_and_swap (rtx operands[])
{
enum machine_mode mode = GET_MODE (mem);
rtx label1, label2, x, cond = gen_lowpart (DImode, scratch);
rtx cond, retval, mem, oldval, newval;
bool is_weak;
enum memmodel mod_s, mod_f;
enum machine_mode mode;
rtx label1, label2, x;
emit_insn (gen_memory_barrier ());
cond = operands[0];
retval = operands[1];
mem = operands[2];
oldval = operands[3];
newval = operands[4];
is_weak = (operands[5] != const0_rtx);
mod_s = (enum memmodel) INTVAL (operands[6]);
mod_f = (enum memmodel) INTVAL (operands[7]);
mode = GET_MODE (mem);
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
alpha_pre_atomic_barrier (mod_s);
label1 = NULL_RTX;
if (!is_weak)
{
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
}
label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
emit_load_locked (mode, retval, mem);
x = gen_lowpart (DImode, retval);
if (oldval == const0_rtx)
x = gen_rtx_NE (DImode, x, const0_rtx);
{
emit_move_insn (cond, const0_rtx);
x = gen_rtx_NE (DImode, x, const0_rtx);
}
else
{
x = gen_rtx_EQ (DImode, x, oldval);
@ -4301,54 +4361,99 @@ alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
}
emit_unlikely_jump (x, label2);
emit_move_insn (scratch, newval);
emit_store_conditional (mode, cond, mem, scratch);
emit_move_insn (cond, newval);
emit_store_conditional (mode, cond, mem, gen_lowpart (mode, cond));
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label1);
if (!is_weak)
{
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label1);
}
emit_insn (gen_memory_barrier ());
emit_label (XEXP (label2, 0));
if (mod_f != MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
alpha_post_atomic_barrier (mod_s);
if (mod_f == MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
}
void
alpha_expand_compare_and_swap_12 (rtx dst, rtx mem, rtx oldval, rtx newval)
alpha_expand_compare_and_swap_12 (rtx operands[])
{
enum machine_mode mode = GET_MODE (mem);
rtx cond, dst, mem, oldval, newval, is_weak, mod_s, mod_f;
enum machine_mode mode;
rtx addr, align, wdst;
rtx (*fn5) (rtx, rtx, rtx, rtx, rtx);
rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
cond = operands[0];
dst = operands[1];
mem = operands[2];
oldval = operands[3];
newval = operands[4];
is_weak = operands[5];
mod_s = operands[6];
mod_f = operands[7];
mode = GET_MODE (mem);
/* We forced the address into a register via mem_noofs_operand. */
addr = XEXP (mem, 0);
gcc_assert (register_operand (addr, DImode));
addr = force_reg (DImode, XEXP (mem, 0));
align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
NULL_RTX, 1, OPTAB_DIRECT);
oldval = convert_modes (DImode, mode, oldval, 1);
newval = emit_insxl (mode, newval, addr);
if (newval != const0_rtx)
newval = emit_insxl (mode, newval, addr);
wdst = gen_reg_rtx (DImode);
if (mode == QImode)
fn5 = gen_sync_compare_and_swapqi_1;
gen = gen_atomic_compare_and_swapqi_1;
else
fn5 = gen_sync_compare_and_swaphi_1;
emit_insn (fn5 (wdst, addr, oldval, newval, align));
gen = gen_atomic_compare_and_swaphi_1;
emit_insn (gen (cond, wdst, mem, oldval, newval, align,
is_weak, mod_s, mod_f));
emit_move_insn (dst, gen_lowpart (mode, wdst));
}
void
alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
rtx oldval, rtx newval, rtx align,
rtx scratch, rtx cond)
alpha_split_compare_and_swap_12 (rtx operands[])
{
rtx label1, label2, mem, width, mask, x;
rtx cond, dest, orig_mem, oldval, newval, align, scratch;
enum machine_mode mode;
bool is_weak;
enum memmodel mod_s, mod_f;
rtx label1, label2, mem, addr, width, mask, x;
cond = operands[0];
dest = operands[1];
orig_mem = operands[2];
oldval = operands[3];
newval = operands[4];
align = operands[5];
is_weak = (operands[6] != const0_rtx);
mod_s = (enum memmodel) INTVAL (operands[7]);
mod_f = (enum memmodel) INTVAL (operands[8]);
scratch = operands[9];
mode = GET_MODE (orig_mem);
addr = XEXP (orig_mem, 0);
mem = gen_rtx_MEM (DImode, align);
MEM_VOLATILE_P (mem) = 1;
MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
emit_insn (gen_memory_barrier ());
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
alpha_pre_atomic_barrier (mod_s);
label1 = NULL_RTX;
if (!is_weak)
{
label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
}
label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label1, 0));
emit_load_locked (DImode, scratch, mem);
@ -4357,7 +4462,10 @@ alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
emit_insn (gen_extxl (dest, scratch, width, addr));
if (oldval == const0_rtx)
x = gen_rtx_NE (DImode, dest, const0_rtx);
{
emit_move_insn (cond, const0_rtx);
x = gen_rtx_NE (DImode, dest, const0_rtx);
}
else
{
x = gen_rtx_EQ (DImode, dest, oldval);
@ -4366,25 +4474,47 @@ alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
}
emit_unlikely_jump (x, label2);
emit_insn (gen_mskxl (scratch, scratch, mask, addr));
emit_insn (gen_iordi3 (scratch, scratch, newval));
emit_insn (gen_mskxl (cond, scratch, mask, addr));
emit_store_conditional (DImode, scratch, mem, scratch);
if (newval != const0_rtx)
emit_insn (gen_iordi3 (cond, cond, newval));
x = gen_rtx_EQ (DImode, scratch, const0_rtx);
emit_unlikely_jump (x, label1);
emit_store_conditional (DImode, cond, mem, cond);
emit_insn (gen_memory_barrier ());
emit_label (XEXP (label2, 0));
if (!is_weak)
{
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label1);
}
if (mod_f != MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
alpha_post_atomic_barrier (mod_s);
if (mod_f == MEMMODEL_RELAXED)
emit_label (XEXP (label2, 0));
}
/* Expand an atomic exchange operation. */
void
alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
alpha_split_atomic_exchange (rtx operands[])
{
enum machine_mode mode = GET_MODE (mem);
rtx label, x, cond = gen_lowpart (DImode, scratch);
rtx retval, mem, val, scratch;
enum memmodel model;
enum machine_mode mode;
rtx label, x, cond;
retval = operands[0];
mem = operands[1];
val = operands[2];
model = (enum memmodel) INTVAL (operands[3]);
scratch = operands[4];
mode = GET_MODE (mem);
cond = gen_lowpart (DImode, scratch);
alpha_pre_atomic_barrier (model);
label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label, 0));
@ -4396,44 +4526,65 @@ alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label);
emit_insn (gen_memory_barrier ());
alpha_post_atomic_barrier (model);
}
void
alpha_expand_lock_test_and_set_12 (rtx dst, rtx mem, rtx val)
alpha_expand_atomic_exchange_12 (rtx operands[])
{
enum machine_mode mode = GET_MODE (mem);
rtx dst, mem, val, model;
enum machine_mode mode;
rtx addr, align, wdst;
rtx (*fn4) (rtx, rtx, rtx, rtx);
rtx (*gen) (rtx, rtx, rtx, rtx, rtx);
/* Force the address into a register. */
addr = force_reg (DImode, XEXP (mem, 0));
dst = operands[0];
mem = operands[1];
val = operands[2];
model = operands[3];
mode = GET_MODE (mem);
/* We forced the address into a register via mem_noofs_operand. */
addr = XEXP (mem, 0);
gcc_assert (register_operand (addr, DImode));
/* Align it to a multiple of 8. */
align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
NULL_RTX, 1, OPTAB_DIRECT);
/* Insert val into the correct byte location within the word. */
val = emit_insxl (mode, val, addr);
if (val != const0_rtx)
val = emit_insxl (mode, val, addr);
wdst = gen_reg_rtx (DImode);
if (mode == QImode)
fn4 = gen_sync_lock_test_and_setqi_1;
gen = gen_atomic_exchangeqi_1;
else
fn4 = gen_sync_lock_test_and_sethi_1;
emit_insn (fn4 (wdst, addr, val, align));
gen = gen_atomic_exchangehi_1;
emit_insn (gen (wdst, mem, val, align, model));
emit_move_insn (dst, gen_lowpart (mode, wdst));
}
void
alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr,
rtx val, rtx align, rtx scratch)
alpha_split_atomic_exchange_12 (rtx operands[])
{
rtx dest, orig_mem, addr, val, align, scratch;
rtx label, mem, width, mask, x;
enum machine_mode mode;
enum memmodel model;
dest = operands[0];
orig_mem = operands[1];
val = operands[2];
align = operands[3];
model = (enum memmodel) INTVAL (operands[4]);
scratch = operands[5];
mode = GET_MODE (orig_mem);
addr = XEXP (orig_mem, 0);
mem = gen_rtx_MEM (DImode, align);
MEM_VOLATILE_P (mem) = 1;
MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
alpha_pre_atomic_barrier (model);
label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label, 0));
@ -4444,14 +4595,15 @@ alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr,
mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
emit_insn (gen_extxl (dest, scratch, width, addr));
emit_insn (gen_mskxl (scratch, scratch, mask, addr));
emit_insn (gen_iordi3 (scratch, scratch, val));
if (val != const0_rtx)
emit_insn (gen_iordi3 (scratch, scratch, val));
emit_store_conditional (DImode, scratch, mem, scratch);
x = gen_rtx_EQ (DImode, scratch, const0_rtx);
emit_unlikely_jump (x, label);
emit_insn (gen_memory_barrier ());
alpha_post_atomic_barrier (model);
}
/* Adjust the cost of a scheduling dependency. Return the new cost of

View File

@ -81,6 +81,7 @@
UNSPECV_SETJMPR_ER ; builtin_setjmp_receiver fragment
UNSPECV_LL ; load-locked
UNSPECV_SC ; store-conditional
UNSPECV_CMPXCHG
])
;; On non-BWX targets, CQImode must be handled the similarly to HImode

View File

@ -19,7 +19,7 @@
;;; Unused letters:
;;; ABCDEF V YZ
;;; de ghijklmnopq stu wxyz
;;; de ghijkl pq tu wxyz
;; Integer register constraints.
@ -38,6 +38,10 @@
(define_register_constraint "v" "R0_REG"
"General register 0, function value return address")
(define_memory_constraint "w"
"A memory whose address is only a register"
(match_operand 0 "mem_noofs_operand"))
;; Integer constant constraints.
(define_constraint "I"
"An unsigned 8 bit constant"

View File

@ -623,3 +623,8 @@
(ior (match_operand 0 "register_operand")
(and (match_test "TARGET_BWX")
(match_operand 0 "memory_operand"))))
;; Accept a memory whose address is only a register.
(define_predicate "mem_noofs_operand"
(and (match_code "mem")
(match_code "reg" "0")))

View File

@ -1,5 +1,5 @@
;; GCC machine description for Alpha synchronization instructions.
;; Copyright (C) 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
;; Copyright (C) 2005, 2007, 2008, 2009, 2011 Free Software Foundation, Inc.
;;
;; This file is part of GCC.
;;
@ -62,247 +62,282 @@
[(set_attr "type" "st_c")])
;; The Alpha Architecture Handbook says that it is UNPREDICTABLE whether
;; the lock is cleared by a TAKEN branch. This means that we can not
;; expand a ll/sc sequence until after the final basic-block reordering pass.
;; the lock is cleared by a normal load or store. This means we cannot
;; expand a ll/sc sequence before reload, lest a register spill is
;; inserted inside the sequence. It is also UNPREDICTABLE whether the
;; lock is cleared by a TAKEN branch. This means that we can not expand
;; a ll/sc sequence containing a branch (i.e. compare-and-swap) until after
;; the final basic-block reordering pass.
(define_insn_and_split "sync_<fetchop_name><mode>"
[(set (match_operand:I48MODE 0 "memory_operand" "+m")
(unspec:I48MODE
[(FETCHOP:I48MODE (match_dup 0)
(match_operand:I48MODE 1 "<fetchop_pred>" "<fetchop_constr>"))]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 2 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (<CODE>, operands[0], operands[1],
NULL, NULL, operands[2]);
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "sync_nand<mode>"
[(set (match_operand:I48MODE 0 "memory_operand" "+m")
(unspec:I48MODE
[(not:I48MODE
(and:I48MODE (match_dup 0)
(match_operand:I48MODE 1 "register_operand" "r")))]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 2 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (NOT, operands[0], operands[1],
NULL, NULL, operands[2]);
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "sync_old_<fetchop_name><mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m"))
(set (match_dup 1)
(unspec:I48MODE
[(FETCHOP:I48MODE (match_dup 1)
(match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>"))]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (<CODE>, operands[1], operands[2],
operands[0], NULL, operands[3]);
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "sync_old_nand<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m"))
(set (match_dup 1)
(unspec:I48MODE
[(not:I48MODE
(and:I48MODE (match_dup 1)
(match_operand:I48MODE 2 "register_operand" "r")))]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (NOT, operands[1], operands[2],
operands[0], NULL, operands[3]);
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "sync_new_<fetchop_name><mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(FETCHOP:I48MODE
(match_operand:I48MODE 1 "memory_operand" "+m")
(match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>")))
(set (match_dup 1)
(unspec:I48MODE
[(FETCHOP:I48MODE (match_dup 1) (match_dup 2))]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (<CODE>, operands[1], operands[2],
NULL, operands[0], operands[3]);
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "sync_new_nand<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(not:I48MODE
(and:I48MODE (match_operand:I48MODE 1 "memory_operand" "+m")
(match_operand:I48MODE 2 "register_operand" "r"))))
(set (match_dup 1)
(unspec:I48MODE
[(not:I48MODE (and:I48MODE (match_dup 1) (match_dup 2)))]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (NOT, operands[1], operands[2],
NULL, operands[0], operands[3]);
DONE;
}
[(set_attr "type" "multi")])
(define_expand "sync_compare_and_swap<mode>"
[(match_operand:I12MODE 0 "register_operand" "")
(match_operand:I12MODE 1 "memory_operand" "")
(match_operand:I12MODE 2 "register_operand" "")
(match_operand:I12MODE 3 "add_operand" "")]
""
{
alpha_expand_compare_and_swap_12 (operands[0], operands[1],
operands[2], operands[3]);
DONE;
})
(define_insn_and_split "sync_compare_and_swap<mode>_1"
[(set (match_operand:DI 0 "register_operand" "=&r,&r")
(zero_extend:DI
(mem:I12MODE (match_operand:DI 1 "register_operand" "r,r"))))
(set (mem:I12MODE (match_dup 1))
(unspec:I12MODE
[(match_operand:DI 2 "reg_or_8bit_operand" "J,rI")
(match_operand:DI 3 "register_operand" "r,r")
(match_operand:DI 4 "register_operand" "r,r")]
UNSPEC_CMPXCHG))
(clobber (match_scratch:DI 5 "=&r,&r"))
(clobber (match_scratch:DI 6 "=X,&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_compare_and_swap_12 (<MODE>mode, operands[0], operands[1],
operands[2], operands[3], operands[4],
operands[5], operands[6]);
DONE;
}
[(set_attr "type" "multi")])
(define_expand "sync_compare_and_swap<mode>"
(define_expand "atomic_compare_and_swap<mode>"
[(parallel
[(set (match_operand:I48MODE 0 "register_operand" "")
(match_operand:I48MODE 1 "memory_operand" ""))
(set (match_dup 1)
(unspec:I48MODE
[(match_operand:I48MODE 2 "reg_or_8bit_operand" "")
(match_operand:I48MODE 3 "add_operand" "rKL")]
UNSPEC_CMPXCHG))
(clobber (match_scratch:I48MODE 4 "=&r"))])]
[(set (match_operand:DI 0 "register_operand" "") ;; bool out
(unspec_volatile:DI [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:I48MODE 1 "register_operand" "") ;; val out
(unspec_volatile:I48MODE [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:I48MODE 2 "memory_operand" "") ;; memory
(unspec_volatile:I48MODE
[(match_dup 2)
(match_operand:I48MODE 3 "reg_or_8bit_operand" "") ;; expected
(match_operand:I48MODE 4 "add_operand" "") ;; desired
(match_operand:SI 5 "const_int_operand" "") ;; is_weak
(match_operand:SI 6 "const_int_operand" "") ;; succ model
(match_operand:SI 7 "const_int_operand" "")] ;; fail model
UNSPECV_CMPXCHG))])]
""
{
if (<MODE>mode == SImode)
operands[2] = convert_modes (DImode, SImode, operands[2], 0);
{
operands[3] = convert_modes (DImode, SImode, operands[3], 0);
operands[4] = convert_modes (DImode, SImode, operands[4], 0);
}
})
(define_insn_and_split "*sync_compare_and_swap<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m"))
(define_insn_and_split "*atomic_compare_and_swap<mode>"
[(set (match_operand:DI 0 "register_operand" "=&r") ;; bool out
(unspec_volatile:DI [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:I48MODE 1 "register_operand" "=&r") ;; val out
(unspec_volatile:I48MODE [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:I48MODE 2 "memory_operand" "+m") ;; memory
(unspec_volatile:I48MODE
[(match_dup 2)
(match_operand:DI 3 "reg_or_8bit_operand" "rI") ;; expected
(match_operand:DI 4 "add_operand" "rKL") ;; desired
(match_operand:SI 5 "const_int_operand" "") ;; is_weak
(match_operand:SI 6 "const_int_operand" "") ;; succ model
(match_operand:SI 7 "const_int_operand" "")] ;; fail model
UNSPECV_CMPXCHG))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_compare_and_swap (operands);
DONE;
}
[(set_attr "type" "multi")])
(define_expand "atomic_compare_and_swap<mode>"
[(match_operand:DI 0 "register_operand" "") ;; bool out
(match_operand:I12MODE 1 "register_operand" "") ;; val out
(match_operand:I12MODE 2 "mem_noofs_operand" "") ;; memory
(match_operand:I12MODE 3 "register_operand" "") ;; expected
(match_operand:I12MODE 4 "add_operand" "") ;; desired
(match_operand:SI 5 "const_int_operand" "") ;; is_weak
(match_operand:SI 6 "const_int_operand" "") ;; succ model
(match_operand:SI 7 "const_int_operand" "")] ;; fail model
""
{
alpha_expand_compare_and_swap_12 (operands);
DONE;
})
(define_insn_and_split "atomic_compare_and_swap<mode>_1"
[(set (match_operand:DI 0 "register_operand" "=&r") ;; bool out
(unspec_volatile:DI [(const_int 0)] UNSPECV_CMPXCHG))
(set (match_operand:DI 1 "register_operand" "=&r") ;; val out
(zero_extend:DI
(unspec_volatile:I12MODE [(const_int 0)] UNSPECV_CMPXCHG)))
(set (match_operand:I12MODE 2 "mem_noofs_operand" "+w") ;; memory
(unspec_volatile:I12MODE
[(match_dup 2)
(match_operand:DI 3 "reg_or_8bit_operand" "rI") ;; expected
(match_operand:DI 4 "reg_or_0_operand" "rJ") ;; desired
(match_operand:DI 5 "register_operand" "r") ;; align
(match_operand:SI 6 "const_int_operand" "") ;; is_weak
(match_operand:SI 7 "const_int_operand" "") ;; succ model
(match_operand:SI 8 "const_int_operand" "")] ;; fail model
UNSPECV_CMPXCHG))
(clobber (match_scratch:DI 9 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_compare_and_swap_12 (operands);
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "atomic_exchange<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r") ;; output
(match_operand:I48MODE 1 "memory_operand" "+m")) ;; memory
(set (match_dup 1)
(unspec:I48MODE
[(match_operand:DI 2 "reg_or_8bit_operand" "rI")
(match_operand:I48MODE 3 "add_operand" "rKL")]
UNSPEC_CMPXCHG))
[(match_operand:I48MODE 2 "add_operand" "rKL") ;; input
(match_operand:SI 3 "const_int_operand" "")] ;; model
UNSPEC_XCHG))
(clobber (match_scratch:I48MODE 4 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_compare_and_swap (operands[0], operands[1], operands[2],
operands[3], operands[4]);
alpha_split_atomic_exchange (operands);
DONE;
}
[(set_attr "type" "multi")])
(define_expand "sync_lock_test_and_set<mode>"
[(match_operand:I12MODE 0 "register_operand" "")
(match_operand:I12MODE 1 "memory_operand" "")
(match_operand:I12MODE 2 "register_operand" "")]
(define_expand "atomic_exchange<mode>"
[(match_operand:I12MODE 0 "register_operand" "") ;; output
(match_operand:I12MODE 1 "mem_noofs_operand" "") ;; memory
(match_operand:I12MODE 2 "reg_or_0_operand" "") ;; input
(match_operand:SI 3 "const_int_operand" "")] ;; model
""
{
alpha_expand_lock_test_and_set_12 (operands[0], operands[1], operands[2]);
alpha_expand_atomic_exchange_12 (operands);
DONE;
})
(define_insn_and_split "sync_lock_test_and_set<mode>_1"
[(set (match_operand:DI 0 "register_operand" "=&r")
(define_insn_and_split "atomic_exchange<mode>_1"
[(set (match_operand:DI 0 "register_operand" "=&r") ;; output
(zero_extend:DI
(mem:I12MODE (match_operand:DI 1 "register_operand" "r"))))
(set (mem:I12MODE (match_dup 1))
(match_operand:I12MODE 1 "mem_noofs_operand" "+w"))) ;; memory
(set (match_dup 1)
(unspec:I12MODE
[(match_operand:DI 2 "reg_or_8bit_operand" "rI")
(match_operand:DI 3 "register_operand" "r")]
[(match_operand:DI 2 "reg_or_8bit_operand" "rI") ;; input
(match_operand:DI 3 "register_operand" "r") ;; align
(match_operand:SI 4 "const_int_operand" "")] ;; model
UNSPEC_XCHG))
(clobber (match_scratch:DI 4 "=&r"))]
(clobber (match_scratch:DI 5 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_lock_test_and_set_12 (<MODE>mode, operands[0], operands[1],
operands[2], operands[3], operands[4]);
alpha_split_atomic_exchange_12 (operands);
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "sync_lock_test_and_set<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m"))
(set (match_dup 1)
(define_insn_and_split "atomic_<fetchop_name><mode>"
[(set (match_operand:I48MODE 0 "memory_operand" "+m")
(unspec:I48MODE
[(match_operand:I48MODE 2 "add_operand" "rKL")]
UNSPEC_XCHG))
[(FETCHOP:I48MODE (match_dup 0)
(match_operand:I48MODE 1 "<fetchop_pred>" "<fetchop_constr>"))
(match_operand:SI 2 "const_int_operand" "")]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_lock_test_and_set (operands[0], operands[1],
operands[2], operands[3]);
alpha_split_atomic_op (<CODE>, operands[0], operands[1],
NULL, NULL, operands[3],
(enum memmodel) INTVAL (operands[2]));
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "atomic_nand<mode>"
[(set (match_operand:I48MODE 0 "memory_operand" "+m")
(unspec:I48MODE
[(not:I48MODE
(and:I48MODE (match_dup 0)
(match_operand:I48MODE 1 "register_operand" "r")))
(match_operand:SI 2 "const_int_operand" "")]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 3 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (NOT, operands[0], operands[1],
NULL, NULL, operands[3],
(enum memmodel) INTVAL (operands[2]));
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "atomic_fetch_<fetchop_name><mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m"))
(set (match_dup 1)
(unspec:I48MODE
[(FETCHOP:I48MODE (match_dup 1)
(match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>"))
(match_operand:SI 3 "const_int_operand" "")]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 4 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (<CODE>, operands[1], operands[2],
operands[0], NULL, operands[4],
(enum memmodel) INTVAL (operands[3]));
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "atomic_fetch_nand<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(match_operand:I48MODE 1 "memory_operand" "+m"))
(set (match_dup 1)
(unspec:I48MODE
[(not:I48MODE
(and:I48MODE (match_dup 1)
(match_operand:I48MODE 2 "register_operand" "r")))
(match_operand:SI 3 "const_int_operand" "")]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 4 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (NOT, operands[1], operands[2],
operands[0], NULL, operands[4],
(enum memmodel) INTVAL (operands[3]));
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "atomic_<fetchop_name>_fetch<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(FETCHOP:I48MODE
(match_operand:I48MODE 1 "memory_operand" "+m")
(match_operand:I48MODE 2 "<fetchop_pred>" "<fetchop_constr>")))
(set (match_dup 1)
(unspec:I48MODE
[(FETCHOP:I48MODE (match_dup 1) (match_dup 2))
(match_operand:SI 3 "const_int_operand" "")]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 4 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (<CODE>, operands[1], operands[2],
NULL, operands[0], operands[4],
(enum memmodel) INTVAL (operands[3]));
DONE;
}
[(set_attr "type" "multi")])
(define_insn_and_split "atomic_nand_fetch<mode>"
[(set (match_operand:I48MODE 0 "register_operand" "=&r")
(not:I48MODE
(and:I48MODE (match_operand:I48MODE 1 "memory_operand" "+m")
(match_operand:I48MODE 2 "register_operand" "r"))))
(set (match_dup 1)
(unspec:I48MODE
[(not:I48MODE (and:I48MODE (match_dup 1) (match_dup 2)))
(match_operand:SI 3 "const_int_operand" "")]
UNSPEC_ATOMIC))
(clobber (match_scratch:I48MODE 4 "=&r"))]
""
"#"
"epilogue_completed"
[(const_int 0)]
{
alpha_split_atomic_op (NOT, operands[1], operands[2],
NULL, operands[0], operands[4],
(enum memmodel) INTVAL (operands[3]));
DONE;
}
[(set_attr "type" "multi")])