{
rtx bval, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x;
machine_mode mode, cmp_mode;
- rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx (*gen_cas_fn) (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ int idx;
+ gen_cas_fn gen;
+ const gen_cas_fn split_cas[] =
+ {
+ gen_aarch64_compare_and_swapqi,
+ gen_aarch64_compare_and_swaphi,
+ gen_aarch64_compare_and_swapsi,
+ gen_aarch64_compare_and_swapdi
+ };
+ const gen_cas_fn atomic_cas[] =
+ {
+ gen_aarch64_compare_and_swapqi_lse,
+ gen_aarch64_compare_and_swaphi_lse,
+ gen_aarch64_compare_and_swapsi_lse,
+ gen_aarch64_compare_and_swapdi_lse
+ };
bval = operands[0];
rval = operands[1];
switch (mode)
{
- case QImode: gen = gen_atomic_compare_and_swapqi_1; break;
- case HImode: gen = gen_atomic_compare_and_swaphi_1; break;
- case SImode: gen = gen_atomic_compare_and_swapsi_1; break;
- case DImode: gen = gen_atomic_compare_and_swapdi_1; break;
+ case QImode: idx = 0; break;
+ case HImode: idx = 1; break;
+ case SImode: idx = 2; break;
+ case DImode: idx = 3; break;
default:
gcc_unreachable ();
}
+ if (TARGET_LSE)
+ gen = atomic_cas[idx];
+ else
+ gen = split_cas[idx];
emit_insn (gen (rval, mem, oldval, newval, is_weak, mod_s, mod_f));
}
}
+/* Emit an atomic compare-and-swap operation. RVAL is the destination register
+ for the data in memory. EXPECTED is the value expected to be in memory.
+ DESIRED is the value to store to memory. MEM is the memory location. MODEL
+ is the memory ordering to use. */
+
+void
+aarch64_gen_atomic_cas (rtx rval, rtx mem,
+ rtx expected, rtx desired,
+ rtx model)
+{
+ rtx (*gen) (rtx, rtx, rtx, rtx);
+ machine_mode mode;
+
+ mode = GET_MODE (mem);
+
+ switch (mode)
+ {
+ case QImode: gen = gen_aarch64_atomic_casqi; break;
+ case HImode: gen = gen_aarch64_atomic_cashi; break;
+ case SImode: gen = gen_aarch64_atomic_cassi; break;
+ case DImode: gen = gen_aarch64_atomic_casdi; break;
+ default:
+ gcc_unreachable ();
+ }
+
+ /* Move the expected value into the CAS destination register. */
+ emit_insn (gen_rtx_SET (rval, expected));
+
+ /* Emit the CAS. */
+ emit_insn (gen (rval, mem, desired, model));
+
+ /* Compare the expected value with the value loaded by the CAS, to establish
+ whether the swap was made. */
+ aarch64_gen_compare_reg (EQ, rval, expected);
+}
+
/* Split a compare and swap pattern. */
void
UNSPECV_STL ; Represent an atomic store or store-release.
UNSPECV_ATOMIC_CMPSW ; Represent an atomic compare swap.
UNSPECV_ATOMIC_EXCHG ; Represent an atomic exchange.
+ UNSPECV_ATOMIC_CAS ; Represent an atomic CAS.
UNSPECV_ATOMIC_OP ; Represent an atomic operation.
])
}
)
-(define_insn_and_split "atomic_compare_and_swap<mode>_1"
+(define_insn_and_split "aarch64_compare_and_swap<mode>"
[(set (reg:CC CC_REGNUM) ;; bool out
(unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
- (set (match_operand:SI 0 "register_operand" "=&r") ;; val out
+ (set (match_operand:SI 0 "register_operand" "=&r") ;; val out
(zero_extend:SI
(match_operand:SHORT 1 "aarch64_sync_memory_operand" "+Q"))) ;; memory
(set (match_dup 1)
(match_operand:SHORT 3 "register_operand" "r") ;; desired
(match_operand:SI 4 "const_int_operand") ;; is_weak
(match_operand:SI 5 "const_int_operand") ;; mod_s
- (match_operand:SI 6 "const_int_operand")] ;; mod_f
+ (match_operand:SI 6 "const_int_operand")] ;; mod_f
UNSPECV_ATOMIC_CMPSW))
(clobber (match_scratch:SI 7 "=&r"))]
""
}
)
-(define_insn_and_split "atomic_compare_and_swap<mode>_1"
+(define_insn_and_split "aarch64_compare_and_swap<mode>"
[(set (reg:CC CC_REGNUM) ;; bool out
(unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
(set (match_operand:GPI 0 "register_operand" "=&r") ;; val out
- (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
+ (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
(set (match_dup 1)
(unspec_volatile:GPI
[(match_operand:GPI 2 "aarch64_plus_operand" "rI") ;; expect
(match_operand:GPI 3 "register_operand" "r") ;; desired
- (match_operand:SI 4 "const_int_operand") ;; is_weak
- (match_operand:SI 5 "const_int_operand") ;; mod_s
+ (match_operand:SI 4 "const_int_operand") ;; is_weak
+ (match_operand:SI 5 "const_int_operand") ;; mod_s
(match_operand:SI 6 "const_int_operand")] ;; mod_f
UNSPECV_ATOMIC_CMPSW))
(clobber (match_scratch:SI 7 "=&r"))]
}
)
+(define_insn_and_split "aarch64_compare_and_swap<mode>_lse"
+ [(set (reg:CC CC_REGNUM) ;; bool out
+ (unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
+ (set (match_operand:SI 0 "register_operand" "=&r") ;; val out
+ (zero_extend:SI
+ (match_operand:SHORT 1 "aarch64_sync_memory_operand" "+Q"))) ;; memory
+ (set (match_dup 1)
+ (unspec_volatile:SHORT
+ [(match_operand:SI 2 "aarch64_plus_operand" "rI") ;; expected
+ (match_operand:SHORT 3 "register_operand" "r") ;; desired
+ (match_operand:SI 4 "const_int_operand") ;; is_weak
+ (match_operand:SI 5 "const_int_operand") ;; mod_s
+ (match_operand:SI 6 "const_int_operand")] ;; mod_f
+ UNSPECV_ATOMIC_CMPSW))]
+ "TARGET_LSE"
+ "#"
+ "&& reload_completed"
+ [(const_int 0)]
+ {
+ aarch64_gen_atomic_cas (operands[0], operands[1],
+ operands[2], operands[3],
+ operands[5]);
+ DONE;
+ }
+)
+
+(define_insn_and_split "aarch64_compare_and_swap<mode>_lse"
+ [(set (reg:CC CC_REGNUM) ;; bool out
+ (unspec_volatile:CC [(const_int 0)] UNSPECV_ATOMIC_CMPSW))
+ (set (match_operand:GPI 0 "register_operand" "=&r") ;; val out
+ (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
+ (set (match_dup 1)
+ (unspec_volatile:GPI
+ [(match_operand:GPI 2 "aarch64_plus_operand" "rI") ;; expect
+ (match_operand:GPI 3 "register_operand" "r") ;; desired
+ (match_operand:SI 4 "const_int_operand") ;; is_weak
+ (match_operand:SI 5 "const_int_operand") ;; mod_s
+ (match_operand:SI 6 "const_int_operand")] ;; mod_f
+ UNSPECV_ATOMIC_CMPSW))]
+ "TARGET_LSE "
+ "#"
+ "&& reload_completed"
+ [(const_int 0)]
+ {
+ aarch64_gen_atomic_cas (operands[0], operands[1],
+ operands[2], operands[3],
+ operands[5]);
+ DONE;
+ }
+)
+
(define_insn_and_split "atomic_exchange<mode>"
[(set (match_operand:ALLI 0 "register_operand" "=&r") ;; output
(match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory
return "dmb\\tish";
}
)
+
+;; ARMv8.1 LSE instructions.
+
+;; Atomic compare-and-swap: HI and smaller modes.
+
+(define_insn "aarch64_atomic_cas<mode>"
+ [(set (match_operand:SI 0 "register_operand" "+&r") ;; out
+ (zero_extend:SI
+ (match_operand:SHORT 1 "aarch64_sync_memory_operand" "+Q"))) ;; memory.
+ (set (match_dup 1)
+ (unspec_volatile:SHORT
+ [(match_dup 0)
+ (match_operand:SHORT 2 "register_operand" "r") ;; value.
+ (match_operand:SI 3 "const_int_operand" "")] ;; model.
+ UNSPECV_ATOMIC_CAS))]
+ "TARGET_LSE && reload_completed"
+{
+ enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
+ if (is_mm_relaxed (model))
+ return "cas<atomic_sfx>\t%<w>0, %<w>2, %1";
+ else if (is_mm_acquire (model) || is_mm_consume (model))
+ return "casa<atomic_sfx>\t%<w>0, %<w>2, %1";
+ else if (is_mm_release (model))
+ return "casl<atomic_sfx>\t%<w>0, %<w>2, %1";
+ else
+ return "casal<atomic_sfx>\t%<w>0, %<w>2, %1";
+})
+
+;; Atomic compare-and-swap: SI and larger modes.
+
+(define_insn "aarch64_atomic_cas<mode>"
+ [(set (match_operand:GPI 0 "register_operand" "+&r") ;; out
+ (match_operand:GPI 1 "aarch64_sync_memory_operand" "+Q")) ;; memory.
+ (set (match_dup 1)
+ (unspec_volatile:GPI
+ [(match_dup 0)
+ (match_operand:GPI 2 "register_operand" "r") ;; value.
+ (match_operand:SI 3 "const_int_operand" "")] ;; model.
+ UNSPECV_ATOMIC_CAS))]
+ "TARGET_LSE && reload_completed"
+{
+ enum memmodel model = memmodel_from_int (INTVAL (operands[3]));
+ if (is_mm_relaxed (model))
+ return "cas<atomic_sfx>\t%<w>0, %<w>2, %1";
+ else if (is_mm_acquire (model) || is_mm_consume (model))
+ return "casa<atomic_sfx>\t%<w>0, %<w>2, %1";
+ else if (is_mm_release (model))
+ return "casl<atomic_sfx>\t%<w>0, %<w>2, %1";
+ else
+ return "casal<atomic_sfx>\t%<w>0, %<w>2, %1";
+})