mode = GET_MODE (mem);
model = memmodel_from_int (INTVAL (model_rtx));
+ /* When OLDVAL is zero and we want the strong version we can emit a tighter
+ loop:
+ .label1:
+ LD[A]XR rval, [mem]
+ CBNZ rval, .label2
+ ST[L]XR scratch, newval, [mem]
+ CBNZ scratch, .label1
+ .label2:
+ CMP rval, 0. */
+ bool strong_zero_p = !is_weak && oldval == const0_rtx;
+
label1 = NULL;
if (!is_weak)
{
else
aarch64_emit_load_exclusive (mode, rval, mem, model_rtx);
- cond = aarch64_gen_compare_reg (NE, rval, oldval);
- x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
- x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
- gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
- aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+ if (strong_zero_p)
+ {
+ x = gen_rtx_NE (VOIDmode, rval, const0_rtx);
+ x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
+ gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
+ aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+ }
+ else
+ {
+ cond = aarch64_gen_compare_reg (NE, rval, oldval);
+ x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
+ x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
+ gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
+ aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+ }
aarch64_emit_store_exclusive (mode, scratch, mem, newval, model_rtx);
}
emit_label (label2);
-
+ /* If we used a CBNZ in the exchange loop emit an explicit compare with RVAL
+ to set the condition flags. If this is not used it will be removed by
+ later passes. */
+ if (strong_zero_p)
+ {
+ cond = gen_rtx_REG (CCmode, CC_REGNUM);
+ x = gen_rtx_COMPARE (CCmode, rval, const0_rtx);
+ emit_insn (gen_rtx_SET (cond, x));
+ }
/* Emit any final barrier needed for a __sync operation. */
if (is_mm_sync (model))
aarch64_emit_post_barrier (model);