+2015-06-01 Matthew Wahab <matthew.wahab@arm.com>
+
+ PR target/65697
+ * config/aarch64/aarch64.c (aarch64_emit_post_barrier):New.
+ (aarch64_split_atomic_op): Check for __sync memory models, emit
+ appropriate initial loads and final barriers.
+
2015-06-01 Vidya Praveen <vidya.praveen@atmel.com>
* Makefile.in: Fix gcov dependencies that should
emit_insn (gen_rtx_SET (bval, x));
}
+/* Emit a barrier, that is appropriate for memory model MODEL, at the end of a
+ sequence implementing an atomic operation. */
+
+static void
+aarch64_emit_post_barrier (enum memmodel model)
+{
+ const enum memmodel base_model = memmodel_base (model);
+
+ if (is_mm_sync (model)
+ && (base_model == MEMMODEL_ACQUIRE
+ || base_model == MEMMODEL_ACQ_REL
+ || base_model == MEMMODEL_SEQ_CST))
+ {
+ emit_insn (gen_mem_thread_fence (GEN_INT (MEMMODEL_SEQ_CST)));
+ }
+}
+
/* Split a compare and swap pattern. */
void
{
machine_mode mode = GET_MODE (mem);
machine_mode wmode = (mode == DImode ? DImode : SImode);
+ const enum memmodel model = memmodel_from_int (INTVAL (model_rtx));
+ const bool is_sync = is_mm_sync (model);
rtx_code_label *label;
rtx x;
old_out = new_out;
value = simplify_gen_subreg (wmode, value, mode, 0);
- aarch64_emit_load_exclusive (mode, old_out, mem, model_rtx);
+ /* The initial load can be relaxed for a __sync operation since a final
+ barrier will be emitted to stop code hoisting. */
+ if (is_sync)
+ aarch64_emit_load_exclusive (mode, old_out, mem,
+ GEN_INT (MEMMODEL_RELAXED));
+ else
+ aarch64_emit_load_exclusive (mode, old_out, mem, model_rtx);
switch (code)
{
x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
gen_rtx_LABEL_REF (Pmode, label), pc_rtx);
aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+
+ /* Emit any final barrier needed for a __sync operation. */
+ if (is_sync)
+ aarch64_emit_post_barrier (model);
}
static void