From 97269a2340c812667bc083a1927f646d76d90b37 Mon Sep 17 00:00:00 2001 From: Eric Botcazou Date: Tue, 28 Jun 2011 22:03:44 +0000 Subject: [PATCH] sync.md (*stbar): Delete. * config/sparc/sync.md (*stbar): Delete. (*membar_v8): New insn to implement UNSPEC_MEMBAR in SPARC-V8. From-SVN: r175604 --- gcc/ChangeLog | 5 +++++ gcc/config/sparc/sync.md | 13 +++++++++---- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 588a0b586eb..c5759af0d48 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,8 @@ +2011-06-28 Eric Botcazou + + * config/sparc/sync.md (*stbar): Delete. + (*membar_v8): New insn to implement UNSPEC_MEMBAR in SPARC-V8. + 2011-06-28 Eric Botcazou * tree-ssa-dom.c (initialize_hash_element): Fix oversight. diff --git a/gcc/config/sparc/sync.md b/gcc/config/sparc/sync.md index 5dd37d09475..a7380ab3c70 100644 --- a/gcc/config/sparc/sync.md +++ b/gcc/config/sparc/sync.md @@ -30,15 +30,20 @@ { operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode)); MEM_VOLATILE_P (operands[0]) = 1; - }) -(define_insn "*stbar" +;; In V8, loads are blocking and ordered wrt earlier loads, i.e. every load +;; is virtually followed by a load barrier (membar #LoadStore | #LoadLoad). +;; In PSO, stbar orders the stores (membar #StoreStore). +;; In TSO, ldstub orders the stores wrt subsequent loads (membar #StoreLoad). +;; The combination of the three yields a full memory barrier in all cases. +(define_insn "*membar_v8" [(set (match_operand:BLK 0 "" "") (unspec:BLK [(match_dup 0)] UNSPEC_MEMBAR))] "TARGET_V8" - "stbar" - [(set_attr "type" "multi")]) + "stbar\n\tldstub\t[%%sp-1], %%g0" + [(set_attr "type" "multi") + (set_attr "length" "2")]) ;; membar #StoreStore | #LoadStore | #StoreLoad | #LoadLoad (define_insn "*membar" -- 2.30.2