diff options
| -rw-r--r-- | gcc/ChangeLog | 5 | ||||
| -rw-r--r-- | gcc/config/sparc/sync.md | 13 |
2 files changed, 14 insertions, 4 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 588a0b5..c5759af 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,10 @@ 2011-06-28 Eric Botcazou <ebotcazou@adacore.com> + * config/sparc/sync.md (*stbar): Delete. + (*membar_v8): New insn to implement UNSPEC_MEMBAR in SPARC-V8. + +2011-06-28 Eric Botcazou <ebotcazou@adacore.com> + * tree-ssa-dom.c (initialize_hash_element): Fix oversight. 2011-06-28 Rainer Orth <ro@CeBiTec.Uni-Bielefeld.DE> diff --git a/gcc/config/sparc/sync.md b/gcc/config/sparc/sync.md index 5dd37d0..a7380ab 100644 --- a/gcc/config/sparc/sync.md +++ b/gcc/config/sparc/sync.md @@ -30,15 +30,20 @@ { operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode)); MEM_VOLATILE_P (operands[0]) = 1; - }) -(define_insn "*stbar" +;; In V8, loads are blocking and ordered wrt earlier loads, i.e. every load +;; is virtually followed by a load barrier (membar #LoadStore | #LoadLoad). +;; In PSO, stbar orders the stores (membar #StoreStore). +;; In TSO, ldstub orders the stores wrt subsequent loads (membar #StoreLoad). +;; The combination of the three yields a full memory barrier in all cases. +(define_insn "*membar_v8" [(set (match_operand:BLK 0 "" "") (unspec:BLK [(match_dup 0)] UNSPEC_MEMBAR))] "TARGET_V8" - "stbar" - [(set_attr "type" "multi")]) + "stbar\n\tldstub\t[%%sp-1], %%g0" + [(set_attr "type" "multi") + (set_attr "length" "2")]) ;; membar #StoreStore | #LoadStore | #StoreLoad | #LoadLoad (define_insn "*membar" |
