aboutsummaryrefslogtreecommitdiff
path: root/libgo/go
diff options
context:
space:
mode:
authorIan Lance Taylor <ian@gcc.gnu.org>2016-09-10 12:21:59 +0000
committerIan Lance Taylor <ian@gcc.gnu.org>2016-09-10 12:21:59 +0000
commit337fa50b7b0d2964b6ebe2373224b5c1bbb61efb (patch)
treeb4415ddf1a7fca30e8f28898fc5f1a50f344315c /libgo/go
parentec1ea9cfbcd4a1285b4dfa81cb1680aed3ecd7e5 (diff)
downloadgcc-337fa50b7b0d2964b6ebe2373224b5c1bbb61efb.zip
gcc-337fa50b7b0d2964b6ebe2373224b5c1bbb61efb.tar.gz
gcc-337fa50b7b0d2964b6ebe2373224b5c1bbb61efb.tar.bz2
runtime/internal/atomic: new package, API copied from Go 1.7
Copy over the Go 1.7 runtime/internal/atomic package, but implement the functions in C using __atomic functions rather than using the processor-specific assembler code. Reviewed-on: https://go-review.googlesource.com/29010 From-SVN: r240070
Diffstat (limited to 'libgo/go')
-rw-r--r--libgo/go/runtime/internal/atomic/atomic.c237
-rw-r--r--libgo/go/runtime/internal/atomic/atomic_test.go67
-rw-r--r--libgo/go/runtime/internal/atomic/gccgo.go59
-rw-r--r--libgo/go/runtime/internal/atomic/stubs.go33
4 files changed, 396 insertions, 0 deletions
diff --git a/libgo/go/runtime/internal/atomic/atomic.c b/libgo/go/runtime/internal/atomic/atomic.c
new file mode 100644
index 0000000..3393fbe
--- /dev/null
+++ b/libgo/go/runtime/internal/atomic/atomic.c
@@ -0,0 +1,237 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+#include <stdint.h>
+
+#include "runtime.h"
+
+uint32_t Load (uint32_t *ptr)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Load")
+ __attribute__ ((no_split_stack));
+
+uint32_t
+Load (uint32_t *ptr)
+{
+ return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+void *Loadp (void *ptr)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loadp")
+ __attribute__ ((no_split_stack));
+
+void *
+Loadp (void *ptr)
+{
+ return __atomic_load_n ((void **) ptr, __ATOMIC_ACQUIRE);
+}
+
+uint64_t Load64 (uint64_t *ptr)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Load64")
+ __attribute__ ((no_split_stack));
+
+uint64_t
+Load64 (uint64_t *ptr)
+{
+ return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+uintptr_t Loaduintptr (uintptr_t *ptr)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loaduintptr")
+ __attribute__ ((no_split_stack));
+
+uintptr_t
+Loaduintptr (uintptr_t *ptr)
+{
+ return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+uintgo Loaduint (uintgo *ptr)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loaduint")
+ __attribute__ ((no_split_stack));
+
+uintgo
+Loaduint (uintgo *ptr)
+{
+ return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+int64_t Loadint64 (int64_t *ptr)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loadint64")
+ __attribute__ ((no_split_stack));
+
+int64_t
+Loadint64 (int64_t *ptr)
+{
+ return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+uint32_t Xadd (uint32_t *ptr, int32_t delta)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xadd")
+ __attribute__ ((no_split_stack));
+
+uint32_t
+Xadd (uint32_t *ptr, int32_t delta)
+{
+ return __atomic_add_fetch (ptr, (uint32_t) delta, __ATOMIC_SEQ_CST);
+}
+
+uint64_t Xadd64 (uint64_t *ptr, int64_t delta)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xadd64")
+ __attribute__ ((no_split_stack));
+
+uint64_t
+Xadd64 (uint64_t *ptr, int64_t delta)
+{
+ return __atomic_add_fetch (ptr, (uint64_t) delta, __ATOMIC_SEQ_CST);
+}
+
+uintptr_t Xadduintptr (uintptr_t *ptr, uintptr_t delta)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xadduintptr")
+ __attribute__ ((no_split_stack));
+
+uintptr_t
+Xadduintptr (uintptr_t *ptr, uintptr_t delta)
+{
+ return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
+}
+
+int64_t Xaddint64 (int64_t *ptr, int64_t delta)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xaddint64")
+ __attribute__ ((no_split_stack));
+
+int64_t
+Xaddint64 (int64_t *ptr, int64_t delta)
+{
+ return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
+}
+
+uint32_t Xchg (uint32_t *ptr, uint32_t new)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xchg")
+ __attribute__ ((no_split_stack));
+
+uint32_t
+Xchg (uint32_t *ptr, uint32_t new)
+{
+ return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
+}
+
+uint64_t Xchg64 (uint64_t *ptr, uint64_t new)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xchg64")
+ __attribute__ ((no_split_stack));
+
+uint64_t
+Xchg64 (uint64_t *ptr, uint64_t new)
+{
+ return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
+}
+
+uintptr_t Xchguintptr (uintptr_t *ptr, uintptr_t new)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xchguintptr")
+ __attribute__ ((no_split_stack));
+
+uintptr_t
+Xchguintptr (uintptr_t *ptr, uintptr_t new)
+{
+ return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
+}
+
+void And8 (uint8_t *ptr, uint8_t val)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.And8")
+ __attribute__ ((no_split_stack));
+
+void
+And8 (uint8_t *ptr, uint8_t val)
+{
+ __atomic_and_fetch (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void Or8 (uint8_t *ptr, uint8_t val)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Or8")
+ __attribute__ ((no_split_stack));
+
+void
+Or8 (uint8_t *ptr, uint8_t val)
+{
+ __atomic_or_fetch (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+_Bool Cas (uint32_t *ptr, uint32_t old, uint32_t new)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Cas")
+ __attribute__ ((no_split_stack));
+
+_Bool
+Cas (uint32_t *ptr, uint32_t old, uint32_t new)
+{
+ return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
+}
+
+_Bool Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Cas64")
+ __attribute__ ((no_split_stack));
+
+_Bool
+Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
+{
+ return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
+}
+
+_Bool Casp1 (void **ptr, void *old, void *new)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Casp1")
+ __attribute__ ((no_split_stack));
+
+_Bool
+Casp1 (void **ptr, void *old, void *new)
+{
+ return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
+}
+
+_Bool Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Casuintptr")
+ __attribute__ ((no_split_stack));
+
+_Bool
+Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
+{
+ return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
+}
+
+void Store (uint32_t *ptr, uint32_t val)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Store")
+ __attribute__ ((no_split_stack));
+
+void
+Store (uint32_t *ptr, uint32_t val)
+{
+ __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void Store64 (uint64_t *ptr, uint64_t val)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Store64")
+ __attribute__ ((no_split_stack));
+
+void
+Store64 (uint64_t *ptr, uint64_t val)
+{
+ __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void Storeuintptr (uintptr_t *ptr, uintptr_t val)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Storeuintptr")
+ __attribute__ ((no_split_stack));
+
+void
+Storeuintptr (uintptr_t *ptr, uintptr_t val)
+{
+ __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void StorepNoWB (void *ptr, void *val)
+ __asm__ (GOSYM_PREFIX "runtime_internal_atomic.StorepNoWB")
+ __attribute__ ((no_split_stack));
+
+void
+StorepNoWB (void *ptr, void *val)
+{
+ __atomic_store_n ((void**) ptr, val, __ATOMIC_SEQ_CST);
+}
diff --git a/libgo/go/runtime/internal/atomic/atomic_test.go b/libgo/go/runtime/internal/atomic/atomic_test.go
new file mode 100644
index 0000000..d5dc552
--- /dev/null
+++ b/libgo/go/runtime/internal/atomic/atomic_test.go
@@ -0,0 +1,67 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package atomic_test
+
+import (
+ "runtime"
+ "runtime/internal/atomic"
+ "testing"
+ "unsafe"
+)
+
+func runParallel(N, iter int, f func()) {
+ defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(int(N)))
+ done := make(chan bool)
+ for i := 0; i < N; i++ {
+ go func() {
+ for j := 0; j < iter; j++ {
+ f()
+ }
+ done <- true
+ }()
+ }
+ for i := 0; i < N; i++ {
+ <-done
+ }
+}
+
+func TestXadduintptr(t *testing.T) {
+ const N = 20
+ const iter = 100000
+ inc := uintptr(100)
+ total := uintptr(0)
+ runParallel(N, iter, func() {
+ atomic.Xadduintptr(&total, inc)
+ })
+ if want := uintptr(N * iter * inc); want != total {
+ t.Fatalf("xadduintpr error, want %d, got %d", want, total)
+ }
+ total = 0
+ runParallel(N, iter, func() {
+ atomic.Xadduintptr(&total, inc)
+ atomic.Xadduintptr(&total, uintptr(-int64(inc)))
+ })
+ if total != 0 {
+ t.Fatalf("xadduintpr total error, want %d, got %d", 0, total)
+ }
+}
+
+// Tests that xadduintptr correctly updates 64-bit values. The place where
+// we actually do so is mstats.go, functions mSysStat{Inc,Dec}.
+func TestXadduintptrOnUint64(t *testing.T) {
+ /* if runtime.BigEndian != 0 {
+ // On big endian architectures, we never use xadduintptr to update
+ // 64-bit values and hence we skip the test. (Note that functions
+ // mSysStat{Inc,Dec} in mstats.go have explicit checks for
+ // big-endianness.)
+ return
+ }*/
+ const inc = 100
+ val := uint64(0)
+ atomic.Xadduintptr((*uintptr)(unsafe.Pointer(&val)), inc)
+ if inc != val {
+ t.Fatalf("xadduintptr should increase lower-order bits, want %d, got %d", inc, val)
+ }
+}
diff --git a/libgo/go/runtime/internal/atomic/gccgo.go b/libgo/go/runtime/internal/atomic/gccgo.go
new file mode 100644
index 0000000..6967364
--- /dev/null
+++ b/libgo/go/runtime/internal/atomic/gccgo.go
@@ -0,0 +1,59 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package atomic
+
+// Stubs for atomic functions that in gccgo are implemented in C.
+
+import "unsafe"
+
+//go:noescape
+func Load(ptr *uint32) uint32
+
+//go:noescape
+func Loadp(ptr unsafe.Pointer) unsafe.Pointer
+
+//go:noescape
+func Load64(ptr *uint64) uint64
+
+//go:noescape
+func Xadd(ptr *uint32, delta int32) uint32
+
+//go:noescape
+func Xadd64(ptr *uint64, delta int64) uint64
+
+//go:noescape
+func Xadduintptr(ptr *uintptr, delta uintptr) uintptr
+
+//go:noescape
+func Xchg(ptr *uint32, new uint32) uint32
+
+//go:noescape
+func Xchg64(ptr *uint64, new uint64) uint64
+
+//go:noescape
+func Xchguintptr(ptr *uintptr, new uintptr) uintptr
+
+//go:noescape
+func And8(ptr *uint8, val uint8)
+
+//go:noescape
+func Or8(ptr *uint8, val uint8)
+
+// NOTE: Do not add atomicxor8 (XOR is not idempotent).
+
+//go:noescape
+func Cas64(ptr *uint64, old, new uint64) bool
+
+//go:noescape
+func Store(ptr *uint32, val uint32)
+
+//go:noescape
+func Store64(ptr *uint64, val uint64)
+
+// StorepNoWB performs *ptr = val atomically and without a write
+// barrier.
+//
+// NO go:noescape annotation; see atomic_pointer.go.
+func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
diff --git a/libgo/go/runtime/internal/atomic/stubs.go b/libgo/go/runtime/internal/atomic/stubs.go
new file mode 100644
index 0000000..497b980
--- /dev/null
+++ b/libgo/go/runtime/internal/atomic/stubs.go
@@ -0,0 +1,33 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package atomic
+
+import "unsafe"
+
+//go:noescape
+func Cas(ptr *uint32, old, new uint32) bool
+
+// NO go:noescape annotation; see atomic_pointer.go.
+func Casp1(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool
+
+//go:noescape
+func Casuintptr(ptr *uintptr, old, new uintptr) bool
+
+//go:noescape
+func Storeuintptr(ptr *uintptr, new uintptr)
+
+//go:noescape
+func Loaduintptr(ptr *uintptr) uintptr
+
+//go:noescape
+func Loaduint(ptr *uint) uint
+
+// TODO(matloob): Should these functions have the go:noescape annotation?
+
+//go:noescape
+func Loadint64(ptr *int64) int64
+
+//go:noescape
+func Xaddint64(ptr *int64, delta int64) int64