aboutsummaryrefslogtreecommitdiff
path: root/libgo/go/sync/atomic/atomic_test.go
diff options
context:
space:
mode:
Diffstat (limited to 'libgo/go/sync/atomic/atomic_test.go')
-rw-r--r--libgo/go/sync/atomic/atomic_test.go14
1 files changed, 5 insertions, 9 deletions
diff --git a/libgo/go/sync/atomic/atomic_test.go b/libgo/go/sync/atomic/atomic_test.go
index 39c40c6..286eadc 100644
--- a/libgo/go/sync/atomic/atomic_test.go
+++ b/libgo/go/sync/atomic/atomic_test.go
@@ -1140,6 +1140,9 @@ func hammerStoreLoadUintptr(t *testing.T, paddr unsafe.Pointer) {
StoreUintptr(addr, new)
}
+//go:nocheckptr
+// This code is just testing that LoadPointer/StorePointer operate
+// atomically; it's not actually calculating pointers.
func hammerStoreLoadPointer(t *testing.T, paddr unsafe.Pointer) {
addr := (*unsafe.Pointer)(paddr)
v := uintptr(LoadPointer(addr))
@@ -1391,15 +1394,8 @@ func TestUnaligned64(t *testing.T) {
// Unaligned 64-bit atomics on 32-bit systems are
// a continual source of pain. Test that on 32-bit systems they crash
// instead of failing silently.
-
- switch runtime.GOARCH {
- default:
- if !arch32 {
- t.Skip("test only runs on 32-bit systems")
- }
- case "amd64p32":
- // amd64p32 can handle unaligned atomics.
- t.Skipf("test not needed on %v", runtime.GOARCH)
+ if !arch32 {
+ t.Skip("test only runs on 32-bit systems")
}
x := make([]uint32, 4)