aboutsummaryrefslogtreecommitdiff
path: root/libgo/go/runtime/export_test.go
diff options
context:
space:
mode:
authorIan Lance Taylor <iant@golang.org>2020-10-24 14:47:44 -0700
committerIan Lance Taylor <iant@golang.org>2020-10-27 13:58:02 -0700
commit668894d7b584b40ddb46e9e2e2ffa637f4d732e5 (patch)
tree6c97d325215c8462f375f142d1e91099cc4edb68 /libgo/go/runtime/export_test.go
parent2b3e722a3ca1b9dcfff1c016e651d0d681de1af0 (diff)
downloadgcc-668894d7b584b40ddb46e9e2e2ffa637f4d732e5.zip
gcc-668894d7b584b40ddb46e9e2e2ffa637f4d732e5.tar.gz
gcc-668894d7b584b40ddb46e9e2e2ffa637f4d732e5.tar.bz2
libgo: update to Go 1.15.3 release
Reviewed-on: https://go-review.googlesource.com/c/gofrontend/+/265717
Diffstat (limited to 'libgo/go/runtime/export_test.go')
-rw-r--r--libgo/go/runtime/export_test.go24
1 files changed, 13 insertions, 11 deletions
diff --git a/libgo/go/runtime/export_test.go b/libgo/go/runtime/export_test.go
index 482d014..369230a 100644
--- a/libgo/go/runtime/export_test.go
+++ b/libgo/go/runtime/export_test.go
@@ -355,7 +355,11 @@ func ReadMemStatsSlow() (base, slow MemStats) {
}
for i := mheap_.pages.start; i < mheap_.pages.end; i++ {
- pg := mheap_.pages.chunkOf(i).scavenged.popcntRange(0, pallocChunkPages)
+ chunk := mheap_.pages.tryChunkOf(i)
+ if chunk == nil {
+ continue
+ }
+ pg := chunk.scavenged.popcntRange(0, pallocChunkPages)
slow.HeapReleased += uint64(pg) * pageSize
}
for _, p := range allp {
@@ -752,11 +756,7 @@ func (p *PageAlloc) InUse() []AddrRange {
// Returns nil if the PallocData's L2 is missing.
func (p *PageAlloc) PallocData(i ChunkIdx) *PallocData {
ci := chunkIdx(i)
- l2 := (*pageAlloc)(p).chunks[ci.l1()]
- if l2 == nil {
- return nil
- }
- return (*PallocData)(&l2[ci.l2()])
+ return (*PallocData)((*pageAlloc)(p).tryChunkOf(ci))
}
// AddrRange represents a range over addresses.
@@ -896,7 +896,10 @@ func CheckScavengedBitsCleared(mismatches []BitsMismatch) (n int, ok bool) {
lock(&mheap_.lock)
chunkLoop:
for i := mheap_.pages.start; i < mheap_.pages.end; i++ {
- chunk := mheap_.pages.chunkOf(i)
+ chunk := mheap_.pages.tryChunkOf(i)
+ if chunk == nil {
+ continue
+ }
for j := 0; j < pallocChunkPages/64; j++ {
// Run over each 64-bit bitmap section and ensure
// scavenged is being cleared properly on allocation.
@@ -977,10 +980,9 @@ func MapHashCheck(m interface{}, k interface{}) (uintptr, uintptr) {
}
func MSpanCountAlloc(bits []byte) int {
- s := mspan{
- nelems: uintptr(len(bits) * 8),
- gcmarkBits: (*gcBits)(unsafe.Pointer(&bits[0])),
- }
+ s := (*mspan)(mheap_.spanalloc.alloc())
+ s.nelems = uintptr(len(bits) * 8)
+ s.gcmarkBits = (*gcBits)(unsafe.Pointer(&bits[0]))
return s.countAlloc()
}