aboutsummaryrefslogtreecommitdiff
path: root/libgo/go/runtime
diff options
context:
space:
mode:
authorIan Lance Taylor <ian@gcc.gnu.org>2017-06-08 19:02:12 +0000
committerIan Lance Taylor <ian@gcc.gnu.org>2017-06-08 19:02:12 +0000
commit20e96b489ea501861c0bb6e509888275d682ed2f (patch)
treef064a2348260e9821e3f50ff2fc8a0247c19be56 /libgo/go/runtime
parent34361776fb238310222e0b24d4f0b51a3717a785 (diff)
downloadgcc-20e96b489ea501861c0bb6e509888275d682ed2f.zip
gcc-20e96b489ea501861c0bb6e509888275d682ed2f.tar.gz
gcc-20e96b489ea501861c0bb6e509888275d682ed2f.tar.bz2
libgo: update to 1.8.3 release
Reviewed-on: https://go-review.googlesource.com/45150 From-SVN: r249033
Diffstat (limited to 'libgo/go/runtime')
-rw-r--r--libgo/go/runtime/malloc.go14
-rw-r--r--libgo/go/runtime/mbitmap.go1
-rw-r--r--libgo/go/runtime/mgc.go2
3 files changed, 15 insertions, 2 deletions
diff --git a/libgo/go/runtime/malloc.go b/libgo/go/runtime/malloc.go
index ed25782..05a69c9 100644
--- a/libgo/go/runtime/malloc.go
+++ b/libgo/go/runtime/malloc.go
@@ -412,10 +412,12 @@ func (h *mheap) sysAlloc(n uintptr) unsafe.Pointer {
if p == 0 {
return nil
}
+ // p can be just about anywhere in the address
+ // space, including before arena_end.
if p == h.arena_end {
h.arena_end = new_end
h.arena_reserved = reserved
- } else if h.arena_start <= p && p+p_size-h.arena_start-1 <= _MaxArena32 {
+ } else if h.arena_end < p && p+p_size-h.arena_start-1 <= _MaxArena32 {
// Keep everything page-aligned.
// Our pages are bigger than hardware pages.
h.arena_end = p + p_size
@@ -425,6 +427,16 @@ func (h *mheap) sysAlloc(n uintptr) unsafe.Pointer {
h.arena_used = used
h.arena_reserved = reserved
} else {
+ // We got a mapping, but it's not
+ // linear with our current arena, so
+ // we can't use it.
+ //
+ // TODO: Make it possible to allocate
+ // from this. We can't decrease
+ // arena_used, but we could introduce
+ // a new variable for the current
+ // allocation position.
+
// We haven't added this allocation to
// the stats, so subtract it from a
// fake stat (but avoid underflow).
diff --git a/libgo/go/runtime/mbitmap.go b/libgo/go/runtime/mbitmap.go
index 2b00493..a7ccc65 100644
--- a/libgo/go/runtime/mbitmap.go
+++ b/libgo/go/runtime/mbitmap.go
@@ -374,6 +374,7 @@ func heapBitsForAddr(addr uintptr) heapBits {
// heapBitsForSpan returns the heapBits for the span base address base.
func heapBitsForSpan(base uintptr) (hbits heapBits) {
if base < mheap_.arena_start || base >= mheap_.arena_used {
+ print("runtime: base ", hex(base), " not in range [", hex(mheap_.arena_start), ",", hex(mheap_.arena_used), ")\n")
throw("heapBitsForSpan: base out of range")
}
return heapBitsForAddr(base)
diff --git a/libgo/go/runtime/mgc.go b/libgo/go/runtime/mgc.go
index f828e7c..5cee12d 100644
--- a/libgo/go/runtime/mgc.go
+++ b/libgo/go/runtime/mgc.go
@@ -1908,7 +1908,7 @@ func gchelper() {
traceGCScanDone()
}
- nproc := work.nproc // work.nproc can change right after we increment work.ndone
+ nproc := atomic.Load(&work.nproc) // work.nproc can change right after we increment work.ndone
if atomic.Xadd(&work.ndone, +1) == nproc-1 {
notewakeup(&work.alldone)
}