aboutsummaryrefslogtreecommitdiff
path: root/libgo/go/reflect
diff options
context:
space:
mode:
Diffstat (limited to 'libgo/go/reflect')
-rw-r--r--libgo/go/reflect/all_test.go755
-rw-r--r--libgo/go/reflect/deepequal.go37
-rw-r--r--libgo/go/reflect/example_test.go32
-rw-r--r--libgo/go/reflect/export_test.go11
-rw-r--r--libgo/go/reflect/makefunc.go11
-rw-r--r--libgo/go/reflect/makefunc_ffi.go3
-rw-r--r--libgo/go/reflect/set_test.go24
-rw-r--r--libgo/go/reflect/swapper.go5
-rw-r--r--libgo/go/reflect/tostring_test.go2
-rw-r--r--libgo/go/reflect/type.go115
-rw-r--r--libgo/go/reflect/value.go455
-rw-r--r--libgo/go/reflect/visiblefields.go2
-rw-r--r--libgo/go/reflect/visiblefields_test.go23
13 files changed, 1168 insertions, 307 deletions
diff --git a/libgo/go/reflect/all_test.go b/libgo/go/reflect/all_test.go
index 7fa815f..f9aaa31 100644
--- a/libgo/go/reflect/all_test.go
+++ b/libgo/go/reflect/all_test.go
@@ -10,6 +10,7 @@ import (
"flag"
"fmt"
"go/token"
+ "internal/goarch"
"io"
"math"
"math/rand"
@@ -28,7 +29,7 @@ import (
"unsafe"
)
-var sink interface{}
+var sink any
func TestBool(t *testing.T) {
v := ValueOf(true)
@@ -46,7 +47,7 @@ type T struct {
}
type pair struct {
- i interface{}
+ i any
s string
}
@@ -335,6 +336,115 @@ func TestSetValue(t *testing.T) {
}
}
+func TestMapIterSet(t *testing.T) {
+ m := make(map[string]any, len(valueTests))
+ for _, tt := range valueTests {
+ m[tt.s] = tt.i
+ }
+ v := ValueOf(m)
+
+ k := New(v.Type().Key()).Elem()
+ e := New(v.Type().Elem()).Elem()
+
+ iter := v.MapRange()
+ for iter.Next() {
+ k.SetIterKey(iter)
+ e.SetIterValue(iter)
+ want := m[k.String()]
+ got := e.Interface()
+ if got != want {
+ t.Errorf("%q: want (%T) %v, got (%T) %v", k.String(), want, want, got, got)
+ }
+ if setkey, key := valueToString(k), valueToString(iter.Key()); setkey != key {
+ t.Errorf("MapIter.Key() = %q, MapIter.SetKey() = %q", key, setkey)
+ }
+ if setval, val := valueToString(e), valueToString(iter.Value()); setval != val {
+ t.Errorf("MapIter.Value() = %q, MapIter.SetValue() = %q", val, setval)
+ }
+ }
+
+ got := int(testing.AllocsPerRun(10, func() {
+ iter := v.MapRange()
+ for iter.Next() {
+ k.SetIterKey(iter)
+ e.SetIterValue(iter)
+ }
+ }))
+ // Making a *MapIter allocates. This should be the only allocation.
+ if got != 1 {
+ t.Errorf("wanted 1 alloc, got %d", got)
+ }
+}
+
+func TestCanIntUintFloatComplex(t *testing.T) {
+ type integer int
+ type uinteger uint
+ type float float64
+ type complex complex128
+
+ var ops = [...]string{"CanInt", "CanUint", "CanFloat", "CanComplex"}
+
+ var testCases = []struct {
+ i any
+ want [4]bool
+ }{
+ // signed integer
+ {132, [...]bool{true, false, false, false}},
+ {int8(8), [...]bool{true, false, false, false}},
+ {int16(16), [...]bool{true, false, false, false}},
+ {int32(32), [...]bool{true, false, false, false}},
+ {int64(64), [...]bool{true, false, false, false}},
+ // unsigned integer
+ {uint(132), [...]bool{false, true, false, false}},
+ {uint8(8), [...]bool{false, true, false, false}},
+ {uint16(16), [...]bool{false, true, false, false}},
+ {uint32(32), [...]bool{false, true, false, false}},
+ {uint64(64), [...]bool{false, true, false, false}},
+ {uintptr(0xABCD), [...]bool{false, true, false, false}},
+ // floating-point
+ {float32(256.25), [...]bool{false, false, true, false}},
+ {float64(512.125), [...]bool{false, false, true, false}},
+ // complex
+ {complex64(532.125 + 10i), [...]bool{false, false, false, true}},
+ {complex128(564.25 + 1i), [...]bool{false, false, false, true}},
+ // underlying
+ {integer(-132), [...]bool{true, false, false, false}},
+ {uinteger(132), [...]bool{false, true, false, false}},
+ {float(256.25), [...]bool{false, false, true, false}},
+ {complex(532.125 + 10i), [...]bool{false, false, false, true}},
+ // not-acceptable
+ {"hello world", [...]bool{false, false, false, false}},
+ {new(int), [...]bool{false, false, false, false}},
+ {new(uint), [...]bool{false, false, false, false}},
+ {new(float64), [...]bool{false, false, false, false}},
+ {new(complex64), [...]bool{false, false, false, false}},
+ {new([5]int), [...]bool{false, false, false, false}},
+ {new(integer), [...]bool{false, false, false, false}},
+ {new(map[int]int), [...]bool{false, false, false, false}},
+ {new(chan<- int), [...]bool{false, false, false, false}},
+ {new(func(a int8)), [...]bool{false, false, false, false}},
+ {new(struct{ i int }), [...]bool{false, false, false, false}},
+ }
+
+ for i, tc := range testCases {
+ v := ValueOf(tc.i)
+ got := [...]bool{v.CanInt(), v.CanUint(), v.CanFloat(), v.CanComplex()}
+
+ for j := range tc.want {
+ if got[j] != tc.want[j] {
+ t.Errorf(
+ "#%d: v.%s() returned %t for type %T, want %t",
+ i,
+ ops[j],
+ got[j],
+ tc.i,
+ tc.want[j],
+ )
+ }
+ }
+ }
+}
+
func TestCanSetField(t *testing.T) {
type embed struct{ x, X int }
type Embed struct{ x, X int }
@@ -436,7 +546,7 @@ func TestCanSetField(t *testing.T) {
for _, tc := range tt.cases {
f := tt.val
for _, i := range tc.index {
- if f.Kind() == Ptr {
+ if f.Kind() == Pointer {
f = f.Elem()
}
if i == -1 {
@@ -581,7 +691,7 @@ func TestAll(t *testing.T) {
func TestInterfaceGet(t *testing.T) {
var inter struct {
- E interface{}
+ E any
}
inter.E = 123.456
v1 := ValueOf(&inter)
@@ -594,7 +704,7 @@ func TestInterfaceGet(t *testing.T) {
func TestInterfaceValue(t *testing.T) {
var inter struct {
- E interface{}
+ E any
}
inter.E = 123.456
v1 := ValueOf(&inter)
@@ -610,7 +720,7 @@ func TestInterfaceValue(t *testing.T) {
}
func TestFunctionValue(t *testing.T) {
- var x interface{} = func() {}
+ var x any = func() {}
v := ValueOf(x)
if fmt.Sprint(v.Interface()) != fmt.Sprint(x) {
t.Fatalf("TestFunction returned wrong pointer")
@@ -810,7 +920,7 @@ type Basic struct {
type NotBasic Basic
type DeepEqualTest struct {
- a, b interface{}
+ a, b any
eq bool
}
@@ -824,11 +934,11 @@ var (
type self struct{}
type Loop *Loop
-type Loopy interface{}
+type Loopy any
var loop1, loop2 Loop
var loopy1, loopy2 Loopy
-var cycleMap1, cycleMap2, cycleMap3 map[string]interface{}
+var cycleMap1, cycleMap2, cycleMap3 map[string]any
type structWithSelfPtr struct {
p *structWithSelfPtr
@@ -842,11 +952,11 @@ func init() {
loopy1 = &loopy2
loopy2 = &loopy1
- cycleMap1 = map[string]interface{}{}
+ cycleMap1 = map[string]any{}
cycleMap1["cycle"] = cycleMap1
- cycleMap2 = map[string]interface{}{}
+ cycleMap2 = map[string]any{}
cycleMap2["cycle"] = cycleMap2
- cycleMap3 = map[string]interface{}{}
+ cycleMap3 = map[string]any{}
cycleMap3["different"] = cycleMap3
}
@@ -864,6 +974,9 @@ var deepEqualTests = []DeepEqualTest{
{error(nil), error(nil), true},
{map[int]string{1: "one", 2: "two"}, map[int]string{2: "two", 1: "one"}, true},
{fn1, fn2, true},
+ {[]byte{1, 2, 3}, []byte{1, 2, 3}, true},
+ {[]MyByte{1, 2, 3}, []MyByte{1, 2, 3}, true},
+ {MyBytes{1, 2, 3}, MyBytes{1, 2, 3}, true},
// Inequalities
{1, 2, false},
@@ -884,6 +997,9 @@ var deepEqualTests = []DeepEqualTest{
{fn1, fn3, false},
{fn3, fn3, false},
{[][]int{{1}}, [][]int{{2}}, false},
+ {&structWithSelfPtr{p: &structWithSelfPtr{s: "a"}}, &structWithSelfPtr{p: &structWithSelfPtr{s: "b"}}, false},
+
+ // Fun with floating point.
{math.NaN(), math.NaN(), false},
{&[1]float64{math.NaN()}, &[1]float64{math.NaN()}, false},
{&[1]float64{math.NaN()}, self{}, true},
@@ -891,7 +1007,6 @@ var deepEqualTests = []DeepEqualTest{
{[]float64{math.NaN()}, self{}, true},
{map[float64]float64{math.NaN(): 1}, map[float64]float64{1: 2}, false},
{map[float64]float64{math.NaN(): 1}, self{}, true},
- {&structWithSelfPtr{p: &structWithSelfPtr{s: "a"}}, &structWithSelfPtr{p: &structWithSelfPtr{s: "b"}}, false},
// Nil vs empty: not the same.
{[]int{}, []int(nil), false},
@@ -906,9 +1021,12 @@ var deepEqualTests = []DeepEqualTest{
{int32(1), int64(1), false},
{0.5, "hello", false},
{[]int{1, 2, 3}, [3]int{1, 2, 3}, false},
- {&[3]interface{}{1, 2, 4}, &[3]interface{}{1, 2, "s"}, false},
+ {&[3]any{1, 2, 4}, &[3]any{1, 2, "s"}, false},
{Basic{1, 0.5}, NotBasic{1, 0.5}, false},
{map[uint]string{1: "one", 2: "two"}, map[int]string{2: "two", 1: "one"}, false},
+ {[]byte{1, 2, 3}, []MyByte{1, 2, 3}, false},
+ {[]MyByte{1, 2, 3}, MyBytes{1, 2, 3}, false},
+ {[]byte{1, 2, 3}, MyBytes{1, 2, 3}, false},
// Possible loops.
{&loop1, &loop1, true},
@@ -1008,7 +1126,86 @@ func TestDeepEqualUnexportedMap(t *testing.T) {
}
}
-func check2ndField(x interface{}, offs uintptr, t *testing.T) {
+var deepEqualPerfTests = []struct {
+ x, y any
+}{
+ {x: int8(99), y: int8(99)},
+ {x: []int8{99}, y: []int8{99}},
+ {x: int16(99), y: int16(99)},
+ {x: []int16{99}, y: []int16{99}},
+ {x: int32(99), y: int32(99)},
+ {x: []int32{99}, y: []int32{99}},
+ {x: int64(99), y: int64(99)},
+ {x: []int64{99}, y: []int64{99}},
+ {x: int(999999), y: int(999999)},
+ {x: []int{999999}, y: []int{999999}},
+
+ {x: uint8(99), y: uint8(99)},
+ {x: []uint8{99}, y: []uint8{99}},
+ {x: uint16(99), y: uint16(99)},
+ {x: []uint16{99}, y: []uint16{99}},
+ {x: uint32(99), y: uint32(99)},
+ {x: []uint32{99}, y: []uint32{99}},
+ {x: uint64(99), y: uint64(99)},
+ {x: []uint64{99}, y: []uint64{99}},
+ {x: uint(999999), y: uint(999999)},
+ {x: []uint{999999}, y: []uint{999999}},
+ {x: uintptr(999999), y: uintptr(999999)},
+ {x: []uintptr{999999}, y: []uintptr{999999}},
+
+ {x: float32(1.414), y: float32(1.414)},
+ {x: []float32{1.414}, y: []float32{1.414}},
+ {x: float64(1.414), y: float64(1.414)},
+ {x: []float64{1.414}, y: []float64{1.414}},
+
+ {x: complex64(1.414), y: complex64(1.414)},
+ {x: []complex64{1.414}, y: []complex64{1.414}},
+ {x: complex128(1.414), y: complex128(1.414)},
+ {x: []complex128{1.414}, y: []complex128{1.414}},
+
+ {x: true, y: true},
+ {x: []bool{true}, y: []bool{true}},
+
+ {x: "abcdef", y: "abcdef"},
+ {x: []string{"abcdef"}, y: []string{"abcdef"}},
+
+ {x: []byte("abcdef"), y: []byte("abcdef")},
+ {x: [][]byte{[]byte("abcdef")}, y: [][]byte{[]byte("abcdef")}},
+
+ {x: [6]byte{'a', 'b', 'c', 'a', 'b', 'c'}, y: [6]byte{'a', 'b', 'c', 'a', 'b', 'c'}},
+ {x: [][6]byte{[6]byte{'a', 'b', 'c', 'a', 'b', 'c'}}, y: [][6]byte{[6]byte{'a', 'b', 'c', 'a', 'b', 'c'}}},
+}
+
+func TestDeepEqualAllocs(t *testing.T) {
+ if runtime.Compiler == "gccgo" {
+ t.Skip("conservative GC")
+ }
+ for _, tt := range deepEqualPerfTests {
+ t.Run(ValueOf(tt.x).Type().String(), func(t *testing.T) {
+ got := testing.AllocsPerRun(100, func() {
+ if !DeepEqual(tt.x, tt.y) {
+ t.Errorf("DeepEqual(%v, %v)=false", tt.x, tt.y)
+ }
+ })
+ if int(got) != 0 {
+ t.Errorf("DeepEqual(%v, %v) allocated %d times", tt.x, tt.y, int(got))
+ }
+ })
+ }
+}
+
+func BenchmarkDeepEqual(b *testing.B) {
+ for _, bb := range deepEqualPerfTests {
+ b.Run(ValueOf(bb.x).Type().String(), func(b *testing.B) {
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ sink = DeepEqual(bb.x, bb.y)
+ }
+ })
+ }
+}
+
+func check2ndField(x any, offs uintptr, t *testing.T) {
s := ValueOf(x)
f := s.Type().Field(1)
if f.Offset != offs {
@@ -1041,14 +1238,14 @@ func TestAlignment(t *testing.T) {
check2ndField(x1, uintptr(unsafe.Pointer(&x1.f))-uintptr(unsafe.Pointer(&x1)), t)
}
-func Nil(a interface{}, t *testing.T) {
+func Nil(a any, t *testing.T) {
n := ValueOf(a).Field(0)
if !n.IsNil() {
t.Errorf("%v should be nil", a)
}
}
-func NotNil(a interface{}, t *testing.T) {
+func NotNil(a any, t *testing.T) {
n := ValueOf(a).Field(0)
if n.IsNil() {
t.Errorf("value of type %v should not be nil", ValueOf(a).Type().String())
@@ -1058,9 +1255,9 @@ func NotNil(a interface{}, t *testing.T) {
func TestIsNil(t *testing.T) {
// These implement IsNil.
// Wrap in extra struct to hide interface type.
- doNil := []interface{}{
+ doNil := []any{
struct{ x *int }{},
- struct{ x interface{} }{},
+ struct{ x any }{},
struct{ x map[string]int }{},
struct{ x func() bool }{},
struct{ x chan int }{},
@@ -1103,7 +1300,7 @@ func TestIsNil(t *testing.T) {
NotNil(mi, t)
var ii struct {
- x interface{}
+ x any
}
Nil(ii, t)
ii.x = 2
@@ -1119,7 +1316,7 @@ func TestIsNil(t *testing.T) {
func TestIsZero(t *testing.T) {
for i, tt := range []struct {
- x interface{}
+ x any
want bool
}{
// Booleans
@@ -1179,7 +1376,7 @@ func TestIsZero(t *testing.T) {
{(map[string]string)(nil), true},
{map[string]string{}, false},
{make(map[string]string), false},
- // Ptr
+ // Pointer
{(*func())(nil), true},
{(*int)(nil), true},
{new(int), false},
@@ -1231,7 +1428,7 @@ func TestInterfaceExtraction(t *testing.T) {
s.W = os.Stdout
v := Indirect(ValueOf(&s)).Field(0).Interface()
- if v != s.W.(interface{}) {
+ if v != s.W.(any) {
t.Error("Interface() on interface: ", v, s.W)
}
}
@@ -1780,7 +1977,7 @@ func selectWatcher() {
// runSelect runs a single select test.
// It returns the values returned by Select but also returns
// a panic value if the Select panics.
-func runSelect(cases []SelectCase, info []caseInfo) (chosen int, recv Value, recvOK bool, panicErr interface{}) {
+func runSelect(cases []SelectCase, info []caseInfo) (chosen int, recv Value, recvOK bool, panicErr any) {
defer func() {
panicErr = recover()
@@ -2320,6 +2517,11 @@ func TestMethodValue(t *testing.T) {
p := Point{3, 4}
var i int64
+ // Check that method value have the same underlying code pointers.
+ if p1, p2 := ValueOf(Point{1, 1}).Method(1), ValueOf(Point{2, 2}).Method(1); p1.Pointer() != p2.Pointer() {
+ t.Errorf("methodValueCall mismatched: %v - %v", p1, p2)
+ }
+
// Curried method of value.
tfunc := TypeOf((func(int) int)(nil))
v := ValueOf(p).Method(1)
@@ -2569,7 +2771,7 @@ func TestMethod5(t *testing.T) {
var TinterType = TypeOf(new(Tinter)).Elem()
- CheckI := func(name string, i interface{}, inc int) {
+ CheckI := func(name string, i any, inc int) {
v := ValueOf(i)
CheckV(name, v, inc)
CheckV("(i="+name+")", v.Convert(TinterType), inc)
@@ -2618,7 +2820,7 @@ func TestInterfaceSet(t *testing.T) {
p := &Point{3, 4}
var s struct {
- I interface{}
+ I any
P interface {
Dist(int) int
}
@@ -2660,7 +2862,7 @@ func TestAnonymousFields(t *testing.T) {
}
type FTest struct {
- s interface{}
+ s any
name string
index []int
value int
@@ -2891,7 +3093,7 @@ func TestImportPath(t *testing.T) {
{TypeOf([]byte(nil)), ""},
{TypeOf([]rune(nil)), ""},
{TypeOf(string("")), ""},
- {TypeOf((*interface{})(nil)).Elem(), ""},
+ {TypeOf((*any)(nil)).Elem(), ""},
{TypeOf((*byte)(nil)), ""},
{TypeOf((*rune)(nil)), ""},
{TypeOf((*int64)(nil)), ""},
@@ -3028,11 +3230,11 @@ func (*outer) M() {}
func TestNestedMethods(t *testing.T) {
t.Skip("fails on gccgo due to function wrappers")
typ := TypeOf((*outer)(nil))
- if typ.NumMethod() != 1 || typ.Method(0).Func.Pointer() != ValueOf((*outer).M).Pointer() {
+ if typ.NumMethod() != 1 || typ.Method(0).Func.UnsafePointer() != ValueOf((*outer).M).UnsafePointer() {
t.Errorf("Wrong method table for outer: (M=%p)", (*outer).M)
for i := 0; i < typ.NumMethod(); i++ {
m := typ.Method(i)
- t.Errorf("\t%d: %s %#x\n", i, m.Name, m.Func.Pointer())
+ t.Errorf("\t%d: %s %p\n", i, m.Name, m.Func.UnsafePointer())
}
}
}
@@ -3072,11 +3274,11 @@ func (i *InnerInt) M() int {
func TestEmbeddedMethods(t *testing.T) {
/* This part of the test fails on gccgo due to function wrappers.
typ := TypeOf((*OuterInt)(nil))
- if typ.NumMethod() != 1 || typ.Method(0).Func.Pointer() != ValueOf((*OuterInt).M).Pointer() {
+ if typ.NumMethod() != 1 || typ.Method(0).Func.UnsafePointer() != ValueOf((*OuterInt).M).UnsafePointer() {
t.Errorf("Wrong method table for OuterInt: (m=%p)", (*OuterInt).M)
for i := 0; i < typ.NumMethod(); i++ {
m := typ.Method(i)
- t.Errorf("\t%d: %s %#x\n", i, m.Name, m.Func.Pointer())
+ t.Errorf("\t%d: %s %p\n", i, m.Name, m.Func.UnsafePointer())
}
}
*/
@@ -3097,7 +3299,7 @@ func TestEmbeddedMethods(t *testing.T) {
}
}
-type FuncDDD func(...interface{}) error
+type FuncDDD func(...any) error
func (f FuncDDD) M() {}
@@ -3120,22 +3322,22 @@ func TestPtrTo(t *testing.T) {
typ := TypeOf(z)
for i = 0; i < 100; i++ {
- typ = PtrTo(typ)
+ typ = PointerTo(typ)
}
for i = 0; i < 100; i++ {
typ = typ.Elem()
}
if typ != TypeOf(z) {
- t.Errorf("after 100 PtrTo and Elem, have %s, want %s", typ, TypeOf(z))
+ t.Errorf("after 100 PointerTo and Elem, have %s, want %s", typ, TypeOf(z))
}
}
func TestPtrToGC(t *testing.T) {
type T *uintptr
tt := TypeOf(T(nil))
- pt := PtrTo(tt)
+ pt := PointerTo(tt)
const n = 100
- var x []interface{}
+ var x []any
for i := 0; i < n; i++ {
v := New(pt)
p := new(*uintptr)
@@ -3167,11 +3369,11 @@ func BenchmarkPtrTo(b *testing.B) {
}
b.ResetTimer()
- // Now benchmark calling PtrTo on it: we'll have to hit the ptrMap cache on
+ // Now benchmark calling PointerTo on it: we'll have to hit the ptrMap cache on
// every call.
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
- PtrTo(t)
+ PointerTo(t)
}
})
}
@@ -3259,7 +3461,7 @@ func noAlloc(t *testing.T, n int, f func(int)) {
func TestAllocations(t *testing.T) {
noAlloc(t, 100, func(j int) {
- var i interface{}
+ var i any
var v Value
// We can uncomment this when compiler escape analysis
@@ -3339,11 +3541,11 @@ func TestSlice(t *testing.T) {
rv := ValueOf(&xs).Elem()
rv = rv.Slice(3, 4)
- ptr2 := rv.Pointer()
+ ptr2 := rv.UnsafePointer()
rv = rv.Slice(5, 5)
- ptr3 := rv.Pointer()
+ ptr3 := rv.UnsafePointer()
if ptr3 != ptr2 {
- t.Errorf("xs.Slice(3,4).Slice3(5,5).Pointer() = %#x, want %#x", ptr3, ptr2)
+ t.Errorf("xs.Slice(3,4).Slice3(5,5).UnsafePointer() = %p, want %p", ptr3, ptr2)
}
}
@@ -3386,11 +3588,11 @@ func TestSlice3(t *testing.T) {
rv = ValueOf(&xs).Elem()
rv = rv.Slice3(3, 5, 7)
- ptr2 := rv.Pointer()
+ ptr2 := rv.UnsafePointer()
rv = rv.Slice3(4, 4, 4)
- ptr3 := rv.Pointer()
+ ptr3 := rv.UnsafePointer()
if ptr3 != ptr2 {
- t.Errorf("xs.Slice3(3,5,7).Slice3(4,4,4).Pointer() = %#x, want %#x", ptr3, ptr2)
+ t.Errorf("xs.Slice3(3,5,7).Slice3(4,4,4).UnsafePointer() = %p, want %p", ptr3, ptr2)
}
}
@@ -3435,7 +3637,7 @@ func TestVariadic(t *testing.T) {
}
b.Reset()
- V(fmt.Fprintf).CallSlice([]Value{V(&b), V("%s, %d world"), V([]interface{}{"hello", 42})})
+ V(fmt.Fprintf).CallSlice([]Value{V(&b), V("%s, %d world"), V([]any{"hello", 42})})
if b.String() != "hello, 42 world" {
t.Errorf("after Fprintf CallSlice: %q != %q", b.String(), "hello 42 world")
}
@@ -3778,7 +3980,7 @@ func shouldPanic(expect string, f func()) {
f()
}
-func isNonNil(x interface{}) {
+func isNonNil(x any) {
if x == nil {
panic("nil interface")
}
@@ -3804,7 +4006,7 @@ func TestAlias(t *testing.T) {
var V = ValueOf
-func EmptyInterfaceV(x interface{}) Value {
+func EmptyInterfaceV(x any) Value {
return ValueOf(&x).Elem()
}
@@ -4245,7 +4447,7 @@ var convertTests = []struct {
{V((map[uint]bool)(nil)), V((map[uint]bool)(nil))},
{V([]uint(nil)), V([]uint(nil))},
{V([]int(nil)), V([]int(nil))},
- {V(new(interface{})), V(new(interface{}))},
+ {V(new(any)), V(new(any))},
{V(new(io.Reader)), V(new(io.Reader))},
{V(new(io.Writer)), V(new(io.Writer))},
@@ -4397,8 +4599,17 @@ func TestConvertPanic(t *testing.T) {
var gFloat32 float32
+const snan uint32 = 0x7f800001
+
func TestConvertNaNs(t *testing.T) {
- const snan uint32 = 0x7f800001
+ // Test to see if a store followed by a load of a signaling NaN
+ // maintains the signaling bit. (This used to fail on the 387 port.)
+ gFloat32 = math.Float32frombits(snan)
+ runtime.Gosched() // make sure we don't optimize the store/load away
+ if got := math.Float32bits(gFloat32); got != snan {
+ t.Errorf("store/load of sNaN not faithful, got %x want %x", got, snan)
+ }
+ // Test reflect's conversion between float32s. See issue 36400.
type myFloat32 float32
x := V(myFloat32(math.Float32frombits(snan)))
y := x.Convert(TypeOf(float32(0)))
@@ -4438,7 +4649,7 @@ var comparableTests = []struct {
{TypeOf(NonComparableStruct{}), false},
{TypeOf([10]map[string]int{}), false},
{TypeOf([10]string{}), true},
- {TypeOf(new(interface{})).Elem(), true},
+ {TypeOf(new(any)).Elem(), true},
}
func TestComparable(t *testing.T) {
@@ -4488,7 +4699,7 @@ func TestOverflow(t *testing.T) {
}
}
-func checkSameType(t *testing.T, x Type, y interface{}) {
+func checkSameType(t *testing.T, x Type, y any) {
if x != TypeOf(y) || TypeOf(Zero(x).Interface()) != TypeOf(y) {
t.Errorf("did not find preexisting type for %s (vs %s)", TypeOf(x), TypeOf(y))
}
@@ -4498,73 +4709,73 @@ func TestArrayOf(t *testing.T) {
// check construction and use of type not in binary
tests := []struct {
n int
- value func(i int) interface{}
+ value func(i int) any
comparable bool
want string
}{
{
n: 0,
- value: func(i int) interface{} { type Tint int; return Tint(i) },
+ value: func(i int) any { type Tint int; return Tint(i) },
comparable: true,
want: "[]",
},
{
n: 10,
- value: func(i int) interface{} { type Tint int; return Tint(i) },
+ value: func(i int) any { type Tint int; return Tint(i) },
comparable: true,
want: "[0 1 2 3 4 5 6 7 8 9]",
},
{
n: 10,
- value: func(i int) interface{} { type Tfloat float64; return Tfloat(i) },
+ value: func(i int) any { type Tfloat float64; return Tfloat(i) },
comparable: true,
want: "[0 1 2 3 4 5 6 7 8 9]",
},
{
n: 10,
- value: func(i int) interface{} { type Tstring string; return Tstring(strconv.Itoa(i)) },
+ value: func(i int) any { type Tstring string; return Tstring(strconv.Itoa(i)) },
comparable: true,
want: "[0 1 2 3 4 5 6 7 8 9]",
},
{
n: 10,
- value: func(i int) interface{} { type Tstruct struct{ V int }; return Tstruct{i} },
+ value: func(i int) any { type Tstruct struct{ V int }; return Tstruct{i} },
comparable: true,
want: "[{0} {1} {2} {3} {4} {5} {6} {7} {8} {9}]",
},
{
n: 10,
- value: func(i int) interface{} { type Tint int; return []Tint{Tint(i)} },
+ value: func(i int) any { type Tint int; return []Tint{Tint(i)} },
comparable: false,
want: "[[0] [1] [2] [3] [4] [5] [6] [7] [8] [9]]",
},
{
n: 10,
- value: func(i int) interface{} { type Tint int; return [1]Tint{Tint(i)} },
+ value: func(i int) any { type Tint int; return [1]Tint{Tint(i)} },
comparable: true,
want: "[[0] [1] [2] [3] [4] [5] [6] [7] [8] [9]]",
},
{
n: 10,
- value: func(i int) interface{} { type Tstruct struct{ V [1]int }; return Tstruct{[1]int{i}} },
+ value: func(i int) any { type Tstruct struct{ V [1]int }; return Tstruct{[1]int{i}} },
comparable: true,
want: "[{[0]} {[1]} {[2]} {[3]} {[4]} {[5]} {[6]} {[7]} {[8]} {[9]}]",
},
{
n: 10,
- value: func(i int) interface{} { type Tstruct struct{ V []int }; return Tstruct{[]int{i}} },
+ value: func(i int) any { type Tstruct struct{ V []int }; return Tstruct{[]int{i}} },
comparable: false,
want: "[{[0]} {[1]} {[2]} {[3]} {[4]} {[5]} {[6]} {[7]} {[8]} {[9]}]",
},
{
n: 10,
- value: func(i int) interface{} { type TstructUV struct{ U, V int }; return TstructUV{i, i} },
+ value: func(i int) any { type TstructUV struct{ U, V int }; return TstructUV{i, i} },
comparable: true,
want: "[{0 0} {1 1} {2 2} {3 3} {4 4} {5 5} {6 6} {7 7} {8 8} {9 9}]",
},
{
n: 10,
- value: func(i int) interface{} {
+ value: func(i int) any {
type TstructUV struct {
U int
V float64
@@ -4625,7 +4836,7 @@ func TestArrayOfGC(t *testing.T) {
type T *uintptr
tt := TypeOf(T(nil))
const n = 100
- var x []interface{}
+ var x []any
for i := 0; i < n; i++ {
v := New(ArrayOf(n, tt)).Elem()
for j := 0; j < v.Len(); j++ {
@@ -4701,7 +4912,7 @@ func TestArrayOfDirectIface(t *testing.T) {
v1 := ValueOf(&i1).Elem()
p1 := v1.InterfaceData()[1]
- i2 := Zero(ArrayOf(1, PtrTo(TypeOf(int8(0))))).Interface()
+ i2 := Zero(ArrayOf(1, PointerTo(TypeOf(int8(0))))).Interface()
v2 := ValueOf(&i2).Elem()
p2 := v2.InterfaceData()[1]
@@ -4719,7 +4930,7 @@ func TestArrayOfDirectIface(t *testing.T) {
v1 := ValueOf(&i1).Elem()
p1 := v1.InterfaceData()[1]
- i2 := Zero(ArrayOf(0, PtrTo(TypeOf(int8(0))))).Interface()
+ i2 := Zero(ArrayOf(0, PointerTo(TypeOf(int8(0))))).Interface()
v2 := ValueOf(&i2).Elem()
p2 := v2.InterfaceData()[1]
@@ -4789,7 +5000,7 @@ func TestSliceOfGC(t *testing.T) {
tt := TypeOf(T(nil))
st := SliceOf(tt)
const n = 100
- var x []interface{}
+ var x []any
for i := 0; i < n; i++ {
v := MakeSlice(st, n, n)
for j := 0; j < v.Len(); j++ {
@@ -4980,7 +5191,7 @@ func TestStructOf(t *testing.T) {
checkSameType(t, StructOf(fields[2:3]), struct{ Y uint64 }{})
// gccgo used to fail this test.
- type structFieldType interface{}
+ type structFieldType any
checkSameType(t,
StructOf([]StructField{
{
@@ -5156,7 +5367,7 @@ func TestStructOfGC(t *testing.T) {
st := StructOf(fields)
const n = 10000
- var x []interface{}
+ var x []any
for i := 0; i < n; i++ {
v := New(st).Elem()
for j := 0; j < v.NumField(); j++ {
@@ -5424,8 +5635,8 @@ func TestStructOfWithInterface(t *testing.T) {
},
{
name: "StructI",
- typ: PtrTo(TypeOf(StructI(want))),
- val: ValueOf(func() interface{} {
+ typ: PointerTo(TypeOf(StructI(want))),
+ val: ValueOf(func() any {
v := StructI(want)
return &v
}()),
@@ -5433,8 +5644,8 @@ func TestStructOfWithInterface(t *testing.T) {
},
{
name: "StructIPtr",
- typ: PtrTo(TypeOf(StructIPtr(want))),
- val: ValueOf(func() interface{} {
+ typ: PointerTo(TypeOf(StructIPtr(want))),
+ val: ValueOf(func() any {
v := StructIPtr(want)
return &v
}()),
@@ -5522,7 +5733,7 @@ func TestStructOfWithInterface(t *testing.T) {
fields := []StructField{{
Name: "StructIPtr",
Anonymous: true,
- Type: PtrTo(TypeOf(StructIPtr(want))),
+ Type: PointerTo(TypeOf(StructIPtr(want))),
}}
rt := StructOf(fields)
rv := New(rt).Elem()
@@ -5536,7 +5747,7 @@ func TestStructOfWithInterface(t *testing.T) {
fields = []StructField{{
Name: "SettableStruct",
Anonymous: true,
- Type: PtrTo(TypeOf(SettableStruct{})),
+ Type: PointerTo(TypeOf(SettableStruct{})),
}}
rt = StructOf(fields)
rv = New(rt).Elem()
@@ -5552,7 +5763,7 @@ func TestStructOfWithInterface(t *testing.T) {
{
Name: "SettableStruct",
Anonymous: true,
- Type: PtrTo(TypeOf(SettableStruct{})),
+ Type: PointerTo(TypeOf(SettableStruct{})),
},
{
Name: "EmptyStruct",
@@ -5701,7 +5912,7 @@ func TestChanOfGC(t *testing.T) {
// so we have to save pointers to channels in x; the pointer code will
// use the gc info in the newly constructed chan type.
const n = 100
- var x []interface{}
+ var x []any
for i := 0; i < n; i++ {
v := MakeChan(ct, n)
for j := 0; j < n; j++ {
@@ -5759,7 +5970,7 @@ func TestMapOfGCKeys(t *testing.T) {
// so we have to save pointers to maps in x; the pointer code will
// use the gc info in the newly constructed map type.
const n = 100
- var x []interface{}
+ var x []any
for i := 0; i < n; i++ {
v := MakeMap(mt)
for j := 0; j < n; j++ {
@@ -5797,7 +6008,7 @@ func TestMapOfGCValues(t *testing.T) {
// so we have to save pointers to maps in x; the pointer code will
// use the gc info in the newly constructed map type.
const n = 100
- var x []interface{}
+ var x []any
for i := 0; i < n; i++ {
v := MakeMap(mt)
for j := 0; j < n; j++ {
@@ -5865,7 +6076,7 @@ func TestFuncOf(t *testing.T) {
testCases := []struct {
in, out []Type
variadic bool
- want interface{}
+ want any
}{
{in: []Type{TypeOf(T1(0))}, want: (func(T1))(nil)},
{in: []Type{TypeOf(int(0))}, want: (func(int))(nil)},
@@ -6292,6 +6503,29 @@ func TestCallMethodJump(t *testing.T) {
*CallGC = false
}
+func TestCallArgLive(t *testing.T) {
+ type T struct{ X, Y *string } // pointerful aggregate
+
+ F := func(t T) { *t.X = "ok" }
+
+ // In reflect.Value.Call, trigger a garbage collection in reflect.call
+ // between marshaling argument and the actual call.
+ *CallGC = true
+
+ x := new(string)
+ runtime.SetFinalizer(x, func(p *string) {
+ if *p != "ok" {
+ t.Errorf("x dead prematurely")
+ }
+ })
+ v := T{x, nil}
+
+ ValueOf(F).Call([]Value{ValueOf(v)})
+
+ // Stop garbage collecting during reflect.call.
+ *CallGC = false
+}
+
func TestMakeFuncStackCopy(t *testing.T) {
target := func(in []Value) []Value {
runtime.GC()
@@ -6335,7 +6569,7 @@ func TestValueString(t *testing.T) {
func TestInvalid(t *testing.T) {
// Used to have inconsistency between IsValid() and Kind() != Invalid.
- type T struct{ v interface{} }
+ type T struct{ v any }
v := ValueOf(T{}).Field(0)
if v.IsValid() != true || v.Kind() != Interface {
@@ -6353,7 +6587,7 @@ func TestLargeGCProg(t *testing.T) {
fv.Call([]Value{ValueOf([256]*byte{})})
}
-func fieldIndexRecover(t Type, i int) (recovered interface{}) {
+func fieldIndexRecover(t Type, i int) (recovered any) {
defer func() {
recovered = recover()
}()
@@ -6490,10 +6724,10 @@ func clobber() {
func TestFuncLayout(t *testing.T) {
align := func(x uintptr) uintptr {
- return (x + PtrSize - 1) &^ (PtrSize - 1)
+ return (x + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
}
var r []byte
- if PtrSize == 4 {
+ if goarch.PtrSize == 4 {
r = []byte{0, 0, 0, 1}
} else {
r = []byte{0, 0, 1}
@@ -6514,56 +6748,56 @@ func TestFuncLayout(t *testing.T) {
tests := []test{
{
typ: ValueOf(func(a, b string) string { return "" }).Type(),
- size: 6 * PtrSize,
- argsize: 4 * PtrSize,
- retOffset: 4 * PtrSize,
+ size: 6 * goarch.PtrSize,
+ argsize: 4 * goarch.PtrSize,
+ retOffset: 4 * goarch.PtrSize,
stack: []byte{1, 0, 1, 0, 1},
gc: []byte{1, 0, 1, 0, 1},
},
{
typ: ValueOf(func(a, b, c uint32, p *byte, d uint16) {}).Type(),
- size: align(align(3*4) + PtrSize + 2),
- argsize: align(3*4) + PtrSize + 2,
- retOffset: align(align(3*4) + PtrSize + 2),
+ size: align(align(3*4) + goarch.PtrSize + 2),
+ argsize: align(3*4) + goarch.PtrSize + 2,
+ retOffset: align(align(3*4) + goarch.PtrSize + 2),
stack: r,
gc: r,
},
{
- typ: ValueOf(func(a map[int]int, b uintptr, c interface{}) {}).Type(),
- size: 4 * PtrSize,
- argsize: 4 * PtrSize,
- retOffset: 4 * PtrSize,
+ typ: ValueOf(func(a map[int]int, b uintptr, c any) {}).Type(),
+ size: 4 * goarch.PtrSize,
+ argsize: 4 * goarch.PtrSize,
+ retOffset: 4 * goarch.PtrSize,
stack: []byte{1, 0, 1, 1},
gc: []byte{1, 0, 1, 1},
},
{
typ: ValueOf(func(a S) {}).Type(),
- size: 4 * PtrSize,
- argsize: 4 * PtrSize,
- retOffset: 4 * PtrSize,
+ size: 4 * goarch.PtrSize,
+ argsize: 4 * goarch.PtrSize,
+ retOffset: 4 * goarch.PtrSize,
stack: []byte{0, 0, 1, 1},
gc: []byte{0, 0, 1, 1},
},
{
rcvr: ValueOf((*byte)(nil)).Type(),
typ: ValueOf(func(a uintptr, b *int) {}).Type(),
- size: 3 * PtrSize,
- argsize: 3 * PtrSize,
- retOffset: 3 * PtrSize,
+ size: 3 * goarch.PtrSize,
+ argsize: 3 * goarch.PtrSize,
+ retOffset: 3 * goarch.PtrSize,
stack: []byte{1, 0, 1},
gc: []byte{1, 0, 1},
},
{
typ: ValueOf(func(a uintptr) {}).Type(),
- size: PtrSize,
- argsize: PtrSize,
- retOffset: PtrSize,
+ size: goarch.PtrSize,
+ argsize: goarch.PtrSize,
+ retOffset: goarch.PtrSize,
stack: []byte{},
gc: []byte{},
},
{
typ: ValueOf(func() uintptr { return 0 }).Type(),
- size: PtrSize,
+ size: goarch.PtrSize,
argsize: 0,
retOffset: 0,
stack: []byte{},
@@ -6572,9 +6806,9 @@ func TestFuncLayout(t *testing.T) {
{
rcvr: ValueOf(uintptr(0)).Type(),
typ: ValueOf(func(a uintptr) {}).Type(),
- size: 2 * PtrSize,
- argsize: 2 * PtrSize,
- retOffset: 2 * PtrSize,
+ size: 2 * goarch.PtrSize,
+ argsize: 2 * goarch.PtrSize,
+ retOffset: 2 * goarch.PtrSize,
stack: []byte{1},
gc: []byte{1},
// Note: this one is tricky, as the receiver is not a pointer. But we
@@ -6636,7 +6870,7 @@ func verifyGCBitsSlice(t *testing.T, typ Type, cap int, bits []byte) {
// repeat a bitmap for a small array or executing a repeat in
// a GC program.
val := MakeSlice(typ, 0, cap)
- data := NewAt(ArrayOf(cap, typ), unsafe.Pointer(val.Pointer()))
+ data := NewAt(ArrayOf(cap, typ), val.UnsafePointer())
heapBits := GCBits(data.Interface())
// Repeat the bitmap for the slice size, trimming scalars in
// the last element.
@@ -6779,14 +7013,14 @@ func TestGCBits(t *testing.T) {
verifyGCBits(t, MapOf(ArrayOf(10000, Tscalarptr), Tscalar), lit(1))
verifyGCBits(t, TypeOf((*[10000]Xscalar)(nil)), lit(1))
- verifyGCBits(t, PtrTo(ArrayOf(10000, Tscalar)), lit(1))
+ verifyGCBits(t, PointerTo(ArrayOf(10000, Tscalar)), lit(1))
verifyGCBits(t, TypeOf(([][10000]Xscalar)(nil)), lit(1))
verifyGCBits(t, SliceOf(ArrayOf(10000, Tscalar)), lit(1))
- hdr := make([]byte, 8/PtrSize)
+ hdr := make([]byte, 8/goarch.PtrSize)
- verifyMapBucket := func(t *testing.T, k, e Type, m interface{}, want []byte) {
+ verifyMapBucket := func(t *testing.T, k, e Type, m any, want []byte) {
verifyGCBits(t, MapBucketOf(k, e), want)
verifyGCBits(t, CachedBucketOf(TypeOf(m)), want)
}
@@ -6800,7 +7034,7 @@ func TestGCBits(t *testing.T) {
join(hdr, rep(8, lit(0, 1)), rep(8, lit(1)), lit(1)))
verifyMapBucket(t, Tint64, Tptr,
map[int64]Xptr(nil),
- join(hdr, rep(8, rep(8/PtrSize, lit(0))), rep(8, lit(1)), lit(1)))
+ join(hdr, rep(8, rep(8/goarch.PtrSize, lit(0))), rep(8, lit(1)), lit(1)))
verifyMapBucket(t,
Tscalar, Tscalar,
map[Xscalar]Xscalar(nil),
@@ -6810,20 +7044,20 @@ func TestGCBits(t *testing.T) {
map[[2]Xscalarptr][3]Xptrscalar(nil),
join(hdr, rep(8*2, lit(0, 1)), rep(8*3, lit(1, 0)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize, Tscalarptr), ArrayOf(64/PtrSize, Tptrscalar),
- map[[64 / PtrSize]Xscalarptr][64 / PtrSize]Xptrscalar(nil),
- join(hdr, rep(8*64/PtrSize, lit(0, 1)), rep(8*64/PtrSize, lit(1, 0)), lit(1)))
+ ArrayOf(64/goarch.PtrSize, Tscalarptr), ArrayOf(64/goarch.PtrSize, Tptrscalar),
+ map[[64 / goarch.PtrSize]Xscalarptr][64 / goarch.PtrSize]Xptrscalar(nil),
+ join(hdr, rep(8*64/goarch.PtrSize, lit(0, 1)), rep(8*64/goarch.PtrSize, lit(1, 0)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize+1, Tscalarptr), ArrayOf(64/PtrSize, Tptrscalar),
- map[[64/PtrSize + 1]Xscalarptr][64 / PtrSize]Xptrscalar(nil),
- join(hdr, rep(8, lit(1)), rep(8*64/PtrSize, lit(1, 0)), lit(1)))
+ ArrayOf(64/goarch.PtrSize+1, Tscalarptr), ArrayOf(64/goarch.PtrSize, Tptrscalar),
+ map[[64/goarch.PtrSize + 1]Xscalarptr][64 / goarch.PtrSize]Xptrscalar(nil),
+ join(hdr, rep(8, lit(1)), rep(8*64/goarch.PtrSize, lit(1, 0)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize, Tscalarptr), ArrayOf(64/PtrSize+1, Tptrscalar),
- map[[64 / PtrSize]Xscalarptr][64/PtrSize + 1]Xptrscalar(nil),
- join(hdr, rep(8*64/PtrSize, lit(0, 1)), rep(8, lit(1)), lit(1)))
+ ArrayOf(64/goarch.PtrSize, Tscalarptr), ArrayOf(64/goarch.PtrSize+1, Tptrscalar),
+ map[[64 / goarch.PtrSize]Xscalarptr][64/goarch.PtrSize + 1]Xptrscalar(nil),
+ join(hdr, rep(8*64/goarch.PtrSize, lit(0, 1)), rep(8, lit(1)), lit(1)))
verifyMapBucket(t,
- ArrayOf(64/PtrSize+1, Tscalarptr), ArrayOf(64/PtrSize+1, Tptrscalar),
- map[[64/PtrSize + 1]Xscalarptr][64/PtrSize + 1]Xptrscalar(nil),
+ ArrayOf(64/goarch.PtrSize+1, Tscalarptr), ArrayOf(64/goarch.PtrSize+1, Tptrscalar),
+ map[[64/goarch.PtrSize + 1]Xscalarptr][64/goarch.PtrSize + 1]Xptrscalar(nil),
join(hdr, rep(8, lit(1)), rep(8, lit(1)), lit(1)))
}
@@ -6848,7 +7082,7 @@ func TestTypeOfTypeOf(t *testing.T) {
check("ChanOf", ChanOf(BothDir, TypeOf(T{})))
check("FuncOf", FuncOf([]Type{TypeOf(T{})}, nil, false))
check("MapOf", MapOf(TypeOf(T{}), TypeOf(T{})))
- check("PtrTo", PtrTo(TypeOf(T{})))
+ check("PtrTo", PointerTo(TypeOf(T{})))
check("SliceOf", SliceOf(TypeOf(T{})))
}
@@ -6915,7 +7149,7 @@ func TestChanAlloc(t *testing.T) {
type TheNameOfThisTypeIsExactly255BytesLongSoWhenTheCompilerPrependsTheReflectTestPackageNameAndExtraStarTheLinkerRuntimeAndReflectPackagesWillHaveToCorrectlyDecodeTheSecondLengthByte0123456789_0123456789_0123456789_0123456789_0123456789_012345678 int
type nameTest struct {
- v interface{}
+ v any
want string
}
@@ -6927,7 +7161,7 @@ var nameTests = []nameTest{
{(*func() D1)(nil), ""},
{(*<-chan D1)(nil), ""},
{(*chan<- D1)(nil), ""},
- {(*interface{})(nil), ""},
+ {(*any)(nil), ""},
{(*interface {
F()
})(nil), ""},
@@ -6957,7 +7191,7 @@ func TestExported(t *testing.T) {
type p3 p
type exportTest struct {
- v interface{}
+ v any
want bool
}
exportTests := []exportTest{
@@ -7037,6 +7271,53 @@ func BenchmarkNew(b *testing.B) {
})
}
+func BenchmarkMap(b *testing.B) {
+ type V *int
+ value := ValueOf((V)(nil))
+ stringKeys := []string{}
+ mapOfStrings := map[string]V{}
+ uint64Keys := []uint64{}
+ mapOfUint64s := map[uint64]V{}
+ for i := 0; i < 100; i++ {
+ stringKey := fmt.Sprintf("key%d", i)
+ stringKeys = append(stringKeys, stringKey)
+ mapOfStrings[stringKey] = nil
+
+ uint64Key := uint64(i)
+ uint64Keys = append(uint64Keys, uint64Key)
+ mapOfUint64s[uint64Key] = nil
+ }
+
+ tests := []struct {
+ label string
+ m, keys, value Value
+ }{
+ {"StringKeys", ValueOf(mapOfStrings), ValueOf(stringKeys), value},
+ {"Uint64Keys", ValueOf(mapOfUint64s), ValueOf(uint64Keys), value},
+ }
+
+ for _, tt := range tests {
+ b.Run(tt.label, func(b *testing.B) {
+ b.Run("MapIndex", func(b *testing.B) {
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ for j := tt.keys.Len() - 1; j >= 0; j-- {
+ tt.m.MapIndex(tt.keys.Index(j))
+ }
+ }
+ })
+ b.Run("SetMapIndex", func(b *testing.B) {
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ for j := tt.keys.Len() - 1; j >= 0; j-- {
+ tt.m.SetMapIndex(tt.keys.Index(j), tt.value)
+ }
+ }
+ })
+ })
+ }
+}
+
func TestSwapper(t *testing.T) {
type I int
var a, b, c I
@@ -7050,9 +7331,9 @@ func TestSwapper(t *testing.T) {
type S string
tests := []struct {
- in interface{}
+ in any
i, j int
- want interface{}
+ want any
}{
{
in: []int{1, 20, 300},
@@ -7219,6 +7500,72 @@ func TestMapIterNilMap(t *testing.T) {
}
}
+func TestMapIterReset(t *testing.T) {
+ iter := new(MapIter)
+
+ // Use of zero iterator should panic.
+ func() {
+ defer func() { recover() }()
+ iter.Next()
+ t.Error("Next did not panic")
+ }()
+
+ // Reset to new Map should work.
+ m := map[string]int{"one": 1, "two": 2, "three": 3}
+ iter.Reset(ValueOf(m))
+ if got, want := iterateToString(iter), `[one: 1, three: 3, two: 2]`; got != want {
+ t.Errorf("iterator returned %s (after sorting), want %s", got, want)
+ }
+
+ // Reset to Zero value should work, but iterating over it should panic.
+ iter.Reset(Value{})
+ func() {
+ defer func() { recover() }()
+ iter.Next()
+ t.Error("Next did not panic")
+ }()
+
+ // Reset to a different Map with different types should work.
+ m2 := map[int]string{1: "one", 2: "two", 3: "three"}
+ iter.Reset(ValueOf(m2))
+ if got, want := iterateToString(iter), `[1: one, 2: two, 3: three]`; got != want {
+ t.Errorf("iterator returned %s (after sorting), want %s", got, want)
+ }
+
+ // Check that Reset, Next, and SetKey/SetValue play nicely together.
+ m3 := map[uint64]uint64{
+ 1 << 0: 1 << 1,
+ 1 << 1: 1 << 2,
+ 1 << 2: 1 << 3,
+ }
+ kv := New(TypeOf(uint64(0))).Elem()
+ for i := 0; i < 5; i++ {
+ var seenk, seenv uint64
+ iter.Reset(ValueOf(m3))
+ for iter.Next() {
+ kv.SetIterKey(iter)
+ seenk ^= kv.Uint()
+ kv.SetIterValue(iter)
+ seenv ^= kv.Uint()
+ }
+ if seenk != 0b111 {
+ t.Errorf("iteration yielded keys %b, want %b", seenk, 0b111)
+ }
+ if seenv != 0b1110 {
+ t.Errorf("iteration yielded values %b, want %b", seenv, 0b1110)
+ }
+ }
+
+ // Reset should not allocate.
+ n := int(testing.AllocsPerRun(10, func() {
+ iter.Reset(ValueOf(m2))
+ iter.Reset(Value{})
+ }))
+ if n > 0 {
+ t.Errorf("MapIter.Reset allocated %d times", n)
+ }
+}
+
func TestMapIterSafety(t *testing.T) {
// Using a zero MapIter causes a panic, but not a crash.
func() {
@@ -7284,6 +7631,16 @@ func TestMapIterNext(t *testing.T) {
}
}
+func BenchmarkMapIterNext(b *testing.B) {
+ m := ValueOf(map[string]int{"a": 0, "b": 1, "c": 2, "d": 3})
+ it := m.MapRange()
+ for i := 0; i < b.N; i++ {
+ for it.Next() {
+ }
+ it.Reset(m)
+ }
+}
+
func TestMapIterDelete0(t *testing.T) {
// Delete all elements before first iteration.
m := map[string]int{"one": 1, "two": 2, "three": 3}
@@ -7332,4 +7689,140 @@ func TestConvertibleTo(t *testing.T) {
if t1.ConvertibleTo(t2) {
t.Fatalf("(%s).ConvertibleTo(%s) = true, want false", t1, t2)
}
+
+ t3 := ValueOf([]example1.MyStruct{}).Type()
+ t4 := ValueOf([]example2.MyStruct{}).Type()
+
+ if t3.ConvertibleTo(t4) {
+ t.Fatalf("(%s).ConvertibleTo(%s) = true, want false", t3, t4)
+ }
+}
+
+func TestSetIter(t *testing.T) {
+ data := map[string]int{
+ "foo": 1,
+ "bar": 2,
+ "baz": 3,
+ }
+
+ m := ValueOf(data)
+ i := m.MapRange()
+ k := New(TypeOf("")).Elem()
+ v := New(TypeOf(0)).Elem()
+ shouldPanic("Value.SetIterKey called before Next", func() {
+ k.SetIterKey(i)
+ })
+ shouldPanic("Value.SetIterValue called before Next", func() {
+ v.SetIterValue(i)
+ })
+ data2 := map[string]int{}
+ for i.Next() {
+ k.SetIterKey(i)
+ v.SetIterValue(i)
+ data2[k.Interface().(string)] = v.Interface().(int)
+ }
+ if !DeepEqual(data, data2) {
+ t.Errorf("maps not equal, got %v want %v", data2, data)
+ }
+ shouldPanic("Value.SetIterKey called on exhausted iterator", func() {
+ k.SetIterKey(i)
+ })
+ shouldPanic("Value.SetIterValue called on exhausted iterator", func() {
+ v.SetIterValue(i)
+ })
+
+ i.Reset(m)
+ i.Next()
+ shouldPanic("Value.SetIterKey using unaddressable value", func() {
+ ValueOf("").SetIterKey(i)
+ })
+ shouldPanic("Value.SetIterValue using unaddressable value", func() {
+ ValueOf(0).SetIterValue(i)
+ })
+ shouldPanic("value of type string is not assignable to type int", func() {
+ New(TypeOf(0)).Elem().SetIterKey(i)
+ })
+ shouldPanic("value of type int is not assignable to type string", func() {
+ New(TypeOf("")).Elem().SetIterValue(i)
+ })
+
+ // Make sure assignment conversion works.
+ var x any
+ y := ValueOf(&x).Elem()
+ y.SetIterKey(i)
+ if _, ok := data[x.(string)]; !ok {
+ t.Errorf("got key %s which is not in map", x)
+ }
+ y.SetIterValue(i)
+ if x.(int) < 1 || x.(int) > 3 {
+ t.Errorf("got value %d which is not in map", x)
+ }
+
+ // Try some key/value types which are direct interfaces.
+ a := 88
+ b := 99
+ pp := map[*int]*int{
+ &a: &b,
+ }
+ i = ValueOf(pp).MapRange()
+ i.Next()
+ y.SetIterKey(i)
+ if got := *y.Interface().(*int); got != a {
+ t.Errorf("pointer incorrect: got %d want %d", got, a)
+ }
+ y.SetIterValue(i)
+ if got := *y.Interface().(*int); got != b {
+ t.Errorf("pointer incorrect: got %d want %d", got, b)
+ }
}
+
+//go:notinheap
+type nih struct{ x int }
+
+var global_nih = nih{x: 7}
+
+func TestNotInHeapDeref(t *testing.T) {
+ // See issue 48399.
+ v := ValueOf((*nih)(nil))
+ v.Elem()
+ shouldPanic("reflect: call of reflect.Value.Field on zero Value", func() { v.Elem().Field(0) })
+
+ v = ValueOf(&global_nih)
+ if got := v.Elem().Field(0).Int(); got != 7 {
+ t.Fatalf("got %d, want 7", got)
+ }
+
+ v = ValueOf((*nih)(unsafe.Pointer(new(int))))
+ shouldPanic("reflect: reflect.Value.Elem on an invalid notinheap pointer", func() { v.Elem() })
+ shouldPanic("reflect: reflect.Value.Pointer on an invalid notinheap pointer", func() { v.Pointer() })
+ shouldPanic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer", func() { v.UnsafePointer() })
+}
+
+func TestMethodCallValueCodePtr(t *testing.T) {
+ m := ValueOf(Point{}).Method(1)
+ want := MethodValueCallCodePtr()
+ if got := uintptr(m.UnsafePointer()); got != want {
+ t.Errorf("methodValueCall code pointer mismatched, want: %v, got: %v", want, got)
+ }
+ if got := m.Pointer(); got != want {
+ t.Errorf("methodValueCall code pointer mismatched, want: %v, got: %v", want, got)
+ }
+}
+
+/* FIXME: comment out for generics
+
+type A struct{}
+type B[T any] struct{}
+
+func TestIssue50208(t *testing.T) {
+ want1 := "B[reflect_test.A]"
+ if got := TypeOf(new(B[A])).Elem().Name(); got != want1 {
+ t.Errorf("name of type parameter mismatched, want:%s, got:%s", want1, got)
+ }
+ want2 := "B[reflect_test.B[reflect_test.A]]"
+ if got := TypeOf(new(B[B[A]])).Elem().Name(); got != want2 {
+ t.Errorf("name of type parameter mismatched, want:%s, got:%s", want2, got)
+ }
+}
+
+*/
diff --git a/libgo/go/reflect/deepequal.go b/libgo/go/reflect/deepequal.go
index d951d8d..eaab101 100644
--- a/libgo/go/reflect/deepequal.go
+++ b/libgo/go/reflect/deepequal.go
@@ -6,7 +6,10 @@
package reflect
-import "unsafe"
+import (
+ "internal/bytealg"
+ "unsafe"
+)
// During deepValueEqual, must keep track of checks that are
// in progress. The comparison algorithm assumes that all
@@ -35,7 +38,7 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
// and it's safe and valid to get Value's internal pointer.
hard := func(v1, v2 Value) bool {
switch v1.Kind() {
- case Ptr:
+ case Pointer:
if v1.typ.ptrdata == 0 {
// go:notinheap pointers can't be cyclic.
// At least, all of our current uses of go:notinheap have
@@ -53,13 +56,13 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
}
if hard(v1, v2) {
- // For a Ptr or Map value, we need to check flagIndir,
+ // For a Pointer or Map value, we need to check flagIndir,
// which we do by calling the pointer method.
// For Slice or Interface, flagIndir is always set,
// and using v.ptr suffices.
ptrval := func(v Value) unsafe.Pointer {
switch v.Kind() {
- case Ptr, Map:
+ case Pointer, Map:
return v.pointer()
default:
return v.ptr
@@ -99,9 +102,13 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
if v1.Len() != v2.Len() {
return false
}
- if v1.Pointer() == v2.Pointer() {
+ if v1.UnsafePointer() == v2.UnsafePointer() {
return true
}
+ // Special case for []byte, which is common.
+ if v1.Type().Elem().Kind() == Uint8 {
+ return bytealg.Equal(v1.Bytes(), v2.Bytes())
+ }
for i := 0; i < v1.Len(); i++ {
if !deepValueEqual(v1.Index(i), v2.Index(i), visited) {
return false
@@ -113,8 +120,8 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
return v1.IsNil() == v2.IsNil()
}
return deepValueEqual(v1.Elem(), v2.Elem(), visited)
- case Ptr:
- if v1.Pointer() == v2.Pointer() {
+ case Pointer:
+ if v1.UnsafePointer() == v2.UnsafePointer() {
return true
}
return deepValueEqual(v1.Elem(), v2.Elem(), visited)
@@ -132,7 +139,7 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
if v1.Len() != v2.Len() {
return false
}
- if v1.Pointer() == v2.Pointer() {
+ if v1.UnsafePointer() == v2.UnsafePointer() {
return true
}
for _, k := range v1.MapKeys() {
@@ -149,6 +156,18 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
}
// Can't do better than this:
return false
+ case Int, Int8, Int16, Int32, Int64:
+ return v1.Int() == v2.Int()
+ case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
+ return v1.Uint() == v2.Uint()
+ case String:
+ return v1.String() == v2.String()
+ case Bool:
+ return v1.Bool() == v2.Bool()
+ case Float32, Float64:
+ return v1.Float() == v2.Float()
+ case Complex64, Complex128:
+ return v1.Complex() == v2.Complex()
default:
// Normal equality suffices
return valueInterface(v1, false) == valueInterface(v2, false)
@@ -206,7 +225,7 @@ func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
// values that have been compared before, it treats the values as
// equal rather than examining the values to which they point.
// This ensures that DeepEqual terminates.
-func DeepEqual(x, y interface{}) bool {
+func DeepEqual(x, y any) bool {
if x == nil || y == nil {
return x == y
}
diff --git a/libgo/go/reflect/example_test.go b/libgo/go/reflect/example_test.go
index 23c08e4..3db971c 100644
--- a/libgo/go/reflect/example_test.go
+++ b/libgo/go/reflect/example_test.go
@@ -14,7 +14,7 @@ import (
)
func ExampleKind() {
- for _, v := range []interface{}{"hi", 42, func() {}} {
+ for _, v := range []any{"hi", 42, func() {}} {
switch v := reflect.ValueOf(v); v.Kind() {
case reflect.String:
fmt.Println(v.String())
@@ -45,7 +45,7 @@ func ExampleMakeFunc() {
// When the function is invoked, reflect turns the arguments
// into Values, calls swap, and then turns swap's result slice
// into the values returned by the new function.
- makeSwap := func(fptr interface{}) {
+ makeSwap := func(fptr any) {
// fptr is a pointer to a function.
// Obtain the function value itself (likely nil) as a reflect.Value
// so that we can query its type and then set the value.
@@ -166,3 +166,31 @@ func ExampleStructOf() {
// json: {"height":0.4,"age":2}
// value: &{Height:1.5 Age:10}
}
+
+func ExampleValue_FieldByIndex() {
+ // This example shows a case in which the name of a promoted field
+ // is hidden by another field: FieldByName will not work, so
+ // FieldByIndex must be used instead.
+ type user struct {
+ firstName string
+ lastName string
+ }
+
+ type data struct {
+ user
+ firstName string
+ lastName string
+ }
+
+ u := data{
+ user: user{"Embedded John", "Embedded Doe"},
+ firstName: "John",
+ lastName: "Doe",
+ }
+
+ s := reflect.ValueOf(u).FieldByIndex([]int{0, 1})
+ fmt.Println("embedded last name:", s)
+
+ // Output:
+ // embedded last name: Embedded Doe
+}
diff --git a/libgo/go/reflect/export_test.go b/libgo/go/reflect/export_test.go
index 203a307..57042d2 100644
--- a/libgo/go/reflect/export_test.go
+++ b/libgo/go/reflect/export_test.go
@@ -17,9 +17,12 @@ func IsRO(v Value) bool {
var CallGC = &callGC
-const PtrSize = ptrSize
-
-func FuncLayout(t Type, rcvr Type) (frametype Type, argSize, retOffset uintptr, stack []byte, gc []byte, ptrs bool) {
+// FuncLayout calls funcLayout and returns a subset of the results for testing.
+//
+// Bitmaps like stack, gc, inReg, and outReg are expanded such that each bit
+// takes up one byte, so that writing out test cases is a little clearer.
+// If ptrs is false, gc will be nil.
+func FuncLayout(t Type, rcvr Type) (frametype Type, argSize, retOffset uintptr, stack, gc, inReg, outReg []byte, ptrs bool) {
return
}
@@ -87,3 +90,5 @@ func ResolveReflectName(s string) {
type Buffer struct {
buf []byte
}
+
+var MethodValueCallCodePtr = methodValueCallCodePtr
diff --git a/libgo/go/reflect/makefunc.go b/libgo/go/reflect/makefunc.go
index 91df328..5f8a53e 100644
--- a/libgo/go/reflect/makefunc.go
+++ b/libgo/go/reflect/makefunc.go
@@ -7,6 +7,7 @@
package reflect
import (
+ "internal/abi"
"unsafe"
)
@@ -133,6 +134,16 @@ func makeValueMethod(v Value) Value {
return Value{t, unsafe.Pointer(&impl), v.flag&flagRO | flag(Func) | flagIndir}
}
+// methodValueCallCodePtr and methodValueCall are only here to provide
+// something to return for the UnsafePointer method of a method value.
+func methodValueCallCodePtr() uintptr {
+ return abi.FuncPCABI0(methodValueCall)
+}
+
+func methodValueCall() {
+ panic("methodValueCall")
+}
+
// Call the function represented by a makeFuncImpl.
func (c *makeFuncImpl) call(in []Value) []Value {
if c.method == -1 {
diff --git a/libgo/go/reflect/makefunc_ffi.go b/libgo/go/reflect/makefunc_ffi.go
index 05c5bc0..b982c65 100644
--- a/libgo/go/reflect/makefunc_ffi.go
+++ b/libgo/go/reflect/makefunc_ffi.go
@@ -5,6 +5,7 @@
package reflect
import (
+ "internal/goarch"
"unsafe"
)
@@ -37,7 +38,7 @@ func ffiCallbackGo(results unsafe.Pointer, params unsafe.Pointer, impl *makeFunc
typedmemmove(rt, p, *(*unsafe.Pointer)(ap))
v := Value{rt, p, flag(rt.Kind()) | flagIndir}
in = append(in, v)
- ap = (unsafe.Pointer)(uintptr(ap) + ptrSize)
+ ap = (unsafe.Pointer)(uintptr(ap) + goarch.PtrSize)
}
out := impl.call(in)
diff --git a/libgo/go/reflect/set_test.go b/libgo/go/reflect/set_test.go
index a633e6e..9ce0e09 100644
--- a/libgo/go/reflect/set_test.go
+++ b/libgo/go/reflect/set_test.go
@@ -31,7 +31,7 @@ func TestImplicitMapConversion(t *testing.T) {
}
{
// convert interface key
- m := make(map[interface{}]int)
+ m := make(map[any]int)
mv := ValueOf(m)
mv.SetMapIndex(ValueOf(1), ValueOf(2))
x, ok := m[1]
@@ -44,7 +44,7 @@ func TestImplicitMapConversion(t *testing.T) {
}
{
// convert interface value
- m := make(map[int]interface{})
+ m := make(map[int]any)
mv := ValueOf(m)
mv.SetMapIndex(ValueOf(1), ValueOf(2))
x, ok := m[1]
@@ -57,7 +57,7 @@ func TestImplicitMapConversion(t *testing.T) {
}
{
// convert both interface key and interface value
- m := make(map[interface{}]interface{})
+ m := make(map[any]any)
mv := ValueOf(m)
mv.SetMapIndex(ValueOf(1), ValueOf(2))
x, ok := m[1]
@@ -79,7 +79,7 @@ func TestImplicitMapConversion(t *testing.T) {
if x != b2 {
t.Errorf("#5 after SetMapIndex(b1, b2): %p (!= %p), %t (map=%v)", x, b2, ok, m)
}
- if p := mv.MapIndex(ValueOf(b1)).Elem().Pointer(); p != uintptr(unsafe.Pointer(b2)) {
+ if p := mv.MapIndex(ValueOf(b1)).Elem().UnsafePointer(); p != unsafe.Pointer(b2) {
t.Errorf("#5 MapIndex(b1) = %#x want %p", p, b2)
}
}
@@ -94,7 +94,7 @@ func TestImplicitMapConversion(t *testing.T) {
if x != c2 {
t.Errorf("#6 after SetMapIndex(c1, c2): %p (!= %p), %t (map=%v)", x, c2, ok, m)
}
- if p := mv.MapIndex(ValueOf(c1)).Pointer(); p != ValueOf(c2).Pointer() {
+ if p := mv.MapIndex(ValueOf(c1)).UnsafePointer(); p != ValueOf(c2).UnsafePointer() {
t.Errorf("#6 MapIndex(c1) = %#x want %p", p, c2)
}
}
@@ -110,7 +110,7 @@ func TestImplicitMapConversion(t *testing.T) {
if x != b2 {
t.Errorf("#7 after SetMapIndex(b1, b2): %p (!= %p), %t (map=%v)", x, b2, ok, m)
}
- if p := mv.MapIndex(ValueOf(b1)).Pointer(); p != uintptr(unsafe.Pointer(b2)) {
+ if p := mv.MapIndex(ValueOf(b1)).UnsafePointer(); p != unsafe.Pointer(b2) {
t.Errorf("#7 MapIndex(b1) = %#x want %p", p, b2)
}
}
@@ -160,8 +160,8 @@ func TestImplicitAppendConversion(t *testing.T) {
}
var implementsTests = []struct {
- x interface{}
- t interface{}
+ x any
+ t any
b bool
}{
{new(*bytes.Buffer), new(io.Reader), true},
@@ -198,8 +198,8 @@ func TestImplements(t *testing.T) {
}
var assignableTests = []struct {
- x interface{}
- t interface{}
+ x any
+ t any
b bool
}{
{new(chan int), new(<-chan int), true},
@@ -207,13 +207,13 @@ var assignableTests = []struct {
{new(*int), new(IntPtr), true},
{new(IntPtr), new(*int), true},
{new(IntPtr), new(IntPtr1), false},
- {new(Ch), new(<-chan interface{}), true},
+ {new(Ch), new(<-chan any), true},
// test runs implementsTests too
}
type IntPtr *int
type IntPtr1 *int
-type Ch <-chan interface{}
+type Ch <-chan any
func TestAssignableTo(t *testing.T) {
for _, tt := range append(assignableTests, implementsTests...) {
diff --git a/libgo/go/reflect/swapper.go b/libgo/go/reflect/swapper.go
index 0cf4066..745c7b9 100644
--- a/libgo/go/reflect/swapper.go
+++ b/libgo/go/reflect/swapper.go
@@ -5,6 +5,7 @@
package reflect
import (
+ "internal/goarch"
"internal/unsafeheader"
"unsafe"
)
@@ -13,7 +14,7 @@ import (
// slice.
//
// Swapper panics if the provided interface is not a slice.
-func Swapper(slice interface{}) func(i, j int) {
+func Swapper(slice any) func(i, j int) {
v := ValueOf(slice)
if v.Kind() != Slice {
panic(&ValueError{Method: "Swapper", Kind: v.Kind()})
@@ -36,7 +37,7 @@ func Swapper(slice interface{}) func(i, j int) {
// Some common & small cases, without using memmove:
if hasPtr {
- if size == ptrSize {
+ if size == goarch.PtrSize {
ps := *(*[]unsafe.Pointer)(v.ptr)
return func(i, j int) { ps[i], ps[j] = ps[j], ps[i] }
}
diff --git a/libgo/go/reflect/tostring_test.go b/libgo/go/reflect/tostring_test.go
index e416fd8..193484a 100644
--- a/libgo/go/reflect/tostring_test.go
+++ b/libgo/go/reflect/tostring_test.go
@@ -39,7 +39,7 @@ func valueToString(val Value) string {
} else {
return "false"
}
- case Ptr:
+ case Pointer:
v := val
str = typ.String() + "("
if v.IsNil() {
diff --git a/libgo/go/reflect/type.go b/libgo/go/reflect/type.go
index 0c394a2..82edcf8 100644
--- a/libgo/go/reflect/type.go
+++ b/libgo/go/reflect/type.go
@@ -16,6 +16,7 @@
package reflect
import (
+ "internal/goarch"
"strconv"
"sync"
"unicode"
@@ -127,7 +128,7 @@ type Type interface {
// Chan: ChanDir, Elem
// Func: In, NumIn, Out, NumOut, IsVariadic.
// Map: Key, Elem
- // Ptr: Elem
+ // Pointer: Elem
// Slice: Elem
// Struct: Field, FieldByIndex, FieldByName, FieldByNameFunc, NumField
@@ -155,7 +156,7 @@ type Type interface {
IsVariadic() bool
// Elem returns a type's element type.
- // It panics if the type's Kind is not Array, Chan, Map, Ptr, or Slice.
+ // It panics if the type's Kind is not Array, Chan, Map, Pointer, or Slice.
Elem() Type
// Field returns a struct type's i'th field.
@@ -230,7 +231,7 @@ type Type interface {
// See https://golang.org/issue/4876 for more details.
/*
- * These data structures are known to the compiler (../../cmd/internal/reflectdata/reflect.go).
+ * These data structures are known to the compiler (../cmd/compile/internal/reflectdata/reflect.go).
* A few are known to ../runtime/type.go to convey to debuggers.
* They are also known to ../runtime/type.go.
*/
@@ -262,13 +263,16 @@ const (
Func
Interface
Map
- Ptr
+ Pointer
Slice
String
Struct
UnsafePointer
)
+// Ptr is the old name for the Pointer kind.
+const Ptr = Pointer
+
// tflag is used by an rtype to signal what extra type information is
// available in the memory directly following the rtype value.
//
@@ -481,7 +485,7 @@ var kindNames = []string{
Func: "func",
Interface: "interface",
Map: "map",
- Ptr: "ptr",
+ Pointer: "ptr",
Slice: "slice",
String: "string",
Struct: "struct",
@@ -682,7 +686,7 @@ func (t *rtype) Elem() Type {
case Map:
tt := (*mapType)(unsafe.Pointer(t))
return toType(tt.elem)
- case Ptr:
+ case Pointer:
tt := (*ptrType)(unsafe.Pointer(t))
return toType(tt.elem)
case Slice:
@@ -975,7 +979,7 @@ func (t *structType) FieldByIndex(index []int) (f StructField) {
for i, x := range index {
if i > 0 {
ft := f.Type
- if ft.Kind() == Ptr && ft.Elem().Kind() == Struct {
+ if ft.Kind() == Pointer && ft.Elem().Kind() == Struct {
ft = ft.Elem()
}
f.Type = ft
@@ -1046,7 +1050,7 @@ func (t *structType) FieldByNameFunc(match func(string) bool) (result StructFiel
if f.embedded() {
// Embedded field of type T or *T.
ntyp = f.typ
- if ntyp.Kind() == Ptr {
+ if ntyp.Kind() == Pointer {
ntyp = ntyp.Elem().common()
}
}
@@ -1122,17 +1126,24 @@ func (t *structType) FieldByName(name string) (f StructField, present bool) {
// TypeOf returns the reflection Type that represents the dynamic type of i.
// If i is a nil interface value, TypeOf returns nil.
-func TypeOf(i interface{}) Type {
+func TypeOf(i any) Type {
eface := *(*emptyInterface)(unsafe.Pointer(&i))
return toType(eface.typ)
}
-// ptrMap is the cache for PtrTo.
+// ptrMap is the cache for PointerTo.
var ptrMap sync.Map // map[*rtype]*ptrType
// PtrTo returns the pointer type with element t.
// For example, if t represents type Foo, PtrTo(t) represents *Foo.
-func PtrTo(t Type) Type {
+//
+// PtrTo is the old spelling of PointerTo.
+// The two functions behave identically.
+func PtrTo(t Type) Type { return PointerTo(t) }
+
+// PointerTo returns the pointer type with element t.
+// For example, if t represents type Foo, PointerTo(t) represents *Foo.
+func PointerTo(t Type) Type {
return t.(*rtype).ptrTo()
}
@@ -1158,7 +1169,7 @@ func (t *rtype) ptrTo() *rtype {
// Create a new ptrType starting with the description
// of an *unsafe.Pointer.
- var iptr interface{} = (*unsafe.Pointer)(nil)
+ var iptr any = (*unsafe.Pointer)(nil)
prototype := *(**ptrType)(unsafe.Pointer(&iptr))
pp := *prototype
@@ -1380,7 +1391,7 @@ func haveIdenticalUnderlyingType(T, V *rtype, cmpTags bool) bool {
case Map:
return haveIdenticalType(T.Key(), V.Key(), cmpTags) && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
- case Ptr, Slice:
+ case Pointer, Slice:
return haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
case Struct:
@@ -1487,7 +1498,7 @@ func ChanOf(dir ChanDir, t Type) Type {
}
// Make a channel type.
- var ichan interface{} = (chan unsafe.Pointer)(nil)
+ var ichan any = (chan unsafe.Pointer)(nil)
prototype := *(**chanType)(unsafe.Pointer(&ichan))
ch := *prototype
ch.tflag = tflagRegularMemory
@@ -1548,7 +1559,7 @@ func MapOf(key, elem Type) Type {
// Make a map type.
// Note: flag values must match those used in the TMAP case
// in ../cmd/compile/internal/reflectdata/reflect.go:writeType.
- var imap interface{} = (map[unsafe.Pointer]unsafe.Pointer)(nil)
+ var imap any = (map[unsafe.Pointer]unsafe.Pointer)(nil)
mt := **(**mapType)(unsafe.Pointer(&imap))
mt.string = &s
@@ -1567,13 +1578,13 @@ func MapOf(key, elem Type) Type {
}
mt.flags = 0
if ktyp.size > maxKeySize {
- mt.keysize = uint8(ptrSize)
+ mt.keysize = uint8(goarch.PtrSize)
mt.flags |= 1 // indirect key
} else {
mt.keysize = uint8(ktyp.size)
}
if etyp.size > maxValSize {
- mt.valuesize = uint8(ptrSize)
+ mt.valuesize = uint8(goarch.PtrSize)
mt.flags |= 2 // indirect value
} else {
mt.valuesize = uint8(etyp.size)
@@ -1606,7 +1617,7 @@ func FuncOf(in, out []Type, variadic bool) Type {
}
// Make a func type.
- var ifunc interface{} = (func())(nil)
+ var ifunc any = (func())(nil)
prototype := *(**funcType)(unsafe.Pointer(&ifunc))
ft := new(funcType)
*ft = *prototype
@@ -1718,7 +1729,7 @@ func funcStr(ft *funcType) string {
// That is, x == x for all values x of type t.
func isReflexive(t *rtype) bool {
switch t.Kind() {
- case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Ptr, String, UnsafePointer:
+ case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, String, UnsafePointer:
return true
case Float32, Float64, Complex64, Complex128, Interface:
return false
@@ -1742,7 +1753,7 @@ func isReflexive(t *rtype) bool {
// needKeyUpdate reports whether map overwrites require the key to be copied.
func needKeyUpdate(t *rtype) bool {
switch t.Kind() {
- case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Ptr, UnsafePointer:
+ case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, UnsafePointer:
return false
case Float32, Float64, Complex64, Complex128, Interface, String:
// Float keys can be updated from +0 to -0.
@@ -1799,14 +1810,14 @@ const (
func bucketOf(ktyp, etyp *rtype) *rtype {
if ktyp.size > maxKeySize {
- ktyp = PtrTo(ktyp).(*rtype)
+ ktyp = PointerTo(ktyp).(*rtype)
}
if etyp.size > maxValSize {
- etyp = PtrTo(etyp).(*rtype)
+ etyp = PointerTo(etyp).(*rtype)
}
// Prepare GC data if any.
- // A bucket is at most bucketSize*(1+maxKeySize+maxValSize)+2*ptrSize bytes,
+ // A bucket is at most bucketSize*(1+maxKeySize+maxValSize)+2*goarch.PtrSize bytes,
// or 2072 bytes, or 259 pointer-size words, or 33 bytes of pointer bitmap.
// Note that since the key and value are known to be <= 128 bytes,
// they're guaranteed to have bitmaps instead of GC programs.
@@ -1823,39 +1834,39 @@ func bucketOf(ktyp, etyp *rtype) *rtype {
if maxAlign < uintptr(etyp.fieldAlign) {
maxAlign = uintptr(etyp.fieldAlign)
}
- if maxAlign > ptrSize {
+ if maxAlign > goarch.PtrSize {
size = align(size, maxAlign)
- size += align(ptrSize, maxAlign) - ptrSize
- } else if maxAlign < ptrSize {
- size = align(size, ptrSize)
- maxAlign = ptrSize
+ size += align(goarch.PtrSize, maxAlign) - goarch.PtrSize
+ } else if maxAlign < goarch.PtrSize {
+ size = align(size, goarch.PtrSize)
+ maxAlign = goarch.PtrSize
}
ovoff := size
- size += ptrSize
+ size += goarch.PtrSize
if ktyp.ptrdata != 0 || etyp.ptrdata != 0 {
- nptr := size / ptrSize
+ nptr := size / goarch.PtrSize
mask := make([]byte, (nptr+7)/8)
psize := bucketSize
psize = align(psize, uintptr(ktyp.fieldAlign))
- base := psize / ptrSize
+ base := psize / goarch.PtrSize
if ktyp.ptrdata != 0 {
emitGCMask(mask, base, ktyp, bucketSize)
}
psize += bucketSize * ktyp.size
psize = align(psize, uintptr(etyp.fieldAlign))
- base = psize / ptrSize
+ base = psize / goarch.PtrSize
if etyp.ptrdata != 0 {
emitGCMask(mask, base, etyp, bucketSize)
}
- word := ovoff / ptrSize
+ word := ovoff / goarch.PtrSize
mask[word/8] |= 1 << (word % 8)
gcdata = &mask[0]
- ptrdata = (word + 1) * ptrSize
+ ptrdata = (word + 1) * goarch.PtrSize
// overflow word must be last
if ptrdata != size {
@@ -1886,8 +1897,8 @@ func emitGCMask(out []byte, base uintptr, typ *rtype, n uintptr) {
if typ.kind&kindGCProg != 0 {
panic("reflect: unexpected GC program")
}
- ptrs := typ.ptrdata / ptrSize
- words := typ.size / ptrSize
+ ptrs := typ.ptrdata / goarch.PtrSize
+ words := typ.size / goarch.PtrSize
mask := typ.gcSlice(0, (ptrs+7)/8)
for j := uintptr(0); j < ptrs; j++ {
if (mask[j/8]>>(j%8))&1 != 0 {
@@ -1910,7 +1921,7 @@ func appendGCProg(dst []byte, typ *rtype) []byte {
}
// Element is small with pointer mask; use as literal bits.
- ptrs := typ.ptrdata / ptrSize
+ ptrs := typ.ptrdata / goarch.PtrSize
mask := typ.gcSlice(0, (ptrs+7)/8)
// Emit 120-bit chunks of full bytes (max is 127 but we avoid using partial bytes).
@@ -1947,7 +1958,7 @@ func SliceOf(t Type) Type {
}
// Make a slice type.
- var islice interface{} = ([]unsafe.Pointer)(nil)
+ var islice any = ([]unsafe.Pointer)(nil)
prototype := *(**sliceType)(unsafe.Pointer(&islice))
slice := *prototype
slice.string = &s
@@ -2055,10 +2066,10 @@ func StructOf(fields []StructField) Type {
} else {
// Embedded field
repr = append(repr, " ?"...)
- if f.typ.Kind() == Ptr {
+ if f.typ.Kind() == Pointer {
// Embedded ** and *interface{} are illegal
elem := ft.Elem()
- if k := elem.Kind(); k == Ptr || k == Interface {
+ if k := elem.Kind(); k == Pointer || k == Interface {
panic("reflect.StructOf: illegal embedded field type " + ft.String())
}
name = elem.String()
@@ -2072,7 +2083,7 @@ func StructOf(fields []StructField) Type {
if len(ift.methods) > 0 {
panic("reflect.StructOf: embedded field with methods not implemented")
}
- case Ptr:
+ case Pointer:
ptr := (*ptrType)(unsafe.Pointer(ft))
if unt := ptr.uncommon(); unt != nil {
if len(unt.methods) > 0 {
@@ -2092,7 +2103,7 @@ func StructOf(fields []StructField) Type {
}
}
}
- if _, dup := fset[name]; dup {
+ if _, dup := fset[name]; dup && name != "_" {
panic("reflect.StructOf: duplicate field " + name)
}
fset[name] = struct{}{}
@@ -2141,7 +2152,7 @@ func StructOf(fields []StructField) Type {
size = align(size, uintptr(typalign))
// Make the struct type.
- var istruct interface{} = struct{}{}
+ var istruct any = struct{}{}
prototype := *(**structType)(unsafe.Pointer(&istruct))
typ := new(structType)
*typ = *prototype
@@ -2214,7 +2225,7 @@ func StructOf(fields []StructField) Type {
}
// Pad to start of this field with zeros.
if ft.offset() > off {
- n := (ft.offset() - off) / ptrSize
+ n := (ft.offset() - off) / goarch.PtrSize
prog = append(prog, 0x01, 0x00) // emit a 0 bit
if n > 1 {
prog = append(prog, 0x81) // repeat previous bit
@@ -2371,7 +2382,7 @@ func ArrayOf(length int, elem Type) Type {
}
// Make an array type.
- var iarray interface{} = [1]unsafe.Pointer{}
+ var iarray any = [1]unsafe.Pointer{}
prototype := *(**arrayType)(unsafe.Pointer(&iarray))
array := *prototype
array.tflag = typ.tflag & tflagRegularMemory
@@ -2414,11 +2425,11 @@ func ArrayOf(length int, elem Type) Type {
array.gcdata = typ.gcdata
array.ptrdata = typ.ptrdata
- case typ.kind&kindGCProg == 0 && array.size <= maxPtrmaskBytes*8*ptrSize:
+ case typ.kind&kindGCProg == 0 && array.size <= maxPtrmaskBytes*8*goarch.PtrSize:
// Element is small with pointer mask; array is still small.
// Create direct pointer mask by turning each 1 bit in elem
// into length 1 bits in larger mask.
- mask := make([]byte, (array.ptrdata/ptrSize+7)/8)
+ mask := make([]byte, (array.ptrdata/goarch.PtrSize+7)/8)
emitGCMask(mask, 0, typ, array.len)
array.gcdata = &mask[0]
@@ -2428,8 +2439,8 @@ func ArrayOf(length int, elem Type) Type {
prog := []byte{0, 0, 0, 0} // will be length of prog
prog = appendGCProg(prog, typ)
// Pad from ptrdata to size.
- elemPtrs := typ.ptrdata / ptrSize
- elemWords := typ.size / ptrSize
+ elemPtrs := typ.ptrdata / goarch.PtrSize
+ elemWords := typ.size / goarch.PtrSize
if elemPtrs < elemWords {
// Emit literal 0 bit, then repeat as needed.
prog = append(prog, 0x01, 0x00)
@@ -2520,16 +2531,16 @@ func addTypeBits(bv *bitVector, offset uintptr, t *rtype) {
}
switch Kind(t.kind & kindMask) {
- case Chan, Func, Map, Ptr, Slice, String, UnsafePointer:
+ case Chan, Func, Map, Pointer, Slice, String, UnsafePointer:
// 1 pointer at start of representation
- for bv.n < uint32(offset/uintptr(ptrSize)) {
+ for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
bv.append(0)
}
bv.append(1)
case Interface:
// 2 pointers
- for bv.n < uint32(offset/uintptr(ptrSize)) {
+ for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
bv.append(0)
}
bv.append(1)
diff --git a/libgo/go/reflect/value.go b/libgo/go/reflect/value.go
index 300ef1a..54ebd6f 100644
--- a/libgo/go/reflect/value.go
+++ b/libgo/go/reflect/value.go
@@ -5,6 +5,8 @@
package reflect
import (
+ "errors"
+ "internal/goarch"
"internal/itoa"
"internal/unsafeheader"
"math"
@@ -12,8 +14,6 @@ import (
"unsafe"
)
-const ptrSize = 4 << (^uintptr(0) >> 63) // unsafe.Sizeof(uintptr(0)) but an ideal const
-
// Value is the reflection interface to a Go value.
//
// Not all methods apply to all kinds of values. Restrictions,
@@ -91,10 +91,10 @@ func (f flag) ro() flag {
}
// pointer returns the underlying pointer represented by v.
-// v.Kind() must be Ptr, Map, Chan, Func, or UnsafePointer
-// if v.Kind() == Ptr, the base type must not be go:notinheap.
+// v.Kind() must be Pointer, Map, Chan, Func, or UnsafePointer
+// if v.Kind() == Pointer, the base type must not be go:notinheap.
func (v Value) pointer() unsafe.Pointer {
- if v.typ.size != ptrSize || !v.typ.pointers() {
+ if v.typ.size != goarch.PtrSize || !v.typ.pointers() {
panic("can't call pointer on a non-pointer Value")
}
if v.flag&flagIndir != 0 {
@@ -104,9 +104,9 @@ func (v Value) pointer() unsafe.Pointer {
}
// packEface converts v to the empty interface.
-func packEface(v Value) interface{} {
+func packEface(v Value) any {
t := v.typ
- var i interface{}
+ var i any
e := (*emptyInterface)(unsafe.Pointer(&i))
// First, fill in the data portion of the interface.
switch {
@@ -141,7 +141,7 @@ func packEface(v Value) interface{} {
}
// unpackEface converts the empty interface i to a Value.
-func unpackEface(i interface{}) Value {
+func unpackEface(i any) Value {
e := (*emptyInterface)(unsafe.Pointer(&i))
// NOTE: don't read e.word until we know whether it is really a pointer or not.
t := e.typ
@@ -272,7 +272,7 @@ func (v Value) Addr() Value {
// Preserve flagRO instead of using v.flag.ro() so that
// v.Addr().Elem() is equivalent to v (#32772)
fl := v.flag & flagRO
- return Value{v.typ.ptrTo(), v.ptr, fl | flag(Ptr)}
+ return Value{v.typ.ptrTo(), v.ptr, fl | flag(Pointer)}
}
// Bool returns v's underlying value.
@@ -349,7 +349,7 @@ func (v Value) CallSlice(in []Value) []Value {
return v.call("CallSlice", in)
}
-var callGC bool // for testing; see TestCallMethodJump
+var callGC bool // for testing; see TestCallMethodJump and TestCallArgLive
const debugReflectCall = false
@@ -596,6 +596,16 @@ func (v Value) Close() {
chanclose(v.pointer())
}
+// CanComplex reports whether Complex can be used without panicking.
+func (v Value) CanComplex() bool {
+ switch v.kind() {
+ case Complex64, Complex128:
+ return true
+ default:
+ return false
+ }
+}
+
// Complex returns v's underlying value, as a complex128.
// It panics if v's Kind is not Complex64 or Complex128
func (v Value) Complex() complex128 {
@@ -611,17 +621,17 @@ func (v Value) Complex() complex128 {
// Elem returns the value that the interface v contains
// or that the pointer v points to.
-// It panics if v's Kind is not Interface or Ptr.
+// It panics if v's Kind is not Interface or Pointer.
// It returns the zero Value if v is nil.
func (v Value) Elem() Value {
k := v.kind()
switch k {
case Interface:
- var eface interface{}
+ var eface any
if v.typ.NumMethod() == 0 {
- eface = *(*interface{})(v.ptr)
+ eface = *(*any)(v.ptr)
} else {
- eface = (interface{})(*(*interface {
+ eface = (any)(*(*interface {
M()
})(v.ptr))
}
@@ -630,9 +640,24 @@ func (v Value) Elem() Value {
x.flag |= v.flag.ro()
}
return x
- case Ptr:
+ case Pointer:
ptr := v.ptr
if v.flag&flagIndir != 0 {
+ if ifaceIndir(v.typ) {
+ // This is a pointer to a not-in-heap object. ptr points to a uintptr
+ // in the heap. That uintptr is the address of a not-in-heap object.
+ // In general, pointers to not-in-heap objects can be total junk.
+ // But Elem() is asking to dereference it, so the user has asserted
+ // that at least it is a valid pointer (not just an integer stored in
+ // a pointer slot). So let's check, to make sure that it isn't a pointer
+ // that the runtime will crash on if it sees it during GC or write barriers.
+ // Since it is a not-in-heap pointer, all pointers to the heap are
+ // forbidden! That makes the test pretty easy.
+ // See issue 48399.
+ if !verifyNotInHeapPtr(*(*uintptr)(ptr)) {
+ panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
+ }
+ }
ptr = *(*unsafe.Pointer)(ptr)
}
// The returned value's address is v's value.
@@ -681,7 +706,8 @@ func (v Value) Field(i int) Value {
}
// FieldByIndex returns the nested field corresponding to index.
-// It panics if v's Kind is not struct.
+// It panics if evaluation requires stepping through a nil
+// pointer or a field that is not a struct.
func (v Value) FieldByIndex(index []int) Value {
if len(index) == 1 {
return v.Field(index[0])
@@ -689,7 +715,7 @@ func (v Value) FieldByIndex(index []int) Value {
v.mustBe(Struct)
for i, x := range index {
if i > 0 {
- if v.Kind() == Ptr && v.typ.Elem().Kind() == Struct {
+ if v.Kind() == Pointer && v.typ.Elem().Kind() == Struct {
if v.IsNil() {
panic("reflect: indirection through nil pointer to embedded struct")
}
@@ -701,6 +727,29 @@ func (v Value) FieldByIndex(index []int) Value {
return v
}
+// FieldByIndexErr returns the nested field corresponding to index.
+// It returns an error if evaluation requires stepping through a nil
+// pointer, but panics if it must step through a field that
+// is not a struct.
+func (v Value) FieldByIndexErr(index []int) (Value, error) {
+ if len(index) == 1 {
+ return v.Field(index[0]), nil
+ }
+ v.mustBe(Struct)
+ for i, x := range index {
+ if i > 0 {
+ if v.Kind() == Ptr && v.typ.Elem().Kind() == Struct {
+ if v.IsNil() {
+ return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + v.typ.Elem().Name())
+ }
+ v = v.Elem()
+ }
+ }
+ v = v.Field(x)
+ }
+ return v, nil
+}
+
// FieldByName returns the struct field with the given name.
// It returns the zero Value if no field was found.
// It panics if v's Kind is not struct.
@@ -723,6 +772,16 @@ func (v Value) FieldByNameFunc(match func(string) bool) Value {
return Value{}
}
+// CanFloat reports whether Float can be used without panicking.
+func (v Value) CanFloat() bool {
+ switch v.kind() {
+ case Float32, Float64:
+ return true
+ default:
+ return false
+ }
+}
+
// Float returns v's underlying value, as a float64.
// It panics if v's Kind is not Float32 or Float64
func (v Value) Float() float64 {
@@ -760,7 +819,7 @@ func (v Value) Index(i int) Value {
return Value{typ, val, fl}
case Slice:
- // Element flag same as Elem of Ptr.
+ // Element flag same as Elem of Pointer.
// Addressable, indirect, possibly read-only.
s := (*unsafeheader.Slice)(v.ptr)
if uint(i) >= uint(s.Len) {
@@ -784,6 +843,16 @@ func (v Value) Index(i int) Value {
panic(&ValueError{"reflect.Value.Index", v.kind()})
}
+// CanInt reports whether Int can be used without panicking.
+func (v Value) CanInt() bool {
+ switch v.kind() {
+ case Int, Int8, Int16, Int32, Int64:
+ return true
+ default:
+ return false
+ }
+}
+
// Int returns v's underlying value, as an int64.
// It panics if v's Kind is not Int, Int8, Int16, Int32, or Int64.
func (v Value) Int() int64 {
@@ -817,11 +886,11 @@ func (v Value) CanInterface() bool {
// var i interface{} = (v's underlying value)
// It panics if the Value was obtained by accessing
// unexported struct fields.
-func (v Value) Interface() (i interface{}) {
+func (v Value) Interface() (i any) {
return valueInterface(v, true)
}
-func valueInterface(v Value, safe bool) interface{} {
+func valueInterface(v Value, safe bool) any {
if v.flag == 0 {
panic(&ValueError{"reflect.Value.Interface", Invalid})
}
@@ -850,7 +919,7 @@ func valueInterface(v Value, safe bool) interface{} {
// Empty interface has one layout, all interfaces with
// methods have a second layout.
if v.NumMethod() == 0 {
- return *(*interface{})(v.ptr)
+ return *(*any)(v.ptr)
}
return *(*interface {
M()
@@ -890,7 +959,7 @@ func (v Value) InterfaceData() [2]uintptr {
func (v Value) IsNil() bool {
k := v.kind()
switch k {
- case Chan, Func, Map, Ptr, UnsafePointer:
+ case Chan, Func, Map, Pointer, UnsafePointer:
if v.flag&flagMethod != 0 {
return false
}
@@ -938,7 +1007,7 @@ func (v Value) IsZero() bool {
}
}
return true
- case Chan, Func, Interface, Map, Ptr, Slice, UnsafePointer:
+ case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
return v.IsNil()
case String:
return v.Len() == 0
@@ -999,15 +1068,21 @@ func (v Value) MapIndex(key Value) Value {
// considered unexported. This is consistent with the
// behavior for structs, which allow read but not write
// of unexported fields.
- key = key.assignTo("reflect.Value.MapIndex", tt.key, nil)
- var k unsafe.Pointer
- if key.flag&flagIndir != 0 {
- k = key.ptr
+ var e unsafe.Pointer
+ if key.kind() == String && tt.key.Kind() == String && tt.elem.size <= maxValSize {
+ k := *(*string)(key.ptr)
+ e = mapaccess_faststr(v.typ, v.pointer(), k)
} else {
- k = unsafe.Pointer(&key.ptr)
+ key = key.assignTo("reflect.Value.MapIndex", tt.key, nil)
+ var k unsafe.Pointer
+ if key.flag&flagIndir != 0 {
+ k = key.ptr
+ } else {
+ k = unsafe.Pointer(&key.ptr)
+ }
+ e = mapaccess(v.typ, v.pointer(), k)
}
- e := mapaccess(v.typ, v.pointer(), k)
if e == nil {
return Value{}
}
@@ -1033,11 +1108,12 @@ func (v Value) MapKeys() []Value {
if m != nil {
mlen = maplen(m)
}
- it := mapiterinit(v.typ, m)
+ var it hiter
+ mapiterinit(v.typ, m, &it)
a := make([]Value, mlen)
var i int
for i = 0; i < len(a); i++ {
- key := mapiterkey(it)
+ key := mapiterkey(&it)
if key == nil {
// Someone deleted an entry from the map since we
// called maplen above. It's a data race, but nothing
@@ -1045,59 +1121,154 @@ func (v Value) MapKeys() []Value {
break
}
a[i] = copyVal(keyType, fl, key)
- mapiternext(it)
+ mapiternext(&it)
}
return a[:i]
}
+// hiter's structure matches runtime.hiter's structure.
+// Having a clone here allows us to embed a map iterator
+// inside type MapIter so that MapIters can be re-used
+// without doing any allocations.
+type hiter struct {
+ key unsafe.Pointer
+ elem unsafe.Pointer
+ t unsafe.Pointer
+ h unsafe.Pointer
+ buckets unsafe.Pointer
+ bptr unsafe.Pointer
+ overflow *[]unsafe.Pointer
+ oldoverflow *[]unsafe.Pointer
+ startBucket uintptr
+ offset uint8
+ wrapped bool
+ B uint8
+ i uint8
+ bucket uintptr
+ checkBucket uintptr
+}
+
+func (h *hiter) initialized() bool {
+ return h.t != nil
+}
+
// A MapIter is an iterator for ranging over a map.
// See Value.MapRange.
type MapIter struct {
- m Value
- it unsafe.Pointer
+ m Value
+ hiter hiter
}
-// Key returns the key of the iterator's current map entry.
-func (it *MapIter) Key() Value {
- if it.it == nil {
+// Key returns the key of iter's current map entry.
+func (iter *MapIter) Key() Value {
+ if !iter.hiter.initialized() {
panic("MapIter.Key called before Next")
}
- if mapiterkey(it.it) == nil {
+ iterkey := mapiterkey(&iter.hiter)
+ if iterkey == nil {
panic("MapIter.Key called on exhausted iterator")
}
- t := (*mapType)(unsafe.Pointer(it.m.typ))
+ t := (*mapType)(unsafe.Pointer(iter.m.typ))
ktype := t.key
- return copyVal(ktype, it.m.flag.ro()|flag(ktype.Kind()), mapiterkey(it.it))
+ return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
}
-// Value returns the value of the iterator's current map entry.
-func (it *MapIter) Value() Value {
- if it.it == nil {
+// SetIterKey assigns to v the key of iter's current map entry.
+// It is equivalent to v.Set(iter.Key()), but it avoids allocating a new Value.
+// As in Go, the key must be assignable to v's type.
+func (v Value) SetIterKey(iter *MapIter) {
+ if !iter.hiter.initialized() {
+ panic("reflect: Value.SetIterKey called before Next")
+ }
+ iterkey := mapiterkey(&iter.hiter)
+ if iterkey == nil {
+ panic("reflect: Value.SetIterKey called on exhausted iterator")
+ }
+
+ v.mustBeAssignable()
+ var target unsafe.Pointer
+ if v.kind() == Interface {
+ target = v.ptr
+ }
+
+ t := (*mapType)(unsafe.Pointer(iter.m.typ))
+ ktype := t.key
+
+ key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
+ key = key.assignTo("reflect.MapIter.SetKey", v.typ, target)
+ typedmemmove(v.typ, v.ptr, key.ptr)
+}
+
+// Value returns the value of iter's current map entry.
+func (iter *MapIter) Value() Value {
+ if !iter.hiter.initialized() {
panic("MapIter.Value called before Next")
}
- if mapiterkey(it.it) == nil {
+ iterelem := mapiterelem(&iter.hiter)
+ if iterelem == nil {
panic("MapIter.Value called on exhausted iterator")
}
- t := (*mapType)(unsafe.Pointer(it.m.typ))
+ t := (*mapType)(unsafe.Pointer(iter.m.typ))
vtype := t.elem
- return copyVal(vtype, it.m.flag.ro()|flag(vtype.Kind()), mapiterelem(it.it))
+ return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
+}
+
+// SetIterValue assigns to v the value of iter's current map entry.
+// It is equivalent to v.Set(iter.Value()), but it avoids allocating a new Value.
+// As in Go, the value must be assignable to v's type.
+func (v Value) SetIterValue(iter *MapIter) {
+ if !iter.hiter.initialized() {
+ panic("reflect: Value.SetIterValue called before Next")
+ }
+ iterelem := mapiterelem(&iter.hiter)
+ if iterelem == nil {
+ panic("reflect: Value.SetIterValue called on exhausted iterator")
+ }
+
+ v.mustBeAssignable()
+ var target unsafe.Pointer
+ if v.kind() == Interface {
+ target = v.ptr
+ }
+
+ t := (*mapType)(unsafe.Pointer(iter.m.typ))
+ vtype := t.elem
+
+ elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
+ elem = elem.assignTo("reflect.MapIter.SetValue", v.typ, target)
+ typedmemmove(v.typ, v.ptr, elem.ptr)
}
// Next advances the map iterator and reports whether there is another
-// entry. It returns false when the iterator is exhausted; subsequent
+// entry. It returns false when iter is exhausted; subsequent
// calls to Key, Value, or Next will panic.
-func (it *MapIter) Next() bool {
- if it.it == nil {
- it.it = mapiterinit(it.m.typ, it.m.pointer())
+func (iter *MapIter) Next() bool {
+ if !iter.m.IsValid() {
+ panic("MapIter.Next called on an iterator that does not have an associated map Value")
+ }
+ if !iter.hiter.initialized() {
+ mapiterinit(iter.m.typ, iter.m.pointer(), &iter.hiter)
} else {
- if mapiterkey(it.it) == nil {
+ if mapiterkey(&iter.hiter) == nil {
panic("MapIter.Next called on exhausted iterator")
}
- mapiternext(it.it)
+ mapiternext(&iter.hiter)
}
- return mapiterkey(it.it) != nil
+ return mapiterkey(&iter.hiter) != nil
+}
+
+// Reset modifies iter to iterate over v.
+// It panics if v's Kind is not Map and v is not the zero Value.
+// Reset(Value{}) causes iter to not to refer to any map,
+// which may allow the previously iterated-over map to be garbage collected.
+func (iter *MapIter) Reset(v Value) {
+ if v.IsValid() {
+ v.mustBe(Map)
+ }
+ iter.m = v
+ iter.hiter = hiter{}
}
// MapRange returns a range iterator for a map.
@@ -1260,7 +1431,7 @@ func (v Value) OverflowUint(x uint64) bool {
// It returns uintptr instead of unsafe.Pointer so that
// code using reflect cannot obtain unsafe.Pointers
// without importing the unsafe package explicitly.
-// It panics if v's Kind is not Chan, Func, Map, Ptr, Slice, or UnsafePointer.
+// It panics if v's Kind is not Chan, Func, Map, Pointer, Slice, or UnsafePointer.
//
// If v's Kind is Func, the returned pointer is an underlying
// code pointer, but not necessarily enough to identify a
@@ -1270,22 +1441,34 @@ func (v Value) OverflowUint(x uint64) bool {
// If v's Kind is Slice, the returned pointer is to the first
// element of the slice. If the slice is nil the returned value
// is 0. If the slice is empty but non-nil the return value is non-zero.
+//
+// It's preferred to use uintptr(Value.UnsafePointer()) to get the equivalent result.
func (v Value) Pointer() uintptr {
- // TODO: deprecate
k := v.kind()
switch k {
- case Ptr:
+ case Pointer:
if v.typ.ptrdata == 0 {
- // Handle pointers to go:notinheap types directly,
- // so we never materialize such pointers as an
- // unsafe.Pointer. (Such pointers are always indirect.)
- // See issue 42076.
- return *(*uintptr)(v.ptr)
+ val := *(*uintptr)(v.ptr)
+ // Since it is a not-in-heap pointer, all pointers to the heap are
+ // forbidden! See comment in Value.Elem and issue #48399.
+ if !verifyNotInHeapPtr(val) {
+ panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
+ }
+ return val
}
fallthrough
case Chan, Map, UnsafePointer:
return uintptr(v.pointer())
case Func:
+ if v.flag&flagMethod != 0 {
+ // As the doc comment says, the returned pointer is an
+ // underlying code pointer but not necessarily enough to
+ // identify a single function uniquely. All method expressions
+ // created via reflect have the same underlying code pointer,
+ // so their Pointers are equal. The function used here must
+ // match the one used in makeMethodValue.
+ return methodValueCallCodePtr()
+ }
p := v.pointer()
// Non-nil func value points at data block.
// First word of data block is actual code.
@@ -1499,6 +1682,25 @@ func (v Value) SetMapIndex(key, elem Value) {
v.mustBeExported()
key.mustBeExported()
tt := (*mapType)(unsafe.Pointer(v.typ))
+
+ if key.kind() == String && tt.key.Kind() == String && tt.elem.size <= maxValSize {
+ k := *(*string)(key.ptr)
+ if elem.typ == nil {
+ mapdelete_faststr(v.typ, v.pointer(), k)
+ return
+ }
+ elem.mustBeExported()
+ elem = elem.assignTo("reflect.Value.SetMapIndex", tt.elem, nil)
+ var e unsafe.Pointer
+ if elem.flag&flagIndir != 0 {
+ e = elem.ptr
+ } else {
+ e = unsafe.Pointer(&elem.ptr)
+ }
+ mapassign_faststr(v.typ, v.pointer(), k, e)
+ return
+ }
+
key = key.assignTo("reflect.Value.SetMapIndex", tt.key, nil)
var k unsafe.Pointer
if key.flag&flagIndir != 0 {
@@ -1744,6 +1946,16 @@ func (v Value) Type() Type {
return toType(m.mtyp)
}
+// CanUint reports whether Uint can be used without panicking.
+func (v Value) CanUint() bool {
+ switch v.kind() {
+ case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
+ return true
+ default:
+ return false
+ }
+}
+
// Uint returns v's underlying value, as a uint64.
// It panics if v's Kind is not Uint, Uintptr, Uint8, Uint16, Uint32, or Uint64.
func (v Value) Uint() uint64 {
@@ -1771,11 +1983,12 @@ func (v Value) Uint() uint64 {
// which ensures cmd/compile can recognize unsafe.Pointer(v.UnsafeAddr())
// and make an exception.
-// UnsafeAddr returns a pointer to v's data.
+// UnsafeAddr returns a pointer to v's data, as a uintptr.
// It is for advanced clients that also import the "unsafe" package.
// It panics if v is not addressable.
+//
+// It's preferred to use uintptr(Value.Addr().UnsafePointer()) to get the equivalent result.
func (v Value) UnsafeAddr() uintptr {
- // TODO: deprecate
if v.typ == nil {
panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
}
@@ -1785,6 +1998,57 @@ func (v Value) UnsafeAddr() uintptr {
return uintptr(v.ptr)
}
+// UnsafePointer returns v's value as a unsafe.Pointer.
+// It panics if v's Kind is not Chan, Func, Map, Pointer, Slice, or UnsafePointer.
+//
+// If v's Kind is Func, the returned pointer is an underlying
+// code pointer, but not necessarily enough to identify a
+// single function uniquely. The only guarantee is that the
+// result is zero if and only if v is a nil func Value.
+//
+// If v's Kind is Slice, the returned pointer is to the first
+// element of the slice. If the slice is nil the returned value
+// is nil. If the slice is empty but non-nil the return value is non-nil.
+func (v Value) UnsafePointer() unsafe.Pointer {
+ k := v.kind()
+ switch k {
+ case Pointer:
+ if v.typ.ptrdata == 0 {
+ // Since it is a not-in-heap pointer, all pointers to the heap are
+ // forbidden! See comment in Value.Elem and issue #48399.
+ if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
+ panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
+ }
+ return *(*unsafe.Pointer)(v.ptr)
+ }
+ fallthrough
+ case Chan, Map, UnsafePointer:
+ return v.pointer()
+ case Func:
+ if v.flag&flagMethod != 0 {
+ // As the doc comment says, the returned pointer is an
+ // underlying code pointer but not necessarily enough to
+ // identify a single function uniquely. All method expressions
+ // created via reflect have the same underlying code pointer,
+ // so their Pointers are equal. The function used here must
+ // match the one used in makeMethodValue.
+ code := methodValueCallCodePtr()
+ return *(*unsafe.Pointer)(unsafe.Pointer(&code))
+ }
+ p := v.pointer()
+ // Non-nil func value points at data block.
+ // First word of data block is actual code.
+ if p != nil {
+ p = *(*unsafe.Pointer)(p)
+ }
+ return p
+
+ case Slice:
+ return (*unsafeheader.Slice)(v.ptr).Data
+ }
+ panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()})
+}
+
// StringHeader is the runtime representation of a string.
// It cannot be used safely or portably and its representation may
// change in a later release.
@@ -1840,11 +2104,12 @@ func grow(s Value, extra int) (Value, int, int) {
if m == 0 {
m = extra
} else {
+ const threshold = 256
for m < i1 {
- if i0 < 1024 {
+ if i0 < threshold {
m += m
} else {
- m += m / 4
+ m += (m + 3*threshold) / 4
}
}
}
@@ -2151,7 +2416,7 @@ func MakeMapWithSize(typ Type, n int) Value {
// If v is a nil pointer, Indirect returns a zero Value.
// If v is not a pointer, Indirect returns v.
func Indirect(v Value) Value {
- if v.Kind() != Ptr {
+ if v.Kind() != Pointer {
return v
}
return v.Elem()
@@ -2159,7 +2424,7 @@ func Indirect(v Value) Value {
// ValueOf returns a new Value initialized to the concrete value
// stored in the interface i. ValueOf(nil) returns the zero Value.
-func ValueOf(i interface{}) Value {
+func ValueOf(i any) Value {
if i == nil {
return Value{}
}
@@ -2204,7 +2469,7 @@ const maxZero = 1024
var zeroVal [maxZero]byte
// New returns a Value representing a pointer to a new zero value
-// for the specified type. That is, the returned Value's Type is PtrTo(typ).
+// for the specified type. That is, the returned Value's Type is PointerTo(typ).
func New(typ Type) Value {
if typ == nil {
panic("reflect: New(nil)")
@@ -2216,14 +2481,14 @@ func New(typ Type) Value {
panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
}
ptr := unsafe_New(t)
- fl := flag(Ptr)
+ fl := flag(Pointer)
return Value{pt, ptr, fl}
}
// NewAt returns a Value representing a pointer to a value of the
// specified type, using p as that pointer.
func NewAt(typ Type, p unsafe.Pointer) Value {
- fl := flag(Ptr)
+ fl := flag(Pointer)
t := typ.(*rtype)
return Value{t.ptrTo(), p, fl}
}
@@ -2257,7 +2522,7 @@ func (v Value) assignTo(context string, dst *rtype, target unsafe.Pointer) Value
}
x := valueInterface(v, false)
if dst.NumMethod() == 0 {
- *(*interface{})(target) = x
+ *(*any)(target) = x
} else {
ifaceE2I(dst, x, target)
}
@@ -2292,10 +2557,9 @@ func (v Value) CanConvert(t Type) bool {
// Currently the only conversion that is OK in terms of type
// but that can panic depending on the value is converting
// from slice to pointer-to-array.
- if vt.Kind() == Slice && t.Kind() == Ptr && t.Elem().Kind() == Array {
+ if vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array {
n := t.Elem().Len()
- h := (*unsafeheader.Slice)(v.ptr)
- if n > h.Len {
+ if n > v.Len() {
return false
}
}
@@ -2363,7 +2627,7 @@ func convertOp(dst, src *rtype) func(Value, Type) Value {
}
// "x is a slice, T is a pointer-to-array type,
// and the slice and array types have identical element types."
- if dst.Kind() == Ptr && dst.Elem().Kind() == Array && src.Elem() == dst.Elem().Elem() {
+ if dst.Kind() == Pointer && dst.Elem().Kind() == Array && src.Elem() == dst.Elem().Elem() {
return cvtSliceArrayPtr
}
@@ -2379,8 +2643,8 @@ func convertOp(dst, src *rtype) func(Value, Type) Value {
}
// dst and src are non-defined pointer types with same underlying base type.
- if dst.Kind() == Ptr && dst.Name() == "" &&
- src.Kind() == Ptr && src.Name() == "" &&
+ if dst.Kind() == Pointer && dst.Name() == "" &&
+ src.Kind() == Pointer && src.Name() == "" &&
haveIdenticalUnderlyingType(dst.Elem().common(), src.Elem().common(), false) {
return cvtDirect
}
@@ -2562,11 +2826,11 @@ func cvtStringRunes(v Value, t Type) Value {
// convertOp: []T -> *[N]T
func cvtSliceArrayPtr(v Value, t Type) Value {
n := t.Elem().Len()
- h := (*unsafeheader.Slice)(v.ptr)
- if n > h.Len {
- panic("reflect: cannot convert slice with length " + itoa.Itoa(h.Len) + " to pointer to array with length " + itoa.Itoa(n))
+ if n > v.Len() {
+ panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to pointer to array with length " + itoa.Itoa(n))
}
- return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Ptr)}
+ h := (*unsafeheader.Slice)(v.ptr)
+ return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
}
// convertOp: direct copy
@@ -2589,7 +2853,7 @@ func cvtT2I(v Value, typ Type) Value {
target := unsafe_New(typ.common())
x := valueInterface(v, false)
if typ.NumMethod() == 0 {
- *(*interface{})(target) = x
+ *(*any)(target) = x
} else {
ifaceE2I(typ.(*rtype), x, target)
}
@@ -2632,24 +2896,31 @@ func makemap(t *rtype, cap int) (m unsafe.Pointer)
func mapaccess(t *rtype, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
//go:noescape
+func mapaccess_faststr(t *rtype, m unsafe.Pointer, key string) (val unsafe.Pointer)
+
+//go:noescape
func mapassign(t *rtype, m unsafe.Pointer, key, val unsafe.Pointer)
//go:noescape
+func mapassign_faststr(t *rtype, m unsafe.Pointer, key string, val unsafe.Pointer)
+
+//go:noescape
func mapdelete(t *rtype, m unsafe.Pointer, key unsafe.Pointer)
-// m escapes into the return value, but the caller of mapiterinit
-// doesn't let the return value escape.
//go:noescape
-func mapiterinit(t *rtype, m unsafe.Pointer) unsafe.Pointer
+func mapdelete_faststr(t *rtype, m unsafe.Pointer, key string)
+
+//go:noescape
+func mapiterinit(t *rtype, m unsafe.Pointer, it *hiter)
//go:noescape
-func mapiterkey(it unsafe.Pointer) (key unsafe.Pointer)
+func mapiterkey(it *hiter) (key unsafe.Pointer)
//go:noescape
-func mapiterelem(it unsafe.Pointer) (elem unsafe.Pointer)
+func mapiterelem(it *hiter) (elem unsafe.Pointer)
//go:noescape
-func mapiternext(it unsafe.Pointer)
+func mapiternext(it *hiter)
//go:noescape
func maplen(m unsafe.Pointer) int
@@ -2657,7 +2928,7 @@ func maplen(m unsafe.Pointer) int
//go:linkname call runtime.reflectcall
func call(typ *funcType, fnaddr unsafe.Pointer, isInterface bool, isMethod bool, params *unsafe.Pointer, results *unsafe.Pointer)
-func ifaceE2I(t *rtype, src interface{}, dst unsafe.Pointer)
+func ifaceE2I(t *rtype, src any, dst unsafe.Pointer)
// memmove copies size bytes to dst from src. No write barriers are used.
//go:noescape
@@ -2679,10 +2950,12 @@ func typedslicecopy(elemType *rtype, dst, src unsafeheader.Slice) int
//go:noescape
func typehash(t *rtype, p unsafe.Pointer, h uintptr) uintptr
+func verifyNotInHeapPtr(p uintptr) bool
+
// Dummy annotation marking that the value x escapes,
// for use in cases where the reflect code is so clever that
// the compiler cannot follow.
-func escapes(x interface{}) {
+func escapes(x any) {
if dummy.b {
dummy.x = x
}
@@ -2690,5 +2963,5 @@ func escapes(x interface{}) {
var dummy struct {
b bool
- x interface{}
+ x any
}
diff --git a/libgo/go/reflect/visiblefields.go b/libgo/go/reflect/visiblefields.go
index 1a2b535..9375faa 100644
--- a/libgo/go/reflect/visiblefields.go
+++ b/libgo/go/reflect/visiblefields.go
@@ -92,7 +92,7 @@ func (w *visibleFieldsWalker) walk(t Type) {
w.fields = append(w.fields, f)
}
if f.Anonymous {
- if f.Type.Kind() == Ptr {
+ if f.Type.Kind() == Pointer {
f.Type = f.Type.Elem()
}
if f.Type.Kind() == Struct {
diff --git a/libgo/go/reflect/visiblefields_test.go b/libgo/go/reflect/visiblefields_test.go
index 915bbee..fdedc21 100644
--- a/libgo/go/reflect/visiblefields_test.go
+++ b/libgo/go/reflect/visiblefields_test.go
@@ -6,6 +6,7 @@ package reflect_test
import (
. "reflect"
+ "strings"
"testing"
)
@@ -16,7 +17,7 @@ type structField struct {
var fieldsTests = []struct {
testName string
- val interface{}
+ val any
expect []structField
}{{
testName: "SimpleStruct",
@@ -278,7 +279,7 @@ type RS3 struct {
RS1
}
-type M map[string]interface{}
+type M map[string]any
type Rec1 struct {
*Rec2
@@ -328,3 +329,21 @@ func TestFields(t *testing.T) {
})
}
}
+
+// Must not panic with nil embedded pointer.
+func TestFieldByIndexErr(t *testing.T) {
+ type A struct {
+ S string
+ }
+ type B struct {
+ *A
+ }
+ v := ValueOf(B{})
+ _, err := v.FieldByIndexErr([]int{0, 0})
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ if !strings.Contains(err.Error(), "embedded struct field A") {
+ t.Fatal(err)
+ }
+}