"flag"
"fmt"
"go/token"
+ "internal/abi"
"internal/goarch"
"internal/testenv"
"io"
"math"
"math/rand"
+ "net"
"os"
. "reflect"
"reflect/internal/example1"
"unsafe"
)
+const bucketCount = abi.MapBucketCount
+
var sink any
func TestBool(t *testing.T) {
assert(t, v.Type().String(), "func()")
}
+func TestGrow(t *testing.T) {
+ v := ValueOf([]int(nil))
+ shouldPanic("reflect.Value.Grow using unaddressable value", func() { v.Grow(0) })
+ v = ValueOf(new([]int)).Elem()
+ v.Grow(0)
+ if !v.IsNil() {
+ t.Errorf("v.Grow(0) should still be nil")
+ }
+ v.Grow(1)
+ if v.Cap() == 0 {
+ t.Errorf("v.Cap = %v, want non-zero", v.Cap())
+ }
+ want := v.UnsafePointer()
+ v.Grow(1)
+ got := v.UnsafePointer()
+ if got != want {
+ t.Errorf("noop v.Grow should not change pointers")
+ }
+
+ t.Run("Append", func(t *testing.T) {
+ var got, want []T
+ v := ValueOf(&got).Elem()
+ appendValue := func(vt T) {
+ v.Grow(1)
+ v.SetLen(v.Len() + 1)
+ v.Index(v.Len() - 1).Set(ValueOf(vt))
+ }
+ for i := 0; i < 10; i++ {
+ vt := T{i, float64(i), strconv.Itoa(i), &i}
+ appendValue(vt)
+ want = append(want, vt)
+ }
+ if !DeepEqual(got, want) {
+ t.Errorf("value mismatch:\ngot %v\nwant %v", got, want)
+ }
+ })
+
+ t.Run("Rate", func(t *testing.T) {
+ var b []byte
+ v := ValueOf(new([]byte)).Elem()
+ for i := 0; i < 10; i++ {
+ b = append(b[:cap(b)], make([]byte, 1)...)
+ v.SetLen(v.Cap())
+ v.Grow(1)
+ if v.Cap() != cap(b) {
+ t.Errorf("v.Cap = %v, want %v", v.Cap(), cap(b))
+ }
+ }
+ })
+
+ t.Run("ZeroCapacity", func(t *testing.T) {
+ for i := 0; i < 10; i++ {
+ v := ValueOf(new([]byte)).Elem()
+ v.Grow(61)
+ b := v.Bytes()
+ b = b[:cap(b)]
+ for i, c := range b {
+ if c != 0 {
+ t.Fatalf("Value.Bytes[%d] = 0x%02x, want 0x00", i, c)
+ }
+ b[i] = 0xff
+ }
+ runtime.GC()
+ }
+ })
+}
+
var appendTests = []struct {
orig, extra []int
}{
+ {nil, nil},
+ {[]int{}, nil},
+ {nil, []int{}},
+ {[]int{}, []int{}},
+ {nil, []int{22}},
+ {[]int{}, []int{22}},
+ {make([]int, 2, 4), nil},
+ {make([]int, 2, 4), []int{}},
{make([]int, 2, 4), []int{22}},
{make([]int, 2, 4), []int{22, 33, 44}},
}
-func sameInts(x, y []int) bool {
- if len(x) != len(y) {
- return false
- }
- for i, xx := range x {
- if xx != y[i] {
- return false
- }
- }
- return true
-}
-
func TestAppend(t *testing.T) {
for i, test := range appendTests {
origLen, extraLen := len(test.orig), len(test.extra)
}
// Convert extra from []int to *SliceValue.
e1 := ValueOf(test.extra)
+
// Test Append.
- a0 := ValueOf(test.orig)
- have0 := Append(a0, e0...).Interface().([]int)
- if !sameInts(have0, want) {
- t.Errorf("Append #%d: have %v, want %v (%p %p)", i, have0, want, test.orig, have0)
+ a0 := ValueOf(&test.orig).Elem()
+ have0 := Append(a0, e0...)
+ if have0.CanAddr() {
+ t.Errorf("Append #%d: have slice should not be addressable", i)
+ }
+ if !DeepEqual(have0.Interface(), want) {
+ t.Errorf("Append #%d: have %v, want %v (%p %p)", i, have0, want, test.orig, have0.Interface())
}
// Check that the orig and extra slices were not modified.
+ if a0.Len() != len(test.orig) {
+ t.Errorf("Append #%d: a0.Len: have %d, want %d", i, a0.Len(), origLen)
+ }
if len(test.orig) != origLen {
t.Errorf("Append #%d origLen: have %v, want %v", i, len(test.orig), origLen)
}
if len(test.extra) != extraLen {
t.Errorf("Append #%d extraLen: have %v, want %v", i, len(test.extra), extraLen)
}
+
// Test AppendSlice.
- a1 := ValueOf(test.orig)
- have1 := AppendSlice(a1, e1).Interface().([]int)
- if !sameInts(have1, want) {
+ a1 := ValueOf(&test.orig).Elem()
+ have1 := AppendSlice(a1, e1)
+ if have1.CanAddr() {
+ t.Errorf("AppendSlice #%d: have slice should not be addressable", i)
+ }
+ if !DeepEqual(have1.Interface(), want) {
t.Errorf("AppendSlice #%d: have %v, want %v", i, have1, want)
}
// Check that the orig and extra slices were not modified.
+ if a1.Len() != len(test.orig) {
+ t.Errorf("AppendSlice #%d: a1.Len: have %d, want %d", i, a0.Len(), origLen)
+ }
if len(test.orig) != origLen {
t.Errorf("AppendSlice #%d origLen: have %v, want %v", i, len(test.orig), origLen)
}
if len(test.extra) != extraLen {
t.Errorf("AppendSlice #%d extraLen: have %v, want %v", i, len(test.extra), extraLen)
}
+
+ // Test Append and AppendSlice with unexported value.
+ ax := ValueOf(struct{ x []int }{test.orig}).Field(0)
+ shouldPanic("using unexported field", func() { Append(ax, e0...) })
+ shouldPanic("using unexported field", func() { AppendSlice(ax, e1) })
}
}
}
}
-func BenchmarkDeepEqual(b *testing.B) {
- for _, bb := range deepEqualPerfTests {
- b.Run(ValueOf(bb.x).Type().String(), func(b *testing.B) {
- b.ReportAllocs()
- for i := 0; i < b.N; i++ {
- sink = DeepEqual(bb.x, bb.y)
- }
- })
- }
-}
-
func check2ndField(x any, offs uintptr, t *testing.T) {
s := ValueOf(x)
f := s.Type().Field(1)
NotNil(fi, t)
}
+func setField[S, V any](in S, offset uintptr, value V) (out S) {
+ *(*V)(unsafe.Add(unsafe.Pointer(&in), offset)) = value
+ return in
+}
+
func TestIsZero(t *testing.T) {
for i, tt := range []struct {
x any
{float32(1.2), false},
{float64(0), true},
{float64(1.2), false},
- {math.Copysign(0, -1), false},
+ {math.Copysign(0, -1), true},
{complex64(0), true},
{complex64(1.2), false},
{complex128(0), true},
{complex128(1.2), false},
- {complex(math.Copysign(0, -1), 0), false},
- {complex(0, math.Copysign(0, -1)), false},
- {complex(math.Copysign(0, -1), math.Copysign(0, -1)), false},
+ {complex(math.Copysign(0, -1), 0), true},
+ {complex(0, math.Copysign(0, -1)), true},
+ {complex(math.Copysign(0, -1), math.Copysign(0, -1)), true},
{uintptr(0), true},
{uintptr(128), false},
// Array
{[3][]int{{1}}, false}, // incomparable array
{[1 << 12]byte{}, true},
{[1 << 12]byte{1}, false},
+ {[1]struct{ p *int }{}, true},
+ {[1]struct{ p *int }{{new(int)}}, false},
{[3]Value{}, true},
{[3]Value{{}, ValueOf(0), {}}, false},
// Chan
{struct{ s []int }{[]int{1}}, false}, // incomparable struct
{struct{ Value }{}, true},
{struct{ Value }{ValueOf(0)}, false},
+ {struct{ _, a, _ uintptr }{}, true}, // comparable struct with blank fields
+ {setField(struct{ _, a, _ uintptr }{}, 0*unsafe.Sizeof(uintptr(0)), 1), true},
+ {setField(struct{ _, a, _ uintptr }{}, 1*unsafe.Sizeof(uintptr(0)), 1), false},
+ {setField(struct{ _, a, _ uintptr }{}, 2*unsafe.Sizeof(uintptr(0)), 1), true},
+ {struct{ _, a, _ func() }{}, true}, // incomparable struct with blank fields
+ {setField(struct{ _, a, _ func() }{}, 0*unsafe.Sizeof((func())(nil)), func() {}), true},
+ {setField(struct{ _, a, _ func() }{}, 1*unsafe.Sizeof((func())(nil)), func() {}), false},
+ {setField(struct{ _, a, _ func() }{}, 2*unsafe.Sizeof((func())(nil)), func() {}), true},
// UnsafePointer
{(unsafe.Pointer)(nil), true},
{(unsafe.Pointer)(new(int)), false},
}()
}
-func BenchmarkIsZero(b *testing.B) {
- source := ValueOf(struct {
- ArrayComparable [4]T
- ArrayIncomparable [4]_Complex
- StructComparable T
- StructIncomparable _Complex
- }{})
-
- for i := 0; i < source.NumField(); i++ {
- name := source.Type().Field(i).Name
- value := source.Field(i)
- b.Run(name, func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sink = value.IsZero()
- }
- })
- }
-}
-
-func BenchmarkSetZero(b *testing.B) {
- source := ValueOf(new(struct {
- Bool bool
- Int int64
- Uint uint64
- Float float64
- Complex complex128
- Array [4]Value
- Chan chan Value
- Func func() Value
- Interface interface{ String() string }
- Map map[string]Value
- Pointer *Value
- Slice []Value
- String string
- Struct Value
- })).Elem()
-
- for i := 0; i < source.NumField(); i++ {
- name := source.Type().Field(i).Name
- value := source.Field(i)
- zero := Zero(value.Type())
- b.Run(name+"/Direct", func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- value.SetZero()
- }
- })
- b.Run(name+"/CachedZero", func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- value.Set(zero)
- }
- })
- b.Run(name+"/NewZero", func(b *testing.B) {
- for i := 0; i < b.N; i++ {
- value.Set(Zero(value.Type()))
- }
- })
+func TestInternalIsZero(t *testing.T) {
+ b := make([]byte, 512)
+ for a := 0; a < 8; a++ {
+ for i := 256 + 7; i <= 512-a; i++ {
+ InternalIsZero(b[a : a+i])
+ }
}
}
if i, ok := cv.Recv(); i.Int() != 0 || ok {
t.Errorf("after close Recv %d, %t", i.Int(), ok)
}
+ // Closing a read-only channel
+ shouldPanic("", func() {
+ c := make(<-chan int, 1)
+ cv := ValueOf(c)
+ cv.Close()
+ })
}
// check creation of unbuffered channel
}
}
-func BenchmarkSelect(b *testing.B) {
- channel := make(chan int)
- close(channel)
- var cases []SelectCase
- for i := 0; i < 8; i++ {
- cases = append(cases, SelectCase{
- Dir: SelectRecv,
- Chan: ValueOf(channel),
- })
- }
- for _, numCases := range []int{1, 4, 8} {
- b.Run(strconv.Itoa(numCases), func(b *testing.B) {
- b.ReportAllocs()
- for i := 0; i < b.N; i++ {
- _, _, _ = Select(cases[:numCases])
- }
- })
- }
-}
-
// selectWatch and the selectWatcher are a watchdog mechanism for running Select.
// If the selectWatcher notices that the select has been blocked for >1 second, it prints
// an error describing the select and panics the entire test binary.
// fmtSelect formats the information about a single select test.
func fmtSelect(info []caseInfo) string {
- var buf bytes.Buffer
+ var buf strings.Builder
fmt.Fprintf(&buf, "\nselect {\n")
for i, cas := range info {
fmt.Fprintf(&buf, "%d: %s", i, cas.desc)
runtime.KeepAlive(v)
}
-func BenchmarkCall(b *testing.B) {
- fv := ValueOf(func(a, b string) {})
- b.ReportAllocs()
- b.RunParallel(func(pb *testing.PB) {
- args := []Value{ValueOf("a"), ValueOf("b")}
- for pb.Next() {
- fv.Call(args)
- }
- })
-}
-
-type myint int64
-
-func (i *myint) inc() {
- *i = *i + 1
-}
-
-func BenchmarkCallMethod(b *testing.B) {
- b.ReportAllocs()
- z := new(myint)
-
- v := ValueOf(z.inc)
- for i := 0; i < b.N; i++ {
- v.Call(nil)
- }
-}
-
-func BenchmarkCallArgCopy(b *testing.B) {
- byteArray := func(n int) Value {
- return Zero(ArrayOf(n, TypeOf(byte(0))))
- }
- sizes := [...]struct {
- fv Value
- arg Value
- }{
- {ValueOf(func(a [128]byte) {}), byteArray(128)},
- {ValueOf(func(a [256]byte) {}), byteArray(256)},
- {ValueOf(func(a [1024]byte) {}), byteArray(1024)},
- {ValueOf(func(a [4096]byte) {}), byteArray(4096)},
- {ValueOf(func(a [65536]byte) {}), byteArray(65536)},
- }
- for _, size := range sizes {
- bench := func(b *testing.B) {
- args := []Value{size.arg}
- b.SetBytes(int64(size.arg.Len()))
- b.ResetTimer()
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- size.fv.Call(args)
- }
- })
- }
- name := fmt.Sprintf("size=%v", size.arg.Len())
- b.Run(name, bench)
- }
-}
-
func TestMakeFunc(t *testing.T) {
f := dummy
fv := MakeFunc(TypeOf(f), func(in []Value) []Value { return in })
t.Errorf("NoArgs returned %d values; want 0", n)
}
+ _, ok = TypeOf(&p).MethodByName("AA")
+ if ok {
+ t.Errorf(`MethodByName("AA") should have failed`)
+ }
+
+ _, ok = TypeOf(&p).MethodByName("ZZ")
+ if ok {
+ t.Errorf(`MethodByName("ZZ") should have failed`)
+ }
+
// Curried method of value.
tfunc := TypeOf((func(int) int)(nil))
v := ValueOf(p).Method(1)
f() (int32, int8)
}
-var unexpi unexpI = new(unexp)
-
func TestUnexportedMethods(t *testing.T) {
- typ := TypeOf(unexpi)
-
+ typ := TypeOf(new(unexp))
if got := typ.NumMethod(); got != 0 {
t.Errorf("NumMethod=%d, want 0 satisfied methods", got)
}
+
+ typ = TypeOf((*unexpI)(nil))
+ if got := typ.Elem().NumMethod(); got != 1 {
+ t.Errorf("NumMethod=%d, want 1 satisfied methods", got)
+ }
}
type InnerInt struct {
}
}
-func BenchmarkPtrTo(b *testing.B) {
- // Construct a type with a zero ptrToThis.
- type T struct{ int }
- t := SliceOf(TypeOf(T{}))
- ptrToThis := ValueOf(t).Elem().FieldByName("ptrToThis")
- if !ptrToThis.IsValid() {
- b.Fatalf("%v has no ptrToThis field; was it removed from rtype?", t)
- }
- if ptrToThis.Int() != 0 {
- b.Fatalf("%v.ptrToThis unexpectedly nonzero", t)
- }
- b.ResetTimer()
-
- // Now benchmark calling PointerTo on it: we'll have to hit the ptrMap cache on
- // every call.
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- PointerTo(t)
- }
- })
-}
-
func TestAddr(t *testing.T) {
var p struct {
X, Y int
}
func TestVariadic(t *testing.T) {
- var b bytes.Buffer
+ var b strings.Builder
V := ValueOf
b.Reset()
{V(MyString("runes♝")), V(MyRunes("runes♝"))},
{V(MyRunes("runes♕")), V(MyString("runes♕"))},
+ // slice to array
+ {V([]byte(nil)), V([0]byte{})},
+ {V([]byte{}), V([0]byte{})},
+ {V([]byte{1}), V([1]byte{1})},
+ {V([]byte{1, 2}), V([2]byte{1, 2})},
+ {V([]byte{1, 2, 3}), V([3]byte{1, 2, 3})},
+ {V(MyBytes([]byte(nil))), V([0]byte{})},
+ {V(MyBytes{}), V([0]byte{})},
+ {V(MyBytes{1}), V([1]byte{1})},
+ {V(MyBytes{1, 2}), V([2]byte{1, 2})},
+ {V(MyBytes{1, 2, 3}), V([3]byte{1, 2, 3})},
+ {V([]byte(nil)), V(MyBytesArray0{})},
+ {V([]byte{}), V(MyBytesArray0([0]byte{}))},
+ {V([]byte{1, 2, 3, 4}), V(MyBytesArray([4]byte{1, 2, 3, 4}))},
+ {V(MyBytes{}), V(MyBytesArray0([0]byte{}))},
+ {V(MyBytes{5, 6, 7, 8}), V(MyBytesArray([4]byte{5, 6, 7, 8}))},
+ {V([]MyByte{}), V([0]MyByte{})},
+ {V([]MyByte{1, 2}), V([2]MyByte{1, 2})},
+
// slice to array pointer
{V([]byte(nil)), V((*[0]byte)(nil))},
{V([]byte{}), V(new([0]byte))},
// cannot convert mismatched array sizes
{V([2]byte{}), V([2]byte{})},
{V([3]byte{}), V([3]byte{})},
+ {V(MyBytesArray0{}), V([0]byte{})},
+ {V([0]byte{}), V(MyBytesArray0{})},
// cannot convert other instances
{V((**byte)(nil)), V((**byte)(nil))},
shouldPanic("reflect: cannot convert slice with length 4 to pointer to array with length 8", func() {
_ = v.Convert(pt)
})
+
+ if v.CanConvert(pt.Elem()) {
+ t.Errorf("slice with length 4 should not be convertible to [8]byte")
+ }
+ shouldPanic("reflect: cannot convert slice with length 4 to array with length 8", func() {
+ _ = v.Convert(pt.Elem())
+ })
+}
+
+func TestConvertSlice2Array(t *testing.T) {
+ s := make([]int, 4)
+ p := [4]int{}
+ pt := TypeOf(p)
+ ov := ValueOf(s)
+ v := ov.Convert(pt)
+ // Converting a slice to non-empty array needs to return
+ // a non-addressable copy of the original memory.
+ if v.CanAddr() {
+ t.Fatalf("convert slice to non-empty array returns a addressable copy array")
+ }
+ for i := range s {
+ ov.Index(i).Set(ValueOf(i + 1))
+ }
+ for i := range s {
+ if v.Index(i).Int() != 0 {
+ t.Fatalf("slice (%v) mutation visible in converted result (%v)", ov, v)
+ }
+ }
}
var gFloat32 float32
FuncOf(in, nil, false)
}
-type B1 struct {
- X int
- Y int
- Z int
-}
-
-func BenchmarkFieldByName1(b *testing.B) {
- t := TypeOf(B1{})
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- t.FieldByName("Z")
- }
- })
-}
-
-func BenchmarkFieldByName2(b *testing.B) {
- t := TypeOf(S3{})
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- t.FieldByName("B")
- }
- })
-}
-
type R0 struct {
*R1
*R2
}
}
-func BenchmarkFieldByName3(b *testing.B) {
- t := TypeOf(R0{})
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- t.FieldByName("X")
- }
- })
-}
-
-type S struct {
- i1 int64
- i2 int64
-}
-
-func BenchmarkInterfaceBig(b *testing.B) {
- v := ValueOf(S{})
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- v.Interface()
- }
- })
- b.StopTimer()
-}
-
func TestAllocsInterfaceBig(t *testing.T) {
if testing.Short() {
t.Skip("skipping malloc count in short mode")
}
}
-func BenchmarkInterfaceSmall(b *testing.B) {
- v := ValueOf(int64(0))
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- v.Interface()
- }
- })
-}
-
func TestAllocsInterfaceSmall(t *testing.T) {
if testing.Short() {
t.Skip("skipping malloc count in short mode")
// e.g. with rep(2, lit(1, 0)).
bits = trimBitmap(bits)
- if !bytes.Equal(heapBits, bits) {
- _, _, line, _ := runtime.Caller(1)
- t.Errorf("line %d: heapBits incorrect for %v\nhave %v\nwant %v", line, typ, heapBits, bits)
+ if bytes.HasPrefix(heapBits, bits) {
+ // Just the prefix matching is OK.
+ //
+ // The Go runtime's pointer/scalar iterator generates pointers beyond
+ // the size of the type, up to the size of the size class. This space
+ // is safe for the GC to scan since it's zero, and GCBits checks to
+ // make sure that's true. But we need to handle the fact that the bitmap
+ // may be larger than we expect.
+ return
}
+ _, _, line, _ := runtime.Caller(1)
+ t.Errorf("line %d: heapBits incorrect for %v\nhave %v\nwant %v", line, typ, heapBits, bits)
}
func verifyGCBitsSlice(t *testing.T, typ Type, cap int, bits []byte) {
// repeat a bitmap for a small array or executing a repeat in
// a GC program.
val := MakeSlice(typ, 0, cap)
- data := NewAt(ArrayOf(cap, typ), val.UnsafePointer())
+ data := NewAt(typ.Elem(), val.UnsafePointer())
heapBits := GCBits(data.Interface())
// Repeat the bitmap for the slice size, trimming scalars in
// the last element.
bits = trimBitmap(rep(cap, bits))
- if !bytes.Equal(heapBits, bits) {
- _, _, line, _ := runtime.Caller(1)
- t.Errorf("line %d: heapBits incorrect for make(%v, 0, %v)\nhave %v\nwant %v", line, typ, cap, heapBits, bits)
+ if bytes.Equal(heapBits, bits) {
+ return
+ }
+ if len(heapBits) > len(bits) && bytes.Equal(heapBits[:len(bits)], bits) {
+ // Just the prefix matching is OK.
+ return
}
+ _, _, line, _ := runtime.Caller(1)
+ t.Errorf("line %d: heapBits incorrect for make(%v, 0, %v)\nhave %v\nwant %v", line, typ, cap, heapBits, bits)
}
func TestGCBits(t *testing.T) {
verifyGCBits(t, TypeOf(([][10000]Xscalar)(nil)), lit(1))
verifyGCBits(t, SliceOf(ArrayOf(10000, Tscalar)), lit(1))
- hdr := make([]byte, 8/goarch.PtrSize)
+ hdr := make([]byte, bucketCount/goarch.PtrSize)
verifyMapBucket := func(t *testing.T, k, e Type, m any, want []byte) {
verifyGCBits(t, MapBucketOf(k, e), want)
verifyMapBucket(t,
Tscalar, Tptr,
map[Xscalar]Xptr(nil),
- join(hdr, rep(8, lit(0)), rep(8, lit(1)), lit(1)))
+ join(hdr, rep(bucketCount, lit(0)), rep(bucketCount, lit(1)), lit(1)))
verifyMapBucket(t,
Tscalarptr, Tptr,
map[Xscalarptr]Xptr(nil),
- join(hdr, rep(8, lit(0, 1)), rep(8, lit(1)), lit(1)))
+ join(hdr, rep(bucketCount, lit(0, 1)), rep(bucketCount, lit(1)), lit(1)))
verifyMapBucket(t, Tint64, Tptr,
map[int64]Xptr(nil),
- join(hdr, rep(8, rep(8/goarch.PtrSize, lit(0))), rep(8, lit(1)), lit(1)))
+ join(hdr, rep(bucketCount, rep(8/goarch.PtrSize, lit(0))), rep(bucketCount, lit(1)), lit(1)))
verifyMapBucket(t,
Tscalar, Tscalar,
map[Xscalar]Xscalar(nil),
verifyMapBucket(t,
ArrayOf(2, Tscalarptr), ArrayOf(3, Tptrscalar),
map[[2]Xscalarptr][3]Xptrscalar(nil),
- join(hdr, rep(8*2, lit(0, 1)), rep(8*3, lit(1, 0)), lit(1)))
+ join(hdr, rep(bucketCount*2, lit(0, 1)), rep(bucketCount*3, lit(1, 0)), lit(1)))
verifyMapBucket(t,
ArrayOf(64/goarch.PtrSize, Tscalarptr), ArrayOf(64/goarch.PtrSize, Tptrscalar),
map[[64 / goarch.PtrSize]Xscalarptr][64 / goarch.PtrSize]Xptrscalar(nil),
- join(hdr, rep(8*64/goarch.PtrSize, lit(0, 1)), rep(8*64/goarch.PtrSize, lit(1, 0)), lit(1)))
+ join(hdr, rep(bucketCount*64/goarch.PtrSize, lit(0, 1)), rep(bucketCount*64/goarch.PtrSize, lit(1, 0)), lit(1)))
verifyMapBucket(t,
ArrayOf(64/goarch.PtrSize+1, Tscalarptr), ArrayOf(64/goarch.PtrSize, Tptrscalar),
map[[64/goarch.PtrSize + 1]Xscalarptr][64 / goarch.PtrSize]Xptrscalar(nil),
- join(hdr, rep(8, lit(1)), rep(8*64/goarch.PtrSize, lit(1, 0)), lit(1)))
+ join(hdr, rep(bucketCount, lit(1)), rep(bucketCount*64/goarch.PtrSize, lit(1, 0)), lit(1)))
verifyMapBucket(t,
ArrayOf(64/goarch.PtrSize, Tscalarptr), ArrayOf(64/goarch.PtrSize+1, Tptrscalar),
map[[64 / goarch.PtrSize]Xscalarptr][64/goarch.PtrSize + 1]Xptrscalar(nil),
- join(hdr, rep(8*64/goarch.PtrSize, lit(0, 1)), rep(8, lit(1)), lit(1)))
+ join(hdr, rep(bucketCount*64/goarch.PtrSize, lit(0, 1)), rep(bucketCount, lit(1)), lit(1)))
verifyMapBucket(t,
ArrayOf(64/goarch.PtrSize+1, Tscalarptr), ArrayOf(64/goarch.PtrSize+1, Tptrscalar),
map[[64/goarch.PtrSize + 1]Xscalarptr][64/goarch.PtrSize + 1]Xptrscalar(nil),
- join(hdr, rep(8, lit(1)), rep(8, lit(1)), lit(1)))
+ join(hdr, rep(bucketCount, lit(1)), rep(bucketCount, lit(1)), lit(1)))
}
func rep(n int, b []byte) []byte { return bytes.Repeat(b, n) }
wg.Wait()
}
-func BenchmarkNew(b *testing.B) {
- v := TypeOf(XM{})
- b.RunParallel(func(pb *testing.PB) {
- for pb.Next() {
- New(v)
- }
- })
-}
-
-func BenchmarkMap(b *testing.B) {
- type V *int
- type S string
- value := ValueOf((V)(nil))
- stringKeys := []string{}
- mapOfStrings := map[string]V{}
- uint64Keys := []uint64{}
- mapOfUint64s := map[uint64]V{}
- userStringKeys := []S{}
- mapOfUserStrings := map[S]V{}
- for i := 0; i < 100; i++ {
- stringKey := fmt.Sprintf("key%d", i)
- stringKeys = append(stringKeys, stringKey)
- mapOfStrings[stringKey] = nil
-
- uint64Key := uint64(i)
- uint64Keys = append(uint64Keys, uint64Key)
- mapOfUint64s[uint64Key] = nil
-
- userStringKey := S(fmt.Sprintf("key%d", i))
- userStringKeys = append(userStringKeys, userStringKey)
- mapOfUserStrings[userStringKey] = nil
- }
-
- tests := []struct {
- label string
- m, keys, value Value
- }{
- {"StringKeys", ValueOf(mapOfStrings), ValueOf(stringKeys), value},
- {"Uint64Keys", ValueOf(mapOfUint64s), ValueOf(uint64Keys), value},
- {"UserStringKeys", ValueOf(mapOfUserStrings), ValueOf(userStringKeys), value},
- }
-
- for _, tt := range tests {
- b.Run(tt.label, func(b *testing.B) {
- b.Run("MapIndex", func(b *testing.B) {
- b.ReportAllocs()
- for i := 0; i < b.N; i++ {
- for j := tt.keys.Len() - 1; j >= 0; j-- {
- tt.m.MapIndex(tt.keys.Index(j))
- }
- }
- })
- b.Run("SetMapIndex", func(b *testing.B) {
- b.ReportAllocs()
- for i := 0; i < b.N; i++ {
- for j := tt.keys.Len() - 1; j >= 0; j-- {
- tt.m.SetMapIndex(tt.keys.Index(j), tt.value)
- }
- }
- })
- })
- }
-}
-
func TestSwapper(t *testing.T) {
type I int
var a, b, c I
}
}
-func BenchmarkMapIterNext(b *testing.B) {
- m := ValueOf(map[string]int{"a": 0, "b": 1, "c": 2, "d": 3})
- it := m.MapRange()
- for i := 0; i < b.N; i++ {
- for it.Next() {
- }
- it.Reset(m)
- }
-}
-
func TestMapIterDelete0(t *testing.T) {
// Delete all elements before first iteration.
m := map[string]int{"one": 1, "two": 2, "three": 3}
if got := *y.Interface().(*int); got != b {
t.Errorf("pointer incorrect: got %d want %d", got, b)
}
-}
-
-//go:notinheap
-type nih struct{ x int }
-
-var global_nih = nih{x: 7}
-
-func TestNotInHeapDeref(t *testing.T) {
- // See issue 48399.
- v := ValueOf((*nih)(nil))
- v.Elem()
- shouldPanic("reflect: call of reflect.Value.Field on zero Value", func() { v.Elem().Field(0) })
- v = ValueOf(&global_nih)
- if got := v.Elem().Field(0).Int(); got != 7 {
- t.Fatalf("got %d, want 7", got)
+ // Make sure we panic assigning from an unexported field.
+ m = ValueOf(struct{ m map[string]int }{data}).Field(0)
+ for iter := m.MapRange(); iter.Next(); {
+ shouldPanic("using value obtained using unexported field", func() {
+ k.SetIterKey(iter)
+ })
+ shouldPanic("using value obtained using unexported field", func() {
+ v.SetIterValue(iter)
+ })
}
-
- v = ValueOf((*nih)(unsafe.Pointer(new(int))))
- shouldPanic("reflect: reflect.Value.Elem on an invalid notinheap pointer", func() { v.Elem() })
- shouldPanic("reflect: reflect.Value.Pointer on an invalid notinheap pointer", func() { v.Pointer() })
- shouldPanic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer", func() { v.UnsafePointer() })
}
func TestMethodCallValueCodePtr(t *testing.T) {
namedBytes []byte
)
-var sourceAll = struct {
- Bool Value
- String Value
- Bytes Value
- NamedBytes Value
- BytesArray Value
- SliceAny Value
- MapStringAny Value
-}{
- Bool: ValueOf(new(bool)).Elem(),
- String: ValueOf(new(string)).Elem(),
- Bytes: ValueOf(new([]byte)).Elem(),
- NamedBytes: ValueOf(new(namedBytes)).Elem(),
- BytesArray: ValueOf(new([32]byte)).Elem(),
- SliceAny: ValueOf(new([]any)).Elem(),
- MapStringAny: ValueOf(new(map[string]any)).Elem(),
-}
-
-var sinkAll struct {
- RawBool bool
- RawString string
- RawBytes []byte
- RawInt int
-}
-
-func BenchmarkBool(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawBool = sourceAll.Bool.Bool()
- }
-}
-
-func BenchmarkString(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawString = sourceAll.String.String()
- }
-}
-
-func BenchmarkBytes(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawBytes = sourceAll.Bytes.Bytes()
- }
-}
-
-func BenchmarkNamedBytes(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawBytes = sourceAll.NamedBytes.Bytes()
- }
-}
-
-func BenchmarkBytesArray(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawBytes = sourceAll.BytesArray.Bytes()
- }
-}
-
-func BenchmarkSliceLen(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawInt = sourceAll.SliceAny.Len()
- }
-}
-
-func BenchmarkMapLen(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawInt = sourceAll.MapStringAny.Len()
- }
-}
-
-func BenchmarkStringLen(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawInt = sourceAll.String.Len()
- }
-}
-
-func BenchmarkArrayLen(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawInt = sourceAll.BytesArray.Len()
- }
-}
-
-func BenchmarkSliceCap(b *testing.B) {
- for i := 0; i < b.N; i++ {
- sinkAll.RawInt = sourceAll.SliceAny.Cap()
- }
-}
-
func TestValue_Cap(t *testing.T) {
a := &[3]int{1, 2, 3}
v := ValueOf(a)
true,
true, false,
},
- {
- &equalSlice, []int{1},
- false,
- true, false,
- },
- {
- map[int]int{}, map[int]int{},
- false,
- false, false,
- },
{
(chan int)(nil), nil,
false,
true,
false, false,
},
- {
- &mapInterface, &mapInterface,
- false,
- true, true,
- },
}
func TestValue_Equal(t *testing.T) {
}
}
}
+
+func TestValue_EqualNonComparable(t *testing.T) {
+ var invalid = Value{} // ValueOf(nil)
+ var values = []Value{
+ // Value of slice is non-comparable.
+ ValueOf([]int(nil)),
+ ValueOf(([]int{})),
+
+ // Value of map is non-comparable.
+ ValueOf(map[int]int(nil)),
+ ValueOf((map[int]int{})),
+
+ // Value of func is non-comparable.
+ ValueOf(((func())(nil))),
+ ValueOf(func() {}),
+
+ // Value of struct is non-comparable because of non-comparable elements.
+ ValueOf((NonComparableStruct{})),
+
+ // Value of array is non-comparable because of non-comparable elements.
+ ValueOf([0]map[int]int{}),
+ ValueOf([0]func(){}),
+ ValueOf(([1]struct{ I interface{} }{{[]int{}}})),
+ ValueOf(([1]interface{}{[1]interface{}{map[int]int{}}})),
+ }
+ for _, value := range values {
+ // Panic when reflect.Value.Equal using two valid non-comparable values.
+ shouldPanic("are not comparable", func() { value.Equal(value) })
+
+ // If one is non-comparable and the other is invalid, the expected result is always false.
+ if r := value.Equal(invalid); r != false {
+ t.Errorf("%s == invalid got %t, want false", value.Type(), r)
+ }
+ }
+}
+
+func TestInitFuncTypes(t *testing.T) {
+ n := 100
+ var wg sync.WaitGroup
+
+ wg.Add(n)
+ for i := 0; i < n; i++ {
+ go func() {
+ defer wg.Done()
+ ipT := TypeOf(net.IP{})
+ for i := 0; i < ipT.NumMethod(); i++ {
+ _ = ipT.Method(i)
+ }
+ }()
+ }
+ wg.Wait()
+}
+
+func TestClear(t *testing.T) {
+ m := make(map[string]any, len(valueTests))
+ for _, tt := range valueTests {
+ m[tt.s] = tt.i
+ }
+ mapTestFn := func(v Value) bool { v.Clear(); return v.Len() == 0 }
+
+ s := make([]*pair, len(valueTests))
+ for i := range s {
+ s[i] = &valueTests[i]
+ }
+ sliceTestFn := func(v Value) bool {
+ v.Clear()
+ for i := 0; i < v.Len(); i++ {
+ if !v.Index(i).IsZero() {
+ return false
+ }
+ }
+ return true
+ }
+
+ panicTestFn := func(v Value) bool { shouldPanic("reflect.Value.Clear", func() { v.Clear() }); return true }
+
+ tests := []struct {
+ name string
+ value Value
+ testFunc func(v Value) bool
+ }{
+ {"map", ValueOf(m), mapTestFn},
+ {"slice no pointer", ValueOf([]int{1, 2, 3, 4, 5}), sliceTestFn},
+ {"slice has pointer", ValueOf(s), sliceTestFn},
+ {"non-map/slice", ValueOf(1), panicTestFn},
+ }
+
+ for _, tc := range tests {
+ tc := tc
+ t.Run(tc.name, func(t *testing.T) {
+ t.Parallel()
+ if !tc.testFunc(tc.value) {
+ t.Errorf("unexpected result for value.Clear(): %value", tc.value)
+ }
+ })
+ }
+}