{[3][]int{{1}}, false}, // incomparable array
{[1 << 12]byte{}, true},
{[1 << 12]byte{1}, false},
+ {[1]struct{ p *int }{}, true},
+ {[1]struct{ p *int }{{new(int)}}, false},
{[3]Value{}, true},
{[3]Value{{}, ValueOf(0), {}}, false},
// Chan
}()
}
+func TestInternalIsZero(t *testing.T) {
+ b := make([]byte, 512)
+ for a := 0; a < 8; a++ {
+ for i := 256 + 7; i <= 512-a; i++ {
+ InternalIsZero(b[a : a+i])
+ }
+ }
+}
+
func TestInterfaceExtraction(t *testing.T) {
var s struct {
W io.Writer
}
func BenchmarkIsZero(b *testing.B) {
- source := ValueOf(struct {
- ArrayComparable [4]T
- ArrayIncomparable [4]_Complex
- StructComparable T
- StructIncomparable _Complex
- }{})
+ type Int4 struct {
+ a, b, c, d int
+ }
+ type Int1024 struct {
+ a [1024]int
+ }
+ s := struct {
+ ArrayComparable [4]T
+ ArrayIncomparable [4]_Complex
+ StructComparable T
+ StructIncomparable _Complex
+ ArrayInt_4 [4]int
+ ArrayInt_1024 [1024]int
+ ArrayInt_1024_NoZero [1024]int
+ Struct4Int Int4
+ ArrayStruct4Int_1024 [256]Int4
+ ArrayChanInt_1024 [1024]chan int
+ }{}
+ s.ArrayInt_1024_NoZero[512] = 1
+ source := ValueOf(s)
for i := 0; i < source.NumField(); i++ {
name := source.Type().Field(i).Name
case Complex64, Complex128:
return v.Complex() == 0
case Array:
+ array := (*abi.ArrayType)(unsafe.Pointer(v.typ()))
+ // Avoid performance degradation of small benchmarks.
// If the type is comparable, then compare directly with zero.
- if v.typ().Equal != nil && v.typ().Size() <= maxZero {
+ if array.Equal != nil && array.Size() <= maxZero {
if v.flag&flagIndir == 0 {
return v.ptr == nil
}
// v.ptr doesn't escape, as Equal functions are compiler generated
// and never escape. The escape analysis doesn't know, as it is a
// function pointer call.
- return v.typ().Equal(noescape(v.ptr), unsafe.Pointer(&zeroVal[0]))
+ return array.Equal(noescape(v.ptr), unsafe.Pointer(&zeroVal[0]))
}
-
- n := v.Len()
+ if array.TFlag&abi.TFlagRegularMemory != 0 {
+ // For some types where the zero value is a value where all bits of this type are 0
+ // optimize it.
+ return isZero(unsafe.Slice(((*byte)(v.ptr)), array.Size()))
+ }
+ n := int(array.Len)
for i := 0; i < n; i++ {
if !v.Index(i).IsZero() {
return false
}
}
+// isZero must have len(b)>256+7 to ensure at
+// least one 8-byte aligned [256]byte,
+// otherwise the access will be out of bounds.
+// For all zeros, performance is not as good as
+// return bytealg.Count(b, byte(0)) == len(b)
+func isZero(b []byte) bool {
+ const n = 32
+ const bit = n * 8
+ // Align memory addresses to 8 bytes
+ for uintptr(unsafe.Pointer(&b[0]))%8 != 0 {
+ if b[0] != 0 {
+ return false
+ }
+ b = b[1:]
+ }
+ for len(b)%bit != 0 {
+ if b[len(b)-1] != 0 {
+ return false
+ }
+ b = b[:len(b)-1]
+ }
+ w := unsafe.Slice((*uint64)(unsafe.Pointer(&b[0])), len(b)/8)
+ for len(w) >= n {
+ if w[0] != 0 || w[1] != 0 || w[2] != 0 || w[3] != 0 || w[4] != 0 || w[5] != 0 || w[6] != 0 || w[7] != 0 || w[8] != 0 || w[9] != 0 || w[10] != 0 || w[11] != 0 || w[12] != 0 || w[13] != 0 || w[14] != 0 || w[15] != 0 || w[16] != 0 || w[17] != 0 || w[18] != 0 || w[19] != 0 || w[20] != 0 || w[21] != 0 || w[22] != 0 || w[23] != 0 || w[24] != 0 || w[25] != 0 || w[26] != 0 || w[27] != 0 || w[28] != 0 || w[29] != 0 || w[30] != 0 || w[31] != 0 {
+ return false
+ }
+ w = w[n:]
+ }
+ return true
+}
+
// SetZero sets v to be the zero value of v's type.
// It panics if [Value.CanSet] returns false.
func (v Value) SetZero() {