if raceenabled {
raceReadObjectPC(t.elem, ep, callerpc, funcPC(chansend))
}
+ if msanenabled {
+ msanread(ep, t.elem.size)
+ }
if c == nil {
if !block {
racereadpc(unsafe.Pointer(h), callerpc, pc)
raceReadObjectPC(t.key, key, callerpc, pc)
}
+ if msanenabled && h != nil {
+ msanread(key, t.key.size)
+ }
if h == nil || h.count == 0 {
return atomicloadp(unsafe.Pointer(&zeroptr))
}
racereadpc(unsafe.Pointer(h), callerpc, pc)
raceReadObjectPC(t.key, key, callerpc, pc)
}
+ if msanenabled && h != nil {
+ msanread(key, t.key.size)
+ }
if h == nil || h.count == 0 {
return atomicloadp(unsafe.Pointer(&zeroptr)), false
}
raceReadObjectPC(t.key, key, callerpc, pc)
raceReadObjectPC(t.elem, val, callerpc, pc)
}
+ if msanenabled {
+ msanread(key, t.key.size)
+ msanread(val, t.elem.size)
+ }
alg := t.key.alg
hash := alg.hash(key, uintptr(h.hash0))
racewritepc(unsafe.Pointer(h), callerpc, pc)
raceReadObjectPC(t.key, key, callerpc, pc)
}
+ if msanenabled && h != nil {
+ msanread(key, t.key.size)
+ }
if h == nil || h.count == 0 {
return
}
if raceenabled {
raceReadObjectPC(t, elem, getcallerpc(unsafe.Pointer(&t)), funcPC(convT2E))
}
+ if msanenabled {
+ msanread(elem, t.size)
+ }
ep := (*eface)(unsafe.Pointer(&e))
if isDirectIface(t) {
ep._type = t
if raceenabled {
raceReadObjectPC(t, elem, getcallerpc(unsafe.Pointer(&t)), funcPC(convT2I))
}
+ if msanenabled {
+ msanread(elem, t.size)
+ }
tab := (*itab)(atomicloadp(unsafe.Pointer(cache)))
if tab == nil {
tab = getitab(inter, t, false)
if raceenabled {
racemalloc(x, size)
}
+ if msanenabled {
+ msanmalloc(x, size)
+ }
mp.mallocing = 0
releasem(mp)
racewriterangepc(dstp, uintptr(n)*typ.size, callerpc, pc)
racereadrangepc(srcp, uintptr(n)*typ.size, callerpc, pc)
}
+ if msanenabled {
+ msanwrite(dstp, uintptr(n)*typ.size)
+ msanread(srcp, uintptr(n)*typ.size)
+ }
// Note: No point in checking typ.kind&kindNoPointers here:
// compiler only emits calls to typedslicecopy for types with pointers,
if debug.allocfreetrace != 0 {
tracefree(unsafe.Pointer(p), size)
}
+ if msanenabled {
+ msanfree(unsafe.Pointer(p), size)
+ }
// Reset to allocated+noscan.
if cl == 0 {
if raceenabled {
racemalloc(unsafe.Pointer(gp.stack.lo), gp.stackAlloc)
}
+ if msanenabled {
+ msanmalloc(unsafe.Pointer(gp.stack.lo), gp.stackAlloc)
+ }
}
}
return gp
raceReadObjectPC(c.elemtype, cas.elem, cas.pc, chansendpc)
}
}
+ if msanenabled {
+ if cas.kind == caseRecv && cas.elem != nil {
+ msanwrite(cas.elem, c.elemtype.size)
+ } else if cas.kind == caseSend {
+ msanread(cas.elem, c.elemtype.size)
+ }
+ }
selunlock(sel)
goto retc
raceacquire(chanbuf(c, c.recvx))
racerelease(chanbuf(c, c.recvx))
}
+ if msanenabled && cas.elem != nil {
+ msanwrite(cas.elem, c.elemtype.size)
+ }
if cas.receivedp != nil {
*cas.receivedp = true
}
racerelease(chanbuf(c, c.sendx))
raceReadObjectPC(c.elemtype, cas.elem, cas.pc, chansendpc)
}
+ if msanenabled {
+ msanread(cas.elem, c.elemtype.size)
+ }
typedmemmove(c.elemtype, chanbuf(c, c.sendx), cas.elem)
c.sendx++
if c.sendx == c.dataqsiz {
}
racesync(c, sg)
}
+ if msanenabled && cas.elem != nil {
+ msanwrite(cas.elem, c.elemtype.size)
+ }
selunlock(sel)
if debugSelect {
print("syncrecv: sel=", sel, " c=", c, "\n")
raceReadObjectPC(c.elemtype, cas.elem, cas.pc, chansendpc)
racesync(c, sg)
}
+ if msanenabled {
+ msanread(cas.elem, c.elemtype.size)
+ }
selunlock(sel)
if debugSelect {
print("syncsend: sel=", sel, " c=", c, "\n")
callerpc := getcallerpc(unsafe.Pointer(&t))
racereadrangepc(old.array, uintptr(old.len*int(t.elem.size)), callerpc, funcPC(growslice))
}
+ if msanenabled {
+ msanread(old.array, uintptr(old.len*int(t.elem.size)))
+ }
et := t.elem
if et.size == 0 {
racewriterangepc(to.array, uintptr(n*int(width)), callerpc, pc)
racereadrangepc(fm.array, uintptr(n*int(width)), callerpc, pc)
}
+ if msanenabled {
+ msanwrite(to.array, uintptr(n*int(width)))
+ msanread(fm.array, uintptr(n*int(width)))
+ }
size := uintptr(n) * width
if size == 1 { // common case worth about 2x to do here
pc := funcPC(slicestringcopy)
racewriterangepc(unsafe.Pointer(&to[0]), uintptr(n), callerpc, pc)
}
+ if msanenabled {
+ msanwrite(unsafe.Pointer(&to[0]), uintptr(n))
+ }
memmove(unsafe.Pointer(&to[0]), unsafe.Pointer(stringStructOf(&fm).str), uintptr(n))
return n
if raceenabled {
racemalloc(v, uintptr(n))
}
+ if msanenabled {
+ msanmalloc(v, uintptr(n))
+ }
if stackDebug >= 1 {
print(" allocated ", v, "\n")
}
}
return
}
+ if msanenabled {
+ msanfree(v, n)
+ }
if stackCache != 0 && n < _FixedStack<<_NumStackOrders && n < _StackCacheSize {
order := uint8(0)
n2 := n
getcallerpc(unsafe.Pointer(&b)),
funcPC(slicebytetostring))
}
+ if msanenabled && l > 0 {
+ msanread(unsafe.Pointer(&b[0]), uintptr(l))
+ }
s, c := rawstringtmp(buf, l)
copy(c, b)
return s
getcallerpc(unsafe.Pointer(&b)),
funcPC(slicebytetostringtmp))
}
+ if msanenabled && len(b) > 0 {
+ msanread(unsafe.Pointer(&b[0]), uintptr(len(b)))
+ }
return *(*string)(unsafe.Pointer(&b))
}
getcallerpc(unsafe.Pointer(&a)),
funcPC(slicerunetostring))
}
+ if msanenabled && len(a) > 0 {
+ msanread(unsafe.Pointer(&a[0]), uintptr(len(a))*unsafe.Sizeof(a[0]))
+ }
var dum [4]byte
size1 := 0
for _, r := range a {
--- /dev/null
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build msan
+
+package syscall
+
+import (
+ "runtime"
+ "unsafe"
+)
+
+const msanenabled = true
+
+func msanRead(addr unsafe.Pointer, len int) {
+ runtime.MSanRead(addr, len)
+}
+
+func msanWrite(addr unsafe.Pointer, len int) {
+ runtime.MSanWrite(addr, len)
+}
--- /dev/null
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !msan
+
+package syscall
+
+import (
+ "unsafe"
+)
+
+const msanenabled = false
+
+func msanRead(addr unsafe.Pointer, len int) {
+}
+
+func msanWrite(addr unsafe.Pointer, len int) {
+}
raceAcquire(unsafe.Pointer(&ioSync))
}
}
+ if msanenabled && n > 0 {
+ msanWrite(unsafe.Pointer(&p[0]), n)
+ }
return
}
if raceenabled && n > 0 {
raceReadRange(unsafe.Pointer(&p[0]), n)
}
+ if msanenabled && n > 0 {
+ msanRead(unsafe.Pointer(&p[0]), n)
+ }
return
}
}
raceAcquire(unsafe.Pointer(&ioSync))
}
+ if msanenabled && done > 0 {
+ msanWrite(unsafe.Pointer(&p[0]), int(done))
+ }
return int(done), nil
}
if raceenabled && done > 0 {
raceReadRange(unsafe.Pointer(&p[0]), int(done))
}
+ if msanenabled && done > 0 {
+ msanRead(unsafe.Pointer(&p[0]), int(done))
+ }
return int(done), nil
}