1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
15 offsetARMHasV7Atomics = unsafe.Offsetof(cpu.ARM.HasV7Atomics)
18 // Export some functions via linkname to assembly in sync/atomic.
21 //go:linkname Xchguintptr
23 type spinlock struct {
28 func (l *spinlock) lock() {
37 func (l *spinlock) unlock() {
41 var locktab [57]struct {
43 pad [cpu.CacheLinePadSize - unsafe.Sizeof(spinlock{})]byte
46 func addrLock(addr *uint64) *spinlock {
47 return &locktab[(uintptr(unsafe.Pointer(addr))>>3)%uintptr(len(locktab))].l
50 // Atomic add and return new value.
53 func Xadd(val *uint32, delta int32) uint32 {
56 nval := oval + uint32(delta)
57 if Cas(val, oval, nval) {
64 func Xadduintptr(ptr *uintptr, delta uintptr) uintptr
67 func Xchg(addr *uint32, v uint32) uint32 {
70 if Cas(addr, old, v) {
77 func Xchguintptr(addr *uintptr, v uintptr) uintptr {
78 return uintptr(Xchg((*uint32)(unsafe.Pointer(addr)), uint32(v)))
81 // Not noescape -- it installs a pointer to addr.
82 func StorepNoWB(addr unsafe.Pointer, v unsafe.Pointer)
85 func Store(addr *uint32, v uint32)
88 func StoreRel(addr *uint32, v uint32)
91 func StoreReluintptr(addr *uintptr, v uintptr)
94 func goCas64(addr *uint64, old, new uint64) bool {
95 if uintptr(unsafe.Pointer(addr))&7 != 0 {
96 *(*int)(nil) = 0 // crash on unaligned uint64
98 _ = *addr // if nil, fault before taking the lock
100 addrLock(addr).lock()
105 addrLock(addr).unlock()
110 func goXadd64(addr *uint64, delta int64) uint64 {
111 if uintptr(unsafe.Pointer(addr))&7 != 0 {
112 *(*int)(nil) = 0 // crash on unaligned uint64
114 _ = *addr // if nil, fault before taking the lock
116 addrLock(addr).lock()
117 r = *addr + uint64(delta)
119 addrLock(addr).unlock()
124 func goXchg64(addr *uint64, v uint64) uint64 {
125 if uintptr(unsafe.Pointer(addr))&7 != 0 {
126 *(*int)(nil) = 0 // crash on unaligned uint64
128 _ = *addr // if nil, fault before taking the lock
130 addrLock(addr).lock()
133 addrLock(addr).unlock()
138 func goLoad64(addr *uint64) uint64 {
139 if uintptr(unsafe.Pointer(addr))&7 != 0 {
140 *(*int)(nil) = 0 // crash on unaligned uint64
142 _ = *addr // if nil, fault before taking the lock
144 addrLock(addr).lock()
146 addrLock(addr).unlock()
151 func goStore64(addr *uint64, v uint64) {
152 if uintptr(unsafe.Pointer(addr))&7 != 0 {
153 *(*int)(nil) = 0 // crash on unaligned uint64
155 _ = *addr // if nil, fault before taking the lock
156 addrLock(addr).lock()
158 addrLock(addr).unlock()
162 func Or8(addr *uint8, v uint8) {
163 // Align down to 4 bytes and use 32-bit CAS.
164 uaddr := uintptr(unsafe.Pointer(addr))
165 addr32 := (*uint32)(unsafe.Pointer(uaddr &^ 3))
166 word := uint32(v) << ((uaddr & 3) * 8) // little endian
169 if Cas(addr32, old, old|word) {
176 func And8(addr *uint8, v uint8) {
177 // Align down to 4 bytes and use 32-bit CAS.
178 uaddr := uintptr(unsafe.Pointer(addr))
179 addr32 := (*uint32)(unsafe.Pointer(uaddr &^ 3))
180 word := uint32(v) << ((uaddr & 3) * 8) // little endian
181 mask := uint32(0xFF) << ((uaddr & 3) * 8) // little endian
185 if Cas(addr32, old, old&word) {
192 func Or(addr *uint32, v uint32) {
195 if Cas(addr, old, old|v) {
202 func And(addr *uint32, v uint32) {
205 if Cas(addr, old, old&v) {
212 func armcas(ptr *uint32, old, new uint32) bool
215 func Load(addr *uint32) uint32
217 // NO go:noescape annotation; *addr escapes if result escapes (#31525)
218 func Loadp(addr unsafe.Pointer) unsafe.Pointer
221 func Load8(addr *uint8) uint8
224 func LoadAcq(addr *uint32) uint32
227 func LoadAcquintptr(ptr *uintptr) uintptr
230 func Cas64(addr *uint64, old, new uint64) bool
233 func CasRel(addr *uint32, old, new uint32) bool
236 func Xadd64(addr *uint64, delta int64) uint64
239 func Xchg64(addr *uint64, v uint64) uint64
242 func Load64(addr *uint64) uint64
245 func Store8(addr *uint8, v uint8)
248 func Store64(addr *uint64, v uint64)