1 // Copyright 2014 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
8 TEXT ·Casint32(SB), NOSPLIT, $0-17
11 TEXT ·Casint64(SB), NOSPLIT, $0-25
14 TEXT ·Casuintptr(SB), NOSPLIT, $0-25
17 TEXT ·CasRel(SB), NOSPLIT, $0-17
20 TEXT ·Loadint32(SB), NOSPLIT, $0-12
23 TEXT ·Loadint64(SB), NOSPLIT, $0-16
26 TEXT ·Loaduintptr(SB), NOSPLIT, $0-16
29 TEXT ·Loaduint(SB), NOSPLIT, $0-16
32 TEXT ·Storeint32(SB), NOSPLIT, $0-12
35 TEXT ·Storeint64(SB), NOSPLIT, $0-16
38 TEXT ·Storeuintptr(SB), NOSPLIT, $0-16
41 TEXT ·Xaddint32(SB), NOSPLIT, $0-20
44 TEXT ·Xaddint64(SB), NOSPLIT, $0-24
47 TEXT ·Xadduintptr(SB), NOSPLIT, $0-24
50 TEXT ·Casp1(SB), NOSPLIT, $0-25
53 // uint32 ·Load(uint32 volatile* addr)
54 TEXT ·Load(SB),NOSPLIT,$0-12
60 // uint8 ·Load8(uint8 volatile* addr)
61 TEXT ·Load8(SB),NOSPLIT,$0-9
67 // uint64 ·Load64(uint64 volatile* addr)
68 TEXT ·Load64(SB),NOSPLIT,$0-16
74 // void *·Loadp(void *volatile *addr)
75 TEXT ·Loadp(SB),NOSPLIT,$0-16
81 // uint32 ·LoadAcq(uint32 volatile* addr)
82 TEXT ·LoadAcq(SB),NOSPLIT,$0-12
85 // uint64 ·LoadAcquintptr(uint64 volatile* addr)
86 TEXT ·LoadAcq64(SB),NOSPLIT,$0-16
89 // uintptr ·LoadAcq64(uintptr volatile* addr)
90 TEXT ·LoadAcquintptr(SB),NOSPLIT,$0-16
93 TEXT ·StorepNoWB(SB), NOSPLIT, $0-16
96 TEXT ·StoreRel(SB), NOSPLIT, $0-12
99 TEXT ·StoreRel64(SB), NOSPLIT, $0-16
102 TEXT ·StoreReluintptr(SB), NOSPLIT, $0-16
105 TEXT ·Store(SB), NOSPLIT, $0-12
111 TEXT ·Store8(SB), NOSPLIT, $0-9
117 TEXT ·Store64(SB), NOSPLIT, $0-16
123 // uint32 Xchg(ptr *uint32, new uint32)
128 TEXT ·Xchg(SB), NOSPLIT, $0-20
131 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
132 CBZ R4, load_store_loop
139 CBNZ R3, load_store_loop
143 // uint64 Xchg64(ptr *uint64, new uint64)
148 TEXT ·Xchg64(SB), NOSPLIT, $0-24
151 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
152 CBZ R4, load_store_loop
159 CBNZ R3, load_store_loop
163 // bool Cas(uint32 *ptr, uint32 old, uint32 new)
170 TEXT ·Cas(SB), NOSPLIT, $0-17
174 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
175 CBZ R4, load_store_loop
187 CBNZ R3, load_store_loop
193 // bool ·Cas64(uint64 *ptr, uint64 old, uint64 new)
201 TEXT ·Cas64(SB), NOSPLIT, $0-25
205 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
206 CBZ R4, load_store_loop
218 CBNZ R3, load_store_loop
224 // uint32 xadd(uint32 volatile *ptr, int32 delta)
228 TEXT ·Xadd(SB), NOSPLIT, $0-20
231 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
232 CBZ R4, load_store_loop
233 LDADDALW R1, (R0), R2
241 CBNZ R3, load_store_loop
245 // uint64 Xadd64(uint64 volatile *ptr, int64 delta)
249 TEXT ·Xadd64(SB), NOSPLIT, $0-24
252 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
253 CBZ R4, load_store_loop
254 LDADDALD R1, (R0), R2
262 CBNZ R3, load_store_loop
266 TEXT ·Xchgint32(SB), NOSPLIT, $0-20
269 TEXT ·Xchgint64(SB), NOSPLIT, $0-24
272 TEXT ·Xchguintptr(SB), NOSPLIT, $0-24
275 TEXT ·And8(SB), NOSPLIT, $0-9
278 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
279 CBZ R4, load_store_loop
281 LDCLRALB R2, (R0), R3
287 CBNZ R3, load_store_loop
290 TEXT ·Or8(SB), NOSPLIT, $0-9
293 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
294 CBZ R4, load_store_loop
301 CBNZ R3, load_store_loop
304 // func And(addr *uint32, v uint32)
305 TEXT ·And(SB), NOSPLIT, $0-12
308 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
309 CBZ R4, load_store_loop
311 LDCLRALW R2, (R0), R3
317 CBNZ R3, load_store_loop
320 // func Or(addr *uint32, v uint32)
321 TEXT ·Or(SB), NOSPLIT, $0-12
324 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
325 CBZ R4, load_store_loop
332 CBNZ R3, load_store_loop
335 // func Or32(addr *uint32, v uint32) old uint32
336 TEXT ·Or32(SB), NOSPLIT, $0-20
339 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
340 CBZ R4, load_store_loop
348 CBNZ R4, load_store_loop
352 // func And32(addr *uint32, v uint32) old uint32
353 TEXT ·And32(SB), NOSPLIT, $0-20
356 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
357 CBZ R4, load_store_loop
359 LDCLRALW R2, (R0), R3
366 CBNZ R4, load_store_loop
370 // func Or64(addr *uint64, v uint64) old uint64
371 TEXT ·Or64(SB), NOSPLIT, $0-24
374 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
375 CBZ R4, load_store_loop
383 CBNZ R4, load_store_loop
387 // func And64(addr *uint64, v uint64) old uint64
388 TEXT ·And64(SB), NOSPLIT, $0-24
391 MOVBU internal∕cpu·ARM64+const_offsetARM64HasATOMICS(SB), R4
392 CBZ R4, load_store_loop
394 LDCLRALD R2, (R0), R3
401 CBNZ R4, load_store_loop
405 // func Anduintptr(addr *uintptr, v uintptr) old uintptr
406 TEXT ·Anduintptr(SB), NOSPLIT, $0-24
409 // func Oruintptr(addr *uintptr, v uintptr) old uintptr
410 TEXT ·Oruintptr(SB), NOSPLIT, $0-24