1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
13 "cmd/compile/internal/base"
14 "cmd/compile/internal/escape"
15 "cmd/compile/internal/ir"
16 "cmd/compile/internal/reflectdata"
17 "cmd/compile/internal/typecheck"
18 "cmd/compile/internal/types"
21 // Rewrite append(src, x, y, z) so that any side effects in
22 // x, y, z (including runtime panics) are evaluated in
23 // initialization statements before the append.
24 // For normal code generation, stop there and leave the
27 // For race detector, expand append(src, a [, b]* ) to
31 // const argc = len(args) - 1
32 // newLen := s.len + argc
33 // if uint(newLen) <= uint(s.cap) {
36 // s = growslice(s.ptr, newLen, s.cap, argc, elemType)
38 // s[s.len - argc] = a
39 // s[s.len - argc + 1] = b
43 func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
44 if !ir.SameSafeExpr(dst, n.Args[0]) {
45 n.Args[0] = safeExpr(n.Args[0], init)
46 n.Args[0] = walkExpr(n.Args[0], init)
48 walkExprListSafe(n.Args[1:], init)
52 // walkExprListSafe will leave OINDEX (s[n]) alone if both s
53 // and n are name or literal, but those may index the slice we're
54 // modifying here. Fix explicitly.
55 // Using cheapExpr also makes sure that the evaluation
56 // of all arguments (and especially any panics) happen
57 // before we begin to modify the slice in a visible way.
59 for i, n := range ls {
60 n = cheapExpr(n, init)
61 if !types.Identical(n.Type(), nsrc.Type().Elem()) {
62 n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append")
68 argc := len(n.Args) - 1
73 // General case, with no function calls left as arguments.
74 // Leave for ssagen, except that instrumentation requires the old form.
75 if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime {
81 // s = slice to append to
82 s := typecheck.TempAt(base.Pos, ir.CurFunc, nsrc.Type())
83 l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc))
85 // num = number of things to append
86 num := ir.NewInt(base.Pos, int64(argc))
88 // newLen := s.len + num
89 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
90 l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), num)))
92 // if uint(newLen) <= uint(s.cap)
93 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
94 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(newLen, types.Types[types.TUINT]), typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT]))
98 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
99 slice.SetBounded(true)
100 nif.Body = []ir.Node{
101 ir.NewAssignStmt(base.Pos, s, slice),
104 fn := typecheck.LookupRuntime("growslice") // growslice(ptr *T, newLen, oldCap, num int, <type>) (ret []T)
105 fn = typecheck.SubstArgTypes(fn, s.Type().Elem(), s.Type().Elem())
107 // else { s = growslice(s.ptr, n, s.cap, a, T) }
108 nif.Else = []ir.Node{
109 ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(),
110 ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
112 ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
114 reflectdata.TypePtrAt(base.Pos, s.Type().Elem()))),
120 for i, n := range ls {
121 // s[s.len-argc+i] = arg
122 ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(base.Pos, int64(argc-i))))
124 l = append(l, ir.NewAssignStmt(base.Pos, ix, n))
133 // walkClear walks an OCLEAR node.
134 func walkClear(n *ir.UnaryExpr) ir.Node {
138 if n := arrayClear(n.X.Pos(), n.X, nil); n != nil {
141 // If n == nil, we are clearing an array which takes zero memory, do nothing.
142 return ir.NewBlockStmt(n.Pos(), nil)
144 return mapClear(n.X, reflectdata.TypePtrAt(n.X.Pos(), n.X.Type()))
149 // walkClose walks an OCLOSE node.
150 func walkClose(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
151 // cannot use chanfn - closechan takes any, not chan any
152 fn := typecheck.LookupRuntime("closechan")
153 fn = typecheck.SubstArgTypes(fn, n.X.Type())
154 return mkcall1(fn, nil, init, n.X)
157 // Lower copy(a, b) to a memmove call or a runtime call.
161 // if n > len(b) { n = len(b) }
162 // if a.ptr != b.ptr { memmove(a.ptr, b.ptr, n*sizeof(elem(a))) }
166 // Also works if b is a string.
167 func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
168 if n.X.Type().Elem().HasPointers() {
169 ir.CurFunc.SetWBPos(n.Pos())
170 fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem())
171 n.X = cheapExpr(n.X, init)
172 ptrL, lenL := backingArrayPtrLen(n.X)
173 n.Y = cheapExpr(n.Y, init)
174 ptrR, lenR := backingArrayPtrLen(n.Y)
175 return mkcall1(fn, n.Type(), init, reflectdata.CopyElemRType(base.Pos, n), ptrL, lenL, ptrR, lenR)
179 // rely on runtime to instrument:
180 // copy(n.Left, n.Right)
181 // n.Right can be a slice or string.
183 n.X = cheapExpr(n.X, init)
184 ptrL, lenL := backingArrayPtrLen(n.X)
185 n.Y = cheapExpr(n.Y, init)
186 ptrR, lenR := backingArrayPtrLen(n.Y)
188 fn := typecheck.LookupRuntime("slicecopy")
189 fn = typecheck.SubstArgTypes(fn, ptrL.Type().Elem(), ptrR.Type().Elem())
191 return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(base.Pos, n.X.Type().Elem().Size()))
194 n.X = walkExpr(n.X, init)
195 n.Y = walkExpr(n.Y, init)
196 nl := typecheck.TempAt(base.Pos, ir.CurFunc, n.X.Type())
197 nr := typecheck.TempAt(base.Pos, ir.CurFunc, n.Y.Type())
199 l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
200 l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
202 nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
203 nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
205 nlen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
208 l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl)))
210 // if n > len(frm) { n = len(frm) }
211 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
213 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))
214 nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
217 // if to.ptr != frm.ptr { memmove( ... ) }
218 ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil)
222 fn := typecheck.LookupRuntime("memmove")
223 fn = typecheck.SubstArgTypes(fn, nl.Type().Elem(), nl.Type().Elem())
224 nwid := ir.Node(typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR]))
225 setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR]))
226 ne.Body.Append(setwid)
227 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, nl.Type().Elem().Size()))
228 call := mkcall1(fn, nil, init, nto, nfrm, nwid)
237 // walkDelete walks an ODELETE node.
238 func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node {
239 init.Append(ir.TakeInit(n)...)
242 map_ = walkExpr(map_, init)
243 key = walkExpr(key, init)
247 key = mapKeyArg(fast, n, key, false)
248 return mkcall1(mapfndel(mapdelete[fast], t), nil, init, reflectdata.DeleteMapRType(base.Pos, n), map_, key)
251 // walkLenCap walks an OLEN or OCAP node.
252 func walkLenCap(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
254 // Replace len([]rune(string)) with runtime.countrunes(string).
255 return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
258 conv := n.X.(*ir.ConvExpr)
259 walkStmtList(conv.Init())
260 init.Append(ir.TakeInit(conv)...)
261 _, len := backingArrayPtrLen(cheapExpr(conv.X, init))
265 n.X = walkExpr(n.X, init)
267 // replace len(*[10]int) with 10.
268 // delayed until now to preserve side effects.
276 con := typecheck.OrigInt(n, t.NumElem())
283 // walkMakeChan walks an OMAKECHAN node.
284 func walkMakeChan(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
285 // When size fits into int, use makechan instead of
286 // makechan64, which is faster and shorter on 32 bit platforms.
288 fnname := "makechan64"
289 argtype := types.Types[types.TINT64]
291 // Type checking guarantees that TIDEAL size is positive and fits in an int.
292 // The case of size overflow when converting TUINT or TUINTPTR to TINT
293 // will be handled by the negative range checks in makechan during runtime.
294 if size.Type().IsKind(types.TIDEAL) || size.Type().Size() <= types.Types[types.TUINT].Size() {
296 argtype = types.Types[types.TINT]
299 return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, reflectdata.MakeChanRType(base.Pos, n), typecheck.Conv(size, argtype))
302 // walkMakeMap walks an OMAKEMAP node.
303 func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
305 hmapType := reflectdata.MapType()
310 if n.Esc() == ir.EscNone {
311 // Allocate hmap on stack.
315 h = stackTempAddr(init, hmapType)
317 // Allocate one bucket pointed to by hmap.buckets on stack if hint
318 // is not larger than BUCKETSIZE. In case hint is larger than
319 // BUCKETSIZE runtime.makemap will allocate the buckets on the heap.
320 // Maximum key and elem size is 128 bytes, larger objects
321 // are stored with an indirection. So max bucket size is 2048+eps.
322 if !ir.IsConst(hint, constant.Int) ||
323 constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) {
325 // In case hint is larger than BUCKETSIZE runtime.makemap
326 // will allocate the buckets on the heap, see #20184
328 // if hint <= BUCKETSIZE {
334 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, reflectdata.BUCKETSIZE)), nil, nil)
339 b := stackTempAddr(&nif.Body, reflectdata.MapBucketType(t))
342 bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
343 na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, bsym), typecheck.ConvNop(b, types.Types[types.TUNSAFEPTR]))
345 appendWalkStmt(init, nif)
349 if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) {
350 // Handling make(map[any]any) and
351 // make(map[any]any, hint) where hint <= BUCKETSIZE
352 // special allows for faster map initialization and
353 // improves binary size by using calls with fewer arguments.
354 // For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false
355 // and no buckets will be allocated by makemap. Therefore,
356 // no buckets need to be allocated in this code path.
357 if n.Esc() == ir.EscNone {
358 // Only need to initialize h.hash0 since
359 // hmap h has been allocated on the stack already.
360 // h.hash0 = fastrand()
361 rand := mkcall("fastrand", types.Types[types.TUINT32], init)
362 hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
363 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand))
364 return typecheck.ConvNop(h, t)
366 // Call runtime.makehmap to allocate an
367 // hmap on the heap and initialize hmap's hash0 field.
368 fn := typecheck.LookupRuntime("makemap_small")
369 fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem())
370 return mkcall1(fn, n.Type(), init)
373 if n.Esc() != ir.EscNone {
374 h = typecheck.NodNil()
376 // Map initialization with a variable or large hint is
377 // more complicated. We therefore generate a call to
378 // runtime.makemap to initialize hmap and allocate the
381 // When hint fits into int, use makemap instead of
382 // makemap64, which is faster and shorter on 32 bit platforms.
383 fnname := "makemap64"
384 argtype := types.Types[types.TINT64]
386 // Type checking guarantees that TIDEAL hint is positive and fits in an int.
387 // See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
388 // The case of hint overflow when converting TUINT or TUINTPTR to TINT
389 // will be handled by the negative range checks in makemap during runtime.
390 if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() {
392 argtype = types.Types[types.TINT]
395 fn := typecheck.LookupRuntime(fnname)
396 fn = typecheck.SubstArgTypes(fn, hmapType, t.Key(), t.Elem())
397 return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), h)
400 // walkMakeSlice walks an OMAKESLICE node.
401 func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
405 r = safeExpr(l, init)
409 if t.Elem().NotInHeap() {
410 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
412 if n.Esc() == ir.EscNone {
413 if why := escape.HeapAllocReason(n); why != "" {
414 base.Fatalf("%v has EscNone, but %v", n, why)
418 i := typecheck.IndexConst(r)
420 base.Fatalf("walkExpr: invalid index %v", r)
423 // cap is constrained to [0,2^31) or [0,2^63) depending on whether
424 // we're in 32-bit or 64-bit systems. So it's safe to do:
426 // if uint64(len) > cap {
427 // if len < 0 { panicmakeslicelen() }
428 // panicmakeslicecap()
430 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(base.Pos, i)), nil, nil)
431 niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(base.Pos, 0)), nil, nil)
432 niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
433 nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
434 init.Append(typecheck.Stmt(nif))
436 t = types.NewArray(t.Elem(), i) // [r]T
437 var_ := typecheck.TempAt(base.Pos, ir.CurFunc, t)
438 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil)) // zero temp
439 r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_, nil, l, nil) // arr[:l]
440 // The conv is necessary in case n.Type is named.
441 return walkExpr(typecheck.Expr(typecheck.Conv(r, n.Type())), init)
444 // n escapes; set up a call to makeslice.
445 // When len and cap can fit into int, use makeslice instead of
446 // makeslice64, which is faster and shorter on 32 bit platforms.
450 fnname := "makeslice64"
451 argtype := types.Types[types.TINT64]
453 // Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
454 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
455 // will be handled by the negative range checks in makeslice during runtime.
456 if (len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size()) &&
457 (cap.Type().IsKind(types.TIDEAL) || cap.Type().Size() <= types.Types[types.TUINT].Size()) {
459 argtype = types.Types[types.TINT]
461 fn := typecheck.LookupRuntime(fnname)
462 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
464 len = typecheck.Conv(len, types.Types[types.TINT])
465 cap = typecheck.Conv(cap, types.Types[types.TINT])
466 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, len, cap)
467 return walkExpr(typecheck.Expr(sh), init)
470 // walkMakeSliceCopy walks an OMAKESLICECOPY node.
471 func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
472 if n.Esc() == ir.EscNone {
473 base.Fatalf("OMAKESLICECOPY with EscNone: %v", n)
477 if t.Elem().NotInHeap() {
478 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
481 length := typecheck.Conv(n.Len, types.Types[types.TINT])
482 copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
483 copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
485 if !t.Elem().HasPointers() && n.Bounded() {
486 // When len(to)==len(from) and elements have no pointers:
487 // replace make+copy with runtime.mallocgc+runtime.memmove.
489 // We do not check for overflow of len(to)*elem.Width here
490 // since len(from) is an existing checked slice capacity
491 // with same elem.Width for the from slice.
492 size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(base.Pos, t.Elem().Size()), types.Types[types.TUINTPTR]))
494 // instantiate mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer
495 fn := typecheck.LookupRuntime("mallocgc")
496 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(base.Pos, false))
498 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
500 s := typecheck.TempAt(base.Pos, ir.CurFunc, t)
501 r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh))
502 r = walkExpr(r, init)
505 // instantiate memmove(to *any, frm *any, size uintptr)
506 fn = typecheck.LookupRuntime("memmove")
507 fn = typecheck.SubstArgTypes(fn, t.Elem(), t.Elem())
508 ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size)
509 init.Append(walkExpr(typecheck.Stmt(ncopy), init))
513 // Replace make+copy with runtime.makeslicecopy.
514 // instantiate makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer
515 fn := typecheck.LookupRuntime("makeslicecopy")
516 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
518 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
519 return walkExpr(typecheck.Expr(sh), init)
522 // walkNew walks an ONEW node.
523 func walkNew(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
526 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type().Elem())
528 if n.Esc() == ir.EscNone {
529 if t.Size() > ir.MaxImplicitStackVarSize {
530 base.Fatalf("large ONEW with EscNone: %v", n)
532 return stackTempAddr(init, t)
539 func walkMinMax(n *ir.CallExpr, init *ir.Nodes) ir.Node {
540 init.Append(ir.TakeInit(n)...)
541 walkExprList(n.Args, init)
545 // generate code for print.
546 func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
547 // Hoist all the argument evaluation up before the lock.
548 walkExprListCheap(nn.Args, init)
550 // For println, add " " between elements and "\n" at the end.
551 if nn.Op() == ir.OPRINTN {
553 t := make([]ir.Node, 0, len(s)*2)
554 for i, n := range s {
556 t = append(t, ir.NewString(base.Pos, " "))
560 t = append(t, ir.NewString(base.Pos, "\n"))
564 // Collapse runs of constant strings.
566 t := make([]ir.Node, 0, len(s))
567 for i := 0; i < len(s); {
569 for i < len(s) && ir.IsConst(s[i], constant.String) {
570 strs = append(strs, ir.StringVal(s[i]))
574 t = append(t, ir.NewString(base.Pos, strings.Join(strs, "")))
583 calls := []ir.Node{mkcall("printlock", nil, init)}
584 for i, n := range nn.Args {
585 if n.Op() == ir.OLITERAL {
586 if n.Type() == types.UntypedRune {
587 n = typecheck.DefaultLit(n, types.RuneType)
590 switch n.Val().Kind() {
592 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
595 n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64])
599 if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
600 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
602 n = typecheck.DefaultLit(n, nil)
604 if n.Type() == nil || n.Type().Kind() == types.TFORW {
609 switch n.Type().Kind() {
611 if n.Type().IsEmptyInterface() {
612 on = typecheck.LookupRuntime("printeface")
614 on = typecheck.LookupRuntime("printiface")
616 on = typecheck.SubstArgTypes(on, n.Type()) // any-1
618 if n.Type().Elem().NotInHeap() {
619 on = typecheck.LookupRuntime("printuintptr")
620 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
621 n.SetType(types.Types[types.TUNSAFEPTR])
622 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
623 n.SetType(types.Types[types.TUINTPTR])
627 case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR:
628 on = typecheck.LookupRuntime("printpointer")
629 on = typecheck.SubstArgTypes(on, n.Type()) // any-1
631 on = typecheck.LookupRuntime("printslice")
632 on = typecheck.SubstArgTypes(on, n.Type()) // any-1
633 case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
634 if types.IsRuntimePkg(n.Type().Sym().Pkg) && n.Type().Sym().Name == "hex" {
635 on = typecheck.LookupRuntime("printhex")
637 on = typecheck.LookupRuntime("printuint")
639 case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64:
640 on = typecheck.LookupRuntime("printint")
641 case types.TFLOAT32, types.TFLOAT64:
642 on = typecheck.LookupRuntime("printfloat")
643 case types.TCOMPLEX64, types.TCOMPLEX128:
644 on = typecheck.LookupRuntime("printcomplex")
646 on = typecheck.LookupRuntime("printbool")
649 if ir.IsConst(n, constant.String) {
654 on = typecheck.LookupRuntime("printsp")
656 on = typecheck.LookupRuntime("printnl")
658 on = typecheck.LookupRuntime("printstring")
661 badtype(ir.OPRINT, n.Type(), nil)
665 r := ir.NewCallExpr(base.Pos, ir.OCALL, on, nil)
666 if params := on.Type().Params(); len(params) > 0 {
668 n = typecheck.Conv(n, t)
671 calls = append(calls, r)
674 calls = append(calls, mkcall("printunlock", nil, init))
676 typecheck.Stmts(calls)
677 walkExprList(calls, init)
679 r := ir.NewBlockStmt(base.Pos, nil)
681 return walkStmt(typecheck.Stmt(r))
684 // walkRecoverFP walks an ORECOVERFP node.
685 func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
686 return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init))
689 // walkUnsafeData walks an OUNSAFESLICEDATA or OUNSAFESTRINGDATA expression.
690 func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
691 slice := walkExpr(n.X, init)
692 res := typecheck.Expr(ir.NewUnaryExpr(n.Pos(), ir.OSPTR, slice))
693 res.SetType(n.Type())
694 return walkExpr(res, init)
697 func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
698 ptr := safeExpr(n.X, init)
699 len := safeExpr(n.Y, init)
700 sliceType := n.Type()
702 lenType := types.Types[types.TINT64]
703 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
705 // If checkptr enabled, call runtime.unsafeslicecheckptr to check ptr and len.
706 // for simplicity, unsafeslicecheckptr always uses int64.
707 // Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
708 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
709 // will be handled by the negative range checks in unsafeslice during runtime.
710 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
711 fnname := "unsafeslicecheckptr"
712 fn := typecheck.LookupRuntime(fnname)
713 init.Append(mkcall1(fn, nil, init, reflectdata.UnsafeSliceElemRType(base.Pos, n), unsafePtr, typecheck.Conv(len, lenType)))
715 // Otherwise, open code unsafe.Slice to prevent runtime call overhead.
716 // Keep this code in sync with runtime.unsafeslice{,64}
717 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
718 lenType = types.Types[types.TINT]
720 // len64 := int64(len)
721 // if int64(int(len64)) != len64 {
722 // panicunsafeslicelen()
724 len64 := typecheck.Conv(len, lenType)
725 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
726 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
727 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
728 appendWalkStmt(init, nif)
731 // if len < 0 { panicunsafeslicelen() }
732 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
733 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
734 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
735 appendWalkStmt(init, nif)
737 if sliceType.Elem().Size() == 0 {
738 // if ptr == nil && len > 0 {
739 // panicunsafesliceptrnil()
741 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
742 isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
743 gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
745 ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero)
746 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
747 appendWalkStmt(init, nifPtr)
749 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
750 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
751 typecheck.Conv(len, types.Types[types.TINT]),
752 typecheck.Conv(len, types.Types[types.TINT]))
753 return walkExpr(typecheck.Expr(h), init)
756 // mem, overflow := runtime.mulUintptr(et.size, len)
757 mem := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])
758 overflow := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
759 fn := typecheck.LookupRuntime("mulUintptr")
760 call := mkcall1(fn, fn.Type().ResultsTuple(), init, ir.NewInt(base.Pos, sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR]))
761 appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call}))
763 // if overflow || mem > -uintptr(ptr) {
765 // panicunsafesliceptrnil()
767 // panicunsafeslicelen()
769 nif = ir.NewIfStmt(base.Pos, nil, nil, nil)
770 memCond := ir.NewBinaryExpr(base.Pos, ir.OGT, mem, ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
771 nif.Cond = ir.NewLogicalExpr(base.Pos, ir.OOROR, overflow, memCond)
772 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
773 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
774 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
775 nif.Body.Append(nifPtr, mkcall("panicunsafeslicelen", nil, &nif.Body))
776 appendWalkStmt(init, nif)
779 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
780 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
781 typecheck.Conv(len, types.Types[types.TINT]),
782 typecheck.Conv(len, types.Types[types.TINT]))
783 return walkExpr(typecheck.Expr(h), init)
786 func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
787 ptr := safeExpr(n.X, init)
788 len := safeExpr(n.Y, init)
790 lenType := types.Types[types.TINT64]
791 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
793 // If checkptr enabled, call runtime.unsafestringcheckptr to check ptr and len.
794 // for simplicity, unsafestringcheckptr always uses int64.
795 // Type checking guarantees that TIDEAL len are positive and fit in an int.
796 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
797 fnname := "unsafestringcheckptr"
798 fn := typecheck.LookupRuntime(fnname)
799 init.Append(mkcall1(fn, nil, init, unsafePtr, typecheck.Conv(len, lenType)))
801 // Otherwise, open code unsafe.String to prevent runtime call overhead.
802 // Keep this code in sync with runtime.unsafestring{,64}
803 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
804 lenType = types.Types[types.TINT]
806 // len64 := int64(len)
807 // if int64(int(len64)) != len64 {
808 // panicunsafestringlen()
810 len64 := typecheck.Conv(len, lenType)
811 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
812 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
813 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
814 appendWalkStmt(init, nif)
817 // if len < 0 { panicunsafestringlen() }
818 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
819 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
820 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
821 appendWalkStmt(init, nif)
823 // if uintpr(len) > -uintptr(ptr) {
825 // panicunsafestringnilptr()
827 // panicunsafeslicelen()
829 nifLen := ir.NewIfStmt(base.Pos, nil, nil, nil)
830 nifLen.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINTPTR]), ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
831 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
832 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
833 nifPtr.Body.Append(mkcall("panicunsafestringnilptr", nil, &nifPtr.Body))
834 nifLen.Body.Append(nifPtr, mkcall("panicunsafestringlen", nil, &nifLen.Body))
835 appendWalkStmt(init, nifLen)
837 h := ir.NewStringHeaderExpr(n.Pos(),
838 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
839 typecheck.Conv(len, types.Types[types.TINT]),
841 return walkExpr(typecheck.Expr(h), init)
844 func badtype(op ir.Op, tl, tr *types.Type) {
847 s += fmt.Sprintf("\n\t%v", tl)
850 s += fmt.Sprintf("\n\t%v", tr)
853 // common mistake: *struct and *interface.
854 if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() {
855 if tl.Elem().IsStruct() && tr.Elem().IsInterface() {
856 s += "\n\t(*struct vs *interface)"
857 } else if tl.Elem().IsInterface() && tr.Elem().IsStruct() {
858 s += "\n\t(*interface vs *struct)"
862 base.Errorf("illegal types for operand: %v%s", op, s)
865 func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
866 fn := typecheck.LookupRuntime(name)
867 fn = typecheck.SubstArgTypes(fn, l, r)
871 // isRuneCount reports whether n is of the form len([]rune(string)).
872 // These are optimized into a call to runtime.countrunes.
873 func isRuneCount(n ir.Node) bool {
874 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
877 // isByteCount reports whether n is of the form len(string([]byte)).
878 func isByteCount(n ir.Node) bool {
879 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN &&
880 (n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STR || n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STRTMP)