1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
13 "cmd/compile/internal/base"
14 "cmd/compile/internal/ir"
15 "cmd/compile/internal/objw"
16 "cmd/compile/internal/reflectdata"
17 "cmd/compile/internal/staticdata"
18 "cmd/compile/internal/typecheck"
19 "cmd/compile/internal/types"
24 // The result of walkExpr MUST be assigned back to n, e.g.
26 // n.Left = walkExpr(n.Left, init)
27 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
32 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
33 // not okay to use n->ninit when walking n,
34 // because we might replace n with some other node
35 // and would lose the init list.
36 base.Fatalf("walkExpr init == &n->ninit")
39 if len(n.Init()) != 0 {
40 walkStmtList(n.Init())
41 init.Append(ir.TakeInit(n)...)
46 if base.Flag.LowerW > 1 {
47 ir.Dump("before walk expr", n)
50 if n.Typecheck() != 1 {
51 base.Fatalf("missed typecheck: %+v", n)
54 if n.Type().IsUntyped() {
55 base.Fatalf("expression has untyped type: %+v", n)
58 n = walkExpr1(n, init)
60 // Eagerly compute sizes of all expressions for the back end.
61 if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
64 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
65 types.CheckSize(n.Heapaddr.Type())
67 if ir.IsConst(n, constant.String) {
68 // Emit string symbol now to avoid emitting
69 // any concurrently during the backend.
70 _ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
73 if base.Flag.LowerW != 0 && n != nil {
74 ir.Dump("after walk expr", n)
81 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
85 base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
88 case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
91 case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
92 // TODO(mdempsky): Just return n; see discussion on CL 38655.
93 // Perhaps refactor to use Node.mayBeShared for these instead.
94 // If these return early, make sure to still call
95 // StringSym for constant strings.
99 // TODO(mdempsky): Do this right after type checking.
100 n := n.(*ir.SelectorExpr)
103 case ir.OMIN, ir.OMAX:
104 n := n.(*ir.CallExpr)
105 return walkMinMax(n, init)
107 case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
108 n := n.(*ir.UnaryExpr)
109 n.X = walkExpr(n.X, init)
112 case ir.ODOTMETH, ir.ODOTINTER:
113 n := n.(*ir.SelectorExpr)
114 n.X = walkExpr(n.X, init)
118 n := n.(*ir.AddrExpr)
119 n.X = walkExpr(n.X, init)
123 n := n.(*ir.StarExpr)
124 n.X = walkExpr(n.X, init)
127 case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
129 n := n.(*ir.BinaryExpr)
130 n.X = walkExpr(n.X, init)
131 n.Y = walkExpr(n.Y, init)
134 case ir.OUNSAFESLICE:
135 n := n.(*ir.BinaryExpr)
136 return walkUnsafeSlice(n, init)
138 case ir.OUNSAFESTRING:
139 n := n.(*ir.BinaryExpr)
140 return walkUnsafeString(n, init)
142 case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
143 n := n.(*ir.UnaryExpr)
144 return walkUnsafeData(n, init)
146 case ir.ODOT, ir.ODOTPTR:
147 n := n.(*ir.SelectorExpr)
148 return walkDot(n, init)
150 case ir.ODOTTYPE, ir.ODOTTYPE2:
151 n := n.(*ir.TypeAssertExpr)
152 return walkDotType(n, init)
154 case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
155 n := n.(*ir.DynamicTypeAssertExpr)
156 return walkDynamicDotType(n, init)
158 case ir.OLEN, ir.OCAP:
159 n := n.(*ir.UnaryExpr)
160 return walkLenCap(n, init)
163 n := n.(*ir.BinaryExpr)
164 n.X = walkExpr(n.X, init)
165 n.Y = walkExpr(n.Y, init)
168 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
169 n := n.(*ir.BinaryExpr)
170 return walkCompare(n, init)
172 case ir.OANDAND, ir.OOROR:
173 n := n.(*ir.LogicalExpr)
174 return walkLogical(n, init)
176 case ir.OPRINT, ir.OPRINTLN:
177 return walkPrint(n.(*ir.CallExpr), init)
180 n := n.(*ir.UnaryExpr)
181 return mkcall("gopanic", nil, init, n.X)
184 return walkRecoverFP(n.(*ir.CallExpr), init)
189 case ir.OCALLINTER, ir.OCALLFUNC:
190 n := n.(*ir.CallExpr)
191 return walkCall(n, init)
193 case ir.OAS, ir.OASOP:
194 return walkAssign(init, n)
197 n := n.(*ir.AssignListStmt)
198 return walkAssignList(init, n)
202 n := n.(*ir.AssignListStmt)
203 return walkAssignFunc(init, n)
206 // order.stmt made sure x is addressable or blank.
208 n := n.(*ir.AssignListStmt)
209 return walkAssignRecv(init, n)
213 n := n.(*ir.AssignListStmt)
214 return walkAssignMapRead(init, n)
217 n := n.(*ir.CallExpr)
218 return walkDelete(init, n)
221 n := n.(*ir.AssignListStmt)
222 return walkAssignDotType(n, init)
225 n := n.(*ir.ConvExpr)
226 return walkConvInterface(n, init)
228 case ir.OCONV, ir.OCONVNOP:
229 n := n.(*ir.ConvExpr)
230 return walkConv(n, init)
233 n := n.(*ir.ConvExpr)
234 return walkSliceToArray(n, init)
236 case ir.OSLICE2ARRPTR:
237 n := n.(*ir.ConvExpr)
238 n.X = walkExpr(n.X, init)
241 case ir.ODIV, ir.OMOD:
242 n := n.(*ir.BinaryExpr)
243 return walkDivMod(n, init)
246 n := n.(*ir.IndexExpr)
247 return walkIndex(n, init)
250 n := n.(*ir.IndexExpr)
251 return walkIndexMap(n, init)
254 base.Fatalf("walkExpr ORECV") // should see inside OAS only
257 case ir.OSLICEHEADER:
258 n := n.(*ir.SliceHeaderExpr)
259 return walkSliceHeader(n, init)
261 case ir.OSTRINGHEADER:
262 n := n.(*ir.StringHeaderExpr)
263 return walkStringHeader(n, init)
265 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
266 n := n.(*ir.SliceExpr)
267 return walkSlice(n, init)
270 n := n.(*ir.UnaryExpr)
271 return walkNew(n, init)
274 return walkAddString(n.(*ir.AddStringExpr), init)
277 // order should make sure we only see OAS(node, OAPPEND), which we handle above.
278 base.Fatalf("append outside assignment")
282 return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
285 n := n.(*ir.UnaryExpr)
289 n := n.(*ir.UnaryExpr)
290 return walkClose(n, init)
293 n := n.(*ir.MakeExpr)
294 return walkMakeChan(n, init)
297 n := n.(*ir.MakeExpr)
298 return walkMakeMap(n, init)
301 n := n.(*ir.MakeExpr)
302 return walkMakeSlice(n, init)
304 case ir.OMAKESLICECOPY:
305 n := n.(*ir.MakeExpr)
306 return walkMakeSliceCopy(n, init)
309 n := n.(*ir.ConvExpr)
310 return walkRuneToString(n, init)
312 case ir.OBYTES2STR, ir.ORUNES2STR:
313 n := n.(*ir.ConvExpr)
314 return walkBytesRunesToString(n, init)
316 case ir.OBYTES2STRTMP:
317 n := n.(*ir.ConvExpr)
318 return walkBytesToStringTemp(n, init)
321 n := n.(*ir.ConvExpr)
322 return walkStringToBytes(n, init)
324 case ir.OSTR2BYTESTMP:
325 n := n.(*ir.ConvExpr)
326 return walkStringToBytesTemp(n, init)
329 n := n.(*ir.ConvExpr)
330 return walkStringToRunes(n, init)
332 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
333 return walkCompLit(n, init)
336 n := n.(*ir.SendStmt)
337 return walkSend(n, init)
340 return walkClosure(n.(*ir.ClosureExpr), init)
343 return walkMethodValue(n.(*ir.SelectorExpr), init)
346 // No return! Each case must return (or panic),
347 // to avoid confusion about what gets returned
348 // in the presence of type assertions.
351 // walk the whole tree of the body of an
352 // expression or simple statement.
353 // the types expressions are calculated.
354 // compile-time constants are evaluated.
355 // complex side effects like statements are appended to init.
356 func walkExprList(s []ir.Node, init *ir.Nodes) {
358 s[i] = walkExpr(s[i], init)
362 func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
363 for i, n := range s {
364 s[i] = cheapExpr(n, init)
365 s[i] = walkExpr(s[i], init)
369 func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
370 for i, n := range s {
371 s[i] = safeExpr(n, init)
372 s[i] = walkExpr(s[i], init)
376 // return side-effect free and cheap n, appending side effects to init.
377 // result may not be assignable.
378 func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
380 case ir.ONAME, ir.OLITERAL, ir.ONIL:
384 return copyExpr(n, n.Type(), init)
387 // return side effect-free n, appending side effects to init.
388 // result is assignable if n is.
389 func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
394 if len(n.Init()) != 0 {
395 walkStmtList(n.Init())
396 init.Append(ir.TakeInit(n)...)
400 case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
403 case ir.OLEN, ir.OCAP:
404 n := n.(*ir.UnaryExpr)
405 l := safeExpr(n.X, init)
409 a := ir.Copy(n).(*ir.UnaryExpr)
411 return walkExpr(typecheck.Expr(a), init)
413 case ir.ODOT, ir.ODOTPTR:
414 n := n.(*ir.SelectorExpr)
415 l := safeExpr(n.X, init)
419 a := ir.Copy(n).(*ir.SelectorExpr)
421 return walkExpr(typecheck.Expr(a), init)
424 n := n.(*ir.StarExpr)
425 l := safeExpr(n.X, init)
429 a := ir.Copy(n).(*ir.StarExpr)
431 return walkExpr(typecheck.Expr(a), init)
433 case ir.OINDEX, ir.OINDEXMAP:
434 n := n.(*ir.IndexExpr)
435 l := safeExpr(n.X, init)
436 r := safeExpr(n.Index, init)
437 if l == n.X && r == n.Index {
440 a := ir.Copy(n).(*ir.IndexExpr)
443 return walkExpr(typecheck.Expr(a), init)
445 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
446 n := n.(*ir.CompLitExpr)
447 if isStaticCompositeLiteral(n) {
452 // make a copy; must not be used as an lvalue
453 if ir.IsAddressable(n) {
454 base.Fatalf("missing lvalue case in safeExpr: %v", n)
456 return cheapExpr(n, init)
459 func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
460 l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
461 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
465 func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
469 base.Fatalf("walkAddString count %d too small", c)
472 buf := typecheck.NodNil()
473 if n.Esc() == ir.EscNone {
475 for _, n1 := range n.List {
476 if n1.Op() == ir.OLITERAL {
477 sz += int64(len(ir.StringVal(n1)))
481 // Don't allocate the buffer if the result won't fit.
482 if sz < tmpstringbufsize {
483 // Create temporary buffer for result string on stack.
484 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
488 // build list of string arguments
489 args := []ir.Node{buf}
490 for _, n2 := range n.List {
491 args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
496 // small numbers of strings use direct runtime helpers.
497 // note: order.expr knows this cutoff too.
498 fn = fmt.Sprintf("concatstring%d", c)
500 // large numbers of strings are passed to the runtime as a slice.
503 t := types.NewSlice(types.Types[types.TSTRING])
504 // args[1:] to skip buf arg
505 slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, args[1:])
506 slice.Prealloc = n.Prealloc
507 args = []ir.Node{buf, slice}
508 slice.SetEsc(ir.EscNone)
511 cat := typecheck.LookupRuntime(fn)
512 r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
514 r1 := typecheck.Expr(r)
515 r1 = walkExpr(r1, init)
521 type hookInfo struct {
527 var hooks = map[string]hookInfo{
528 "strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
531 // walkCall walks an OCALLFUNC or OCALLINTER node.
532 func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
533 if n.Op() == ir.OCALLMETH {
534 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
536 if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
537 // We expect both interface call reflect.Type.Method and concrete
538 // call reflect.(*rtype).Method.
541 if n.Op() == ir.OCALLINTER {
542 reflectdata.MarkUsedIfaceMethod(n)
545 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
549 if ir.IsFuncPCIntrinsic(n) {
550 // For internal/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
551 // it to the address of the function of the ABI fn is defined.
552 name := n.Fun.(*ir.Name).Sym().Name
558 case "FuncPCABIInternal":
559 wantABI = obj.ABIInternal
561 if isIfaceOfFunc(arg) {
562 fn := arg.(*ir.ConvExpr).X.(*ir.Name)
565 base.ErrorfAt(n.Pos(), 0, "internal/abi.%s expects an %v function, %s is defined as %v", name, wantABI, fn.Sym().Name, abi)
567 var e ir.Node = ir.NewLinksymExpr(n.Pos(), fn.Sym().LinksymABI(abi), types.Types[types.TUINTPTR])
568 e = ir.NewAddrExpr(n.Pos(), e)
569 e.SetType(types.Types[types.TUINTPTR].PtrTo())
570 return typecheck.Expr(ir.NewConvExpr(n.Pos(), ir.OCONVNOP, n.Type(), e))
572 // fn is not a defined function. It must be ABIInternal.
573 // Read the address from func value, i.e. *(*uintptr)(idata(fn)).
574 if wantABI != obj.ABIInternal {
575 base.ErrorfAt(n.Pos(), 0, "internal/abi.%s does not accept func expression, which is ABIInternal", name)
577 arg = walkExpr(arg, init)
578 var e ir.Node = ir.NewUnaryExpr(n.Pos(), ir.OIDATA, arg)
579 e.SetType(n.Type().PtrTo())
581 e = ir.NewStarExpr(n.Pos(), e)
587 if name, ok := n.Fun.(*ir.Name); ok {
589 if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
590 // Call to runtime.deferrangefunc is being shared with a range-over-func
591 // body that might add defers to this frame, so we cannot use open-coded defers
592 // and we need to call deferreturn even if we don't see any other explicit defers.
593 ir.CurFunc.SetHasDefer(true)
594 ir.CurFunc.SetOpenCodedDeferDisallowed(true)
602 func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
604 return // already walked
608 if n.Op() == ir.OCALLMETH {
609 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
613 params := n.Fun.Type().Params()
615 n.Fun = walkExpr(n.Fun, init)
616 walkExprList(args, init)
618 for i, arg := range args {
619 // Validate argument and parameter types match.
621 if !types.Identical(arg.Type(), param.Type) {
622 base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
625 // For any argument whose evaluation might require a function call,
626 // store that argument into a temporary variable,
627 // to prevent that calls from clobbering arguments already on the stack.
629 // assignment of arg to Temp
630 tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
631 init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
632 // replace arg with temp
637 funSym := n.Fun.Sym()
638 if base.Debug.Libfuzzer != 0 && funSym != nil {
639 if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
640 if len(args) != hook.argsNum {
641 panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
643 var hookArgs []ir.Node
644 for _, arg := range args {
645 hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
647 hookArgs = append(hookArgs, fakePC(n))
648 init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
653 // walkDivMod walks an ODIV or OMOD node.
654 func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
655 n.X = walkExpr(n.X, init)
656 n.Y = walkExpr(n.Y, init)
658 // rewrite complex div into function call.
659 et := n.X.Type().Kind()
661 if types.IsComplex[et] && n.Op() == ir.ODIV {
663 call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
664 return typecheck.Conv(call, t)
667 // Nothing to do for float divisions.
668 if types.IsFloat[et] {
672 // rewrite 64-bit div and mod on 32-bit architectures.
673 // TODO: Remove this code once we can introduce
674 // runtime calls late in SSA processing.
675 if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
676 if n.Y.Op() == ir.OLITERAL {
677 // Leave div/mod by constant powers of 2 or small 16-bit constants.
678 // The SSA backend will handle those.
681 c := ir.Int64Val(n.Y)
685 if c != 0 && c&(c-1) == 0 {
689 c := ir.Uint64Val(n.Y)
693 if c != 0 && c&(c-1) == 0 {
699 if et == types.TINT64 {
704 if n.Op() == ir.ODIV {
709 return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
714 // walkDot walks an ODOT or ODOTPTR node.
715 func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
717 n.X = walkExpr(n.X, init)
721 // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
722 func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
723 n.X = walkExpr(n.X, init)
724 // Set up interface type addresses for back end.
725 if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
726 n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
728 if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
729 // Converting an interface to a non-empty interface. Needs a runtime call.
730 // Allocate an internal/abi.TypeAssert descriptor for that call.
731 lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
734 off = objw.SymPtr(lsym, off, typecheck.LookupRuntimeVar("emptyTypeAssertCache"), 0)
735 off = objw.SymPtr(lsym, off, reflectdata.TypeSym(n.Type()).Linksym(), 0)
736 off = objw.Bool(lsym, off, n.Op() == ir.ODOTTYPE2) // CanFail
737 off += types.PtrSize - 1
738 objw.Global(lsym, int32(off), obj.LOCAL)
739 // Set the type to be just a single pointer, as the cache pointer is the
740 // only one that GC needs to see.
741 lsym.Gotype = reflectdata.TypeLinksym(types.Types[types.TUINT8].PtrTo())
748 var typeAssertGen int
750 // walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
751 func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
752 n.X = walkExpr(n.X, init)
753 n.RType = walkExpr(n.RType, init)
754 n.ITab = walkExpr(n.ITab, init)
755 // Convert to non-dynamic if we can.
756 if n.RType != nil && n.RType.Op() == ir.OADDR {
757 addr := n.RType.(*ir.AddrExpr)
758 if addr.X.Op() == ir.OLINKSYMOFFSET {
759 r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
760 if n.Op() == ir.ODYNAMICDOTTYPE2 {
761 r.SetOp(ir.ODOTTYPE2)
765 return walkExpr(r, init)
771 // walkIndex walks an OINDEX node.
772 func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
773 n.X = walkExpr(n.X, init)
775 // save the original node for bounds checking elision.
776 // If it was a ODIV/OMOD walk might rewrite it.
779 n.Index = walkExpr(n.Index, init)
781 // if range of type cannot exceed static array bound,
782 // disable bounds check.
787 if t != nil && t.IsPtr() {
791 n.SetBounded(bounded(r, t.NumElem()))
792 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
793 base.Warn("index bounds check elided")
795 } else if ir.IsConst(n.X, constant.String) {
796 n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
797 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
798 base.Warn("index bounds check elided")
804 // mapKeyArg returns an expression for key that is suitable to be passed
805 // as the key argument for runtime map* functions.
806 // n is the map indexing or delete Node (to provide Pos).
807 func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
809 // standard version takes key by reference.
810 // orderState.expr made sure key is addressable.
811 return typecheck.NodAddr(key)
814 // mapassign does distinguish pointer vs. integer key.
817 // mapaccess and mapdelete don't distinguish pointer vs. integer key.
820 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
822 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
824 // fast version takes key by value.
829 // walkIndexMap walks an OINDEXMAP node.
830 // It replaces m[k] with *map{access1,assign}(maptype, m, &k)
831 func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
832 n.X = walkExpr(n.X, init)
833 n.Index = walkExpr(n.Index, init)
837 key := mapKeyArg(fast, n, n.Index, n.Assigned)
838 args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
843 mapFn = mapfn(mapassign[fast], t, false)
844 case t.Elem().Size() > zeroValSize:
845 args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
846 mapFn = mapfn("mapaccess1_fat", t, true)
848 mapFn = mapfn(mapaccess1[fast], t, false)
850 call := mkcall1(mapFn, nil, init, args...)
851 call.SetType(types.NewPtr(t.Elem()))
852 call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
853 star := ir.NewStarExpr(base.Pos, call)
854 star.SetType(t.Elem())
859 // walkLogical walks an OANDAND or OOROR node.
860 func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
861 n.X = walkExpr(n.X, init)
863 // cannot put side effects from n.Right on init,
864 // because they cannot run before n.Left is checked.
865 // save elsewhere and store on the eventual n.Right.
868 n.Y = walkExpr(n.Y, &ll)
869 n.Y = ir.InitExpr(ll, n.Y)
873 // walkSend walks an OSEND node.
874 func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
876 n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
877 n1 = walkExpr(n1, init)
878 n1 = typecheck.NodAddr(n1)
879 return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
882 // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
883 func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
884 n.X = walkExpr(n.X, init)
885 n.Low = walkExpr(n.Low, init)
886 if n.Low != nil && ir.IsZero(n.Low) {
887 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
890 n.High = walkExpr(n.High, init)
891 n.Max = walkExpr(n.Max, init)
893 if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
895 if base.Debug.Slice > 0 {
896 base.Warn("slice: omit slice operation")
903 // walkSliceHeader walks an OSLICEHEADER node.
904 func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
905 n.Ptr = walkExpr(n.Ptr, init)
906 n.Len = walkExpr(n.Len, init)
907 n.Cap = walkExpr(n.Cap, init)
911 // walkStringHeader walks an OSTRINGHEADER node.
912 func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
913 n.Ptr = walkExpr(n.Ptr, init)
914 n.Len = walkExpr(n.Len, init)
918 // return 1 if integer n must be in range [0, max), 0 otherwise.
919 func bounded(n ir.Node, max int64) bool {
920 if n.Type() == nil || !n.Type().IsInteger() {
924 sign := n.Type().IsSigned()
925 bits := int32(8 * n.Type().Size())
927 if ir.IsSmallIntConst(n) {
929 return 0 <= v && v < max
933 case ir.OAND, ir.OANDNOT:
934 n := n.(*ir.BinaryExpr)
937 case ir.IsSmallIntConst(n.X):
939 case ir.IsSmallIntConst(n.Y):
941 if n.Op() == ir.OANDNOT {
944 v &= 1<<uint(bits) - 1
948 if 0 <= v && v < max {
953 n := n.(*ir.BinaryExpr)
954 if !sign && ir.IsSmallIntConst(n.Y) {
955 v := ir.Int64Val(n.Y)
956 if 0 <= v && v <= max {
962 n := n.(*ir.BinaryExpr)
963 if !sign && ir.IsSmallIntConst(n.Y) {
964 v := ir.Int64Val(n.Y)
965 for bits > 0 && v >= 2 {
972 n := n.(*ir.BinaryExpr)
973 if !sign && ir.IsSmallIntConst(n.Y) {
974 v := ir.Int64Val(n.Y)
982 if !sign && bits <= 62 && 1<<uint(bits) <= max {
989 // usemethod checks calls for uses of Method and MethodByName of reflect.Value,
990 // reflect.Type, reflect.(*rtype), and reflect.(*interfaceType).
991 func usemethod(n *ir.CallExpr) {
992 // Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
993 // Those functions may be alive via the itab, which should not cause all methods
994 // alive. We only want to mark their callers.
995 if base.Ctxt.Pkgpath == "reflect" {
996 // TODO: is there a better way than hardcoding the names?
997 switch fn := ir.CurFunc.Nname.Sym().Name; {
998 case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
1000 case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
1002 case fn == "Value.Method", fn == "Value.MethodByName":
1007 dot, ok := n.Fun.(*ir.SelectorExpr)
1012 // looking for either direct method calls and interface method calls of:
1013 // reflect.Type.Method - func(int) reflect.Method
1014 // reflect.Type.MethodByName - func(string) (reflect.Method, bool)
1016 // reflect.Value.Method - func(int) reflect.Value
1017 // reflect.Value.MethodByName - func(string) reflect.Value
1018 methodName := dot.Sel.Name
1019 t := dot.Selection.Type
1021 // Check the number of arguments and return values.
1022 if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
1026 // Check the type of the argument.
1027 switch pKind := t.Param(0).Type.Kind(); {
1028 case methodName == "Method" && pKind == types.TINT,
1029 methodName == "MethodByName" && pKind == types.TSTRING:
1032 // not a call to Method or MethodByName of reflect.{Type,Value}.
1036 // Check that first result type is "reflect.Method" or "reflect.Value".
1037 // Note that we have to check sym name and sym package separately, as
1038 // we can't check for exact string "reflect.Method" reliably
1039 // (e.g., see #19028 and #38515).
1040 switch s := t.Result(0).Type.Sym(); {
1041 case s != nil && types.ReflectSymName(s) == "Method",
1042 s != nil && types.ReflectSymName(s) == "Value":
1045 // not a call to Method or MethodByName of reflect.{Type,Value}.
1049 var targetName ir.Node
1052 if methodName == "MethodByName" {
1053 targetName = n.Args[0]
1056 if methodName == "MethodByName" {
1057 targetName = n.Args[1]
1060 base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
1063 if ir.IsConst(targetName, constant.String) {
1064 name := constant.StringVal(targetName.Val())
1066 r := obj.Addrel(ir.CurFunc.LSym)
1067 r.Type = objabi.R_USENAMEDMETHOD
1068 r.Sym = staticdata.StringSymNoCommon(name)
1070 ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
1074 func usefield(n *ir.SelectorExpr) {
1075 if !buildcfg.Experiment.FieldTrack {
1081 base.Fatalf("usefield %v", n.Op())
1083 case ir.ODOT, ir.ODOTPTR:
1087 field := n.Selection
1089 base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
1091 if field.Sym != n.Sel {
1092 base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
1094 if !strings.Contains(field.Note, "go:\"track\"") {
1100 outer = outer.Elem()
1102 if outer.Sym() == nil {
1103 base.Errorf("tracked field must be in named struct type")
1106 sym := reflectdata.TrackSym(outer, field)
1107 if ir.CurFunc.FieldTrack == nil {
1108 ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
1110 ir.CurFunc.FieldTrack[sym] = struct{}{}