1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
13 "cmd/compile/internal/base"
14 "cmd/compile/internal/ir"
15 "cmd/compile/internal/objw"
16 "cmd/compile/internal/reflectdata"
17 "cmd/compile/internal/rttype"
18 "cmd/compile/internal/staticdata"
19 "cmd/compile/internal/typecheck"
20 "cmd/compile/internal/types"
25 // The result of walkExpr MUST be assigned back to n, e.g.
27 // n.Left = walkExpr(n.Left, init)
28 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
33 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
34 // not okay to use n->ninit when walking n,
35 // because we might replace n with some other node
36 // and would lose the init list.
37 base.Fatalf("walkExpr init == &n->ninit")
40 if len(n.Init()) != 0 {
41 walkStmtList(n.Init())
42 init.Append(ir.TakeInit(n)...)
47 if base.Flag.LowerW > 1 {
48 ir.Dump("before walk expr", n)
51 if n.Typecheck() != 1 {
52 base.Fatalf("missed typecheck: %+v", n)
55 if n.Type().IsUntyped() {
56 base.Fatalf("expression has untyped type: %+v", n)
59 n = walkExpr1(n, init)
61 // Eagerly compute sizes of all expressions for the back end.
62 if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
65 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
66 types.CheckSize(n.Heapaddr.Type())
68 if ir.IsConst(n, constant.String) {
69 // Emit string symbol now to avoid emitting
70 // any concurrently during the backend.
71 _ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
74 if base.Flag.LowerW != 0 && n != nil {
75 ir.Dump("after walk expr", n)
82 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
86 base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
89 case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
92 case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
93 // TODO(mdempsky): Just return n; see discussion on CL 38655.
94 // Perhaps refactor to use Node.mayBeShared for these instead.
95 // If these return early, make sure to still call
96 // StringSym for constant strings.
100 // TODO(mdempsky): Do this right after type checking.
101 n := n.(*ir.SelectorExpr)
104 case ir.OMIN, ir.OMAX:
105 n := n.(*ir.CallExpr)
106 return walkMinMax(n, init)
108 case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
109 n := n.(*ir.UnaryExpr)
110 n.X = walkExpr(n.X, init)
113 case ir.ODOTMETH, ir.ODOTINTER:
114 n := n.(*ir.SelectorExpr)
115 n.X = walkExpr(n.X, init)
119 n := n.(*ir.AddrExpr)
120 n.X = walkExpr(n.X, init)
124 n := n.(*ir.StarExpr)
125 n.X = walkExpr(n.X, init)
128 case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
130 n := n.(*ir.BinaryExpr)
131 n.X = walkExpr(n.X, init)
132 n.Y = walkExpr(n.Y, init)
135 case ir.OUNSAFESLICE:
136 n := n.(*ir.BinaryExpr)
137 return walkUnsafeSlice(n, init)
139 case ir.OUNSAFESTRING:
140 n := n.(*ir.BinaryExpr)
141 return walkUnsafeString(n, init)
143 case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
144 n := n.(*ir.UnaryExpr)
145 return walkUnsafeData(n, init)
147 case ir.ODOT, ir.ODOTPTR:
148 n := n.(*ir.SelectorExpr)
149 return walkDot(n, init)
151 case ir.ODOTTYPE, ir.ODOTTYPE2:
152 n := n.(*ir.TypeAssertExpr)
153 return walkDotType(n, init)
155 case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
156 n := n.(*ir.DynamicTypeAssertExpr)
157 return walkDynamicDotType(n, init)
159 case ir.OLEN, ir.OCAP:
160 n := n.(*ir.UnaryExpr)
161 return walkLenCap(n, init)
164 n := n.(*ir.BinaryExpr)
165 n.X = walkExpr(n.X, init)
166 n.Y = walkExpr(n.Y, init)
169 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
170 n := n.(*ir.BinaryExpr)
171 return walkCompare(n, init)
173 case ir.OANDAND, ir.OOROR:
174 n := n.(*ir.LogicalExpr)
175 return walkLogical(n, init)
177 case ir.OPRINT, ir.OPRINTLN:
178 return walkPrint(n.(*ir.CallExpr), init)
181 n := n.(*ir.UnaryExpr)
182 return mkcall("gopanic", nil, init, n.X)
185 return walkRecoverFP(n.(*ir.CallExpr), init)
190 case ir.OCALLINTER, ir.OCALLFUNC:
191 n := n.(*ir.CallExpr)
192 return walkCall(n, init)
194 case ir.OAS, ir.OASOP:
195 return walkAssign(init, n)
198 n := n.(*ir.AssignListStmt)
199 return walkAssignList(init, n)
203 n := n.(*ir.AssignListStmt)
204 return walkAssignFunc(init, n)
207 // order.stmt made sure x is addressable or blank.
209 n := n.(*ir.AssignListStmt)
210 return walkAssignRecv(init, n)
214 n := n.(*ir.AssignListStmt)
215 return walkAssignMapRead(init, n)
218 n := n.(*ir.CallExpr)
219 return walkDelete(init, n)
222 n := n.(*ir.AssignListStmt)
223 return walkAssignDotType(n, init)
226 n := n.(*ir.ConvExpr)
227 return walkConvInterface(n, init)
229 case ir.OCONV, ir.OCONVNOP:
230 n := n.(*ir.ConvExpr)
231 return walkConv(n, init)
234 n := n.(*ir.ConvExpr)
235 return walkSliceToArray(n, init)
237 case ir.OSLICE2ARRPTR:
238 n := n.(*ir.ConvExpr)
239 n.X = walkExpr(n.X, init)
242 case ir.ODIV, ir.OMOD:
243 n := n.(*ir.BinaryExpr)
244 return walkDivMod(n, init)
247 n := n.(*ir.IndexExpr)
248 return walkIndex(n, init)
251 n := n.(*ir.IndexExpr)
252 return walkIndexMap(n, init)
255 base.Fatalf("walkExpr ORECV") // should see inside OAS only
258 case ir.OSLICEHEADER:
259 n := n.(*ir.SliceHeaderExpr)
260 return walkSliceHeader(n, init)
262 case ir.OSTRINGHEADER:
263 n := n.(*ir.StringHeaderExpr)
264 return walkStringHeader(n, init)
266 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
267 n := n.(*ir.SliceExpr)
268 return walkSlice(n, init)
271 n := n.(*ir.UnaryExpr)
272 return walkNew(n, init)
275 return walkAddString(n.(*ir.AddStringExpr), init)
278 // order should make sure we only see OAS(node, OAPPEND), which we handle above.
279 base.Fatalf("append outside assignment")
283 return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
286 n := n.(*ir.UnaryExpr)
290 n := n.(*ir.UnaryExpr)
291 return walkClose(n, init)
294 n := n.(*ir.MakeExpr)
295 return walkMakeChan(n, init)
298 n := n.(*ir.MakeExpr)
299 return walkMakeMap(n, init)
302 n := n.(*ir.MakeExpr)
303 return walkMakeSlice(n, init)
305 case ir.OMAKESLICECOPY:
306 n := n.(*ir.MakeExpr)
307 return walkMakeSliceCopy(n, init)
310 n := n.(*ir.ConvExpr)
311 return walkRuneToString(n, init)
313 case ir.OBYTES2STR, ir.ORUNES2STR:
314 n := n.(*ir.ConvExpr)
315 return walkBytesRunesToString(n, init)
317 case ir.OBYTES2STRTMP:
318 n := n.(*ir.ConvExpr)
319 return walkBytesToStringTemp(n, init)
322 n := n.(*ir.ConvExpr)
323 return walkStringToBytes(n, init)
325 case ir.OSTR2BYTESTMP:
326 n := n.(*ir.ConvExpr)
327 return walkStringToBytesTemp(n, init)
330 n := n.(*ir.ConvExpr)
331 return walkStringToRunes(n, init)
333 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
334 return walkCompLit(n, init)
337 n := n.(*ir.SendStmt)
338 return walkSend(n, init)
341 return walkClosure(n.(*ir.ClosureExpr), init)
344 return walkMethodValue(n.(*ir.SelectorExpr), init)
347 // No return! Each case must return (or panic),
348 // to avoid confusion about what gets returned
349 // in the presence of type assertions.
352 // walk the whole tree of the body of an
353 // expression or simple statement.
354 // the types expressions are calculated.
355 // compile-time constants are evaluated.
356 // complex side effects like statements are appended to init.
357 func walkExprList(s []ir.Node, init *ir.Nodes) {
359 s[i] = walkExpr(s[i], init)
363 func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
364 for i, n := range s {
365 s[i] = cheapExpr(n, init)
366 s[i] = walkExpr(s[i], init)
370 func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
371 for i, n := range s {
372 s[i] = safeExpr(n, init)
373 s[i] = walkExpr(s[i], init)
377 // return side-effect free and cheap n, appending side effects to init.
378 // result may not be assignable.
379 func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
381 case ir.ONAME, ir.OLITERAL, ir.ONIL:
385 return copyExpr(n, n.Type(), init)
388 // return side effect-free n, appending side effects to init.
389 // result is assignable if n is.
390 func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
395 if len(n.Init()) != 0 {
396 walkStmtList(n.Init())
397 init.Append(ir.TakeInit(n)...)
401 case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
404 case ir.OLEN, ir.OCAP:
405 n := n.(*ir.UnaryExpr)
406 l := safeExpr(n.X, init)
410 a := ir.Copy(n).(*ir.UnaryExpr)
412 return walkExpr(typecheck.Expr(a), init)
414 case ir.ODOT, ir.ODOTPTR:
415 n := n.(*ir.SelectorExpr)
416 l := safeExpr(n.X, init)
420 a := ir.Copy(n).(*ir.SelectorExpr)
422 return walkExpr(typecheck.Expr(a), init)
425 n := n.(*ir.StarExpr)
426 l := safeExpr(n.X, init)
430 a := ir.Copy(n).(*ir.StarExpr)
432 return walkExpr(typecheck.Expr(a), init)
434 case ir.OINDEX, ir.OINDEXMAP:
435 n := n.(*ir.IndexExpr)
436 l := safeExpr(n.X, init)
437 r := safeExpr(n.Index, init)
438 if l == n.X && r == n.Index {
441 a := ir.Copy(n).(*ir.IndexExpr)
444 return walkExpr(typecheck.Expr(a), init)
446 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
447 n := n.(*ir.CompLitExpr)
448 if isStaticCompositeLiteral(n) {
453 // make a copy; must not be used as an lvalue
454 if ir.IsAddressable(n) {
455 base.Fatalf("missing lvalue case in safeExpr: %v", n)
457 return cheapExpr(n, init)
460 func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
461 l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
462 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
466 func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
470 base.Fatalf("walkAddString count %d too small", c)
473 buf := typecheck.NodNil()
474 if n.Esc() == ir.EscNone {
476 for _, n1 := range n.List {
477 if n1.Op() == ir.OLITERAL {
478 sz += int64(len(ir.StringVal(n1)))
482 // Don't allocate the buffer if the result won't fit.
483 if sz < tmpstringbufsize {
484 // Create temporary buffer for result string on stack.
485 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
489 // build list of string arguments
490 args := []ir.Node{buf}
491 for _, n2 := range n.List {
492 args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
497 // small numbers of strings use direct runtime helpers.
498 // note: order.expr knows this cutoff too.
499 fn = fmt.Sprintf("concatstring%d", c)
501 // large numbers of strings are passed to the runtime as a slice.
504 t := types.NewSlice(types.Types[types.TSTRING])
505 // args[1:] to skip buf arg
506 slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, args[1:])
507 slice.Prealloc = n.Prealloc
508 args = []ir.Node{buf, slice}
509 slice.SetEsc(ir.EscNone)
512 cat := typecheck.LookupRuntime(fn)
513 r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
515 r1 := typecheck.Expr(r)
516 r1 = walkExpr(r1, init)
522 type hookInfo struct {
528 var hooks = map[string]hookInfo{
529 "strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
532 // walkCall walks an OCALLFUNC or OCALLINTER node.
533 func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
534 if n.Op() == ir.OCALLMETH {
535 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
537 if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
538 // We expect both interface call reflect.Type.Method and concrete
539 // call reflect.(*rtype).Method.
542 if n.Op() == ir.OCALLINTER {
543 reflectdata.MarkUsedIfaceMethod(n)
546 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
550 if ir.IsFuncPCIntrinsic(n) {
551 // For internal/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
552 // it to the address of the function of the ABI fn is defined.
553 name := n.Fun.(*ir.Name).Sym().Name
559 case "FuncPCABIInternal":
560 wantABI = obj.ABIInternal
562 if n.Type() != types.Types[types.TUINTPTR] {
563 base.FatalfAt(n.Pos(), "FuncPC intrinsic should return uintptr, got %v", n.Type()) // as expected by typecheck.FuncPC.
565 n := ir.FuncPC(n.Pos(), arg, wantABI)
566 return walkExpr(n, init)
569 if name, ok := n.Fun.(*ir.Name); ok {
571 if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
572 // Call to runtime.deferrangefunc is being shared with a range-over-func
573 // body that might add defers to this frame, so we cannot use open-coded defers
574 // and we need to call deferreturn even if we don't see any other explicit defers.
575 ir.CurFunc.SetHasDefer(true)
576 ir.CurFunc.SetOpenCodedDeferDisallowed(true)
584 func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
586 return // already walked
590 if n.Op() == ir.OCALLMETH {
591 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
595 params := n.Fun.Type().Params()
597 n.Fun = walkExpr(n.Fun, init)
598 walkExprList(args, init)
600 for i, arg := range args {
601 // Validate argument and parameter types match.
603 if !types.Identical(arg.Type(), param.Type) {
604 base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
607 // For any argument whose evaluation might require a function call,
608 // store that argument into a temporary variable,
609 // to prevent that calls from clobbering arguments already on the stack.
611 // assignment of arg to Temp
612 tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
613 init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
614 // replace arg with temp
619 funSym := n.Fun.Sym()
620 if base.Debug.Libfuzzer != 0 && funSym != nil {
621 if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
622 if len(args) != hook.argsNum {
623 panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
625 var hookArgs []ir.Node
626 for _, arg := range args {
627 hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
629 hookArgs = append(hookArgs, fakePC(n))
630 init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
635 // walkDivMod walks an ODIV or OMOD node.
636 func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
637 n.X = walkExpr(n.X, init)
638 n.Y = walkExpr(n.Y, init)
640 // rewrite complex div into function call.
641 et := n.X.Type().Kind()
643 if types.IsComplex[et] && n.Op() == ir.ODIV {
645 call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
646 return typecheck.Conv(call, t)
649 // Nothing to do for float divisions.
650 if types.IsFloat[et] {
654 // rewrite 64-bit div and mod on 32-bit architectures.
655 // TODO: Remove this code once we can introduce
656 // runtime calls late in SSA processing.
657 if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
658 if n.Y.Op() == ir.OLITERAL {
659 // Leave div/mod by constant powers of 2 or small 16-bit constants.
660 // The SSA backend will handle those.
663 c := ir.Int64Val(n.Y)
667 if c != 0 && c&(c-1) == 0 {
671 c := ir.Uint64Val(n.Y)
675 if c != 0 && c&(c-1) == 0 {
681 if et == types.TINT64 {
686 if n.Op() == ir.ODIV {
691 return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
696 // walkDot walks an ODOT or ODOTPTR node.
697 func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
699 n.X = walkExpr(n.X, init)
703 // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
704 func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
705 n.X = walkExpr(n.X, init)
706 // Set up interface type addresses for back end.
707 if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
708 n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
710 if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
711 // This kind of conversion needs a runtime call. Allocate
712 // a descriptor for that call.
713 n.Descriptor = makeTypeAssertDescriptor(n.Type(), n.Op() == ir.ODOTTYPE2)
718 func makeTypeAssertDescriptor(target *types.Type, canFail bool) *obj.LSym {
719 // When converting from an interface to a non-empty interface. Needs a runtime call.
720 // Allocate an internal/abi.TypeAssert descriptor for that call.
721 lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
723 c := rttype.NewCursor(lsym, 0, rttype.TypeAssert)
724 c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyTypeAssertCache"))
725 c.Field("Inter").WritePtr(reflectdata.TypeSym(target).Linksym())
726 c.Field("CanFail").WriteBool(canFail)
727 objw.Global(lsym, int32(rttype.TypeAssert.Size()), obj.LOCAL)
728 lsym.Gotype = reflectdata.TypeLinksym(rttype.TypeAssert)
732 var typeAssertGen int
734 // walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
735 func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
736 n.X = walkExpr(n.X, init)
737 n.RType = walkExpr(n.RType, init)
738 n.ITab = walkExpr(n.ITab, init)
739 // Convert to non-dynamic if we can.
740 if n.RType != nil && n.RType.Op() == ir.OADDR {
741 addr := n.RType.(*ir.AddrExpr)
742 if addr.X.Op() == ir.OLINKSYMOFFSET {
743 r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
744 if n.Op() == ir.ODYNAMICDOTTYPE2 {
745 r.SetOp(ir.ODOTTYPE2)
749 return walkExpr(r, init)
755 // walkIndex walks an OINDEX node.
756 func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
757 n.X = walkExpr(n.X, init)
759 // save the original node for bounds checking elision.
760 // If it was a ODIV/OMOD walk might rewrite it.
763 n.Index = walkExpr(n.Index, init)
765 // if range of type cannot exceed static array bound,
766 // disable bounds check.
771 if t != nil && t.IsPtr() {
775 n.SetBounded(bounded(r, t.NumElem()))
776 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
777 base.Warn("index bounds check elided")
779 } else if ir.IsConst(n.X, constant.String) {
780 n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
781 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
782 base.Warn("index bounds check elided")
788 // mapKeyArg returns an expression for key that is suitable to be passed
789 // as the key argument for runtime map* functions.
790 // n is the map indexing or delete Node (to provide Pos).
791 func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
793 // standard version takes key by reference.
794 // orderState.expr made sure key is addressable.
795 return typecheck.NodAddr(key)
798 // mapassign does distinguish pointer vs. integer key.
801 // mapaccess and mapdelete don't distinguish pointer vs. integer key.
804 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
806 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
808 // fast version takes key by value.
813 // walkIndexMap walks an OINDEXMAP node.
814 // It replaces m[k] with *map{access1,assign}(maptype, m, &k)
815 func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
816 n.X = walkExpr(n.X, init)
817 n.Index = walkExpr(n.Index, init)
821 key := mapKeyArg(fast, n, n.Index, n.Assigned)
822 args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
827 mapFn = mapfn(mapassign[fast], t, false)
828 case t.Elem().Size() > zeroValSize:
829 args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
830 mapFn = mapfn("mapaccess1_fat", t, true)
832 mapFn = mapfn(mapaccess1[fast], t, false)
834 call := mkcall1(mapFn, nil, init, args...)
835 call.SetType(types.NewPtr(t.Elem()))
836 call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
837 star := ir.NewStarExpr(base.Pos, call)
838 star.SetType(t.Elem())
843 // walkLogical walks an OANDAND or OOROR node.
844 func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
845 n.X = walkExpr(n.X, init)
847 // cannot put side effects from n.Right on init,
848 // because they cannot run before n.Left is checked.
849 // save elsewhere and store on the eventual n.Right.
852 n.Y = walkExpr(n.Y, &ll)
853 n.Y = ir.InitExpr(ll, n.Y)
857 // walkSend walks an OSEND node.
858 func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
860 n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
861 n1 = walkExpr(n1, init)
862 n1 = typecheck.NodAddr(n1)
863 return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
866 // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
867 func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
868 n.X = walkExpr(n.X, init)
869 n.Low = walkExpr(n.Low, init)
870 if n.Low != nil && ir.IsZero(n.Low) {
871 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
874 n.High = walkExpr(n.High, init)
875 n.Max = walkExpr(n.Max, init)
877 if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
879 if base.Debug.Slice > 0 {
880 base.Warn("slice: omit slice operation")
887 // walkSliceHeader walks an OSLICEHEADER node.
888 func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
889 n.Ptr = walkExpr(n.Ptr, init)
890 n.Len = walkExpr(n.Len, init)
891 n.Cap = walkExpr(n.Cap, init)
895 // walkStringHeader walks an OSTRINGHEADER node.
896 func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
897 n.Ptr = walkExpr(n.Ptr, init)
898 n.Len = walkExpr(n.Len, init)
902 // return 1 if integer n must be in range [0, max), 0 otherwise.
903 func bounded(n ir.Node, max int64) bool {
904 if n.Type() == nil || !n.Type().IsInteger() {
908 sign := n.Type().IsSigned()
909 bits := int32(8 * n.Type().Size())
911 if ir.IsSmallIntConst(n) {
913 return 0 <= v && v < max
917 case ir.OAND, ir.OANDNOT:
918 n := n.(*ir.BinaryExpr)
921 case ir.IsSmallIntConst(n.X):
923 case ir.IsSmallIntConst(n.Y):
925 if n.Op() == ir.OANDNOT {
928 v &= 1<<uint(bits) - 1
932 if 0 <= v && v < max {
937 n := n.(*ir.BinaryExpr)
938 if !sign && ir.IsSmallIntConst(n.Y) {
939 v := ir.Int64Val(n.Y)
940 if 0 <= v && v <= max {
946 n := n.(*ir.BinaryExpr)
947 if !sign && ir.IsSmallIntConst(n.Y) {
948 v := ir.Int64Val(n.Y)
949 for bits > 0 && v >= 2 {
956 n := n.(*ir.BinaryExpr)
957 if !sign && ir.IsSmallIntConst(n.Y) {
958 v := ir.Int64Val(n.Y)
966 if !sign && bits <= 62 && 1<<uint(bits) <= max {
973 // usemethod checks calls for uses of Method and MethodByName of reflect.Value,
974 // reflect.Type, reflect.(*rtype), and reflect.(*interfaceType).
975 func usemethod(n *ir.CallExpr) {
976 // Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
977 // Those functions may be alive via the itab, which should not cause all methods
978 // alive. We only want to mark their callers.
979 if base.Ctxt.Pkgpath == "reflect" {
980 // TODO: is there a better way than hardcoding the names?
981 switch fn := ir.CurFunc.Nname.Sym().Name; {
982 case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
984 case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
986 case fn == "Value.Method", fn == "Value.MethodByName":
991 dot, ok := n.Fun.(*ir.SelectorExpr)
996 // looking for either direct method calls and interface method calls of:
997 // reflect.Type.Method - func(int) reflect.Method
998 // reflect.Type.MethodByName - func(string) (reflect.Method, bool)
1000 // reflect.Value.Method - func(int) reflect.Value
1001 // reflect.Value.MethodByName - func(string) reflect.Value
1002 methodName := dot.Sel.Name
1003 t := dot.Selection.Type
1005 // Check the number of arguments and return values.
1006 if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
1010 // Check the type of the argument.
1011 switch pKind := t.Param(0).Type.Kind(); {
1012 case methodName == "Method" && pKind == types.TINT,
1013 methodName == "MethodByName" && pKind == types.TSTRING:
1016 // not a call to Method or MethodByName of reflect.{Type,Value}.
1020 // Check that first result type is "reflect.Method" or "reflect.Value".
1021 // Note that we have to check sym name and sym package separately, as
1022 // we can't check for exact string "reflect.Method" reliably
1023 // (e.g., see #19028 and #38515).
1024 switch s := t.Result(0).Type.Sym(); {
1025 case s != nil && types.ReflectSymName(s) == "Method",
1026 s != nil && types.ReflectSymName(s) == "Value":
1029 // not a call to Method or MethodByName of reflect.{Type,Value}.
1033 var targetName ir.Node
1036 if methodName == "MethodByName" {
1037 targetName = n.Args[0]
1040 if methodName == "MethodByName" {
1041 targetName = n.Args[1]
1044 base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
1047 if ir.IsConst(targetName, constant.String) {
1048 name := constant.StringVal(targetName.Val())
1050 r := obj.Addrel(ir.CurFunc.LSym)
1051 r.Type = objabi.R_USENAMEDMETHOD
1052 r.Sym = staticdata.StringSymNoCommon(name)
1054 ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
1058 func usefield(n *ir.SelectorExpr) {
1059 if !buildcfg.Experiment.FieldTrack {
1065 base.Fatalf("usefield %v", n.Op())
1067 case ir.ODOT, ir.ODOTPTR:
1071 field := n.Selection
1073 base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
1075 if field.Sym != n.Sel {
1076 base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
1078 if !strings.Contains(field.Note, "go:\"track\"") {
1084 outer = outer.Elem()
1086 if outer.Sym() == nil {
1087 base.Errorf("tracked field must be in named struct type")
1090 sym := reflectdata.TrackSym(outer, field)
1091 if ir.CurFunc.FieldTrack == nil {
1092 ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
1094 ir.CurFunc.FieldTrack[sym] = struct{}{}