1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
13 "cmd/compile/internal/base"
14 "cmd/compile/internal/ir"
15 "cmd/compile/internal/reflectdata"
16 "cmd/compile/internal/staticdata"
17 "cmd/compile/internal/typecheck"
18 "cmd/compile/internal/types"
23 // The result of walkExpr MUST be assigned back to n, e.g.
25 // n.Left = walkExpr(n.Left, init)
26 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
31 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
32 // not okay to use n->ninit when walking n,
33 // because we might replace n with some other node
34 // and would lose the init list.
35 base.Fatalf("walkExpr init == &n->ninit")
38 if len(n.Init()) != 0 {
39 walkStmtList(n.Init())
40 init.Append(ir.TakeInit(n)...)
45 if base.Flag.LowerW > 1 {
46 ir.Dump("before walk expr", n)
49 if n.Typecheck() != 1 {
50 base.Fatalf("missed typecheck: %+v", n)
53 if n.Type().IsUntyped() {
54 base.Fatalf("expression has untyped type: %+v", n)
57 n = walkExpr1(n, init)
59 // Eagerly compute sizes of all expressions for the back end.
60 if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
63 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
64 types.CheckSize(n.Heapaddr.Type())
66 if ir.IsConst(n, constant.String) {
67 // Emit string symbol now to avoid emitting
68 // any concurrently during the backend.
69 _ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
72 if base.Flag.LowerW != 0 && n != nil {
73 ir.Dump("after walk expr", n)
80 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
84 base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
87 case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
90 case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
91 // TODO(mdempsky): Just return n; see discussion on CL 38655.
92 // Perhaps refactor to use Node.mayBeShared for these instead.
93 // If these return early, make sure to still call
94 // StringSym for constant strings.
98 // TODO(mdempsky): Do this right after type checking.
99 n := n.(*ir.SelectorExpr)
102 case ir.OMIN, ir.OMAX:
103 n := n.(*ir.CallExpr)
104 return walkMinMax(n, init)
106 case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
107 n := n.(*ir.UnaryExpr)
108 n.X = walkExpr(n.X, init)
111 case ir.ODOTMETH, ir.ODOTINTER:
112 n := n.(*ir.SelectorExpr)
113 n.X = walkExpr(n.X, init)
117 n := n.(*ir.AddrExpr)
118 n.X = walkExpr(n.X, init)
122 n := n.(*ir.StarExpr)
123 n.X = walkExpr(n.X, init)
126 case ir.OEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
128 n := n.(*ir.BinaryExpr)
129 n.X = walkExpr(n.X, init)
130 n.Y = walkExpr(n.Y, init)
133 case ir.OUNSAFESLICE:
134 n := n.(*ir.BinaryExpr)
135 return walkUnsafeSlice(n, init)
137 case ir.OUNSAFESTRING:
138 n := n.(*ir.BinaryExpr)
139 return walkUnsafeString(n, init)
141 case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
142 n := n.(*ir.UnaryExpr)
143 return walkUnsafeData(n, init)
145 case ir.ODOT, ir.ODOTPTR:
146 n := n.(*ir.SelectorExpr)
147 return walkDot(n, init)
149 case ir.ODOTTYPE, ir.ODOTTYPE2:
150 n := n.(*ir.TypeAssertExpr)
151 return walkDotType(n, init)
153 case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
154 n := n.(*ir.DynamicTypeAssertExpr)
155 return walkDynamicDotType(n, init)
157 case ir.OLEN, ir.OCAP:
158 n := n.(*ir.UnaryExpr)
159 return walkLenCap(n, init)
162 n := n.(*ir.BinaryExpr)
163 n.X = walkExpr(n.X, init)
164 n.Y = walkExpr(n.Y, init)
167 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
168 n := n.(*ir.BinaryExpr)
169 return walkCompare(n, init)
171 case ir.OANDAND, ir.OOROR:
172 n := n.(*ir.LogicalExpr)
173 return walkLogical(n, init)
175 case ir.OPRINT, ir.OPRINTN:
176 return walkPrint(n.(*ir.CallExpr), init)
179 n := n.(*ir.UnaryExpr)
180 return mkcall("gopanic", nil, init, n.X)
183 return walkRecoverFP(n.(*ir.CallExpr), init)
188 case ir.OCALLINTER, ir.OCALLFUNC:
189 n := n.(*ir.CallExpr)
190 return walkCall(n, init)
192 case ir.OAS, ir.OASOP:
193 return walkAssign(init, n)
196 n := n.(*ir.AssignListStmt)
197 return walkAssignList(init, n)
201 n := n.(*ir.AssignListStmt)
202 return walkAssignFunc(init, n)
205 // order.stmt made sure x is addressable or blank.
207 n := n.(*ir.AssignListStmt)
208 return walkAssignRecv(init, n)
212 n := n.(*ir.AssignListStmt)
213 return walkAssignMapRead(init, n)
216 n := n.(*ir.CallExpr)
217 return walkDelete(init, n)
220 n := n.(*ir.AssignListStmt)
221 return walkAssignDotType(n, init)
224 n := n.(*ir.ConvExpr)
225 return walkConvInterface(n, init)
228 n := n.(*ir.ConvExpr)
229 return walkConvIData(n, init)
231 case ir.OCONV, ir.OCONVNOP:
232 n := n.(*ir.ConvExpr)
233 return walkConv(n, init)
236 n := n.(*ir.ConvExpr)
237 return walkSliceToArray(n, init)
239 case ir.OSLICE2ARRPTR:
240 n := n.(*ir.ConvExpr)
241 n.X = walkExpr(n.X, init)
244 case ir.ODIV, ir.OMOD:
245 n := n.(*ir.BinaryExpr)
246 return walkDivMod(n, init)
249 n := n.(*ir.IndexExpr)
250 return walkIndex(n, init)
253 n := n.(*ir.IndexExpr)
254 return walkIndexMap(n, init)
257 base.Fatalf("walkExpr ORECV") // should see inside OAS only
260 case ir.OSLICEHEADER:
261 n := n.(*ir.SliceHeaderExpr)
262 return walkSliceHeader(n, init)
264 case ir.OSTRINGHEADER:
265 n := n.(*ir.StringHeaderExpr)
266 return walkStringHeader(n, init)
268 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
269 n := n.(*ir.SliceExpr)
270 return walkSlice(n, init)
273 n := n.(*ir.UnaryExpr)
274 return walkNew(n, init)
277 return walkAddString(n.(*ir.AddStringExpr), init)
280 // order should make sure we only see OAS(node, OAPPEND), which we handle above.
281 base.Fatalf("append outside assignment")
285 return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
288 n := n.(*ir.UnaryExpr)
292 n := n.(*ir.UnaryExpr)
293 return walkClose(n, init)
296 n := n.(*ir.MakeExpr)
297 return walkMakeChan(n, init)
300 n := n.(*ir.MakeExpr)
301 return walkMakeMap(n, init)
304 n := n.(*ir.MakeExpr)
305 return walkMakeSlice(n, init)
307 case ir.OMAKESLICECOPY:
308 n := n.(*ir.MakeExpr)
309 return walkMakeSliceCopy(n, init)
312 n := n.(*ir.ConvExpr)
313 return walkRuneToString(n, init)
315 case ir.OBYTES2STR, ir.ORUNES2STR:
316 n := n.(*ir.ConvExpr)
317 return walkBytesRunesToString(n, init)
319 case ir.OBYTES2STRTMP:
320 n := n.(*ir.ConvExpr)
321 return walkBytesToStringTemp(n, init)
324 n := n.(*ir.ConvExpr)
325 return walkStringToBytes(n, init)
327 case ir.OSTR2BYTESTMP:
328 n := n.(*ir.ConvExpr)
329 return walkStringToBytesTemp(n, init)
332 n := n.(*ir.ConvExpr)
333 return walkStringToRunes(n, init)
335 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
336 return walkCompLit(n, init)
339 n := n.(*ir.SendStmt)
340 return walkSend(n, init)
343 return walkClosure(n.(*ir.ClosureExpr), init)
346 return walkMethodValue(n.(*ir.SelectorExpr), init)
349 // No return! Each case must return (or panic),
350 // to avoid confusion about what gets returned
351 // in the presence of type assertions.
354 // walk the whole tree of the body of an
355 // expression or simple statement.
356 // the types expressions are calculated.
357 // compile-time constants are evaluated.
358 // complex side effects like statements are appended to init.
359 func walkExprList(s []ir.Node, init *ir.Nodes) {
361 s[i] = walkExpr(s[i], init)
365 func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
366 for i, n := range s {
367 s[i] = cheapExpr(n, init)
368 s[i] = walkExpr(s[i], init)
372 func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
373 for i, n := range s {
374 s[i] = safeExpr(n, init)
375 s[i] = walkExpr(s[i], init)
379 // return side-effect free and cheap n, appending side effects to init.
380 // result may not be assignable.
381 func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
383 case ir.ONAME, ir.OLITERAL, ir.ONIL:
387 return copyExpr(n, n.Type(), init)
390 // return side effect-free n, appending side effects to init.
391 // result is assignable if n is.
392 func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
397 if len(n.Init()) != 0 {
398 walkStmtList(n.Init())
399 init.Append(ir.TakeInit(n)...)
403 case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
406 case ir.OLEN, ir.OCAP:
407 n := n.(*ir.UnaryExpr)
408 l := safeExpr(n.X, init)
412 a := ir.Copy(n).(*ir.UnaryExpr)
414 return walkExpr(typecheck.Expr(a), init)
416 case ir.ODOT, ir.ODOTPTR:
417 n := n.(*ir.SelectorExpr)
418 l := safeExpr(n.X, init)
422 a := ir.Copy(n).(*ir.SelectorExpr)
424 return walkExpr(typecheck.Expr(a), init)
427 n := n.(*ir.StarExpr)
428 l := safeExpr(n.X, init)
432 a := ir.Copy(n).(*ir.StarExpr)
434 return walkExpr(typecheck.Expr(a), init)
436 case ir.OINDEX, ir.OINDEXMAP:
437 n := n.(*ir.IndexExpr)
438 l := safeExpr(n.X, init)
439 r := safeExpr(n.Index, init)
440 if l == n.X && r == n.Index {
443 a := ir.Copy(n).(*ir.IndexExpr)
446 return walkExpr(typecheck.Expr(a), init)
448 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
449 n := n.(*ir.CompLitExpr)
450 if isStaticCompositeLiteral(n) {
455 // make a copy; must not be used as an lvalue
456 if ir.IsAddressable(n) {
457 base.Fatalf("missing lvalue case in safeExpr: %v", n)
459 return cheapExpr(n, init)
462 func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
463 l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
464 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
468 func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
472 base.Fatalf("walkAddString count %d too small", c)
475 buf := typecheck.NodNil()
476 if n.Esc() == ir.EscNone {
478 for _, n1 := range n.List {
479 if n1.Op() == ir.OLITERAL {
480 sz += int64(len(ir.StringVal(n1)))
484 // Don't allocate the buffer if the result won't fit.
485 if sz < tmpstringbufsize {
486 // Create temporary buffer for result string on stack.
487 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
491 // build list of string arguments
492 args := []ir.Node{buf}
493 for _, n2 := range n.List {
494 args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
499 // small numbers of strings use direct runtime helpers.
500 // note: order.expr knows this cutoff too.
501 fn = fmt.Sprintf("concatstring%d", c)
503 // large numbers of strings are passed to the runtime as a slice.
506 t := types.NewSlice(types.Types[types.TSTRING])
507 // args[1:] to skip buf arg
508 slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, args[1:])
509 slice.Prealloc = n.Prealloc
510 args = []ir.Node{buf, slice}
511 slice.SetEsc(ir.EscNone)
514 cat := typecheck.LookupRuntime(fn)
515 r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
517 r1 := typecheck.Expr(r)
518 r1 = walkExpr(r1, init)
524 type hookInfo struct {
530 var hooks = map[string]hookInfo{
531 "strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
534 // walkCall walks an OCALLFUNC or OCALLINTER node.
535 func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
536 if n.Op() == ir.OCALLMETH {
537 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
539 if n.Op() == ir.OCALLINTER || n.X.Op() == ir.OMETHEXPR {
540 // We expect both interface call reflect.Type.Method and concrete
541 // call reflect.(*rtype).Method.
544 if n.Op() == ir.OCALLINTER {
545 reflectdata.MarkUsedIfaceMethod(n)
548 if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.OCLOSURE {
552 if ir.IsFuncPCIntrinsic(n) {
553 // For internal/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
554 // it to the address of the function of the ABI fn is defined.
555 name := n.X.(*ir.Name).Sym().Name
561 case "FuncPCABIInternal":
562 wantABI = obj.ABIInternal
564 if isIfaceOfFunc(arg) {
565 fn := arg.(*ir.ConvExpr).X.(*ir.Name)
568 base.ErrorfAt(n.Pos(), 0, "internal/abi.%s expects an %v function, %s is defined as %v", name, wantABI, fn.Sym().Name, abi)
570 var e ir.Node = ir.NewLinksymExpr(n.Pos(), fn.Sym().LinksymABI(abi), types.Types[types.TUINTPTR])
571 e = ir.NewAddrExpr(n.Pos(), e)
572 e.SetType(types.Types[types.TUINTPTR].PtrTo())
573 return typecheck.Expr(ir.NewConvExpr(n.Pos(), ir.OCONVNOP, n.Type(), e))
575 // fn is not a defined function. It must be ABIInternal.
576 // Read the address from func value, i.e. *(*uintptr)(idata(fn)).
577 if wantABI != obj.ABIInternal {
578 base.ErrorfAt(n.Pos(), 0, "internal/abi.%s does not accept func expression, which is ABIInternal", name)
580 arg = walkExpr(arg, init)
581 var e ir.Node = ir.NewUnaryExpr(n.Pos(), ir.OIDATA, arg)
582 e.SetType(n.Type().PtrTo())
584 e = ir.NewStarExpr(n.Pos(), e)
594 func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
596 return // already walked
600 if n.Op() == ir.OCALLMETH {
601 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
605 params := n.X.Type().Params()
607 n.X = walkExpr(n.X, init)
608 walkExprList(args, init)
610 for i, arg := range args {
611 // Validate argument and parameter types match.
613 if !types.Identical(arg.Type(), param.Type) {
614 base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
617 // For any argument whose evaluation might require a function call,
618 // store that argument into a temporary variable,
619 // to prevent that calls from clobbering arguments already on the stack.
621 // assignment of arg to Temp
622 tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
623 init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
624 // replace arg with temp
630 if base.Debug.Libfuzzer != 0 && funSym != nil {
631 if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
632 if len(args) != hook.argsNum {
633 panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
635 var hookArgs []ir.Node
636 for _, arg := range args {
637 hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
639 hookArgs = append(hookArgs, fakePC(n))
640 init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
645 // walkDivMod walks an ODIV or OMOD node.
646 func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
647 n.X = walkExpr(n.X, init)
648 n.Y = walkExpr(n.Y, init)
650 // rewrite complex div into function call.
651 et := n.X.Type().Kind()
653 if types.IsComplex[et] && n.Op() == ir.ODIV {
655 call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
656 return typecheck.Conv(call, t)
659 // Nothing to do for float divisions.
660 if types.IsFloat[et] {
664 // rewrite 64-bit div and mod on 32-bit architectures.
665 // TODO: Remove this code once we can introduce
666 // runtime calls late in SSA processing.
667 if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
668 if n.Y.Op() == ir.OLITERAL {
669 // Leave div/mod by constant powers of 2 or small 16-bit constants.
670 // The SSA backend will handle those.
673 c := ir.Int64Val(n.Y)
677 if c != 0 && c&(c-1) == 0 {
681 c := ir.Uint64Val(n.Y)
685 if c != 0 && c&(c-1) == 0 {
691 if et == types.TINT64 {
696 if n.Op() == ir.ODIV {
701 return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
706 // walkDot walks an ODOT or ODOTPTR node.
707 func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
709 n.X = walkExpr(n.X, init)
713 // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
714 func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
715 n.X = walkExpr(n.X, init)
716 // Set up interface type addresses for back end.
717 if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
718 n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
723 // walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
724 func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
725 n.X = walkExpr(n.X, init)
726 n.RType = walkExpr(n.RType, init)
727 n.ITab = walkExpr(n.ITab, init)
731 // walkIndex walks an OINDEX node.
732 func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
733 n.X = walkExpr(n.X, init)
735 // save the original node for bounds checking elision.
736 // If it was a ODIV/OMOD walk might rewrite it.
739 n.Index = walkExpr(n.Index, init)
741 // if range of type cannot exceed static array bound,
742 // disable bounds check.
747 if t != nil && t.IsPtr() {
751 n.SetBounded(bounded(r, t.NumElem()))
752 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
753 base.Warn("index bounds check elided")
755 } else if ir.IsConst(n.X, constant.String) {
756 n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
757 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
758 base.Warn("index bounds check elided")
764 // mapKeyArg returns an expression for key that is suitable to be passed
765 // as the key argument for runtime map* functions.
766 // n is the map indexing or delete Node (to provide Pos).
767 func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
769 // standard version takes key by reference.
770 // orderState.expr made sure key is addressable.
771 return typecheck.NodAddr(key)
774 // mapassign does distinguish pointer vs. integer key.
777 // mapaccess and mapdelete don't distinguish pointer vs. integer key.
780 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
782 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
784 // fast version takes key by value.
789 // walkIndexMap walks an OINDEXMAP node.
790 // It replaces m[k] with *map{access1,assign}(maptype, m, &k)
791 func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
792 n.X = walkExpr(n.X, init)
793 n.Index = walkExpr(n.Index, init)
797 key := mapKeyArg(fast, n, n.Index, n.Assigned)
798 args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
803 mapFn = mapfn(mapassign[fast], t, false)
804 case t.Elem().Size() > zeroValSize:
805 args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
806 mapFn = mapfn("mapaccess1_fat", t, true)
808 mapFn = mapfn(mapaccess1[fast], t, false)
810 call := mkcall1(mapFn, nil, init, args...)
811 call.SetType(types.NewPtr(t.Elem()))
812 call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
813 star := ir.NewStarExpr(base.Pos, call)
814 star.SetType(t.Elem())
819 // walkLogical walks an OANDAND or OOROR node.
820 func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
821 n.X = walkExpr(n.X, init)
823 // cannot put side effects from n.Right on init,
824 // because they cannot run before n.Left is checked.
825 // save elsewhere and store on the eventual n.Right.
828 n.Y = walkExpr(n.Y, &ll)
829 n.Y = ir.InitExpr(ll, n.Y)
833 // walkSend walks an OSEND node.
834 func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
836 n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
837 n1 = walkExpr(n1, init)
838 n1 = typecheck.NodAddr(n1)
839 return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
842 // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
843 func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
844 n.X = walkExpr(n.X, init)
845 n.Low = walkExpr(n.Low, init)
846 if n.Low != nil && ir.IsZero(n.Low) {
847 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
850 n.High = walkExpr(n.High, init)
851 n.Max = walkExpr(n.Max, init)
853 if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
855 if base.Debug.Slice > 0 {
856 base.Warn("slice: omit slice operation")
863 // walkSliceHeader walks an OSLICEHEADER node.
864 func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
865 n.Ptr = walkExpr(n.Ptr, init)
866 n.Len = walkExpr(n.Len, init)
867 n.Cap = walkExpr(n.Cap, init)
871 // walkStringHeader walks an OSTRINGHEADER node.
872 func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
873 n.Ptr = walkExpr(n.Ptr, init)
874 n.Len = walkExpr(n.Len, init)
878 // return 1 if integer n must be in range [0, max), 0 otherwise.
879 func bounded(n ir.Node, max int64) bool {
880 if n.Type() == nil || !n.Type().IsInteger() {
884 sign := n.Type().IsSigned()
885 bits := int32(8 * n.Type().Size())
887 if ir.IsSmallIntConst(n) {
889 return 0 <= v && v < max
893 case ir.OAND, ir.OANDNOT:
894 n := n.(*ir.BinaryExpr)
897 case ir.IsSmallIntConst(n.X):
899 case ir.IsSmallIntConst(n.Y):
901 if n.Op() == ir.OANDNOT {
904 v &= 1<<uint(bits) - 1
908 if 0 <= v && v < max {
913 n := n.(*ir.BinaryExpr)
914 if !sign && ir.IsSmallIntConst(n.Y) {
915 v := ir.Int64Val(n.Y)
916 if 0 <= v && v <= max {
922 n := n.(*ir.BinaryExpr)
923 if !sign && ir.IsSmallIntConst(n.Y) {
924 v := ir.Int64Val(n.Y)
925 for bits > 0 && v >= 2 {
932 n := n.(*ir.BinaryExpr)
933 if !sign && ir.IsSmallIntConst(n.Y) {
934 v := ir.Int64Val(n.Y)
942 if !sign && bits <= 62 && 1<<uint(bits) <= max {
949 // usemethod checks calls for uses of Method and MethodByName of reflect.Value,
950 // reflect.Type, reflect.(*rtype), and reflect.(*interfaceType).
951 func usemethod(n *ir.CallExpr) {
952 // Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
953 // Those functions may be alive via the itab, which should not cause all methods
954 // alive. We only want to mark their callers.
955 if base.Ctxt.Pkgpath == "reflect" {
956 // TODO: is there a better way than hardcoding the names?
957 switch fn := ir.CurFunc.Nname.Sym().Name; {
958 case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
960 case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
962 case fn == "Value.Method", fn == "Value.MethodByName":
964 // StructOf defines closures that look up methods. They only look up methods
965 // reachable via interfaces. The DCE does not remove such methods. It is ok
966 // to not flag closures in StructOf as ReflectMethods and let the DCE run
967 // even if StructOf is reachable.
969 // (*rtype).MethodByName calls into StructOf so flagging StructOf as
970 // ReflectMethod would disable the DCE even when the name of a method
971 // to look up is a compile-time constant.
972 case strings.HasPrefix(fn, "StructOf.func"):
977 dot, ok := n.X.(*ir.SelectorExpr)
982 // looking for either direct method calls and interface method calls of:
983 // reflect.Type.Method - func(int) reflect.Method
984 // reflect.Type.MethodByName - func(string) (reflect.Method, bool)
986 // reflect.Value.Method - func(int) reflect.Value
987 // reflect.Value.MethodByName - func(string) reflect.Value
988 methodName := dot.Sel.Name
989 t := dot.Selection.Type
991 // Check the number of arguments and return values.
992 if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
996 // Check the type of the argument.
997 switch pKind := t.Param(0).Type.Kind(); {
998 case methodName == "Method" && pKind == types.TINT,
999 methodName == "MethodByName" && pKind == types.TSTRING:
1002 // not a call to Method or MethodByName of reflect.{Type,Value}.
1006 // Check that first result type is "reflect.Method" or "reflect.Value".
1007 // Note that we have to check sym name and sym package separately, as
1008 // we can't check for exact string "reflect.Method" reliably
1009 // (e.g., see #19028 and #38515).
1010 switch s := t.Result(0).Type.Sym(); {
1011 case s != nil && types.ReflectSymName(s) == "Method",
1012 s != nil && types.ReflectSymName(s) == "Value":
1015 // not a call to Method or MethodByName of reflect.{Type,Value}.
1019 var targetName ir.Node
1022 if methodName == "MethodByName" {
1023 targetName = n.Args[0]
1026 if methodName == "MethodByName" {
1027 targetName = n.Args[1]
1030 base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
1033 if ir.IsConst(targetName, constant.String) {
1034 name := constant.StringVal(targetName.Val())
1036 r := obj.Addrel(ir.CurFunc.LSym)
1037 r.Type = objabi.R_USENAMEDMETHOD
1038 r.Sym = staticdata.StringSymNoCommon(name)
1040 ir.CurFunc.SetReflectMethod(true)
1041 // The LSym is initialized at this point. We need to set the attribute on the LSym.
1042 ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
1046 func usefield(n *ir.SelectorExpr) {
1047 if !buildcfg.Experiment.FieldTrack {
1053 base.Fatalf("usefield %v", n.Op())
1055 case ir.ODOT, ir.ODOTPTR:
1059 field := n.Selection
1061 base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
1063 if field.Sym != n.Sel {
1064 base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
1066 if !strings.Contains(field.Note, "go:\"track\"") {
1072 outer = outer.Elem()
1074 if outer.Sym() == nil {
1075 base.Errorf("tracked field must be in named struct type")
1078 sym := reflectdata.TrackSym(outer, field)
1079 if ir.CurFunc.FieldTrack == nil {
1080 ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
1082 ir.CurFunc.FieldTrack[sym] = struct{}{}