1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
12 "cmd/compile/internal/base"
13 "cmd/compile/internal/ir"
14 "cmd/compile/internal/reflectdata"
15 "cmd/compile/internal/staticdata"
16 "cmd/compile/internal/typecheck"
17 "cmd/compile/internal/types"
22 // The result of walkExpr MUST be assigned back to n, e.g.
23 // n.Left = walkExpr(n.Left, init)
24 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
29 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
30 // not okay to use n->ninit when walking n,
31 // because we might replace n with some other node
32 // and would lose the init list.
33 base.Fatalf("walkExpr init == &n->ninit")
36 if len(n.Init()) != 0 {
37 walkStmtList(n.Init())
38 init.Append(ir.TakeInit(n)...)
43 if base.Flag.LowerW > 1 {
44 ir.Dump("before walk expr", n)
47 if n.Typecheck() != 1 {
48 base.Fatalf("missed typecheck: %+v", n)
51 if n.Type().IsUntyped() {
52 base.Fatalf("expression has untyped type: %+v", n)
55 n = walkExpr1(n, init)
57 // Eagerly compute sizes of all expressions for the back end.
58 if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
61 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
62 types.CheckSize(n.Heapaddr.Type())
64 if ir.IsConst(n, constant.String) {
65 // Emit string symbol now to avoid emitting
66 // any concurrently during the backend.
67 _ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
70 if base.Flag.LowerW != 0 && n != nil {
71 ir.Dump("after walk expr", n)
78 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
82 base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
85 case ir.ONONAME, ir.OGETG:
88 case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
89 // TODO(mdempsky): Just return n; see discussion on CL 38655.
90 // Perhaps refactor to use Node.mayBeShared for these instead.
91 // If these return early, make sure to still call
92 // StringSym for constant strings.
96 // TODO(mdempsky): Do this right after type checking.
97 n := n.(*ir.SelectorExpr)
100 case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
101 n := n.(*ir.UnaryExpr)
102 n.X = walkExpr(n.X, init)
105 case ir.ODOTMETH, ir.ODOTINTER:
106 n := n.(*ir.SelectorExpr)
107 n.X = walkExpr(n.X, init)
111 n := n.(*ir.AddrExpr)
112 n.X = walkExpr(n.X, init)
116 n := n.(*ir.StarExpr)
117 n.X = walkExpr(n.X, init)
120 case ir.OEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH:
121 n := n.(*ir.BinaryExpr)
122 n.X = walkExpr(n.X, init)
123 n.Y = walkExpr(n.Y, init)
126 case ir.ODOT, ir.ODOTPTR:
127 n := n.(*ir.SelectorExpr)
128 return walkDot(n, init)
130 case ir.ODOTTYPE, ir.ODOTTYPE2:
131 n := n.(*ir.TypeAssertExpr)
132 return walkDotType(n, init)
134 case ir.OLEN, ir.OCAP:
135 n := n.(*ir.UnaryExpr)
136 return walkLenCap(n, init)
139 n := n.(*ir.BinaryExpr)
140 n.X = walkExpr(n.X, init)
141 n.Y = walkExpr(n.Y, init)
144 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
145 n := n.(*ir.BinaryExpr)
146 return walkCompare(n, init)
148 case ir.OANDAND, ir.OOROR:
149 n := n.(*ir.LogicalExpr)
150 return walkLogical(n, init)
152 case ir.OPRINT, ir.OPRINTN:
153 return walkPrint(n.(*ir.CallExpr), init)
156 n := n.(*ir.UnaryExpr)
157 return mkcall("gopanic", nil, init, n.X)
160 return walkRecover(n.(*ir.CallExpr), init)
165 case ir.OCALLINTER, ir.OCALLFUNC, ir.OCALLMETH:
166 n := n.(*ir.CallExpr)
167 return walkCall(n, init)
169 case ir.OAS, ir.OASOP:
170 return walkAssign(init, n)
173 n := n.(*ir.AssignListStmt)
174 return walkAssignList(init, n)
178 n := n.(*ir.AssignListStmt)
179 return walkAssignFunc(init, n)
182 // order.stmt made sure x is addressable or blank.
184 n := n.(*ir.AssignListStmt)
185 return walkAssignRecv(init, n)
189 n := n.(*ir.AssignListStmt)
190 return walkAssignMapRead(init, n)
193 n := n.(*ir.CallExpr)
194 return walkDelete(init, n)
197 n := n.(*ir.AssignListStmt)
198 return walkAssignDotType(n, init)
201 n := n.(*ir.ConvExpr)
202 return walkConvInterface(n, init)
204 case ir.OCONV, ir.OCONVNOP:
205 n := n.(*ir.ConvExpr)
206 return walkConv(n, init)
208 case ir.ODIV, ir.OMOD:
209 n := n.(*ir.BinaryExpr)
210 return walkDivMod(n, init)
213 n := n.(*ir.IndexExpr)
214 return walkIndex(n, init)
217 n := n.(*ir.IndexExpr)
218 return walkIndexMap(n, init)
221 base.Fatalf("walkExpr ORECV") // should see inside OAS only
224 case ir.OSLICEHEADER:
225 n := n.(*ir.SliceHeaderExpr)
226 return walkSliceHeader(n, init)
228 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
229 n := n.(*ir.SliceExpr)
230 return walkSlice(n, init)
233 n := n.(*ir.UnaryExpr)
234 return walkNew(n, init)
237 return walkAddString(n.(*ir.AddStringExpr), init)
240 // order should make sure we only see OAS(node, OAPPEND), which we handle above.
241 base.Fatalf("append outside assignment")
245 return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
248 n := n.(*ir.UnaryExpr)
249 return walkClose(n, init)
252 n := n.(*ir.MakeExpr)
253 return walkMakeChan(n, init)
256 n := n.(*ir.MakeExpr)
257 return walkMakeMap(n, init)
260 n := n.(*ir.MakeExpr)
261 return walkMakeSlice(n, init)
263 case ir.OMAKESLICECOPY:
264 n := n.(*ir.MakeExpr)
265 return walkMakeSliceCopy(n, init)
268 n := n.(*ir.ConvExpr)
269 return walkRuneToString(n, init)
271 case ir.OBYTES2STR, ir.ORUNES2STR:
272 n := n.(*ir.ConvExpr)
273 return walkBytesRunesToString(n, init)
275 case ir.OBYTES2STRTMP:
276 n := n.(*ir.ConvExpr)
277 return walkBytesToStringTemp(n, init)
280 n := n.(*ir.ConvExpr)
281 return walkStringToBytes(n, init)
283 case ir.OSTR2BYTESTMP:
284 n := n.(*ir.ConvExpr)
285 return walkStringToBytesTemp(n, init)
288 n := n.(*ir.ConvExpr)
289 return walkStringToRunes(n, init)
291 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
292 return walkCompLit(n, init)
295 n := n.(*ir.SendStmt)
296 return walkSend(n, init)
299 return walkClosure(n.(*ir.ClosureExpr), init)
302 return walkCallPart(n.(*ir.SelectorExpr), init)
305 // No return! Each case must return (or panic),
306 // to avoid confusion about what gets returned
307 // in the presence of type assertions.
310 // walk the whole tree of the body of an
311 // expression or simple statement.
312 // the types expressions are calculated.
313 // compile-time constants are evaluated.
314 // complex side effects like statements are appended to init
315 func walkExprList(s []ir.Node, init *ir.Nodes) {
317 s[i] = walkExpr(s[i], init)
321 func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
322 for i, n := range s {
323 s[i] = cheapExpr(n, init)
324 s[i] = walkExpr(s[i], init)
328 func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
329 for i, n := range s {
330 s[i] = safeExpr(n, init)
331 s[i] = walkExpr(s[i], init)
335 // return side-effect free and cheap n, appending side effects to init.
336 // result may not be assignable.
337 func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
339 case ir.ONAME, ir.OLITERAL, ir.ONIL:
343 return copyExpr(n, n.Type(), init)
346 // return side effect-free n, appending side effects to init.
347 // result is assignable if n is.
348 func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
353 if len(n.Init()) != 0 {
354 walkStmtList(n.Init())
355 init.Append(ir.TakeInit(n)...)
359 case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
362 case ir.OLEN, ir.OCAP:
363 n := n.(*ir.UnaryExpr)
364 l := safeExpr(n.X, init)
368 a := ir.Copy(n).(*ir.UnaryExpr)
370 return walkExpr(typecheck.Expr(a), init)
372 case ir.ODOT, ir.ODOTPTR:
373 n := n.(*ir.SelectorExpr)
374 l := safeExpr(n.X, init)
378 a := ir.Copy(n).(*ir.SelectorExpr)
380 return walkExpr(typecheck.Expr(a), init)
383 n := n.(*ir.StarExpr)
384 l := safeExpr(n.X, init)
388 a := ir.Copy(n).(*ir.StarExpr)
390 return walkExpr(typecheck.Expr(a), init)
392 case ir.OINDEX, ir.OINDEXMAP:
393 n := n.(*ir.IndexExpr)
394 l := safeExpr(n.X, init)
395 r := safeExpr(n.Index, init)
396 if l == n.X && r == n.Index {
399 a := ir.Copy(n).(*ir.IndexExpr)
402 return walkExpr(typecheck.Expr(a), init)
404 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
405 n := n.(*ir.CompLitExpr)
406 if isStaticCompositeLiteral(n) {
411 // make a copy; must not be used as an lvalue
412 if ir.IsAddressable(n) {
413 base.Fatalf("missing lvalue case in safeExpr: %v", n)
415 return cheapExpr(n, init)
418 func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
419 l := typecheck.Temp(t)
420 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
424 func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
428 base.Fatalf("walkAddString count %d too small", c)
431 buf := typecheck.NodNil()
432 if n.Esc() == ir.EscNone {
434 for _, n1 := range n.List {
435 if n1.Op() == ir.OLITERAL {
436 sz += int64(len(ir.StringVal(n1)))
440 // Don't allocate the buffer if the result won't fit.
441 if sz < tmpstringbufsize {
442 // Create temporary buffer for result string on stack.
443 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
447 // build list of string arguments
448 args := []ir.Node{buf}
449 for _, n2 := range n.List {
450 args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
455 // small numbers of strings use direct runtime helpers.
456 // note: order.expr knows this cutoff too.
457 fn = fmt.Sprintf("concatstring%d", c)
459 // large numbers of strings are passed to the runtime as a slice.
462 t := types.NewSlice(types.Types[types.TSTRING])
463 // args[1:] to skip buf arg
464 slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(t), args[1:])
465 slice.Prealloc = n.Prealloc
466 args = []ir.Node{buf, slice}
467 slice.SetEsc(ir.EscNone)
470 cat := typecheck.LookupRuntime(fn)
471 r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
473 r1 := typecheck.Expr(r)
474 r1 = walkExpr(r1, init)
480 // walkCall walks an OCALLFUNC, OCALLINTER, or OCALLMETH node.
481 func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
482 if n.Op() == ir.OCALLINTER || n.Op() == ir.OCALLMETH {
483 // We expect both interface call reflect.Type.Method and concrete
484 // call reflect.(*rtype).Method.
487 if n.Op() == ir.OCALLINTER {
488 reflectdata.MarkUsedIfaceMethod(n)
491 if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.OCLOSURE {
499 func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
501 return // already walked
505 // If this is a method call t.M(...),
506 // rewrite into a function call T.M(t, ...).
507 // TODO(mdempsky): Do this right after type checking.
508 if n.Op() == ir.OCALLMETH {
509 withRecv := make([]ir.Node, len(n.Args)+1)
510 dot := n.X.(*ir.SelectorExpr)
512 copy(withRecv[1:], n.Args)
515 dot = ir.NewSelectorExpr(dot.Pos(), ir.OXDOT, ir.TypeNode(dot.X.Type()), dot.Selection.Sym)
517 n.SetOp(ir.OCALLFUNC)
518 n.X = typecheck.Expr(dot)
522 params := n.X.Type().Params()
524 n.X = walkExpr(n.X, init)
525 walkExprList(args, init)
527 for i, arg := range args {
528 // Validate argument and parameter types match.
529 param := params.Field(i)
530 if !types.Identical(arg.Type(), param.Type) {
531 base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
534 // For any argument whose evaluation might require a function call,
535 // store that argument into a temporary variable,
536 // to prevent that calls from clobbering arguments already on the stack.
538 // assignment of arg to Temp
539 tmp := typecheck.Temp(param.Type)
540 init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
541 // replace arg with temp
549 // walkDivMod walks an ODIV or OMOD node.
550 func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
551 n.X = walkExpr(n.X, init)
552 n.Y = walkExpr(n.Y, init)
554 // rewrite complex div into function call.
555 et := n.X.Type().Kind()
557 if types.IsComplex[et] && n.Op() == ir.ODIV {
559 call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
560 return typecheck.Conv(call, t)
563 // Nothing to do for float divisions.
564 if types.IsFloat[et] {
568 // rewrite 64-bit div and mod on 32-bit architectures.
569 // TODO: Remove this code once we can introduce
570 // runtime calls late in SSA processing.
571 if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
572 if n.Y.Op() == ir.OLITERAL {
573 // Leave div/mod by constant powers of 2 or small 16-bit constants.
574 // The SSA backend will handle those.
577 c := ir.Int64Val(n.Y)
581 if c != 0 && c&(c-1) == 0 {
585 c := ir.Uint64Val(n.Y)
589 if c != 0 && c&(c-1) == 0 {
595 if et == types.TINT64 {
600 if n.Op() == ir.ODIV {
605 return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
610 // walkDot walks an ODOT or ODOTPTR node.
611 func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
613 n.X = walkExpr(n.X, init)
617 // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
618 func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
619 n.X = walkExpr(n.X, init)
620 // Set up interface type addresses for back end.
621 if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
622 n.Itab = reflectdata.ITabAddr(n.Type(), n.X.Type())
627 // walkIndex walks an OINDEX node.
628 func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
629 n.X = walkExpr(n.X, init)
631 // save the original node for bounds checking elision.
632 // If it was a ODIV/OMOD walk might rewrite it.
635 n.Index = walkExpr(n.Index, init)
637 // if range of type cannot exceed static array bound,
638 // disable bounds check.
643 if t != nil && t.IsPtr() {
647 n.SetBounded(bounded(r, t.NumElem()))
648 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
649 base.Warn("index bounds check elided")
651 if ir.IsSmallIntConst(n.Index) && !n.Bounded() {
652 base.Errorf("index out of bounds")
654 } else if ir.IsConst(n.X, constant.String) {
655 n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
656 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
657 base.Warn("index bounds check elided")
659 if ir.IsSmallIntConst(n.Index) && !n.Bounded() {
660 base.Errorf("index out of bounds")
664 if ir.IsConst(n.Index, constant.Int) {
665 if v := n.Index.Val(); constant.Sign(v) < 0 || ir.ConstOverflow(v, types.Types[types.TINT]) {
666 base.Errorf("index out of bounds")
672 // walkIndexMap walks an OINDEXMAP node.
673 func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
674 // Replace m[k] with *map{access1,assign}(maptype, m, &k)
675 n.X = walkExpr(n.X, init)
676 n.Index = walkExpr(n.Index, init)
680 var call *ir.CallExpr
682 // This m[k] expression is on the left-hand side of an assignment.
686 // standard version takes key by reference.
687 // order.expr made sure key is addressable.
688 key = typecheck.NodAddr(key)
689 case mapfast32ptr, mapfast64ptr:
690 // pointer version takes pointer key.
691 key = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUNSAFEPTR], key)
693 call = mkcall1(mapfn(mapassign[fast], t, false), nil, init, reflectdata.TypePtr(t), map_, key)
695 // m[k] is not the target of an assignment.
698 // standard version takes key by reference.
699 // order.expr made sure key is addressable.
700 key = typecheck.NodAddr(key)
703 if w := t.Elem().Width; w <= zeroValSize {
704 call = mkcall1(mapfn(mapaccess1[fast], t, false), types.NewPtr(t.Elem()), init, reflectdata.TypePtr(t), map_, key)
706 z := reflectdata.ZeroAddr(w)
707 call = mkcall1(mapfn("mapaccess1_fat", t, true), types.NewPtr(t.Elem()), init, reflectdata.TypePtr(t), map_, key, z)
710 call.SetType(types.NewPtr(t.Elem()))
711 call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
712 star := ir.NewStarExpr(base.Pos, call)
713 star.SetType(t.Elem())
718 // walkLogical walks an OANDAND or OOROR node.
719 func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
720 n.X = walkExpr(n.X, init)
722 // cannot put side effects from n.Right on init,
723 // because they cannot run before n.Left is checked.
724 // save elsewhere and store on the eventual n.Right.
727 n.Y = walkExpr(n.Y, &ll)
728 n.Y = ir.InitExpr(ll, n.Y)
732 // walkSend walks an OSEND node.
733 func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
735 n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
736 n1 = walkExpr(n1, init)
737 n1 = typecheck.NodAddr(n1)
738 return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
741 // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
742 func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
744 checkSlice := ir.ShouldCheckPtr(ir.CurFunc, 1) && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
746 conv := n.X.(*ir.ConvExpr)
747 conv.X = walkExpr(conv.X, init)
749 n.X = walkExpr(n.X, init)
752 n.Low = walkExpr(n.Low, init)
753 if n.Low != nil && ir.IsZero(n.Low) {
754 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
757 n.High = walkExpr(n.High, init)
758 n.Max = walkExpr(n.Max, init)
760 n.X = walkCheckPtrAlignment(n.X.(*ir.ConvExpr), init, n.Max)
763 if n.Op().IsSlice3() {
764 if n.Max != nil && n.Max.Op() == ir.OCAP && ir.SameSafeExpr(n.X, n.Max.(*ir.UnaryExpr).X) {
765 // Reduce x[i:j:cap(x)] to x[i:j].
766 if n.Op() == ir.OSLICE3 {
769 n.SetOp(ir.OSLICEARR)
771 return reduceSlice(n)
775 return reduceSlice(n)
778 // walkSliceHeader walks an OSLICEHEADER node.
779 func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
780 n.Ptr = walkExpr(n.Ptr, init)
781 n.Len = walkExpr(n.Len, init)
782 n.Cap = walkExpr(n.Cap, init)
786 // TODO(josharian): combine this with its caller and simplify
787 func reduceSlice(n *ir.SliceExpr) ir.Node {
788 if n.High != nil && n.High.Op() == ir.OLEN && ir.SameSafeExpr(n.X, n.High.(*ir.UnaryExpr).X) {
789 // Reduce x[i:len(x)] to x[i:].
792 if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
794 if base.Debug.Slice > 0 {
795 base.Warn("slice: omit slice operation")
802 // return 1 if integer n must be in range [0, max), 0 otherwise
803 func bounded(n ir.Node, max int64) bool {
804 if n.Type() == nil || !n.Type().IsInteger() {
808 sign := n.Type().IsSigned()
809 bits := int32(8 * n.Type().Width)
811 if ir.IsSmallIntConst(n) {
813 return 0 <= v && v < max
817 case ir.OAND, ir.OANDNOT:
818 n := n.(*ir.BinaryExpr)
821 case ir.IsSmallIntConst(n.X):
823 case ir.IsSmallIntConst(n.Y):
825 if n.Op() == ir.OANDNOT {
828 v &= 1<<uint(bits) - 1
832 if 0 <= v && v < max {
837 n := n.(*ir.BinaryExpr)
838 if !sign && ir.IsSmallIntConst(n.Y) {
839 v := ir.Int64Val(n.Y)
840 if 0 <= v && v <= max {
846 n := n.(*ir.BinaryExpr)
847 if !sign && ir.IsSmallIntConst(n.Y) {
848 v := ir.Int64Val(n.Y)
849 for bits > 0 && v >= 2 {
856 n := n.(*ir.BinaryExpr)
857 if !sign && ir.IsSmallIntConst(n.Y) {
858 v := ir.Int64Val(n.Y)
866 if !sign && bits <= 62 && 1<<uint(bits) <= max {
873 // usemethod checks interface method calls for uses of reflect.Type.Method.
874 func usemethod(n *ir.CallExpr) {
877 // Looking for either of:
878 // Method(int) reflect.Method
879 // MethodByName(string) (reflect.Method, bool)
881 // TODO(crawshaw): improve precision of match by working out
882 // how to check the method name.
883 if n := t.NumParams(); n != 1 {
886 if n := t.NumResults(); n != 1 && n != 2 {
889 p0 := t.Params().Field(0)
890 res0 := t.Results().Field(0)
891 var res1 *types.Field
892 if t.NumResults() == 2 {
893 res1 = t.Results().Field(1)
897 if p0.Type.Kind() != types.TINT {
901 if !p0.Type.IsString() {
904 if !res1.Type.IsBoolean() {
909 // Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
910 // Those functions may be alive via the itab, which should not cause all methods
911 // alive. We only want to mark their callers.
912 if base.Ctxt.Pkgpath == "reflect" {
913 switch ir.CurFunc.Nname.Sym().Name { // TODO: is there a better way than hardcoding the names?
914 case "(*rtype).Method", "(*rtype).MethodByName", "(*interfaceType).Method", "(*interfaceType).MethodByName":
919 // Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
920 // (including global variables such as numImports - was issue #19028).
921 // Also need to check for reflect package itself (see Issue #38515).
922 if s := res0.Type.Sym(); s != nil && s.Name == "Method" && types.IsReflectPkg(s.Pkg) {
923 ir.CurFunc.SetReflectMethod(true)
924 // The LSym is initialized at this point. We need to set the attribute on the LSym.
925 ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
929 func usefield(n *ir.SelectorExpr) {
930 if !objabi.Experiment.FieldTrack {
936 base.Fatalf("usefield %v", n.Op())
938 case ir.ODOT, ir.ODOTPTR:
944 base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
946 if field.Sym != n.Sel {
947 base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
949 if !strings.Contains(field.Note, "go:\"track\"") {
957 if outer.Sym() == nil {
958 base.Errorf("tracked field must be in named struct type")
960 if !types.IsExported(field.Sym.Name) {
961 base.Errorf("tracked field must be exported (upper case)")
964 sym := reflectdata.TrackSym(outer, field)
965 if ir.CurFunc.FieldTrack == nil {
966 ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
968 ir.CurFunc.FieldTrack[sym] = struct{}{}