1 // Copyright 2011 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
5 // The inlining facility makes 2 passes: first caninl determines which
6 // functions are suitable for inlining, and for those that are it
7 // saves a copy of the body. Then InlineCalls walks each function body to
8 // expand calls to inlinable functions.
10 // The Debug.l flag controls the aggressiveness. Note that main() swaps level 0 and 1,
11 // making 1 the default and -l disable. Additional levels (beyond -l) may be buggy and
14 // 1: 80-nodes leaf functions, oneliners, panic, lazy typechecking (default)
17 // 4: allow non-leaf functions
19 // At some point this may get another default and become switch-offable with -N.
21 // The -d typcheckinl flag enables early typechecking of all imported bodies,
22 // which is useful to flush out bugs.
24 // The Debug.m flag enables diagnostic output. a single -m is useful for verifying
25 // which calls get inlined or not, more is for debugging, and may go away at any point.
34 "cmd/compile/internal/base"
35 "cmd/compile/internal/ir"
36 "cmd/compile/internal/logopt"
37 "cmd/compile/internal/typecheck"
38 "cmd/compile/internal/types"
43 // Inlining budget parameters, gathered in one place
46 inlineExtraAppendCost = 0
47 // default is to inline if there's at most one call. -l=4 overrides this by using 1 instead.
48 inlineExtraCallCost = 57 // 57 was benchmarked to provided most benefit with no bad surprises; see https://github.com/golang/go/issues/19348#issuecomment-439370742
49 inlineExtraPanicCost = 1 // do not penalize inlining panics.
50 inlineExtraThrowCost = inlineMaxBudget // with current (2018-05/1.11) code, inlining runtime.throw does not help.
52 inlineBigFunctionNodes = 5000 // Functions with this many nodes are considered "big".
53 inlineBigFunctionMaxCost = 20 // Max cost of inlinee when inlining into a "big" function.
56 // InlinePackage finds functions that can be inlined and clones them before walk expands them.
57 func InlinePackage() {
58 ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
59 numfns := numNonClosures(list)
60 for _, n := range list {
61 if !recursive || numfns > 1 {
62 // We allow inlining if there is no
63 // recursion, or the recursion cycle is
64 // across more than one function.
67 if base.Flag.LowerM > 1 {
68 fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(n), n.Nname)
76 // CanInline determines whether fn is inlineable.
77 // If so, CanInline saves copies of fn.Body and fn.Dcl in fn.Inl.
78 // fn and fn.Body will already have been typechecked.
79 func CanInline(fn *ir.Func) {
81 base.Fatalf("CanInline no nname %+v", fn)
84 var reason string // reason, if any, that the function was not inlined
85 if base.Flag.LowerM > 1 || logopt.Enabled() {
88 if base.Flag.LowerM > 1 {
89 fmt.Printf("%v: cannot inline %v: %s\n", ir.Line(fn), fn.Nname, reason)
92 logopt.LogOpt(fn.Pos(), "cannotInlineFunction", "inline", ir.FuncName(fn), reason)
98 // If marked "go:noinline", don't inline
99 if fn.Pragma&ir.Noinline != 0 {
100 reason = "marked go:noinline"
104 // If marked "go:norace" and -race compilation, don't inline.
105 if base.Flag.Race && fn.Pragma&ir.Norace != 0 {
106 reason = "marked go:norace with -race compilation"
110 // If marked "go:nocheckptr" and -d checkptr compilation, don't inline.
111 if base.Debug.Checkptr != 0 && fn.Pragma&ir.NoCheckPtr != 0 {
112 reason = "marked go:nocheckptr"
116 // If marked "go:cgo_unsafe_args", don't inline, since the
117 // function makes assumptions about its argument frame layout.
118 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
119 reason = "marked go:cgo_unsafe_args"
123 // If marked as "go:uintptrescapes", don't inline, since the
124 // escape information is lost during inlining.
125 if fn.Pragma&ir.UintptrEscapes != 0 {
126 reason = "marked as having an escaping uintptr argument"
130 // The nowritebarrierrec checker currently works at function
131 // granularity, so inlining yeswritebarrierrec functions can
132 // confuse it (#22342). As a workaround, disallow inlining
134 if fn.Pragma&ir.Yeswritebarrierrec != 0 {
135 reason = "marked go:yeswritebarrierrec"
139 // If fn has no body (is defined outside of Go), cannot inline it.
140 if len(fn.Body) == 0 {
141 reason = "no function body"
145 if fn.Typecheck() == 0 {
146 base.Fatalf("CanInline on non-typechecked function %v", fn)
150 if n.Func.InlinabilityChecked() {
153 defer n.Func.SetInlinabilityChecked(true)
155 cc := int32(inlineExtraCallCost)
156 if base.Flag.LowerL == 4 {
157 cc = 1 // this appears to yield better performance than 0.
160 // At this point in the game the function we're looking at may
161 // have "stale" autos, vars that still appear in the Dcl list, but
162 // which no longer have any uses in the function body (due to
163 // elimination by deadcode). We'd like to exclude these dead vars
164 // when creating the "Inline.Dcl" field below; to accomplish this,
165 // the hairyVisitor below builds up a map of used/referenced
166 // locals, and we use this map to produce a pruned Inline.Dcl
167 // list. See issue 25249 for more context.
169 visitor := hairyVisitor{
170 budget: inlineMaxBudget,
173 if visitor.tooHairy(fn) {
174 reason = visitor.reason
178 n.Func.Inl = &ir.Inline{
179 Cost: inlineMaxBudget - visitor.budget,
180 Dcl: pruneUnusedAutos(n.Defn.(*ir.Func).Dcl, &visitor),
181 Body: inlcopylist(fn.Body),
183 CanDelayResults: canDelayResults(fn),
186 if base.Flag.LowerM > 1 {
187 fmt.Printf("%v: can inline %v with cost %d as: %v { %v }\n", ir.Line(fn), n, inlineMaxBudget-visitor.budget, fn.Type(), ir.Nodes(n.Func.Inl.Body))
188 } else if base.Flag.LowerM != 0 {
189 fmt.Printf("%v: can inline %v\n", ir.Line(fn), n)
191 if logopt.Enabled() {
192 logopt.LogOpt(fn.Pos(), "canInlineFunction", "inline", ir.FuncName(fn), fmt.Sprintf("cost: %d", inlineMaxBudget-visitor.budget))
196 // canDelayResults reports whether inlined calls to fn can delay
197 // declaring the result parameter until the "return" statement.
198 func canDelayResults(fn *ir.Func) bool {
199 // We can delay declaring+initializing result parameters if:
200 // (1) there's exactly one "return" statement in the inlined function;
201 // (2) it's not an empty return statement (#44355); and
202 // (3) the result parameters aren't named.
205 ir.VisitList(fn.Body, func(n ir.Node) {
206 if n, ok := n.(*ir.ReturnStmt); ok {
208 if len(n.Results) == 0 {
209 nreturns++ // empty return statement (case 2)
215 return false // not exactly one return statement (case 1)
218 // temporaries for return values.
219 for _, param := range fn.Type().Results().FieldSlice() {
220 if sym := types.OrigSym(param.Sym); sym != nil && !sym.IsBlank() {
221 return false // found a named result parameter (case 3)
228 // hairyVisitor visits a function body to determine its inlining
229 // hairiness and whether or not it can be inlined.
230 type hairyVisitor struct {
234 usedLocals ir.NameSet
235 do func(ir.Node) bool
238 func (v *hairyVisitor) tooHairy(fn *ir.Func) bool {
239 v.do = v.doNode // cache closure
240 if ir.DoChildren(fn, v.do) {
244 v.reason = fmt.Sprintf("function too complex: cost %d exceeds budget %d", inlineMaxBudget-v.budget, inlineMaxBudget)
250 func (v *hairyVisitor) doNode(n ir.Node) bool {
255 // Call is okay if inlinable and we have the budget for the body.
257 n := n.(*ir.CallExpr)
258 // Functions that call runtime.getcaller{pc,sp} can not be inlined
259 // because getcaller{pc,sp} expect a pointer to the caller's first argument.
261 // runtime.throw is a "cheap call" like panic in normal code.
262 if n.X.Op() == ir.ONAME {
263 name := n.X.(*ir.Name)
264 if name.Class == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
265 fn := name.Sym().Name
266 if fn == "getcallerpc" || fn == "getcallersp" {
267 v.reason = "call to " + fn
271 v.budget -= inlineExtraThrowCost
276 if n.X.Op() == ir.OMETHEXPR {
277 if meth := ir.MethodExprName(n.X); meth != nil {
278 if fn := meth.Func; fn != nil {
281 if types.IsRuntimePkg(s.Pkg) && s.Name == "heapBits.nextArena" {
282 // Special case: explicitly allow mid-stack inlining of
283 // runtime.heapBits.next even though it calls slow-path
284 // runtime.heapBits.nextArena.
287 // Special case: on architectures that can do unaligned loads,
288 // explicitly mark encoding/binary methods as cheap,
289 // because in practice they are, even though our inlining
290 // budgeting system does not see that. See issue 42958.
291 if base.Ctxt.Arch.CanMergeLoads && s.Pkg.Path == "encoding/binary" {
293 case "littleEndian.Uint64", "littleEndian.Uint32", "littleEndian.Uint16",
294 "bigEndian.Uint64", "bigEndian.Uint32", "bigEndian.Uint16",
295 "littleEndian.PutUint64", "littleEndian.PutUint32", "littleEndian.PutUint16",
296 "bigEndian.PutUint64", "bigEndian.PutUint32", "bigEndian.PutUint16":
301 break // treat like any other node, that is, cost of 1
307 if ir.IsIntrinsicCall(n) {
308 // Treat like any other node.
312 if fn := inlCallee(n.X); fn != nil && typecheck.HaveInlineBody(fn) {
313 v.budget -= fn.Inl.Cost
317 // Call cost for non-leaf inlining.
318 v.budget -= v.extraCallCost
321 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
323 // Things that are too hairy, irrespective of the budget
324 case ir.OCALL, ir.OCALLINTER:
325 // Call cost for non-leaf inlining.
326 v.budget -= v.extraCallCost
329 n := n.(*ir.UnaryExpr)
330 if n.X.Op() == ir.OCONVIFACE && n.X.(*ir.ConvExpr).Implicit() {
331 // Hack to keep reflect.flag.mustBe inlinable for TestIntendedInlining.
332 // Before CL 284412, these conversions were introduced later in the
333 // compiler, so they didn't count against inlining budget.
336 v.budget -= inlineExtraPanicCost
339 // recover matches the argument frame pointer to find
340 // the right panic value, so it needs an argument frame.
341 v.reason = "call to recover"
345 if base.Debug.InlFuncsWithClosures == 0 {
346 v.reason = "not inlining functions with closures"
350 // TODO(danscales): Maybe make budget proportional to number of closure
352 //v.budget -= int32(len(n.(*ir.ClosureExpr).Func.ClosureVars) * 3)
354 // Scan body of closure (which DoChildren doesn't automatically
355 // do) to check for disallowed ops in the body and include the
356 // body in the budget.
357 if doList(n.(*ir.ClosureExpr).Func.Body, v.do) {
363 ir.ODCLTYPE, // can't print yet
365 v.reason = "unhandled op " + n.Op().String()
369 v.budget -= inlineExtraAppendCost
372 // *(*X)(unsafe.Pointer(&x)) is low-cost
373 n := n.(*ir.StarExpr)
376 for ptr.Op() == ir.OCONVNOP {
377 ptr = ptr.(*ir.ConvExpr).X
379 if ptr.Op() == ir.OADDR {
380 v.budget += 1 // undo half of default cost of ir.ODEREF+ir.OADDR
384 // This doesn't produce code, but the children might.
385 v.budget++ // undo default cost
387 case ir.ODCLCONST, ir.OFALL:
388 // These nodes don't produce code; omit from inlining budget.
393 if ir.IsConst(n.Cond, constant.Bool) {
394 // This if and the condition cost nothing.
395 if doList(n.Init(), v.do) {
398 if ir.BoolVal(n.Cond) {
399 return doList(n.Body, v.do)
401 return doList(n.Else, v.do)
407 if n.Class == ir.PAUTO {
412 // The only OBLOCK we should see at this point is an empty one.
413 // In any event, let the visitList(n.List()) below take care of the statements,
414 // and don't charge for the OBLOCK itself. The ++ undoes the -- below.
417 case ir.OMETHVALUE, ir.OSLICELIT:
418 v.budget-- // Hack for toolstash -cmp.
421 v.budget++ // Hack for toolstash -cmp.
426 // When debugging, don't stop early, to get full cost of inlining this function
427 if v.budget < 0 && base.Flag.LowerM < 2 && !logopt.Enabled() {
428 v.reason = "too expensive"
432 return ir.DoChildren(n, v.do)
435 func isBigFunc(fn *ir.Func) bool {
436 budget := inlineBigFunctionNodes
437 return ir.Any(fn, func(n ir.Node) bool {
443 // inlcopylist (together with inlcopy) recursively copies a list of nodes, except
444 // that it keeps the same ONAME, OTYPE, and OLITERAL nodes. It is used for copying
445 // the body and dcls of an inlineable function.
446 func inlcopylist(ll []ir.Node) []ir.Node {
447 s := make([]ir.Node, len(ll))
448 for i, n := range ll {
454 // inlcopy is like DeepCopy(), but does extra work to copy closures.
455 func inlcopy(n ir.Node) ir.Node {
456 var edit func(ir.Node) ir.Node
457 edit = func(x ir.Node) ir.Node {
459 case ir.ONAME, ir.OTYPE, ir.OLITERAL, ir.ONIL:
463 ir.EditChildren(m, edit)
464 if x.Op() == ir.OCLOSURE {
465 x := x.(*ir.ClosureExpr)
466 // Need to save/duplicate x.Func.Nname,
467 // x.Func.Nname.Ntype, x.Func.Dcl, x.Func.ClosureVars, and
468 // x.Func.Body for iexport and local inlining.
470 newfn := ir.NewFunc(oldfn.Pos())
471 m.(*ir.ClosureExpr).Func = newfn
472 newfn.Nname = ir.NewNameAt(oldfn.Nname.Pos(), oldfn.Nname.Sym())
473 // XXX OK to share fn.Type() ??
474 newfn.Nname.SetType(oldfn.Nname.Type())
475 // Ntype can be nil for -G=3 mode.
476 if oldfn.Nname.Ntype != nil {
477 newfn.Nname.Ntype = inlcopy(oldfn.Nname.Ntype).(ir.Ntype)
479 newfn.Body = inlcopylist(oldfn.Body)
480 // Make shallow copy of the Dcl and ClosureVar slices
481 newfn.Dcl = append([]*ir.Name(nil), oldfn.Dcl...)
482 newfn.ClosureVars = append([]*ir.Name(nil), oldfn.ClosureVars...)
489 // InlineCalls/inlnode walks fn's statements and expressions and substitutes any
490 // calls made to inlineable functions. This is the external entry point.
491 func InlineCalls(fn *ir.Func) {
494 maxCost := int32(inlineMaxBudget)
496 maxCost = inlineBigFunctionMaxCost
498 // Map to keep track of functions that have been inlined at a particular
499 // call site, in order to stop inlining when we reach the beginning of a
500 // recursion cycle again. We don't inline immediately recursive functions,
501 // but allow inlining if there is a recursion cycle of many functions.
502 // Most likely, the inlining will stop before we even hit the beginning of
503 // the cycle again, but the map catches the unusual case.
504 inlMap := make(map[*ir.Func]bool)
505 var edit func(ir.Node) ir.Node
506 edit = func(n ir.Node) ir.Node {
507 return inlnode(n, maxCost, inlMap, edit)
509 ir.EditChildren(fn, edit)
513 // inlnode recurses over the tree to find inlineable calls, which will
514 // be turned into OINLCALLs by mkinlcall. When the recursion comes
515 // back up will examine left, right, list, rlist, ninit, ntest, nincr,
516 // nbody and nelse and use one of the 4 inlconv/glue functions above
517 // to turn the OINLCALL into an expression, a statement, or patch it
518 // in to this nodes list or rlist as appropriate.
519 // NOTE it makes no sense to pass the glue functions down the
520 // recursion to the level where the OINLCALL gets created because they
521 // have to edit /this/ n, so you'd have to push that one down as well,
522 // but then you may as well do it here. so this is cleaner and
523 // shorter and less complicated.
524 // The result of inlnode MUST be assigned back to n, e.g.
526 // n.Left = inlnode(n.Left)
527 func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.Node) ir.Node) ir.Node {
533 case ir.ODEFER, ir.OGO:
534 n := n.(*ir.GoDeferStmt)
535 switch call := n.Call; call.Op() {
537 base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
539 call := call.(*ir.CallExpr)
543 n := n.(*ir.TailCallStmt)
544 n.Call.NoInline = true // Not inline a tail call for now. Maybe we could inline it just like RETURN fn(arg)?
546 // TODO do them here (or earlier),
547 // so escape analysis can avoid more heapmoves.
551 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
553 n := n.(*ir.CallExpr)
554 if n.X.Op() == ir.OMETHEXPR {
555 // Prevent inlining some reflect.Value methods when using checkptr,
556 // even when package reflect was compiled without it (#35073).
557 if meth := ir.MethodExprName(n.X); meth != nil {
559 if base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
568 ir.EditChildren(n, edit)
570 // with all the branches out of the way, it is now time to
571 // transmogrify this node itself unless inhibited by the
572 // switch at the top of this function.
575 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
578 call := n.(*ir.CallExpr)
582 if base.Flag.LowerM > 3 {
583 fmt.Printf("%v:call to func %+v\n", ir.Line(n), call.X)
585 if ir.IsIntrinsicCall(call) {
588 if fn := inlCallee(call.X); fn != nil && typecheck.HaveInlineBody(fn) {
589 n = mkinlcall(call, fn, maxCost, inlMap, edit)
598 // inlCallee takes a function-typed expression and returns the underlying function ONAME
599 // that it refers to if statically known. Otherwise, it returns nil.
600 func inlCallee(fn ir.Node) *ir.Func {
601 fn = ir.StaticValue(fn)
604 fn := fn.(*ir.SelectorExpr)
605 n := ir.MethodExprName(fn)
606 // Check that receiver type matches fn.X.
607 // TODO(mdempsky): Handle implicit dereference
608 // of pointer receiver argument?
609 if n == nil || !types.Identical(n.Type().Recv().Type, fn.X.Type()) {
615 if fn.Class == ir.PFUNC {
619 fn := fn.(*ir.ClosureExpr)
627 func inlParam(t *types.Field, as ir.InitNode, inlvars map[*ir.Name]*ir.Name) ir.Node {
631 n := t.Nname.(*ir.Name)
637 base.Fatalf("missing inlvar for %v", n)
639 as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, inlvar))
640 inlvar.Name().Defn = as
646 // SSADumpInline gives the SSA back end a chance to dump the function
647 // when producing output for debugging the compiler itself.
648 var SSADumpInline = func(*ir.Func) {}
650 // NewInline allows the inliner implementation to be overridden.
651 // If it returns nil, the legacy inliner will handle this call
653 var NewInline = func(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr { return nil }
655 // If n is a OCALLFUNC node, and fn is an ONAME node for a
656 // function with an inlinable body, return an OINLCALL node that can replace n.
657 // The returned node's Ninit has the parameter assignments, the Nbody is the
658 // inlined function body, and (List, Rlist) contain the (input, output)
660 // The result of mkinlcall MUST be assigned back to n, e.g.
662 // n.Left = mkinlcall(n.Left, fn, isddd)
663 func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.Node) ir.Node) ir.Node {
665 if logopt.Enabled() {
666 logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
667 fmt.Sprintf("%s cannot be inlined", ir.PkgFuncName(fn)))
671 if fn.Inl.Cost > maxCost {
672 // The inlined function body is too big. Typically we use this check to restrict
673 // inlining into very big functions. See issue 26546 and 17566.
674 if logopt.Enabled() {
675 logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
676 fmt.Sprintf("cost %d of %s exceeds max large caller cost %d", fn.Inl.Cost, ir.PkgFuncName(fn), maxCost))
681 if fn == ir.CurFunc {
682 // Can't recursively inline a function into itself.
683 if logopt.Enabled() {
684 logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", fmt.Sprintf("recursive call to %s", ir.FuncName(ir.CurFunc)))
689 // Don't inline a function fn that has no shape parameters, but is passed at
690 // least one shape arg. This means we must be inlining a non-generic function
691 // fn that was passed into a generic function, and can be called with a shape
692 // arg because it matches an appropriate type parameters. But fn may include
693 // an interface conversion (that may be applied to a shape arg) that was not
694 // apparent when we first created the instantiation of the generic function.
695 // We can't handle this if we actually do the inlining, since we want to know
696 // all interface conversions immediately after stenciling. So, we avoid
697 // inlining in this case. See #49309.
698 if !fn.Type().HasShape() {
699 for _, arg := range n.Args {
700 if arg.Type().HasShape() {
701 if logopt.Enabled() {
702 logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
703 fmt.Sprintf("inlining non-shape function %v with shape args", ir.FuncName(fn)))
710 if base.Flag.Cfg.Instrumenting && types.IsRuntimePkg(fn.Sym().Pkg) {
711 // Runtime package must not be instrumented.
712 // Instrument skips runtime package. However, some runtime code can be
713 // inlined into other packages and instrumented there. To avoid this,
714 // we disable inlining of runtime functions when instrumenting.
715 // The example that we observed is inlining of LockOSThread,
716 // which lead to false race reports on m contents.
721 if base.Flag.LowerM > 1 {
722 fmt.Printf("%v: cannot inline %v into %v: repeated recursive cycle\n", ir.Line(n), fn, ir.FuncName(ir.CurFunc))
731 typecheck.FixVariadicCall(n)
733 parent := base.Ctxt.PosTable.Pos(n.Pos()).Base().InliningIndex()
736 inlIndex := base.Ctxt.InlTree.Add(parent, n.Pos(), sym)
738 if base.Flag.GenDwarfInl > 0 {
739 if !sym.WasInlined() {
740 base.Ctxt.DwFixups.SetPrecursorFunc(sym, fn)
741 sym.Set(obj.AttrWasInlined, true)
745 if base.Flag.LowerM != 0 {
746 fmt.Printf("%v: inlining call to %v\n", ir.Line(n), fn)
748 if base.Flag.LowerM > 2 {
749 fmt.Printf("%v: Before inlining: %+v\n", ir.Line(n), n)
752 res := NewInline(n, fn, inlIndex)
754 res = oldInline(n, fn, inlIndex)
757 // transitive inlining
758 // might be nice to do this before exporting the body,
759 // but can't emit the body with inlining expanded.
760 // instead we emit the things that the body needs
761 // and each use must redo the inlining.
762 // luckily these are small.
763 ir.EditChildren(res, edit)
765 if base.Flag.LowerM > 2 {
766 fmt.Printf("%v: After inlining %+v\n\n", ir.Line(res), res)
772 // CalleeEffects appends any side effects from evaluating callee to init.
773 func CalleeEffects(init *ir.Nodes, callee ir.Node) {
776 case ir.ONAME, ir.OCLOSURE, ir.OMETHEXPR:
780 conv := callee.(*ir.ConvExpr)
781 init.Append(ir.TakeInit(conv)...)
785 ic := callee.(*ir.InlinedCallExpr)
786 init.Append(ir.TakeInit(ic)...)
787 init.Append(ic.Body.Take()...)
788 callee = ic.SingleResult()
791 base.FatalfAt(callee.Pos(), "unexpected callee expression: %v", callee)
796 // oldInline creates an InlinedCallExpr to replace the given call
797 // expression. fn is the callee function to be inlined. inlIndex is
798 // the inlining tree position index, for use with src.NewInliningBase
799 // when rewriting positions.
800 func oldInline(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr {
801 if base.Debug.TypecheckInl == 0 {
802 typecheck.ImportedBody(fn)
809 // For normal function calls, the function callee expression
810 // may contain side effects. Make sure to preserve these,
811 // if necessary (#42703).
812 if call.Op() == ir.OCALLFUNC {
813 CalleeEffects(&ninit, call.X)
816 // Make temp names to use instead of the originals.
817 inlvars := make(map[*ir.Name]*ir.Name)
819 // record formals/locals for later post-processing
820 var inlfvars []*ir.Name
822 for _, ln := range fn.Inl.Dcl {
823 if ln.Op() != ir.ONAME {
826 if ln.Class == ir.PPARAMOUT { // return values handled below.
829 inlf := typecheck.Expr(inlvar(ln)).(*ir.Name)
831 if base.Flag.GenDwarfInl > 0 {
832 if ln.Class == ir.PPARAM {
833 inlf.Name().SetInlFormal(true)
835 inlf.Name().SetInlLocal(true)
837 inlf.SetPos(ln.Pos())
838 inlfvars = append(inlfvars, inlf)
842 // We can delay declaring+initializing result parameters if:
843 // temporaries for return values.
844 var retvars []ir.Node
845 for i, t := range fn.Type().Results().Fields().Slice() {
847 if nn := t.Nname; nn != nil && !ir.IsBlank(nn.(*ir.Name)) && !strings.HasPrefix(nn.Sym().Name, "~r") {
850 m = typecheck.Expr(m).(*ir.Name)
853 // anonymous return values, synthesize names for use in assignment that replaces return
857 if base.Flag.GenDwarfInl > 0 {
858 // Don't update the src.Pos on a return variable if it
859 // was manufactured by the inliner (e.g. "~R2"); such vars
860 // were not part of the original callee.
861 if !strings.HasPrefix(m.Sym().Name, "~R") {
862 m.Name().SetInlFormal(true)
864 inlfvars = append(inlfvars, m)
868 retvars = append(retvars, m)
871 // Assign arguments to the parameters' temp names.
872 as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
874 if call.Op() == ir.OCALLMETH {
875 base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
877 as.Rhs.Append(call.Args...)
879 if recv := fn.Type().Recv(); recv != nil {
880 as.Lhs.Append(inlParam(recv, as, inlvars))
882 for _, param := range fn.Type().Params().Fields().Slice() {
883 as.Lhs.Append(inlParam(param, as, inlvars))
886 if len(as.Rhs) != 0 {
887 ninit.Append(typecheck.Stmt(as))
890 if !fn.Inl.CanDelayResults {
891 // Zero the return parameters.
892 for _, n := range retvars {
893 ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
894 ras := ir.NewAssignStmt(base.Pos, n, nil)
895 ninit.Append(typecheck.Stmt(ras))
899 retlabel := typecheck.AutoLabel(".i")
903 // Add an inline mark just before the inlined body.
904 // This mark is inline in the code so that it's a reasonable spot
905 // to put a breakpoint. Not sure if that's really necessary or not
906 // (in which case it could go at the end of the function instead).
908 ninit.Append(ir.NewInlineMarkStmt(call.Pos().WithIsStmt(), int64(inlIndex)))
914 defnMarker: ir.NilExpr{},
915 bases: make(map[*src.PosBase]*src.PosBase),
916 newInlIndex: inlIndex,
919 subst.edit = subst.node
921 body := subst.list(ir.Nodes(fn.Inl.Body))
923 lab := ir.NewLabelStmt(base.Pos, retlabel)
924 body = append(body, lab)
926 if !typecheck.Go117ExportTypes {
927 typecheck.Stmts(body)
930 if base.Flag.GenDwarfInl > 0 {
931 for _, v := range inlfvars {
932 v.SetPos(subst.updatedPos(v.Pos()))
936 //dumplist("ninit post", ninit);
938 res := ir.NewInlinedCallExpr(base.Pos, body, retvars)
940 res.SetType(call.Type())
945 // Every time we expand a function we generate a new set of tmpnames,
946 // PAUTO's in the calling functions, and link them off of the
947 // PPARAM's, PAUTOS and PPARAMOUTs of the called function.
948 func inlvar(var_ *ir.Name) *ir.Name {
949 if base.Flag.LowerM > 3 {
950 fmt.Printf("inlvar %+v\n", var_)
953 n := typecheck.NewName(var_.Sym())
954 n.SetType(var_.Type())
958 n.SetAutoTemp(var_.AutoTemp())
959 n.Curfn = ir.CurFunc // the calling function, not the called one
960 n.SetAddrtaken(var_.Addrtaken())
962 ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
966 // Synthesize a variable to store the inlined function's results in.
967 func retvar(t *types.Field, i int) *ir.Name {
968 n := typecheck.NewName(typecheck.LookupNum("~R", i))
973 n.Curfn = ir.CurFunc // the calling function, not the called one
974 ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
978 // The inlsubst type implements the actual inlining of a single
980 type inlsubst struct {
981 // Target of the goto substituted in place of a return.
984 // Temporary result variables.
987 inlvars map[*ir.Name]*ir.Name
988 // defnMarker is used to mark a Node for reassignment.
989 // inlsubst.clovar set this during creating new ONAME.
990 // inlsubst.node will set the correct Defn for inlvar.
991 defnMarker ir.NilExpr
993 // bases maps from original PosBase to PosBase with an extra
994 // inlined call frame.
995 bases map[*src.PosBase]*src.PosBase
997 // newInlIndex is the index of the inlined call frame to
998 // insert for inlined nodes.
1001 edit func(ir.Node) ir.Node // cached copy of subst.node method value closure
1003 // If non-nil, we are inside a closure inside the inlined function, and
1004 // newclofn is the Func of the new inlined closure.
1007 fn *ir.Func // For debug -- the func that is being inlined
1009 // If true, then don't update source positions during substitution
1010 // (retain old source positions).
1014 // list inlines a list of nodes.
1015 func (subst *inlsubst) list(ll ir.Nodes) []ir.Node {
1016 s := make([]ir.Node, 0, len(ll))
1017 for _, n := range ll {
1018 s = append(s, subst.node(n))
1023 // fields returns a list of the fields of a struct type representing receiver,
1024 // params, or results, after duplicating the field nodes and substituting the
1025 // Nname nodes inside the field nodes.
1026 func (subst *inlsubst) fields(oldt *types.Type) []*types.Field {
1027 oldfields := oldt.FieldSlice()
1028 newfields := make([]*types.Field, len(oldfields))
1029 for i := range oldfields {
1030 newfields[i] = oldfields[i].Copy()
1031 if oldfields[i].Nname != nil {
1032 newfields[i].Nname = subst.node(oldfields[i].Nname.(*ir.Name))
1038 // clovar creates a new ONAME node for a local variable or param of a closure
1039 // inside a function being inlined.
1040 func (subst *inlsubst) clovar(n *ir.Name) *ir.Name {
1041 m := ir.NewNameAt(n.Pos(), n.Sym())
1045 if n.IsClosureVar() {
1046 m.SetIsClosureVar(true)
1049 m.SetAddrtaken(true)
1056 m.Curfn = subst.newclofn
1058 switch defn := n.Defn.(type) {
1062 if !n.IsClosureVar() {
1063 base.FatalfAt(n.Pos(), "want closure variable, got: %+v", n)
1065 if n.Sym().Pkg != types.LocalPkg {
1066 // If the closure came from inlining a function from
1067 // another package, must change package of captured
1068 // variable to localpkg, so that the fields of the closure
1069 // struct are local package and can be accessed even if
1070 // name is not exported. If you disable this code, you can
1071 // reproduce the problem by running 'go test
1072 // go/internal/srcimporter'. TODO(mdempsky) - maybe change
1073 // how we create closure structs?
1074 m.SetSym(types.LocalPkg.Lookup(n.Sym().Name))
1076 // Make sure any inlvar which is the Defn
1077 // of an ONAME closure var is rewritten
1078 // during inlining. Don't substitute
1079 // if Defn node is outside inlined function.
1080 if subst.inlvars[n.Defn.(*ir.Name)] != nil {
1081 m.Defn = subst.node(n.Defn)
1083 case *ir.AssignStmt, *ir.AssignListStmt:
1084 // Mark node for reassignment at the end of inlsubst.node.
1085 m.Defn = &subst.defnMarker
1086 case *ir.TypeSwitchGuard:
1087 // TODO(mdempsky): Set m.Defn properly. See discussion on #45743.
1089 // TODO: Set m.Defn properly if we support inlining range statement in the future.
1091 base.FatalfAt(n.Pos(), "unexpected Defn: %+v", defn)
1095 // Either the outer variable is defined in function being inlined,
1096 // and we will replace it with the substituted variable, or it is
1097 // defined outside the function being inlined, and we should just
1098 // skip the outer variable (the closure variable of the function
1100 s := subst.node(n.Outer).(*ir.Name)
1109 // closure does the necessary substitions for a ClosureExpr n and returns the new
1111 func (subst *inlsubst) closure(n *ir.ClosureExpr) ir.Node {
1112 // Prior to the subst edit, set a flag in the inlsubst to indicate
1113 // that we don't want to update the source positions in the new
1114 // closure function. If we do this, it will appear that the
1115 // closure itself has things inlined into it, which is not the
1116 // case. See issue #46234 for more details. At the same time, we
1117 // do want to update the position in the new ClosureExpr (which is
1118 // part of the function we're working on). See #49171 for an
1119 // example of what happens if we miss that update.
1120 newClosurePos := subst.updatedPos(n.Pos())
1121 defer func(prev bool) { subst.noPosUpdate = prev }(subst.noPosUpdate)
1122 subst.noPosUpdate = true
1124 //fmt.Printf("Inlining func %v with closure into %v\n", subst.fn, ir.FuncName(ir.CurFunc))
1127 newfn := ir.NewClosureFunc(oldfn.Pos(), true)
1129 // Ntype can be nil for -G=3 mode.
1130 if oldfn.Nname.Ntype != nil {
1131 newfn.Nname.Ntype = subst.node(oldfn.Nname.Ntype).(ir.Ntype)
1134 if subst.newclofn != nil {
1135 //fmt.Printf("Inlining a closure with a nested closure\n")
1137 prevxfunc := subst.newclofn
1139 // Mark that we are now substituting within a closure (within the
1140 // inlined function), and create new nodes for all the local
1141 // vars/params inside this closure.
1142 subst.newclofn = newfn
1144 newfn.ClosureVars = nil
1145 for _, oldv := range oldfn.Dcl {
1146 newv := subst.clovar(oldv)
1147 subst.inlvars[oldv] = newv
1148 newfn.Dcl = append(newfn.Dcl, newv)
1150 for _, oldv := range oldfn.ClosureVars {
1151 newv := subst.clovar(oldv)
1152 subst.inlvars[oldv] = newv
1153 newfn.ClosureVars = append(newfn.ClosureVars, newv)
1156 // Need to replace ONAME nodes in
1157 // newfn.Type().FuncType().Receiver/Params/Results.FieldSlice().Nname
1158 oldt := oldfn.Type()
1159 newrecvs := subst.fields(oldt.Recvs())
1160 var newrecv *types.Field
1161 if len(newrecvs) > 0 {
1162 newrecv = newrecvs[0]
1164 newt := types.NewSignature(oldt.Pkg(), newrecv,
1165 nil, subst.fields(oldt.Params()), subst.fields(oldt.Results()))
1167 newfn.Nname.SetType(newt)
1168 newfn.Body = subst.list(oldfn.Body)
1170 // Remove the nodes for the current closure from subst.inlvars
1171 for _, oldv := range oldfn.Dcl {
1172 delete(subst.inlvars, oldv)
1174 for _, oldv := range oldfn.ClosureVars {
1175 delete(subst.inlvars, oldv)
1177 // Go back to previous closure func
1178 subst.newclofn = prevxfunc
1180 // Actually create the named function for the closure, now that
1181 // the closure is inlined in a specific function.
1182 newclo := newfn.OClosure
1183 newclo.SetPos(newClosurePos)
1184 newclo.SetInit(subst.list(n.Init()))
1185 return typecheck.Expr(newclo)
1188 // node recursively copies a node from the saved pristine body of the
1189 // inlined function, substituting references to input/output
1190 // parameters with ones to the tmpnames, and substituting returns with
1191 // assignments to the output.
1192 func (subst *inlsubst) node(n ir.Node) ir.Node {
1201 // Handle captured variables when inlining closures.
1202 if n.IsClosureVar() && subst.newclofn == nil {
1205 // Deal with case where sequence of closures are inlined.
1206 // TODO(danscales) - write test case to see if we need to
1207 // go up multiple levels.
1208 if o.Curfn != ir.CurFunc {
1212 // make sure the outer param matches the inlining location
1213 if o == nil || o.Curfn != ir.CurFunc {
1214 base.Fatalf("%v: unresolvable capture %v\n", ir.Line(n), n)
1217 if base.Flag.LowerM > 2 {
1218 fmt.Printf("substituting captured name %+v -> %+v\n", n, o)
1223 if inlvar := subst.inlvars[n]; inlvar != nil { // These will be set during inlnode
1224 if base.Flag.LowerM > 2 {
1225 fmt.Printf("substituting name %+v -> %+v\n", n, inlvar)
1230 if base.Flag.LowerM > 2 {
1231 fmt.Printf("not substituting name %+v\n", n)
1236 n := n.(*ir.SelectorExpr)
1239 case ir.OLITERAL, ir.ONIL, ir.OTYPE:
1240 // If n is a named constant or type, we can continue
1241 // using it in the inline copy. Otherwise, make a copy
1242 // so we can update the line number.
1248 if subst.newclofn != nil {
1249 // Don't do special substitutions if inside a closure
1252 // Because of the above test for subst.newclofn,
1253 // this return is guaranteed to belong to the current inlined function.
1254 n := n.(*ir.ReturnStmt)
1255 init := subst.list(n.Init())
1256 if len(subst.retvars) != 0 && len(n.Results) != 0 {
1257 as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
1259 // Make a shallow copy of retvars.
1260 // Otherwise OINLCALL.Rlist will be the same list,
1261 // and later walk and typecheck may clobber it.
1262 for _, n := range subst.retvars {
1265 as.Rhs = subst.list(n.Results)
1267 if subst.fn.Inl.CanDelayResults {
1268 for _, n := range as.Lhs {
1269 as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
1274 init = append(init, typecheck.Stmt(as))
1276 init = append(init, ir.NewBranchStmt(base.Pos, ir.OGOTO, subst.retlabel))
1277 typecheck.Stmts(init)
1278 return ir.NewBlockStmt(base.Pos, init)
1280 case ir.OGOTO, ir.OBREAK, ir.OCONTINUE:
1281 if subst.newclofn != nil {
1282 // Don't do special substitutions if inside a closure
1285 n := n.(*ir.BranchStmt)
1286 m := ir.Copy(n).(*ir.BranchStmt)
1287 m.SetPos(subst.updatedPos(m.Pos()))
1289 m.Label = translateLabel(n.Label)
1293 if subst.newclofn != nil {
1294 // Don't do special substitutions if inside a closure
1297 n := n.(*ir.LabelStmt)
1298 m := ir.Copy(n).(*ir.LabelStmt)
1299 m.SetPos(subst.updatedPos(m.Pos()))
1301 m.Label = translateLabel(n.Label)
1305 return subst.closure(n.(*ir.ClosureExpr))
1310 m.SetPos(subst.updatedPos(m.Pos()))
1311 ir.EditChildren(m, subst.edit)
1313 if subst.newclofn == nil {
1314 // Translate any label on FOR, RANGE loops, SWITCH or SELECT
1317 m := m.(*ir.ForStmt)
1318 m.Label = translateLabel(m.Label)
1322 m := m.(*ir.RangeStmt)
1323 m.Label = translateLabel(m.Label)
1327 m := m.(*ir.SwitchStmt)
1328 m.Label = translateLabel(m.Label)
1332 m := m.(*ir.SelectStmt)
1333 m.Label = translateLabel(m.Label)
1338 switch m := m.(type) {
1339 case *ir.AssignStmt:
1340 if lhs, ok := m.X.(*ir.Name); ok && lhs.Defn == &subst.defnMarker {
1343 case *ir.AssignListStmt:
1344 for _, lhs := range m.Lhs {
1345 if lhs, ok := lhs.(*ir.Name); ok && lhs.Defn == &subst.defnMarker {
1354 // translateLabel makes a label from an inlined function (if non-nil) be unique by
1355 // adding "·inlgen".
1356 func translateLabel(l *types.Sym) *types.Sym {
1360 p := fmt.Sprintf("%s·%d", l.Name, inlgen)
1361 return typecheck.Lookup(p)
1364 func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos {
1365 if subst.noPosUpdate {
1368 pos := base.Ctxt.PosTable.Pos(xpos)
1369 oldbase := pos.Base() // can be nil
1370 newbase := subst.bases[oldbase]
1372 newbase = src.NewInliningBase(oldbase, subst.newInlIndex)
1373 subst.bases[oldbase] = newbase
1375 pos.SetBase(newbase)
1376 return base.Ctxt.PosTable.XPos(pos)
1379 func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
1380 s := make([]*ir.Name, 0, len(ll))
1381 for _, n := range ll {
1382 if n.Class == ir.PAUTO {
1383 if !vis.usedLocals.Has(n) {
1392 // numNonClosures returns the number of functions in list which are not closures.
1393 func numNonClosures(list []*ir.Func) int {
1395 for _, fn := range list {
1396 if fn.OClosure == nil {
1403 func doList(list []ir.Node, do func(ir.Node) bool) bool {
1404 for _, x := range list {