1 // Copyright 2012 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
11 "cmd/compile/internal/base"
12 "cmd/compile/internal/ir"
13 "cmd/compile/internal/reflectdata"
14 "cmd/compile/internal/staticinit"
15 "cmd/compile/internal/typecheck"
16 "cmd/compile/internal/types"
20 // Rewrite tree to use separate statements to enforce
21 // order of evaluation. Makes walk easier, because it
22 // can (after this runs) reorder at will within an expression.
24 // Rewrite m[k] op= r into m[k] = m[k] op r if op is / or %.
26 // Introduce temporaries as needed by runtime routines.
27 // For example, the map runtime routines take the map key
28 // by reference, so make sure all map keys are addressable
29 // by copying them to temporaries as needed.
30 // The same is true for channel operations.
32 // Arrange that map index expressions only appear in direct
33 // assignments x = m[k] or m[k] = x, never in larger expressions.
35 // Arrange that receive expressions only appear in direct assignments
36 // x = <-c or as standalone statements <-c, never in larger expressions.
38 // TODO(rsc): The temporary introduction during multiple assignments
39 // should be moved into this file, so that the temporaries can be cleaned
40 // and so that conversions implicit in the OAS2FUNC and OAS2RECV
41 // nodes can be made explicit and then have their temporaries cleaned.
43 // TODO(rsc): Goto and multilevel break/continue can jump over
44 // inserted VARKILL annotations. Work out a way to handle these.
45 // The current implementation is safe, in that it will execute correctly.
46 // But it won't reuse temporaries as aggressively as it might, and
47 // it can result in unnecessary zeroing of those variables in the function
50 // orderState holds state during the ordering process.
51 type orderState struct {
52 out []ir.Node // list of generated statements
53 temp []*ir.Name // stack of temporary variables
54 free map[string][]*ir.Name // free list of unused temporaries, by type.LongString().
55 edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS
58 // Order rewrites fn.Nbody to apply the ordering constraints
59 // described in the comment at the top of the file.
60 func order(fn *ir.Func) {
62 s := fmt.Sprintf("\nbefore order %v", fn.Sym())
63 ir.DumpList(s, fn.Body)
66 orderBlock(&fn.Body, map[string][]*ir.Name{})
69 // append typechecks stmt and appends it to out.
70 func (o *orderState) append(stmt ir.Node) {
71 o.out = append(o.out, typecheck.Stmt(stmt))
74 // newTemp allocates a new temporary with the given type,
75 // pushes it onto the temp stack, and returns it.
76 // If clear is true, newTemp emits code to zero the temporary.
77 func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
79 // Note: LongString is close to the type equality we want,
80 // but not exactly. We still need to double-check with types.Identical.
84 if types.Identical(t, n.Type()) {
96 o.append(ir.NewAssignStmt(base.Pos, v, nil))
99 o.temp = append(o.temp, v)
103 // copyExpr behaves like newTemp but also emits
104 // code to initialize the temporary to the value n.
105 func (o *orderState) copyExpr(n ir.Node) *ir.Name {
106 return o.copyExpr1(n, false)
109 // copyExprClear is like copyExpr but clears the temp before assignment.
110 // It is provided for use when the evaluation of tmp = n turns into
111 // a function call that is passed a pointer to the temporary as the output space.
112 // If the call blocks before tmp has been written,
113 // the garbage collector will still treat the temporary as live,
114 // so we must zero it before entering that call.
115 // Today, this only happens for channel receive operations.
116 // (The other candidate would be map access, but map access
117 // returns a pointer to the result data instead of taking a pointer
119 func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
120 return o.copyExpr1(n, true)
123 func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
125 v := o.newTemp(t, clear)
126 o.append(ir.NewAssignStmt(base.Pos, v, n))
130 // cheapExpr returns a cheap version of n.
131 // The definition of cheap is that n is a variable or constant.
132 // If not, cheapExpr allocates a new tmp, emits tmp = n,
133 // and then returns tmp.
134 func (o *orderState) cheapExpr(n ir.Node) ir.Node {
140 case ir.ONAME, ir.OLITERAL, ir.ONIL:
142 case ir.OLEN, ir.OCAP:
143 n := n.(*ir.UnaryExpr)
144 l := o.cheapExpr(n.X)
148 a := ir.SepCopy(n).(*ir.UnaryExpr)
150 return typecheck.Expr(a)
156 // safeExpr returns a safe version of n.
157 // The definition of safe is that n can appear multiple times
158 // without violating the semantics of the original program,
159 // and that assigning to the safe version has the same effect
160 // as assigning to the original n.
162 // The intended use is to apply to x when rewriting x += y into x = x + y.
163 func (o *orderState) safeExpr(n ir.Node) ir.Node {
165 case ir.ONAME, ir.OLITERAL, ir.ONIL:
168 case ir.OLEN, ir.OCAP:
169 n := n.(*ir.UnaryExpr)
174 a := ir.SepCopy(n).(*ir.UnaryExpr)
176 return typecheck.Expr(a)
179 n := n.(*ir.SelectorExpr)
184 a := ir.SepCopy(n).(*ir.SelectorExpr)
186 return typecheck.Expr(a)
189 n := n.(*ir.SelectorExpr)
190 l := o.cheapExpr(n.X)
194 a := ir.SepCopy(n).(*ir.SelectorExpr)
196 return typecheck.Expr(a)
199 n := n.(*ir.StarExpr)
200 l := o.cheapExpr(n.X)
204 a := ir.SepCopy(n).(*ir.StarExpr)
206 return typecheck.Expr(a)
208 case ir.OINDEX, ir.OINDEXMAP:
209 n := n.(*ir.IndexExpr)
211 if n.X.Type().IsArray() {
216 r := o.cheapExpr(n.Index)
217 if l == n.X && r == n.Index {
220 a := ir.SepCopy(n).(*ir.IndexExpr)
223 return typecheck.Expr(a)
226 base.Fatalf("order.safeExpr %v", n.Op())
227 return nil // not reached
231 // isaddrokay reports whether it is okay to pass n's address to runtime routines.
232 // Taking the address of a variable makes the liveness and optimization analyses
233 // lose track of where the variable's lifetime ends. To avoid hurting the analyses
234 // of ordinary stack variables, those are not 'isaddrokay'. Temporaries are okay,
235 // because we emit explicit VARKILL instructions marking the end of those
236 // temporaries' lifetimes.
237 func isaddrokay(n ir.Node) bool {
238 return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class == ir.PEXTERN || ir.IsAutoTmp(n))
241 // addrTemp ensures that n is okay to pass by address to runtime routines.
242 // If the original argument n is not okay, addrTemp creates a tmp, emits
243 // tmp = n, and then returns tmp.
244 // The result of addrTemp MUST be assigned back to n, e.g.
245 // n.Left = o.addrTemp(n.Left)
246 func (o *orderState) addrTemp(n ir.Node) ir.Node {
247 if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL {
248 // TODO: expand this to all static composite literal nodes?
249 n = typecheck.DefaultLit(n, nil)
250 types.CalcSize(n.Type())
251 vstat := readonlystaticname(n.Type())
252 var s staticinit.Schedule
253 s.StaticAssign(vstat, 0, n, n.Type())
255 base.Fatalf("staticassign of const generated code: %+v", n)
257 vstat = typecheck.Expr(vstat).(*ir.Name)
266 // mapKeyTemp prepares n to be a key in a map runtime call and returns n.
267 // It should only be used for map runtime calls which have *_fast* versions.
268 func (o *orderState) mapKeyTemp(t *types.Type, n ir.Node) ir.Node {
269 // Most map calls need to take the address of the key.
270 // Exception: map*_fast* calls. See golang.org/issue/19015.
278 kt = types.Types[types.TUINT32]
280 kt = types.Types[types.TUINT64]
281 case mapfast32ptr, mapfast64ptr:
282 kt = types.Types[types.TUNSAFEPTR]
284 kt = types.Types[types.TSTRING]
290 case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
291 // can directly convert (e.g. named type to underlying type, or one pointer to another)
292 return typecheck.Expr(ir.NewConvExpr(n.Pos(), ir.OCONVNOP, kt, n))
293 case nt.IsInteger() && kt.IsInteger():
294 // can directly convert (e.g. int32 to uint32)
295 if n.Op() == ir.OLITERAL && nt.IsSigned() {
296 // avoid constant overflow error
297 n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
301 return typecheck.Expr(ir.NewConvExpr(n.Pos(), ir.OCONV, kt, n))
303 // Unsafe cast through memory.
304 // We'll need to do a load with type kt. Create a temporary of type kt to
305 // ensure sufficient alignment. nt may be under-aligned.
306 if kt.Align < nt.Align {
307 base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
309 tmp := o.newTemp(kt, true)
311 var e ir.Node = typecheck.NodAddr(tmp)
312 e = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, nt.PtrTo(), e)
313 e = ir.NewStarExpr(n.Pos(), e)
314 o.append(ir.NewAssignStmt(base.Pos, e, n))
319 // mapKeyReplaceStrConv replaces OBYTES2STR by OBYTES2STRTMP
320 // in n to avoid string allocations for keys in map lookups.
321 // Returns a bool that signals if a modification was made.
325 // x = m[T1{... Tn{..., string(k), ...}]
326 // where k is []byte, T1 to Tn is a nesting of struct and array literals,
327 // the allocation of backing bytes for the string can be avoided
328 // by reusing the []byte backing array. These are special cases
329 // for avoiding allocations when converting byte slices to strings.
330 // It would be nice to handle these generally, but because
331 // []byte keys are not allowed in maps, the use of string(k)
332 // comes up in important cases in practice. See issue 3512.
333 func mapKeyReplaceStrConv(n ir.Node) bool {
337 n := n.(*ir.ConvExpr)
338 n.SetOp(ir.OBYTES2STRTMP)
341 n := n.(*ir.CompLitExpr)
342 for _, elem := range n.List {
343 elem := elem.(*ir.StructKeyExpr)
344 if mapKeyReplaceStrConv(elem.Value) {
349 n := n.(*ir.CompLitExpr)
350 for _, elem := range n.List {
351 if elem.Op() == ir.OKEY {
352 elem = elem.(*ir.KeyExpr).Value
354 if mapKeyReplaceStrConv(elem) {
364 // markTemp returns the top of the temporary variable stack.
365 func (o *orderState) markTemp() ordermarker {
366 return ordermarker(len(o.temp))
369 // popTemp pops temporaries off the stack until reaching the mark,
370 // which must have been returned by markTemp.
371 func (o *orderState) popTemp(mark ordermarker) {
372 for _, n := range o.temp[mark:] {
373 key := n.Type().LongString()
374 o.free[key] = append(o.free[key], n)
376 o.temp = o.temp[:mark]
379 // cleanTempNoPop emits VARKILL instructions to *out
380 // for each temporary above the mark on the temporary stack.
381 // It does not pop the temporaries from the stack.
382 func (o *orderState) cleanTempNoPop(mark ordermarker) []ir.Node {
384 for i := len(o.temp) - 1; i >= int(mark); i-- {
386 out = append(out, typecheck.Stmt(ir.NewUnaryExpr(base.Pos, ir.OVARKILL, n)))
391 // cleanTemp emits VARKILL instructions for each temporary above the
392 // mark on the temporary stack and removes them from the stack.
393 func (o *orderState) cleanTemp(top ordermarker) {
394 o.out = append(o.out, o.cleanTempNoPop(top)...)
398 // stmtList orders each of the statements in the list.
399 func (o *orderState) stmtList(l ir.Nodes) {
402 orderMakeSliceCopy(s[i:])
407 // orderMakeSliceCopy matches the pattern:
408 // m = OMAKESLICE([]T, x); OCOPY(m, s)
409 // and rewrites it to:
410 // m = OMAKESLICECOPY([]T, x, s); nil
411 func orderMakeSliceCopy(s []ir.Node) {
412 if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
415 if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
419 as := s[0].(*ir.AssignStmt)
420 cp := s[1].(*ir.BinaryExpr)
421 if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
422 as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
423 as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
424 // The line above this one is correct with the differing equality operators:
425 // we want as.X and cp.X to be the same name,
426 // but we want the initial data to be coming from a different name.
430 mk := as.Y.(*ir.MakeExpr)
431 if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
434 mk.SetOp(ir.OMAKESLICECOPY)
436 // Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
437 mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
438 as.Y = typecheck.Expr(mk)
439 s[1] = nil // remove separate copy call
442 // edge inserts coverage instrumentation for libfuzzer.
443 func (o *orderState) edge() {
444 if base.Debug.Libfuzzer == 0 {
448 // Create a new uint8 counter to be allocated in section
449 // __libfuzzer_extra_counters.
450 counter := staticinit.StaticName(types.Types[types.TUINT8])
451 counter.SetLibfuzzerExtraCounter(true)
454 incr := ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(1))
458 // orderBlock orders the block of statements in n into a new slice,
459 // and then replaces the old slice in n with the new slice.
460 // free is a map that can be used to obtain temporary variables by type.
461 func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
464 mark := order.markTemp()
467 order.cleanTemp(mark)
471 // exprInPlace orders the side effects in *np and
472 // leaves them as the init list of the final *np.
473 // The result of exprInPlace MUST be assigned back to n, e.g.
474 // n.Left = o.exprInPlace(n.Left)
475 func (o *orderState) exprInPlace(n ir.Node) ir.Node {
478 n = order.expr(n, nil)
479 n = ir.InitExpr(order.out, n)
481 // insert new temporaries from order
482 // at head of outer list.
483 o.temp = append(o.temp, order.temp...)
487 // orderStmtInPlace orders the side effects of the single statement *np
488 // and replaces it with the resulting statement list.
489 // The result of orderStmtInPlace MUST be assigned back to n, e.g.
490 // n.Left = orderStmtInPlace(n.Left)
491 // free is a map that can be used to obtain temporary variables by type.
492 func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
495 mark := order.markTemp()
497 order.cleanTemp(mark)
498 return ir.NewBlockStmt(src.NoXPos, order.out)
501 // init moves n's init list to o.out.
502 func (o *orderState) init(n ir.Node) {
503 if ir.MayBeShared(n) {
504 // For concurrency safety, don't mutate potentially shared nodes.
505 // First, ensure that no work is required here.
506 if len(n.Init()) > 0 {
507 base.Fatalf("order.init shared node with ninit")
511 o.stmtList(ir.TakeInit(n))
514 // call orders the call expression n.
515 // n.Op is OCALLMETH/OCALLFUNC/OCALLINTER or a builtin like OCOPY.
516 func (o *orderState) call(nn ir.Node) {
517 if len(nn.Init()) > 0 {
518 // Caller should have already called o.init(nn).
519 base.Fatalf("%v with unexpected ninit", nn.Op())
522 // Builtin functions.
523 if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLMETH && nn.Op() != ir.OCALLINTER {
524 switch n := nn.(type) {
526 base.Fatalf("unexpected call: %+v", n)
528 n.X = o.expr(n.X, nil)
530 n.X = o.expr(n.X, nil)
532 n.X = o.expr(n.X, nil)
533 n.Y = o.expr(n.Y, nil)
535 n.Len = o.expr(n.Len, nil)
536 n.Cap = o.expr(n.Cap, nil)
543 n := nn.(*ir.CallExpr)
544 typecheck.FixVariadicCall(n)
546 if isFuncPCIntrinsic(n) && isIfaceOfFunc(n.Args[0]) {
547 // For internal/abi.FuncPCABIxxx(fn), if fn is a defined function,
548 // do not introduce temporaries here, so it is easier to rewrite it
549 // to symbol address reference later in walk.
553 n.X = o.expr(n.X, nil)
556 // Pick out the function callee, if statically known.
557 // TODO(mdempsky): De-duplicate with similar code in escape analysis.
561 if fn, ok := n.X.(*ir.Name); ok && fn.Op() == ir.ONAME && fn.Class == ir.PFUNC {
565 callee = ir.MethodExprName(n.X).Func
568 if callee == nil || callee.Pragma&ir.UintptrKeepAlive == 0 {
572 keepAlive := func(args []ir.Node) {
573 // If the argument is really a pointer being converted to uintptr,
574 // arrange for the pointer to be kept alive until the call returns,
575 // by copying it into a temp and marking that temp
576 // still alive when we pop the temp stack.
577 for _, arg := range args {
578 if arg.Op() == ir.OCONVNOP && arg.Type().IsUintptr() {
579 arg := arg.(*ir.ConvExpr)
580 if arg.X.Type().IsUnsafePtr() {
581 x := o.copyExpr(arg.X)
583 x.SetAddrtaken(true) // ensure SSA keeps the x variable
584 n.KeepAlive = append(n.KeepAlive, x)
590 last := len(n.Args) - 1
591 if n.IsDDD && n.Args[last].Op() == ir.OSLICELIT {
592 keepAlive(n.Args[:last])
593 keepAlive(n.Args[last].(*ir.CompLitExpr).List)
599 // mapAssign appends n to o.out.
600 func (o *orderState) mapAssign(n ir.Node) {
603 base.Fatalf("order.mapAssign %v", n.Op())
606 n := n.(*ir.AssignStmt)
607 if n.X.Op() == ir.OINDEXMAP {
608 n.Y = o.safeMapRHS(n.Y)
610 o.out = append(o.out, n)
612 n := n.(*ir.AssignOpStmt)
613 if n.X.Op() == ir.OINDEXMAP {
614 n.Y = o.safeMapRHS(n.Y)
616 o.out = append(o.out, n)
620 func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
621 // Make sure we evaluate the RHS before starting the map insert.
622 // We need to make sure the RHS won't panic. See issue 22881.
623 if r.Op() == ir.OAPPEND {
624 r := r.(*ir.CallExpr)
626 for i, n := range s {
627 s[i] = o.cheapExpr(n)
631 return o.cheapExpr(r)
634 // stmt orders the statement n, appending to o.out.
635 // Temporaries created during the statement are cleaned
636 // up using VARKILL instructions as possible.
637 func (o *orderState) stmt(n ir.Node) {
647 base.Fatalf("order.stmt %v", n.Op())
649 case ir.OVARKILL, ir.OVARLIVE, ir.OINLMARK:
650 o.out = append(o.out, n)
653 n := n.(*ir.AssignStmt)
655 n.X = o.expr(n.X, nil)
656 n.Y = o.expr(n.Y, n.X)
661 n := n.(*ir.AssignOpStmt)
663 n.X = o.expr(n.X, nil)
664 n.Y = o.expr(n.Y, nil)
666 if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
667 // Rewrite m[k] op= r into m[k] = m[k] op r so
668 // that we can ensure that if op panics
669 // because r is zero, the panic happens before
670 // the map assignment.
671 // DeepCopy is a big hammer here, but safeExpr
672 // makes sure there is nothing too deep being copied.
673 l1 := o.safeExpr(n.X)
674 l2 := ir.DeepCopy(src.NoXPos, l1)
675 if l2.Op() == ir.OINDEXMAP {
676 l2 := l2.(*ir.IndexExpr)
680 r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
681 as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
691 n := n.(*ir.AssignListStmt)
695 o.out = append(o.out, n)
698 // Special: avoid copy of func call n.Right
700 n := n.(*ir.AssignListStmt)
708 // Special: use temporary variables to hold result,
709 // so that runtime can take address of temporary.
710 // No temporary for blank assignment.
712 // OAS2MAPR: make sure key is addressable if needed,
713 // and make sure OINDEXMAP is not copied out.
714 case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
715 n := n.(*ir.AssignListStmt)
719 switch r := n.Rhs[0]; r.Op() {
721 r := r.(*ir.TypeAssertExpr)
722 r.X = o.expr(r.X, nil)
724 r := r.(*ir.UnaryExpr)
725 r.X = o.expr(r.X, nil)
727 r := r.(*ir.IndexExpr)
728 r.X = o.expr(r.X, nil)
729 r.Index = o.expr(r.Index, nil)
730 // See similar conversion for OINDEXMAP below.
731 _ = mapKeyReplaceStrConv(r.Index)
732 r.Index = o.mapKeyTemp(r.X.Type(), r.Index)
734 base.Fatalf("order.stmt: %v", r.Op())
740 // Special: does not save n onto out.
742 n := n.(*ir.BlockStmt)
745 // Special: n->left is not an expression; save as is.
755 o.out = append(o.out, n)
757 // Special: handle call arguments.
758 case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
759 n := n.(*ir.CallExpr)
762 o.out = append(o.out, n)
765 case ir.OCHECKNIL, ir.OCLOSE, ir.OPANIC, ir.ORECV:
766 n := n.(*ir.UnaryExpr)
768 n.X = o.expr(n.X, nil)
769 o.out = append(o.out, n)
773 n := n.(*ir.BinaryExpr)
775 n.X = o.expr(n.X, nil)
776 n.Y = o.expr(n.Y, nil)
777 o.out = append(o.out, n)
780 case ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
781 n := n.(*ir.CallExpr)
784 o.out = append(o.out, n)
787 // Special: order arguments to inner call but not call itself.
788 case ir.ODEFER, ir.OGO:
789 n := n.(*ir.GoDeferStmt)
793 if n.Call.Op() == ir.ORECOVER {
794 // Special handling of "defer recover()". We need to evaluate the FP
795 // argument before wrapping.
797 n.Call = walkRecover(n.Call.(*ir.CallExpr), &init)
801 o.out = append(o.out, n)
805 n := n.(*ir.CallExpr)
807 n.Args[0] = o.expr(n.Args[0], nil)
808 n.Args[1] = o.expr(n.Args[1], nil)
809 n.Args[1] = o.mapKeyTemp(n.Args[0].Type(), n.Args[1])
810 o.out = append(o.out, n)
813 // Clean temporaries from condition evaluation at
814 // beginning of loop body and after for statement.
818 n.Cond = o.exprInPlace(n.Cond)
819 n.Body.Prepend(o.cleanTempNoPop(t)...)
820 orderBlock(&n.Body, o.free)
821 n.Post = orderStmtInPlace(n.Post, o.free)
822 o.out = append(o.out, n)
825 // Clean temporaries from condition at
826 // beginning of both branches.
830 n.Cond = o.exprInPlace(n.Cond)
831 n.Body.Prepend(o.cleanTempNoPop(t)...)
832 n.Else.Prepend(o.cleanTempNoPop(t)...)
834 orderBlock(&n.Body, o.free)
835 orderBlock(&n.Else, o.free)
836 o.out = append(o.out, n)
839 // n.Right is the expression being ranged over.
840 // order it, and then make a copy if we need one.
841 // We almost always do, to ensure that we don't
842 // see any value changes made during the loop.
843 // Usually the copy is cheap (e.g., array pointer,
844 // chan, slice, string are all tiny).
845 // The exception is ranging over an array value
846 // (not a slice, not a pointer to array),
847 // which must make a copy to avoid seeing updates made during
848 // the range body. Ranging over an array value is uncommon though.
850 // Mark []byte(str) range expression to reuse string backing storage.
851 // It is safe because the storage cannot be mutated.
852 n := n.(*ir.RangeStmt)
853 if n.X.Op() == ir.OSTR2BYTES {
854 n.X.(*ir.ConvExpr).SetOp(ir.OSTR2BYTESTMP)
858 n.X = o.expr(n.X, nil)
861 xt := typecheck.RangeExprType(n.X.Type())
864 base.Fatalf("order.stmt range %v", n.Type())
866 case types.TARRAY, types.TSLICE:
867 if n.Value == nil || ir.IsBlank(n.Value) {
868 // for i := range x will only use x once, to compute len(x).
869 // No need to copy it.
874 case types.TCHAN, types.TSTRING:
875 // chan, string, slice, array ranges use value multiple times.
879 if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
880 r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
881 r.SetType(types.Types[types.TSTRING])
882 r = typecheck.Expr(r)
889 // Preserve the body of the map clear pattern so it can
890 // be detected during walk. The loop body will not be used
891 // when optimizing away the range loop to a runtime call.
896 // copy the map value in case it is a map literal.
897 // TODO(rsc): Make tmp = literal expressions reuse tmp.
898 // For maps tmp is just one word so it hardly matters.
902 // n.Prealloc is the temp for the iterator.
903 // MapIterType contains pointers and needs to be zeroed.
904 n.Prealloc = o.newTemp(reflectdata.MapIterType(xt), true)
906 n.Key = o.exprInPlace(n.Key)
907 n.Value = o.exprInPlace(n.Value)
909 orderBlock(&n.Body, o.free)
911 o.out = append(o.out, n)
915 n := n.(*ir.ReturnStmt)
916 o.exprList(n.Results)
917 o.out = append(o.out, n)
919 // Special: clean case temporaries in each block entry.
920 // Select must enter one of its blocks, so there is no
921 // need for a cleaning at the end.
922 // Doubly special: evaluation order for select is stricter
923 // than ordinary expressions. Even something like p.c
924 // has to be hoisted into a temporary, so that it cannot be
925 // reordered after the channel evaluation for a different
926 // case (if p were nil, then the timing of the fault would
929 n := n.(*ir.SelectStmt)
931 for _, ncas := range n.Cases {
935 // Append any new body prologue to ninit.
936 // The next loop will insert ninit into nbody.
937 if len(ncas.Init()) != 0 {
938 base.Fatalf("order select ninit")
945 ir.Dump("select case", r)
946 base.Fatalf("unknown op in select %v", r.Op())
950 r := r.(*ir.AssignListStmt)
951 recv := r.Rhs[0].(*ir.UnaryExpr)
952 recv.X = o.expr(recv.X, nil)
953 if !ir.IsAutoTmp(recv.X) {
954 recv.X = o.copyExpr(recv.X)
956 init := ir.TakeInit(r)
959 do := func(i int, t *types.Type) {
964 // If this is case x := <-ch or case x, y := <-ch, the case has
965 // the ODCL nodes to declare x and y. We want to delay that
966 // declaration (and possible allocation) until inside the case body.
967 // Delete the ODCL nodes here and recreate them inside the body below.
969 if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
972 dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
973 ncas.PtrInit().Append(dcl)
975 tmp := o.newTemp(t, t.HasPointers())
976 as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
977 ncas.PtrInit().Append(as)
980 do(0, recv.X.Type().Elem())
981 do(1, types.Types[types.TBOOL])
983 ir.DumpList("ninit", r.Init())
984 base.Fatalf("ninit on select recv")
986 orderBlock(ncas.PtrInit(), o.free)
989 r := r.(*ir.SendStmt)
990 if len(r.Init()) != 0 {
991 ir.DumpList("ninit", r.Init())
992 base.Fatalf("ninit on select send")
996 // r->left is c, r->right is x, both are always evaluated.
997 r.Chan = o.expr(r.Chan, nil)
999 if !ir.IsAutoTmp(r.Chan) {
1000 r.Chan = o.copyExpr(r.Chan)
1002 r.Value = o.expr(r.Value, nil)
1003 if !ir.IsAutoTmp(r.Value) {
1004 r.Value = o.copyExpr(r.Value)
1008 // Now that we have accumulated all the temporaries, clean them.
1009 // Also insert any ninit queued during the previous loop.
1010 // (The temporary cleaning must follow that ninit work.)
1011 for _, cas := range n.Cases {
1012 orderBlock(&cas.Body, o.free)
1013 cas.Body.Prepend(o.cleanTempNoPop(t)...)
1015 // TODO(mdempsky): Is this actually necessary?
1016 // walkSelect appears to walk Ninit.
1017 cas.Body.Prepend(ir.TakeInit(cas)...)
1020 o.out = append(o.out, n)
1023 // Special: value being sent is passed as a pointer; make it addressable.
1025 n := n.(*ir.SendStmt)
1027 n.Chan = o.expr(n.Chan, nil)
1028 n.Value = o.expr(n.Value, nil)
1029 if base.Flag.Cfg.Instrumenting {
1030 // Force copying to the stack so that (chan T)(nil) <- x
1031 // is still instrumented as a read of x.
1032 n.Value = o.copyExpr(n.Value)
1034 n.Value = o.addrTemp(n.Value)
1036 o.out = append(o.out, n)
1039 // TODO(rsc): Clean temporaries more aggressively.
1040 // Note that because walkSwitch will rewrite some of the
1041 // switch into a binary search, this is not as easy as it looks.
1042 // (If we ran that code here we could invoke order.stmt on
1043 // the if-else chain instead.)
1044 // For now just clean all the temporaries at the end.
1045 // In practice that's fine.
1047 n := n.(*ir.SwitchStmt)
1048 if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
1049 // Add empty "default:" case for instrumentation.
1050 n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
1054 n.Tag = o.expr(n.Tag, nil)
1055 for _, ncas := range n.Cases {
1056 o.exprListInPlace(ncas.List)
1057 orderBlock(&ncas.Body, o.free)
1060 o.out = append(o.out, n)
1067 func hasDefaultCase(n *ir.SwitchStmt) bool {
1068 for _, ncas := range n.Cases {
1069 if len(ncas.List) == 0 {
1076 // exprList orders the expression list l into o.
1077 func (o *orderState) exprList(l ir.Nodes) {
1080 s[i] = o.expr(s[i], nil)
1084 // exprListInPlace orders the expression list l but saves
1085 // the side effects on the individual expression ninit lists.
1086 func (o *orderState) exprListInPlace(l ir.Nodes) {
1089 s[i] = o.exprInPlace(s[i])
1093 func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
1094 return o.expr(n, nil)
1097 // expr orders a single expression, appending side
1098 // effects to o.out as needed.
1099 // If this is part of an assignment lhs = *np, lhs is given.
1100 // Otherwise lhs == nil. (When lhs != nil it may be possible
1101 // to avoid copying the result of the expression to a temporary.)
1102 // The result of expr MUST be assigned back to n, e.g.
1103 // n.Left = o.expr(n.Left, lhs)
1104 func (o *orderState) expr(n, lhs ir.Node) ir.Node {
1114 func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
1120 o.edit = o.exprNoLHS // create closure once
1122 ir.EditChildren(n, o.edit)
1125 // Addition of strings turns into a function call.
1126 // Allocate a temporary to hold the strings.
1127 // Fewer than 5 strings use direct runtime helpers.
1129 n := n.(*ir.AddStringExpr)
1132 if len(n.List) > 5 {
1133 t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
1134 n.Prealloc = o.newTemp(t, false)
1137 // Mark string(byteSlice) arguments to reuse byteSlice backing
1138 // buffer during conversion. String concatenation does not
1139 // memorize the strings for later use, so it is safe.
1140 // However, we can do it only if there is at least one non-empty string literal.
1141 // Otherwise if all other arguments are empty strings,
1142 // concatstrings will return the reference to the temp string
1147 for _, n1 := range n.List {
1148 hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
1149 haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
1152 if haslit && hasbyte {
1153 for _, n2 := range n.List {
1154 if n2.Op() == ir.OBYTES2STR {
1155 n2 := n2.(*ir.ConvExpr)
1156 n2.SetOp(ir.OBYTES2STRTMP)
1163 n := n.(*ir.IndexExpr)
1164 n.X = o.expr(n.X, nil)
1165 n.Index = o.expr(n.Index, nil)
1169 // Enforce that any []byte slices we are not copying
1170 // can not be changed before the map index by forcing
1171 // the map index to happen immediately following the
1172 // conversions. See copyExpr a few lines below.
1173 needCopy = mapKeyReplaceStrConv(n.Index)
1175 if base.Flag.Cfg.Instrumenting {
1176 // Race detector needs the copy.
1181 // key must be addressable
1182 n.Index = o.mapKeyTemp(n.X.Type(), n.Index)
1184 return o.copyExpr(n)
1188 // concrete type (not interface) argument might need an addressable
1189 // temporary to pass to the runtime conversion routine.
1191 n := n.(*ir.ConvExpr)
1192 n.X = o.expr(n.X, nil)
1193 if n.X.Type().IsInterface() {
1196 if _, _, needsaddr := convFuncName(n.X.Type(), n.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
1197 // Need a temp if we need to pass the address to the conversion function.
1198 // We also process static composite literal node here, making a named static global
1199 // whose address we can put directly in an interface (see OCONVIFACE case in walk).
1200 n.X = o.addrTemp(n.X)
1205 n := n.(*ir.ConvExpr)
1206 if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER || n.X.Op() == ir.OCALLMETH) {
1207 call := n.X.(*ir.CallExpr)
1208 // When reordering unsafe.Pointer(f()) into a separate
1209 // statement, the conversion and function call must stay
1210 // together. See golang.org/issue/15329.
1213 if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1214 return o.copyExpr(n)
1217 n.X = o.expr(n.X, nil)
1221 case ir.OANDAND, ir.OOROR:
1226 // if r { // or !r, for OROR
1231 n := n.(*ir.LogicalExpr)
1232 r := o.newTemp(n.Type(), false)
1234 // Evaluate left-hand side.
1235 lhs := o.expr(n.X, nil)
1236 o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
1238 // Evaluate right-hand side, save generated code.
1243 rhs := o.expr(n.Y, nil)
1244 o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
1249 // If left-hand side doesn't cause a short-circuit, issue right-hand side.
1250 nif := ir.NewIfStmt(base.Pos, r, nil, nil)
1251 if n.Op() == ir.OANDAND {
1256 o.out = append(o.out, nif)
1279 // len([]rune(s)) is rewritten to runtime.countrunes(s) later.
1280 conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
1281 conv.X = o.expr(conv.X, nil)
1286 if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1287 return o.copyExpr(n)
1292 // Check for append(x, make([]T, y)...) .
1293 n := n.(*ir.CallExpr)
1294 if isAppendOfMake(n) {
1295 n.Args[0] = o.expr(n.Args[0], nil) // order x
1296 mk := n.Args[1].(*ir.MakeExpr)
1297 mk.Len = o.expr(mk.Len, nil) // order y
1302 if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
1303 return o.copyExpr(n)
1307 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
1308 n := n.(*ir.SliceExpr)
1309 n.X = o.expr(n.X, nil)
1310 n.Low = o.cheapExpr(o.expr(n.Low, nil))
1311 n.High = o.cheapExpr(o.expr(n.High, nil))
1312 n.Max = o.cheapExpr(o.expr(n.Max, nil))
1313 if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
1314 return o.copyExpr(n)
1319 n := n.(*ir.ClosureExpr)
1320 if n.Transient() && len(n.Func.ClosureVars) > 0 {
1321 n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
1326 n := n.(*ir.SelectorExpr)
1327 n.X = o.expr(n.X, nil)
1329 t := typecheck.PartialCallType(n)
1330 n.Prealloc = o.newTemp(t, false)
1335 n := n.(*ir.CompLitExpr)
1338 t := types.NewArray(n.Type().Elem(), n.Len)
1339 n.Prealloc = o.newTemp(t, false)
1343 case ir.ODOTTYPE, ir.ODOTTYPE2:
1344 n := n.(*ir.TypeAssertExpr)
1345 n.X = o.expr(n.X, nil)
1346 if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
1347 return o.copyExprClear(n)
1352 n := n.(*ir.UnaryExpr)
1353 n.X = o.expr(n.X, nil)
1354 return o.copyExprClear(n)
1356 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
1357 n := n.(*ir.BinaryExpr)
1358 n.X = o.expr(n.X, nil)
1359 n.Y = o.expr(n.Y, nil)
1364 // Mark string(byteSlice) arguments to reuse byteSlice backing
1365 // buffer during conversion. String comparison does not
1366 // memorize the strings for later use, so it is safe.
1367 if n.X.Op() == ir.OBYTES2STR {
1368 n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1370 if n.Y.Op() == ir.OBYTES2STR {
1371 n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1374 case t.IsStruct() || t.IsArray():
1375 // for complex comparisons, we need both args to be
1376 // addressable so we can pass them to the runtime.
1377 n.X = o.addrTemp(n.X)
1378 n.Y = o.addrTemp(n.Y)
1383 // Order map by converting:
1390 // m := map[int]int{}
1394 // Then order the result.
1395 // Without this special case, order would otherwise compute all
1396 // the keys and values before storing any of them to the map.
1398 n := n.(*ir.CompLitExpr)
1400 statics := entries[:0]
1401 var dynamics []*ir.KeyExpr
1402 for _, r := range entries {
1403 r := r.(*ir.KeyExpr)
1405 if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1406 dynamics = append(dynamics, r)
1410 // Recursively ordering some static entries can change them to dynamic;
1411 // e.g., OCONVIFACE nodes. See #31777.
1412 r = o.expr(r, nil).(*ir.KeyExpr)
1413 if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1414 dynamics = append(dynamics, r)
1418 statics = append(statics, r)
1422 if len(dynamics) == 0 {
1426 // Emit the creation of the map (with all its static entries).
1427 m := o.newTemp(n.Type(), false)
1428 as := ir.NewAssignStmt(base.Pos, m, n)
1432 // Emit eval+insert of dynamic entries, one at a time.
1433 for _, r := range dynamics {
1434 as := ir.NewAssignStmt(base.Pos, ir.NewIndexExpr(base.Pos, m, r.Key), r.Value)
1435 typecheck.Stmt(as) // Note: this converts the OINDEX to an OINDEXMAP
1441 // No return - type-assertions above. Each case must return for itself.
1444 // as2func orders OAS2FUNC nodes. It creates temporaries to ensure left-to-right assignment.
1445 // The caller should order the right-hand side of the assignment before calling order.as2func.
1449 // tmp1, tmp2, tmp3 = ...
1450 // a, b, a = tmp1, tmp2, tmp3
1451 // This is necessary to ensure left to right assignment order.
1452 func (o *orderState) as2func(n *ir.AssignListStmt) {
1453 results := n.Rhs[0].Type()
1454 as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1455 for i, nl := range n.Lhs {
1456 if !ir.IsBlank(nl) {
1457 typ := results.Field(i).Type
1458 tmp := o.newTemp(typ, typ.HasPointers())
1460 as.Lhs = append(as.Lhs, nl)
1461 as.Rhs = append(as.Rhs, tmp)
1465 o.out = append(o.out, n)
1466 o.stmt(typecheck.Stmt(as))
1469 // as2ok orders OAS2XXX with ok.
1470 // Just like as2func, this also adds temporaries to ensure left-to-right assignment.
1471 func (o *orderState) as2ok(n *ir.AssignListStmt) {
1472 as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1474 do := func(i int, typ *types.Type) {
1475 if nl := n.Lhs[i]; !ir.IsBlank(nl) {
1476 var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
1478 as.Lhs = append(as.Lhs, nl)
1480 // The "ok" result is an untyped boolean according to the Go
1481 // spec. We need to explicitly convert it to the LHS type in
1482 // case the latter is a defined boolean type (#8475).
1483 tmp = typecheck.Conv(tmp, nl.Type())
1485 as.Rhs = append(as.Rhs, tmp)
1489 do(0, n.Rhs[0].Type())
1490 do(1, types.Types[types.TBOOL])
1492 o.out = append(o.out, n)
1493 o.stmt(typecheck.Stmt(as))
1496 var wrapGoDefer_prgen int
1498 // wrapGoDefer wraps the target of a "go" or "defer" statement with a
1499 // new "function with no arguments" closure. Specifically, it converts
1506 // defer func() { f(x1, y1) }()
1508 // This is primarily to enable a quicker bringup of defers under the
1509 // new register ABI; by doing this conversion, we can simplify the
1510 // code in the runtime that invokes defers on the panic path.
1511 func (o *orderState) wrapGoDefer(n *ir.GoDeferStmt) {
1514 var callX ir.Node // thing being called
1515 var callArgs []ir.Node // call arguments
1516 var keepAlive []*ir.Name // KeepAlive list from call, if present
1518 // A helper to recreate the call within the closure.
1519 var mkNewCall func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node
1521 // Defer calls come in many shapes and sizes; not all of them
1522 // are ir.CallExpr's. Examine the type to see what we're dealing with.
1523 switch x := call.(type) {
1527 keepAlive = x.KeepAlive
1528 mkNewCall = func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node {
1529 newcall := ir.NewCallExpr(pos, op, fun, args)
1530 newcall.IsDDD = x.IsDDD
1531 return ir.Node(newcall)
1533 case *ir.UnaryExpr: // ex: OCLOSE
1534 callArgs = []ir.Node{x.X}
1535 mkNewCall = func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node {
1537 panic("internal error, expecting single arg")
1539 return ir.Node(ir.NewUnaryExpr(pos, op, args[0]))
1541 case *ir.BinaryExpr: // ex: OCOPY
1542 callArgs = []ir.Node{x.X, x.Y}
1543 mkNewCall = func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node {
1545 panic("internal error, expecting two args")
1547 return ir.Node(ir.NewBinaryExpr(pos, op, args[0], args[1]))
1550 panic("unhandled op")
1553 // No need to wrap if called func has no args, no receiver, and no results.
1554 // However in the case of "defer func() { ... }()" we need to
1555 // protect against the possibility of directClosureCall rewriting
1556 // things so that the call does have arguments.
1558 // Do wrap method calls (OCALLMETH, OCALLINTER), because it has
1561 // Also do wrap builtin functions, because they may be expanded to
1562 // calls with arguments (e.g. ORECOVER).
1564 // TODO: maybe not wrap if the called function has no arguments and
1565 // only in-register results?
1566 if len(callArgs) == 0 && call.Op() == ir.OCALLFUNC && callX.Type().NumResults() == 0 {
1567 if callX.Op() == ir.OCLOSURE {
1568 clo := callX.(*ir.ClosureExpr)
1569 clo.Func.SetClosureCalled(false)
1575 if c, ok := call.(*ir.CallExpr); ok {
1576 // To simplify things, turn f(a, b, []T{c, d, e}...) back
1577 // into f(a, b, c, d, e) -- when the final call is run through the
1578 // type checker below, it will rebuild the proper slice literal.
1584 // This is set to true if the closure we're generating escapes
1585 // (needs heap allocation).
1586 cloEscapes := func() bool {
1587 if n.Op() == ir.OGO {
1588 // For "go", assume that all closures escape.
1591 // For defer, just use whatever result escape analysis
1592 // has determined for the defer.
1593 return n.Esc() != ir.EscNever
1596 // A helper for making a copy of an argument. Note that it is
1597 // not safe to use o.copyExpr(arg) if we're putting a
1598 // reference to the temp into the closure (as opposed to
1599 // copying it in by value), since in the by-reference case we
1600 // need a temporary whose lifetime extends to the end of the
1601 // function (as opposed to being local to the current block or
1602 // statement being ordered).
1603 mkArgCopy := func(arg ir.Node) *ir.Name {
1605 byval := t.Size() <= 128 || cloEscapes
1606 var argCopy *ir.Name
1608 argCopy = o.copyExpr(arg)
1610 argCopy = typecheck.Temp(t)
1611 o.append(ir.NewAssignStmt(base.Pos, argCopy, arg))
1613 // The value of 128 below is meant to be consistent with code
1614 // in escape analysis that picks byval/byaddr based on size.
1615 argCopy.SetByval(byval)
1619 // getUnsafeArg looks for an unsafe.Pointer arg that has been
1620 // previously captured into the call's keepalive list, returning
1621 // the name node for it if found.
1622 getUnsafeArg := func(arg ir.Node) *ir.Name {
1623 // Look for uintptr(unsafe.Pointer(name))
1624 if arg.Op() != ir.OCONVNOP {
1627 if !arg.Type().IsUintptr() {
1630 if !arg.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
1633 arg = arg.(*ir.ConvExpr).X
1634 argname, ok := arg.(*ir.Name)
1638 for i := range keepAlive {
1639 if argname == keepAlive[i] {
1646 // Copy the arguments to the function into temps.
1648 // For calls with uintptr(unsafe.Pointer(...)) args that are being
1649 // kept alive (see code in (*orderState).call that does this), use
1650 // the existing arg copy instead of creating a new copy.
1651 unsafeArgs := make([]*ir.Name, len(callArgs))
1652 origArgs := callArgs
1653 var newNames []*ir.Name
1654 for i := range callArgs {
1656 var argname *ir.Name
1657 unsafeArgName := getUnsafeArg(arg)
1658 if unsafeArgName != nil {
1659 // arg has been copied already, use keepalive copy
1660 argname = unsafeArgName
1661 unsafeArgs[i] = unsafeArgName
1663 argname = mkArgCopy(arg)
1665 newNames = append(newNames, argname)
1668 // Deal with cases where the function expression (what we're
1669 // calling) is not a simple function symbol.
1671 var methSelectorExpr *ir.SelectorExpr
1674 case callX.Op() == ir.ODOTMETH || callX.Op() == ir.ODOTINTER:
1675 // Handle defer of a method call, e.g. "defer v.MyMethod(x, y)"
1676 n := callX.(*ir.SelectorExpr)
1677 n.X = mkArgCopy(n.X)
1678 methSelectorExpr = n
1679 if callX.Op() == ir.ODOTINTER {
1680 // Currently for "defer i.M()" if i is nil it panics at the
1681 // point of defer statement, not when deferred function is called.
1682 // (I think there is an issue discussing what is the intended
1683 // behavior but I cannot find it.)
1684 // We need to do the nil check outside of the wrapper.
1685 tab := typecheck.Expr(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X))
1686 c := ir.NewUnaryExpr(n.Pos(), ir.OCHECKNIL, tab)
1690 case !(callX.Op() == ir.ONAME && callX.(*ir.Name).Class == ir.PFUNC):
1691 // Deal with "defer returnsafunc()(x, y)" (for
1692 // example) by copying the callee expression.
1693 fnExpr = mkArgCopy(callX)
1694 if callX.Op() == ir.OCLOSURE {
1695 // For "defer func(...)", in addition to copying the
1696 // closure into a temp, mark it as no longer directly
1698 callX.(*ir.ClosureExpr).Func.SetClosureCalled(false)
1703 // Create a new no-argument function that we'll hand off to defer.
1704 fn := ir.NewClosureFunc(base.Pos, true)
1705 fn.Nname.SetType(types.NewSignature(types.LocalPkg, nil, nil, nil, nil))
1708 // helper for capturing reference to a var declared in an outer scope.
1709 capName := func(pos src.XPos, fn *ir.Func, n *ir.Name) *ir.Name {
1711 cv := ir.CaptureName(pos, fn, n)
1713 return typecheck.Expr(cv).(*ir.Name)
1716 // Call args (x1, y1) need to be captured as part of the newly
1718 newCallArgs := []ir.Node{}
1719 for i := range newNames {
1721 arg = capName(callArgs[i].Pos(), fn, newNames[i])
1722 if unsafeArgs[i] != nil {
1723 arg = ir.NewConvExpr(arg.Pos(), origArgs[i].Op(), origArgs[i].Type(), arg)
1725 newCallArgs = append(newCallArgs, arg)
1727 // Also capture the function or method expression (if needed) into
1730 callX = capName(callX.Pos(), fn, fnExpr)
1732 if methSelectorExpr != nil {
1733 methSelectorExpr.X = capName(callX.Pos(), fn, methSelectorExpr.X.(*ir.Name))
1736 // This flags a builtin as opposed to a regular call.
1737 irregular := (call.Op() != ir.OCALLFUNC &&
1738 call.Op() != ir.OCALLMETH &&
1739 call.Op() != ir.OCALLINTER)
1741 // Construct new function body: f(x1, y1)
1746 newcall := mkNewCall(call.Pos(), op, callX, newCallArgs)
1748 // Finalize body, register function on the main decls list.
1749 fn.Body = []ir.Node{newcall}
1750 ir.FinishCaptureNames(n.Pos(), ir.CurFunc, fn)
1752 // Create closure expr
1753 clo := typecheck.Expr(fn.OClosure).(*ir.ClosureExpr)
1755 // Set escape properties for closure.
1756 if n.Op() == ir.OGO {
1757 // For "go", assume that the closure is going to escape.
1758 clo.SetEsc(ir.EscHeap)
1761 // For defer, just use whatever result escape analysis
1762 // has determined for the defer.
1763 if n.Esc() == ir.EscNever {
1764 clo.SetTransient(true)
1765 clo.SetEsc(ir.EscNone)
1769 // Create new top level call to closure over argless function.
1770 topcall := ir.NewCallExpr(n.Pos(), ir.OCALL, clo, nil)
1771 typecheck.Call(topcall)
1773 fn.SetClosureCalled(false)
1775 // Finally, point the defer statement at the newly generated call.
1779 // isFuncPCIntrinsic returns whether n is a direct call of internal/abi.FuncPCABIxxx functions.
1780 func isFuncPCIntrinsic(n *ir.CallExpr) bool {
1781 if n.Op() != ir.OCALLFUNC || n.X.Op() != ir.ONAME {
1784 fn := n.X.(*ir.Name).Sym()
1785 return (fn.Name == "FuncPCABI0" || fn.Name == "FuncPCABIInternal") &&
1786 (fn.Pkg.Path == "internal/abi" || fn.Pkg == types.LocalPkg && base.Ctxt.Pkgpath == "internal/abi")
1789 // isIfaceOfFunc returns whether n is an interface conversion from a direct reference of a func.
1790 func isIfaceOfFunc(n ir.Node) bool {
1791 return n.Op() == ir.OCONVIFACE && n.(*ir.ConvExpr).X.Op() == ir.ONAME && n.(*ir.ConvExpr).X.(*ir.Name).Class == ir.PFUNC