1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
14 "cmd/compile/internal/base"
15 "cmd/compile/internal/ir"
16 "cmd/compile/internal/reflectdata"
17 "cmd/compile/internal/staticdata"
18 "cmd/compile/internal/typecheck"
19 "cmd/compile/internal/types"
26 Xoffset int64 // struct, array only
27 Expr ir.Node // bytes of run-time computed expressions
34 // An Schedule is used to decompose assignment statements into
35 // static and dynamic initialization parts. Static initializations are
36 // handled by populating variables' linker symbol data, while dynamic
37 // initializations are accumulated to be executed in order.
38 type Schedule struct {
39 // Out is the ordered list of dynamic initialization
43 Plans map[ir.Node]*Plan
44 Temps map[ir.Node]*ir.Name
47 func (s *Schedule) append(n ir.Node) {
48 s.Out = append(s.Out, n)
51 // StaticInit adds an initialization statement n to the schedule.
52 func (s *Schedule) StaticInit(n ir.Node) {
53 if !s.tryStaticInit(n) {
54 if base.Flag.Percent != 0 {
55 ir.Dump("StaticInit failed", n)
61 // varToMapInit holds book-keeping state for global map initialization;
62 // it records the init function created by the compiler to host the
63 // initialization code for the map in question.
64 var varToMapInit map[*ir.Name]*ir.Func
66 // MapInitToVar is the inverse of VarToMapInit; it maintains a mapping
67 // from a compiler-generated init function to the map the function is
69 var MapInitToVar map[*ir.Func]*ir.Name
71 // recordFuncForVar establishes a mapping between global map var "v" and
72 // outlined init function "fn" (and vice versa); so that we can use
73 // the mappings later on to update relocations.
74 func recordFuncForVar(v *ir.Name, fn *ir.Func) {
75 if varToMapInit == nil {
76 varToMapInit = make(map[*ir.Name]*ir.Func)
77 MapInitToVar = make(map[*ir.Func]*ir.Name)
83 // tryStaticInit attempts to statically execute an initialization
84 // statement and reports whether it succeeded.
85 func (s *Schedule) tryStaticInit(nn ir.Node) bool {
86 // Only worry about simple "l = r" assignments. Multiple
87 // variable/expression OAS2 assignments have already been
88 // replaced by multiple simple OAS assignments, and the other
89 // OAS2* assignments mostly necessitate dynamic execution
91 if nn.Op() != ir.OAS {
94 n := nn.(*ir.AssignStmt)
95 if ir.IsBlank(n.X) && !AnySideEffects(n.Y) {
100 defer func() { base.Pos = lno }()
101 nam := n.X.(*ir.Name)
102 return s.StaticAssign(nam, 0, n.Y, nam.Type())
105 // like staticassign but we are copying an already
106 // initialized value r.
107 func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Type) bool {
108 if rn.Class == ir.PFUNC {
109 // TODO if roff != 0 { panic }
110 staticdata.InitAddr(l, loff, staticdata.FuncLinksym(rn))
113 if rn.Class != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
117 // No explicit initialization value. Probably zeroed but perhaps
118 // supplied externally and of unknown value.
121 if rn.Defn.Op() != ir.OAS {
124 if rn.Type().IsString() { // perhaps overwritten by cmd/link -X (#34675)
131 r := rn.Defn.(*ir.AssignStmt).Y
133 // types2.InitOrder doesn't include default initializers.
134 base.Fatalf("unexpected initializer: %v", rn.Defn)
137 for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), typ) {
138 r = r.(*ir.ConvExpr).X
143 r = r.(*ir.SelectorExpr).FuncName()
147 if s.staticcopy(l, loff, r, typ) {
150 // We may have skipped past one or more OCONVNOPs, so
151 // use conv to ensure r is assignable to l (#13263).
153 if loff != 0 || !types.Identical(typ, l.Type()) {
154 dst = ir.NewNameOffsetExpr(base.Pos, l, loff, typ)
156 s.append(ir.NewAssignStmt(base.Pos, dst, typecheck.Conv(r, typ)))
166 staticdata.InitConst(l, loff, r, int(typ.Size()))
170 r := r.(*ir.AddrExpr)
171 if a, ok := r.X.(*ir.Name); ok && a.Op() == ir.ONAME {
172 staticdata.InitAddr(l, loff, staticdata.GlobalLinksym(a))
177 r := r.(*ir.AddrExpr)
179 case ir.OARRAYLIT, ir.OSLICELIT, ir.OSTRUCTLIT, ir.OMAPLIT:
181 staticdata.InitAddr(l, loff, staticdata.GlobalLinksym(s.Temps[r]))
186 r := r.(*ir.CompLitExpr)
188 staticdata.InitSlice(l, loff, staticdata.GlobalLinksym(s.Temps[r]), r.Len)
191 case ir.OARRAYLIT, ir.OSTRUCTLIT:
192 r := r.(*ir.CompLitExpr)
197 if e.Expr.Op() == ir.OLITERAL || e.Expr.Op() == ir.ONIL {
198 staticdata.InitConst(l, loff+e.Xoffset, e.Expr, int(typ.Size()))
202 if x.Op() == ir.OMETHEXPR {
203 x = x.(*ir.SelectorExpr).FuncName()
205 if x.Op() == ir.ONAME && s.staticcopy(l, loff+e.Xoffset, x.(*ir.Name), typ) {
208 // Requires computation, but we're
209 // copying someone else's computation.
210 ll := ir.NewNameOffsetExpr(base.Pos, l, loff+e.Xoffset, typ)
211 rr := ir.NewNameOffsetExpr(base.Pos, orig, e.Xoffset, typ)
213 s.append(ir.NewAssignStmt(base.Pos, ll, rr))
222 func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Type) bool {
224 // No explicit initialization value. Either zero or supplied
228 for r.Op() == ir.OCONVNOP {
229 r = r.(*ir.ConvExpr).X
232 assign := func(pos src.XPos, a *ir.Name, aoff int64, v ir.Node) {
233 if s.StaticAssign(a, aoff, v, v.Type()) {
238 // Don't use NameOffsetExpr with blank (#43677).
241 lhs = ir.NewNameOffsetExpr(pos, a, aoff, v.Type())
243 s.append(ir.NewAssignStmt(pos, lhs, v))
249 return s.staticcopy(l, loff, r, typ)
252 r := r.(*ir.SelectorExpr)
253 return s.staticcopy(l, loff, r.FuncName(), typ)
262 staticdata.InitConst(l, loff, r, int(typ.Size()))
266 r := r.(*ir.AddrExpr)
267 if name, offset, ok := StaticLoc(r.X); ok && name.Class == ir.PEXTERN {
268 staticdata.InitAddrOffset(l, loff, name.Linksym(), offset)
274 r := r.(*ir.AddrExpr)
276 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT:
278 a := StaticName(r.X.Type())
281 staticdata.InitAddr(l, loff, a.Linksym())
283 // Init underlying literal.
284 assign(base.Pos, a, 0, r.X)
287 //dump("not static ptrlit", r);
290 r := r.(*ir.ConvExpr)
291 if l.Class == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
292 sval := ir.StringVal(r.X)
293 staticdata.InitSliceBytes(l, loff, sval)
298 r := r.(*ir.CompLitExpr)
301 ta := types.NewArray(r.Type().Elem(), r.Len)
305 staticdata.InitSlice(l, loff, a.Linksym(), r.Len)
306 // Fall through to init underlying array.
311 case ir.OARRAYLIT, ir.OSTRUCTLIT:
312 r := r.(*ir.CompLitExpr)
318 if e.Expr.Op() == ir.OLITERAL || e.Expr.Op() == ir.ONIL {
319 staticdata.InitConst(l, loff+e.Xoffset, e.Expr, int(e.Expr.Type().Size()))
323 assign(base.Pos, l, loff+e.Xoffset, e.Expr)
332 r := r.(*ir.ClosureExpr)
333 if ir.IsTrivialClosure(r) {
334 if base.Debug.Closure > 0 {
335 base.WarnfAt(r.Pos(), "closure converted to global")
337 // Issue 59680: if the closure we're looking at was produced
338 // by inlining, it could be marked as hidden, which we don't
339 // want (moving the func to a static init will effectively
340 // hide it from escape analysis). Mark as non-hidden here.
341 // so that it will participated in escape analysis.
342 r.Func.SetIsHiddenClosure(false)
343 // Closures with no captured variables are globals,
344 // so the assignment can be done at link time.
345 // TODO if roff != 0 { panic }
346 staticdata.InitAddr(l, loff, staticdata.FuncLinksym(r.Func.Nname))
349 ir.ClosureDebugRuntimeCheck(r)
352 // This logic is mirrored in isStaticCompositeLiteral.
353 // If you change something here, change it there, and vice versa.
355 // Determine the underlying concrete type and value we are converting from.
356 r := r.(*ir.ConvExpr)
358 for val.Op() == ir.OCONVIFACE {
359 val = val.(*ir.ConvExpr).X
362 if val.Type().IsInterface() {
363 // val is an interface type.
364 // If val is nil, we can statically initialize l;
365 // both words are zero and so there no work to do, so report success.
366 // If val is non-nil, we have no concrete type to record,
367 // and we won't be able to statically initialize its value, so report failure.
368 return val.Op() == ir.ONIL
371 if val.Type().HasShape() {
372 // See comment in cmd/compile/internal/walk/convert.go:walkConvInterface
376 reflectdata.MarkTypeUsedInInterface(val.Type(), l.Linksym())
378 var itab *ir.AddrExpr
379 if typ.IsEmptyInterface() {
380 itab = reflectdata.TypePtrAt(base.Pos, val.Type())
382 itab = reflectdata.ITabAddrAt(base.Pos, val.Type(), typ)
385 // Create a copy of l to modify while we emit data.
387 // Emit itab, advance offset.
388 staticdata.InitAddr(l, loff, itab.X.(*ir.LinksymOffsetExpr).Linksym)
391 if types.IsDirectIface(val.Type()) {
392 if val.Op() == ir.ONIL {
393 // Nil is zero, nothing to do.
396 // Copy val directly into n.
398 assign(base.Pos, l, loff+int64(types.PtrSize), val)
400 // Construct temp to hold val, write pointer to temp into n.
401 a := StaticName(val.Type())
403 assign(base.Pos, a, 0, val)
404 staticdata.InitAddr(l, loff+int64(types.PtrSize), a.Linksym())
410 r := r.(*ir.InlinedCallExpr)
411 return s.staticAssignInlinedCall(l, loff, r, typ)
414 if base.Flag.Percent != 0 {
415 ir.Dump("not static", r)
420 func (s *Schedule) initplan(n ir.Node) {
421 if s.Plans[n] != nil {
428 base.Fatalf("initplan")
430 case ir.OARRAYLIT, ir.OSLICELIT:
431 n := n.(*ir.CompLitExpr)
433 for _, a := range n.List {
434 if a.Op() == ir.OKEY {
435 kv := a.(*ir.KeyExpr)
436 k = typecheck.IndexConst(kv.Key)
438 base.Fatalf("initplan arraylit: invalid index %v", kv.Key)
442 s.addvalue(p, k*n.Type().Elem().Size(), a)
447 n := n.(*ir.CompLitExpr)
448 for _, a := range n.List {
449 if a.Op() != ir.OSTRUCTKEY {
450 base.Fatalf("initplan structlit")
452 a := a.(*ir.StructKeyExpr)
453 if a.Sym().IsBlank() {
456 s.addvalue(p, a.Field.Offset, a.Value)
460 n := n.(*ir.CompLitExpr)
461 for _, a := range n.List {
462 if a.Op() != ir.OKEY {
463 base.Fatalf("initplan maplit")
466 s.addvalue(p, -1, a.Value)
471 func (s *Schedule) addvalue(p *Plan, xoffset int64, n ir.Node) {
472 // special case: zero can be dropped entirely
477 // special case: inline struct and array (not slice) literals
481 for _, qe := range q.E {
482 // qe is a copy; we are not modifying entries in q.E
483 qe.Xoffset += xoffset
484 p.E = append(p.E, qe)
490 p.E = append(p.E, Entry{Xoffset: xoffset, Expr: n})
493 func (s *Schedule) staticAssignInlinedCall(l *ir.Name, loff int64, call *ir.InlinedCallExpr, typ *types.Type) bool {
494 if base.Debug.InlStaticInit == 0 {
498 // Handle the special case of an inlined call of
499 // a function body with a single return statement,
500 // which turns into a single assignment plus a goto.
502 // For example code like this:
504 // type T struct{ x int }
505 // func F(x int) *T { return &T{x} }
506 // var Global = F(400)
508 // turns into IR like this:
512 // . . DCL # x.go:18:13
513 // . . . NAME-p.x Class:PAUTO Offset:0 InlFormal OnStack Used int tc(1) # x.go:14:9,x.go:18:13
514 // . AS2 Def tc(1) # x.go:18:13
516 // . . NAME-p.x Class:PAUTO Offset:0 InlFormal OnStack Used int tc(1) # x.go:14:9,x.go:18:13
518 // . . LITERAL-400 int tc(1) # x.go:18:14
519 // . INLMARK Index:1 # +x.go:18:13
520 // INLCALL PTR-*T tc(1) # x.go:18:13
522 // . BLOCK tc(1) # x.go:18:13
524 // . . DCL tc(1) # x.go:18:13
525 // . . . NAME-p.~R0 Class:PAUTO Offset:0 OnStack Used PTR-*T tc(1) # x.go:18:13
526 // . . AS2 tc(1) # x.go:18:13
528 // . . . NAME-p.~R0 Class:PAUTO Offset:0 OnStack Used PTR-*T tc(1) # x.go:18:13
530 // . . . INLINED RETURN ARGUMENT HERE
531 // . . GOTO p..i1 tc(1) # x.go:18:13
532 // . LABEL p..i1 # x.go:18:13
533 // INLCALL-ReturnVars
534 // . NAME-p.~R0 Class:PAUTO Offset:0 OnStack Used PTR-*T tc(1) # x.go:18:13
536 // In non-unified IR, the tree is slightly different:
537 // - if there are no arguments to the inlined function,
538 // the INLCALL-init omits the AS2.
539 // - the DCL inside BLOCK is on the AS2's init list,
540 // not its own statement in the top level of the BLOCK.
542 // If the init values are side-effect-free and each either only
543 // appears once in the function body or is safely repeatable,
544 // then we inline the value expressions into the return argument
545 // and then call StaticAssign to handle that copy.
547 // This handles simple cases like
549 // var myError = errors.New("mine")
551 // where errors.New is
553 // func New(text string) error {
554 // return &errorString{text}
557 // We could make things more sophisticated but this kind of initializer
558 // is the most important case for us to get right.
561 var as2init *ir.AssignListStmt
562 if len(init) == 2 && init[0].Op() == ir.OAS2 && init[1].Op() == ir.OINLMARK {
563 as2init = init[0].(*ir.AssignListStmt)
564 } else if len(init) == 1 && init[0].Op() == ir.OINLMARK {
565 as2init = new(ir.AssignListStmt)
569 if len(call.Body) != 2 || call.Body[0].Op() != ir.OBLOCK || call.Body[1].Op() != ir.OLABEL {
572 label := call.Body[1].(*ir.LabelStmt).Label
573 block := call.Body[0].(*ir.BlockStmt)
576 if len(list) == 3 && list[0].Op() == ir.ODCL {
577 dcl = list[0].(*ir.Decl)
581 list[0].Op() != ir.OAS2 ||
582 list[1].Op() != ir.OGOTO ||
583 list[1].(*ir.BranchStmt).Label != label {
586 as2body := list[0].(*ir.AssignListStmt)
588 ainit := as2body.Init()
589 if len(ainit) != 1 || ainit[0].Op() != ir.ODCL {
592 dcl = ainit[0].(*ir.Decl)
594 if len(as2body.Lhs) != 1 || as2body.Lhs[0] != dcl.X {
598 // Can't remove the parameter variables if an address is taken.
599 for _, v := range as2init.Lhs {
600 if v.(*ir.Name).Addrtaken() {
604 // Can't move the computation of the args if they have side effects.
605 for _, r := range as2init.Rhs {
606 if AnySideEffects(r) {
611 // Can only substitute arg for param if param is used
612 // at most once or is repeatable.
613 count := make(map[*ir.Name]int)
614 for _, x := range as2init.Lhs {
615 count[x.(*ir.Name)] = 0
618 hasNonTrivialClosure := false
619 ir.Visit(as2body.Rhs[0], func(n ir.Node) {
620 if name, ok := n.(*ir.Name); ok {
621 if c, ok := count[name]; ok {
625 if clo, ok := n.(*ir.ClosureExpr); ok {
626 hasNonTrivialClosure = hasNonTrivialClosure || !ir.IsTrivialClosure(clo)
630 // If there's a non-trivial closure, it has captured the param,
631 // so we can't substitute arg for param.
632 if hasNonTrivialClosure {
636 for name, c := range count {
638 // Check whether corresponding initializer can be repeated.
639 // Something like 1 can be; make(chan int) or &T{} cannot,
640 // because they need to evaluate to the same result in each use.
641 for i, n := range as2init.Lhs {
642 if n == name && !canRepeat(as2init.Rhs[i]) {
649 // Possible static init.
650 // Build tree with args substituted for params and try it.
651 args := make(map[*ir.Name]ir.Node)
652 for i, v := range as2init.Lhs {
656 args[v.(*ir.Name)] = as2init.Rhs[i]
658 r, ok := subst(as2body.Rhs[0], args)
662 ok = s.StaticAssign(l, loff, r, typ)
664 if ok && base.Flag.Percent != 0 {
665 ir.Dump("static inlined-LEFT", l)
666 ir.Dump("static inlined-ORIG", call)
667 ir.Dump("static inlined-RIGHT", r)
672 // from here down is the walk analysis
673 // of composite literals.
674 // most of the work is to generate
675 // data statements for the constant
676 // part of the composite literal.
678 var statuniqgen int // name generator for static temps
680 // StaticName returns a name backed by a (writable) static data symbol.
681 // Use readonlystaticname for read-only node.
682 func StaticName(t *types.Type) *ir.Name {
683 // Don't use LookupNum; it interns the resulting string, but these are all unique.
684 sym := typecheck.Lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen))
687 n := ir.NewNameAt(base.Pos, sym, t)
691 typecheck.Target.Externs = append(typecheck.Target.Externs, n)
693 n.Linksym().Set(obj.AttrStatic, true)
697 // StaticLoc returns the static address of n, if n has one, or else nil.
698 func StaticLoc(n ir.Node) (name *ir.Name, offset int64, ok bool) {
709 n := n.(*ir.SelectorExpr)
710 return StaticLoc(n.FuncName())
713 n := n.(*ir.SelectorExpr)
714 if name, offset, ok = StaticLoc(n.X); !ok {
718 return name, offset, true
721 n := n.(*ir.IndexExpr)
722 if n.X.Type().IsSlice() {
725 if name, offset, ok = StaticLoc(n.X); !ok {
733 // Check for overflow.
734 if n.Type().Size() != 0 && types.MaxWidth/n.Type().Size() <= int64(l) {
737 offset += int64(l) * n.Type().Size()
738 return name, offset, true
744 func isSideEffect(n ir.Node) bool {
746 // Assume side effects unless we know otherwise.
750 // No side effects here (arguments are checked separately).
805 // Only possible side effect is division by zero.
806 case ir.ODIV, ir.OMOD:
807 n := n.(*ir.BinaryExpr)
808 if n.Y.Op() != ir.OLITERAL || constant.Sign(n.Y.Val()) == 0 {
812 // Only possible side effect is panic on invalid size,
813 // but many makechan and makemap use size zero, which is definitely OK.
814 case ir.OMAKECHAN, ir.OMAKEMAP:
815 n := n.(*ir.MakeExpr)
816 if !ir.IsConst(n.Len, constant.Int) || constant.Sign(n.Len.Val()) != 0 {
820 // Only possible side effect is panic on invalid size.
821 // TODO(rsc): Merge with previous case (probably breaks toolstash -cmp).
822 case ir.OMAKESLICE, ir.OMAKESLICECOPY:
828 // AnySideEffects reports whether n contains any operations that could have observable side effects.
829 func AnySideEffects(n ir.Node) bool {
830 return ir.Any(n, isSideEffect)
833 // canRepeat reports whether executing n multiple times has the same effect as
834 // assigning n to a single variable and using that variable multiple times.
835 func canRepeat(n ir.Node) bool {
836 bad := func(n ir.Node) bool {
855 return !ir.Any(n, bad)
858 func getlit(lit ir.Node) int {
859 if ir.IsSmallIntConst(lit) {
860 return int(ir.Int64Val(lit))
865 func isvaluelit(n ir.Node) bool {
866 return n.Op() == ir.OARRAYLIT || n.Op() == ir.OSTRUCTLIT
869 func subst(n ir.Node, m map[*ir.Name]ir.Node) (ir.Node, bool) {
871 var edit func(ir.Node) ir.Node
872 edit = func(x ir.Node) ir.Node {
876 if v, ok := m[x]; ok {
877 return ir.DeepCopy(v.Pos(), v)
880 case ir.ONONAME, ir.OLITERAL, ir.ONIL, ir.OTYPE:
884 ir.EditChildrenWithHidden(x, edit)
886 // TODO: handle more operations, see details discussion in go.dev/cl/466277.
889 x := x.(*ir.ConvExpr)
890 if x.X.Op() == ir.OLITERAL {
891 if x, ok := truncate(x.X, x.Type()); ok {
898 return addStr(x.(*ir.AddStringExpr))
906 // truncate returns the result of force converting c to type t,
907 // truncating its value as needed, like a conversion of a variable.
908 // If the conversion is too difficult, truncate returns nil, false.
909 func truncate(c ir.Node, t *types.Type) (ir.Node, bool) {
912 if ct.Kind() != t.Kind() {
915 // Note: float -> float/integer and complex -> complex are valid but subtle.
916 // For example a float32(float64 1e300) evaluates to +Inf at runtime
917 // and the compiler doesn't have any concept of +Inf, so that would
918 // have to be left for runtime code evaluation.
922 case ct.IsInteger() && t.IsInteger():
923 // truncate or sign extend
925 cv = constant.BinaryOp(cv, token.AND, constant.MakeUint64(1<<bits-1))
926 if t.IsSigned() && constant.Compare(cv, token.GEQ, constant.MakeUint64(1<<(bits-1))) {
927 cv = constant.BinaryOp(cv, token.OR, constant.MakeInt64(-1<<(bits-1)))
931 c = ir.NewConstExpr(cv, c)
936 func addStr(n *ir.AddStringExpr) ir.Node {
937 // Merge adjacent constants in the argument list.
940 for i := 0; i < len(s); i++ {
941 if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
942 // Can't merge s[i] into s[i-1]; need a slot in the list.
951 for _, c := range s {
952 strs = append(strs, ir.StringVal(c))
954 return typecheck.OrigConst(n, constant.MakeString(strings.Join(strs, "")))
956 newList := make([]ir.Node, 0, need)
957 for i := 0; i < len(s); i++ {
958 if ir.IsConst(s[i], constant.String) && i+1 < len(s) && ir.IsConst(s[i+1], constant.String) {
959 // merge from i up to but not including i2
962 for i2 < len(s) && ir.IsConst(s[i2], constant.String) {
963 strs = append(strs, ir.StringVal(s[i2]))
967 nl := ir.Copy(n).(*ir.AddStringExpr)
969 newList = append(newList, typecheck.OrigConst(nl, constant.MakeString(strings.Join(strs, ""))))
972 newList = append(newList, s[i])
976 nn := ir.Copy(n).(*ir.AddStringExpr)
981 const wrapGlobalMapInitSizeThreshold = 20
983 // tryWrapGlobalInit returns a new outlined function to contain global
984 // initializer statement n, if possible and worthwhile. Otherwise, it
987 // Currently, it outlines map assignment statements with large,
988 // side-effect-free RHS expressions.
989 func tryWrapGlobalInit(n ir.Node) *ir.Func {
990 // Look for "X = ..." where X has map type.
991 // FIXME: might also be worth trying to look for cases where
992 // the LHS is of interface type but RHS is map type.
993 if n.Op() != ir.OAS {
996 as := n.(*ir.AssignStmt)
997 if ir.IsBlank(as.X) || as.X.Op() != ir.ONAME {
1000 nm := as.X.(*ir.Name)
1001 if !nm.Type().IsMap() {
1005 // Determine size of RHS.
1007 ir.Any(as.Y, func(n ir.Node) bool {
1011 if base.Debug.WrapGlobalMapDbg > 0 {
1012 fmt.Fprintf(os.Stderr, "=-= mapassign %s %v rhs size %d\n",
1013 base.Ctxt.Pkgpath, n, rsiz)
1016 // Reject smaller candidates if not in stress mode.
1017 if rsiz < wrapGlobalMapInitSizeThreshold && base.Debug.WrapGlobalMapCtl != 2 {
1018 if base.Debug.WrapGlobalMapDbg > 1 {
1019 fmt.Fprintf(os.Stderr, "=-= skipping %v size too small at %d\n",
1025 // Reject right hand sides with side effects.
1026 if AnySideEffects(as.Y) {
1027 if base.Debug.WrapGlobalMapDbg > 0 {
1028 fmt.Fprintf(os.Stderr, "=-= rejected %v due to side effects\n", nm)
1033 if base.Debug.WrapGlobalMapDbg > 1 {
1034 fmt.Fprintf(os.Stderr, "=-= committed for: %+v\n", n)
1037 // Create a new function that will (eventually) have this form:
1039 // func map.init.%d() {
1040 // globmapvar = <map initialization>
1043 // Note: cmd/link expects the function name to contain "map.init".
1044 minitsym := typecheck.LookupNum("map.init.", mapinitgen)
1047 fn := ir.NewFunc(n.Pos(), n.Pos(), minitsym, types.NewSignature(nil, nil, nil))
1048 fn.SetInlinabilityChecked(true) // suppress inlining (which would defeat the point)
1049 typecheck.DeclFunc(fn)
1050 if base.Debug.WrapGlobalMapDbg > 0 {
1051 fmt.Fprintf(os.Stderr, "=-= generated func is %v\n", fn)
1054 // NB: we're relying on this phase being run before inlining;
1055 // if for some reason we need to move it after inlining, we'll
1056 // need code here that relocates or duplicates inline temps.
1058 // Insert assignment into function body; mark body finished.
1059 fn.Body = []ir.Node{as}
1060 typecheck.FinishFuncBody()
1062 if base.Debug.WrapGlobalMapDbg > 1 {
1063 fmt.Fprintf(os.Stderr, "=-= mapvar is %v\n", nm)
1064 fmt.Fprintf(os.Stderr, "=-= newfunc is %+v\n", fn)
1067 recordFuncForVar(nm, fn)
1072 // mapinitgen is a counter used to uniquify compiler-generated
1073 // map init functions.
1076 // AddKeepRelocations adds a dummy "R_KEEP" relocation from each
1077 // global map variable V to its associated outlined init function.
1078 // These relocation ensure that if the map var itself is determined to
1079 // be reachable at link time, we also mark the init function as
1081 func AddKeepRelocations() {
1082 if varToMapInit == nil {
1085 for k, v := range varToMapInit {
1086 // Add R_KEEP relocation from map to init function.
1089 base.Fatalf("bad: func %v has no linksym", v)
1093 base.Fatalf("bad: mapvar %v has no linksym", k)
1097 r.Type = objabi.R_KEEP
1098 if base.Debug.WrapGlobalMapDbg > 1 {
1099 fmt.Fprintf(os.Stderr, "=-= add R_KEEP relo from %s to %s\n",
1106 // OutlineMapInits replaces global map initializers with outlined
1107 // calls to separate "map init" functions (where possible and
1108 // profitable), to facilitate better dead-code elimination by the
1110 func OutlineMapInits(fn *ir.Func) {
1111 if base.Debug.WrapGlobalMapCtl == 1 {
1116 for i, stmt := range fn.Body {
1117 // Attempt to outline stmt. If successful, replace it with a call
1118 // to the returned wrapper function.
1119 if wrapperFn := tryWrapGlobalInit(stmt); wrapperFn != nil {
1120 ir.WithFunc(fn, func() {
1121 fn.Body[i] = typecheck.Call(stmt.Pos(), wrapperFn.Nname, nil, false)
1127 if base.Debug.WrapGlobalMapDbg > 1 {
1128 fmt.Fprintf(os.Stderr, "=-= outlined %v map initializations\n", outlined)