1 // Copyright 2018 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/typecheck"
14 "cmd/compile/internal/types"
19 // Here we analyze functions to determine which Go variables
20 // (including implicit allocations such as calls to "new" or "make",
21 // composite literals, etc.) can be allocated on the stack. The two
22 // key invariants we have to ensure are: (1) pointers to stack objects
23 // cannot be stored in the heap, and (2) pointers to a stack object
24 // cannot outlive that object (e.g., because the declaring function
25 // returned and destroyed the object's stack frame, or its space is
26 // reused across loop iterations for logically distinct variables).
28 // We implement this with a static data-flow analysis of the AST.
29 // First, we construct a directed weighted graph where vertices
30 // (termed "locations") represent variables allocated by statements
31 // and expressions, and edges represent assignments between variables
32 // (with weights representing addressing/dereference counts).
34 // Next we walk the graph looking for assignment paths that might
35 // violate the invariants stated above. If a variable v's address is
36 // stored in the heap or elsewhere that may outlive it, then v is
37 // marked as requiring heap allocation.
39 // To support interprocedural analysis, we also record data-flow from
40 // each function's parameters to the heap and to its result
41 // parameters. This information is summarized as "parameter tags",
42 // which are used at static call sites to improve escape analysis of
43 // function arguments.
45 // Constructing the location graph.
47 // Every allocating statement (e.g., variable declaration) or
48 // expression (e.g., "new" or "make") is first mapped to a unique
51 // We also model every Go assignment as a directed edges between
52 // locations. The number of dereference operations minus the number of
53 // addressing operations is recorded as the edge's weight (termed
54 // "derefs"). For example:
63 // Note that the & operator can only be applied to addressable
64 // expressions, and the expression &x itself is not addressable, so
65 // derefs cannot go below -1.
67 // Every Go language construct is lowered into this representation,
68 // generally without sensitivity to flow, path, or context; and
69 // without distinguishing elements within a compound variable. For
72 // var x struct { f, g *int }
77 // is modeled simply as
81 // That is, we don't distinguish x.f from x.g, or u[0] from u[1],
82 // u[2], etc. However, we do record the implicit dereference involved
83 // in indexing a slice.
85 // A batch holds escape analysis state that's shared across an entire
86 // batch of functions being analyzed at once.
97 // A closure holds a closure expression and its spill hole (i.e.,
98 // where the hole representing storing into its closure record).
104 // An escape holds state specific to a single function being analyzed
109 curfn *ir.Func // function being analyzed
111 labels map[*types.Sym]labelState // known labels
113 // loopDepth counts the current loop nesting depth within
114 // curfn. It increments within each "for" loop and at each
115 // label with a corresponding backwards "goto" (i.e.,
116 // unstructured loop).
120 func Funcs(all []*ir.Func) {
121 ir.VisitFuncsBottomUp(all, Batch)
124 // Batch performs escape analysis on a minimal batch of
126 func Batch(fns []*ir.Func, recursive bool) {
127 for _, fn := range fns {
128 if fn.Op() != ir.ODCLFUNC {
129 base.Fatalf("unexpected node: %v", fn)
134 b.heapLoc.attrs = attrEscapes | attrPersists | attrMutates | attrCalls
135 b.mutatorLoc.attrs = attrMutates
136 b.calleeLoc.attrs = attrCalls
138 // Construct data-flow graph from syntax trees.
139 for _, fn := range fns {
141 s := fmt.Sprintf("\nbefore escape %v", fn)
146 for _, fn := range fns {
147 if !fn.IsHiddenClosure() {
152 // We've walked the function bodies, so we've seen everywhere a
153 // variable might be reassigned or have it's address taken. Now we
154 // can decide whether closures should capture their free variables
155 // by value or reference.
156 for _, closure := range b.closures {
157 b.flowClosure(closure.k, closure.clo)
161 for _, loc := range b.allLocs {
162 if why := HeapAllocReason(loc.n); why != "" {
163 b.flow(b.heapHole().addr(loc.n, why), loc)
171 func (b *batch) with(fn *ir.Func) *escape {
179 func (b *batch) initFunc(fn *ir.Func) {
181 if fn.Esc() != escFuncUnknown {
182 base.Fatalf("unexpected node: %v", fn)
184 fn.SetEsc(escFuncPlanned)
185 if base.Flag.LowerM > 3 {
186 ir.Dump("escAnalyze", fn)
189 // Allocate locations for local variables.
190 for _, n := range fn.Dcl {
194 // Also for hidden parameters (e.g., the ".this" parameter to a
195 // method value wrapper).
196 if fn.OClosure == nil {
197 for _, n := range fn.ClosureVars {
198 e.newLoc(n.Canonical(), true)
202 // Initialize resultIndex for result parameters.
203 for i, f := range fn.Type().Results().FieldSlice() {
204 e.oldLoc(f.Nname.(*ir.Name)).resultIndex = 1 + i
208 func (b *batch) walkFunc(fn *ir.Func) {
210 fn.SetEsc(escFuncStarted)
212 // Identify labels that mark the head of an unstructured loop.
213 ir.Visit(fn, func(n ir.Node) {
216 n := n.(*ir.LabelStmt)
217 if n.Label.IsBlank() {
221 e.labels = make(map[*types.Sym]labelState)
223 e.labels[n.Label] = nonlooping
226 // If we visited the label before the goto,
227 // then this is a looping label.
228 n := n.(*ir.BranchStmt)
229 if e.labels[n.Label] == nonlooping {
230 e.labels[n.Label] = looping
237 if len(e.labels) != 0 {
238 base.FatalfAt(fn.Pos(), "leftover labels after walkFunc")
242 func (b *batch) flowClosure(k hole, clo *ir.ClosureExpr) {
243 for _, cv := range clo.Func.ClosureVars {
247 base.FatalfAt(cv.Pos(), "closure variable never captured: %v", cv)
250 // Capture by value for variables <= 128 bytes that are never reassigned.
251 n.SetByval(!loc.addrtaken && !loc.reassigned && n.Type().Size() <= 128)
254 if n.Sym().Name == typecheck.LocalDictName {
255 base.FatalfAt(n.Pos(), "dictionary variable not captured by value")
259 if base.Flag.LowerM > 1 {
264 base.WarnfAt(n.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", n.Curfn, how, n, loc.addrtaken, loc.reassigned, n.Type().Size())
267 // Flow captured variables to closure.
270 k = k.addr(cv, "reference")
272 b.flow(k.note(cv, "captured by a closure"), loc)
276 func (b *batch) finish(fns []*ir.Func) {
277 // Record parameter tags for package export data.
278 for _, fn := range fns {
279 fn.SetEsc(escFuncTagged)
282 for _, fs := range &types.RecvsParams {
283 for _, f := range fs(fn.Type()).Fields().Slice() {
285 f.Note = b.paramTag(fn, narg, f)
290 for _, loc := range b.allLocs {
296 if n.Op() == ir.ONAME {
301 // Update n.Esc based on escape analysis results.
303 // Omit escape diagnostics for go/defer wrappers, at least for now.
304 // Historically, we haven't printed them, and test cases don't expect them.
305 // TODO(mdempsky): Update tests to expect this.
306 goDeferWrapper := n.Op() == ir.OCLOSURE && n.(*ir.ClosureExpr).Func.Wrapper()
308 if loc.hasAttr(attrEscapes) {
309 if n.Op() == ir.ONAME {
310 if base.Flag.CompilingRuntime {
311 base.ErrorfAt(n.Pos(), 0, "%v escapes to heap, not allowed in runtime", n)
313 if base.Flag.LowerM != 0 {
314 base.WarnfAt(n.Pos(), "moved to heap: %v", n)
317 if base.Flag.LowerM != 0 && !goDeferWrapper {
318 base.WarnfAt(n.Pos(), "%v escapes to heap", n)
320 if logopt.Enabled() {
321 var e_curfn *ir.Func // TODO(mdempsky): Fix.
322 logopt.LogOpt(n.Pos(), "escape", "escape", ir.FuncName(e_curfn))
327 if base.Flag.LowerM != 0 && n.Op() != ir.ONAME && !goDeferWrapper {
328 base.WarnfAt(n.Pos(), "%v does not escape", n)
331 if !loc.hasAttr(attrPersists) {
334 n := n.(*ir.ClosureExpr)
337 n := n.(*ir.SelectorExpr)
340 n := n.(*ir.CompLitExpr)
346 // If the result of a string->[]byte conversion is never mutated,
347 // then it can simply reuse the string's memory directly.
349 // TODO(mdempsky): Enable in a subsequent CL. We need to ensure
350 // []byte("") evaluates to []byte{}, not []byte(nil).
352 if n, ok := n.(*ir.ConvExpr); ok && n.Op() == ir.OSTR2BYTES && !loc.hasAttr(attrMutates) {
353 if base.Flag.LowerM >= 1 {
354 base.WarnfAt(n.Pos(), "zero-copy string->[]byte conversion")
356 n.SetOp(ir.OSTR2BYTESTMP)
362 // inMutualBatch reports whether function fn is in the batch of
363 // mutually recursive functions being analyzed. When this is true,
364 // fn has not yet been analyzed, so its parameters and results
365 // should be incorporated directly into the flow graph instead of
366 // relying on its escape analysis tagging.
367 func (b *batch) inMutualBatch(fn *ir.Name) bool {
368 if fn.Defn != nil && fn.Defn.Esc() < escFuncTagged {
369 if fn.Defn.Esc() == escFuncUnknown {
370 base.FatalfAt(fn.Pos(), "graph inconsistency: %v", fn)
378 escFuncUnknown = 0 + iota
384 // Mark labels that have no backjumps to them as not increasing e.loopdepth.
388 looping labelState = 1 + iota
392 func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
393 name := func() string {
397 return fmt.Sprintf("arg#%d", narg)
400 // Only report diagnostics for user code;
401 // not for wrappers generated around them.
402 // TODO(mdempsky): Generalize this.
403 diagnose := base.Flag.LowerM != 0 && !(fn.Wrapper() || fn.Dupok())
405 if len(fn.Body) == 0 {
406 // Assume that uintptr arguments must be held live across the call.
407 // This is most important for syscall.Syscall.
408 // See golang.org/issue/13372.
409 // This really doesn't have much to do with escape analysis per se,
410 // but we are reusing the ability to annotate an individual function
411 // argument and pass those annotations along to importing code.
412 fn.Pragma |= ir.UintptrKeepAlive
414 if f.Type.IsUintptr() {
416 base.WarnfAt(f.Pos, "assuming %v is unsafe uintptr", name())
421 if !f.Type.HasPointers() { // don't bother tagging for scalars
427 // External functions are assumed unsafe, unless
428 // //go:noescape is given before the declaration.
429 if fn.Pragma&ir.Noescape != 0 {
430 if diagnose && f.Sym != nil {
431 base.WarnfAt(f.Pos, "%v does not escape", name())
436 if diagnose && f.Sym != nil {
437 base.WarnfAt(f.Pos, "leaking param: %v", name())
445 if fn.Pragma&ir.UintptrEscapes != 0 {
446 if f.Type.IsUintptr() {
448 base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())
452 if f.IsDDD() && f.Type.Elem().IsUintptr() {
453 // final argument is ...uintptr.
455 base.WarnfAt(f.Pos, "marking %v as escaping ...uintptr", name())
461 if !f.Type.HasPointers() { // don't bother tagging for scalars
465 // Unnamed parameters are unused and therefore do not escape.
466 if f.Sym == nil || f.Sym.IsBlank() {
471 n := f.Nname.(*ir.Name)
476 if diagnose && !loc.hasAttr(attrEscapes) {
478 if x := esc.Heap(); x >= 0 {
480 base.WarnfAt(f.Pos, "leaking param: %v", name())
482 // TODO(mdempsky): Mention level=x like below?
483 base.WarnfAt(f.Pos, "leaking param content: %v", name())
487 for i := 0; i < numEscResults; i++ {
488 if x := esc.Result(i); x >= 0 {
489 res := fn.Type().Results().Field(i).Sym
490 base.WarnfAt(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x)
495 base.WarnfAt(f.Pos, "%v does not escape", name())
498 if base.Flag.LowerM >= 2 {
499 if x := esc.Mutator(); x >= 0 {
500 base.WarnfAt(f.Pos, "mutates param: %v derefs=%v", name(), x)
502 base.WarnfAt(f.Pos, "does not mutate param: %v", name())
504 if x := esc.Callee(); x >= 0 {
505 base.WarnfAt(f.Pos, "calls param: %v derefs=%v", name(), x)
507 base.WarnfAt(f.Pos, "does not call param: %v", name())