1 // Copyright 2011 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
8 "cmd/compile/internal/ssa"
16 // "Portable" code generation.
18 var makefuncdatasym_nsym int32
20 func makefuncdatasym(namefmt string, funcdatakind int64) *Sym {
23 sym := Lookupf(namefmt, makefuncdatasym_nsym)
24 makefuncdatasym_nsym++
27 Nodconst(&nod, Types[TINT32], funcdatakind)
28 Thearch.Gins(obj.AFUNCDATA, &nod, pnod)
32 // gvardef inserts a VARDEF for n into the instruction stream.
33 // VARDEF is an annotation for the liveness analysis, marking a place
34 // where a complete initialization (definition) of a variable begins.
35 // Since the liveness analysis can see initialization of single-word
36 // variables quite easy, gvardef is usually only called for multi-word
37 // or 'fat' variables, those satisfying isfat(n->type).
38 // However, gvardef is also called when a non-fat variable is initialized
39 // via a block move; the only time this happens is when you have
41 // for a function with multiple return values exactly matching the return
42 // types of the current function.
44 // A 'VARDEF x' annotation in the instruction stream tells the liveness
45 // analysis to behave as though the variable x is being initialized at that
46 // point in the instruction stream. The VARDEF must appear before the
47 // actual (multi-instruction) initialization, and it must also appear after
48 // any uses of the previous value, if any. For example, if compiling:
52 // it is important to generate code like:
54 // base, len, cap = pieces of x[1:]
56 // x = {base, len, cap}
58 // If instead the generated code looked like:
61 // base, len, cap = pieces of x[1:]
62 // x = {base, len, cap}
64 // then the liveness analysis would decide the previous value of x was
65 // unnecessary even though it is about to be used by the x[1:] computation.
66 // Similarly, if the generated code looked like:
68 // base, len, cap = pieces of x[1:]
69 // x = {base, len, cap}
72 // then the liveness analysis will not preserve the new value of x, because
73 // the VARDEF appears to have "overwritten" it.
75 // VARDEF is a bit of a kludge to work around the fact that the instruction
76 // stream is working on single-word values but the liveness analysis
77 // wants to work on individual variables, which might be multi-word
78 // aggregates. It might make sense at some point to look into letting
79 // the liveness analysis work on single-word values as well, although
80 // there are complications around interface values, slices, and strings,
81 // all of which cannot be treated as individual words.
83 // VARKILL is the opposite of VARDEF: it marks a value as no longer needed,
84 // even if its address has been taken. That is, a VARKILL annotation asserts
85 // that its argument is certainly dead, for use when the liveness analysis
86 // would not otherwise be able to deduce that fact.
88 func gvardefx(n *Node, as int) {
93 Yyerror("gvardef %v; %v", Oconv(int(n.Op), obj.FmtSharp), n)
98 case PAUTO, PPARAM, PPARAMOUT:
99 if as == obj.AVARLIVE {
100 Thearch.Gins(as, n, nil)
102 Thearch.Gins(as, nil, n)
107 func Gvardef(n *Node) {
108 gvardefx(n, obj.AVARDEF)
111 func gvarkill(n *Node) {
112 gvardefx(n, obj.AVARKILL)
115 func gvarlive(n *Node) {
116 gvardefx(n, obj.AVARLIVE)
119 func removevardef(firstp *obj.Prog) {
120 for p := firstp; p != nil; p = p.Link {
121 for p.Link != nil && (p.Link.As == obj.AVARDEF || p.Link.As == obj.AVARKILL || p.Link.As == obj.AVARLIVE) {
124 if p.To.Type == obj.TYPE_BRANCH {
125 for p.To.Val.(*obj.Prog) != nil && (p.To.Val.(*obj.Prog).As == obj.AVARDEF || p.To.Val.(*obj.Prog).As == obj.AVARKILL || p.To.Val.(*obj.Prog).As == obj.AVARLIVE) {
126 p.To.Val = p.To.Val.(*obj.Prog).Link
132 func gcsymdup(s *Sym) {
135 Fatalf("cannot rosymdup %s with relocations", ls.Name)
137 ls.Name = fmt.Sprintf("gclocals·%x", md5.Sum(ls.P))
141 func emitptrargsmap() {
142 if Curfn.Func.Nname.Sym.Name == "_" {
145 sym := Lookup(fmt.Sprintf("%s.args_stackmap", Curfn.Func.Nname.Sym.Name))
147 nptr := int(Curfn.Type.Argwid / int64(Widthptr))
148 bv := bvalloc(int32(nptr) * 2)
150 if Curfn.Type.Outtuple > 0 {
153 off := duint32(sym, 0, uint32(nbitmap))
154 off = duint32(sym, off, uint32(bv.n))
156 if Curfn.Type.Thistuple > 0 {
158 onebitwalktype1(getthisx(Curfn.Type), &xoffset, bv)
161 if Curfn.Type.Intuple > 0 {
163 onebitwalktype1(getinargx(Curfn.Type), &xoffset, bv)
166 for j := 0; int32(j) < bv.n; j += 32 {
167 off = duint32(sym, off, bv.b[j/32])
169 if Curfn.Type.Outtuple > 0 {
171 onebitwalktype1(getoutargx(Curfn.Type), &xoffset, bv)
172 for j := 0; int32(j) < bv.n; j += 32 {
173 off = duint32(sym, off, bv.b[j/32])
177 ggloblsym(sym, int32(off), obj.RODATA|obj.LOCAL)
180 // cmpstackvarlt reports whether the stack variable a sorts before b.
182 // Sort the list of stack variables. Autos after anything else,
183 // within autos, unused after used, within used, things with
184 // pointers first, zeroed things first, and then decreasing size.
185 // Because autos are laid out in decreasing addresses
186 // on the stack, pointers first, zeroed things first and decreasing size
187 // really means, in memory, things with pointers needing zeroing at
188 // the top of the stack and increasing in size.
189 // Non-autos sort on offset.
190 func cmpstackvarlt(a, b *Node) bool {
191 if (a.Class == PAUTO) != (b.Class == PAUTO) {
192 return b.Class == PAUTO
195 if a.Class != PAUTO {
196 return a.Xoffset < b.Xoffset
199 if a.Used != b.Used {
203 ap := haspointers(a.Type)
204 bp := haspointers(b.Type)
215 if a.Type.Width != b.Type.Width {
216 return a.Type.Width > b.Type.Width
219 return a.Sym.Name < b.Sym.Name
222 // byStackvar implements sort.Interface for []*Node using cmpstackvarlt.
223 type byStackVar []*Node
225 func (s byStackVar) Len() int { return len(s) }
226 func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) }
227 func (s byStackVar) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
229 // stkdelta records the stack offset delta for a node
230 // during the compaction of the stack frame to remove
231 // unused stack slots.
232 var stkdelta = map[*Node]int64{}
234 // TODO(lvd) find out where the PAUTO/OLITERAL nodes come from.
235 func allocauto(ptxt *obj.Prog) {
239 if len(Curfn.Func.Dcl) == 0 {
243 // Mark the PAUTO's unused.
244 for _, ln := range Curfn.Func.Dcl {
245 if ln.Class == PAUTO {
252 sort.Sort(byStackVar(Curfn.Func.Dcl))
254 // Unused autos are at the end, chop 'em off.
255 n := Curfn.Func.Dcl[0]
256 if n.Class == PAUTO && n.Op == ONAME && !n.Used {
257 // No locals used at all
264 for i := 1; i < len(Curfn.Func.Dcl); i++ {
265 n = Curfn.Func.Dcl[i]
266 if n.Class == PAUTO && n.Op == ONAME && !n.Used {
267 Curfn.Func.Dcl = Curfn.Func.Dcl[:i]
272 // Reassign stack offsets of the locals that are still there.
274 for _, n := range Curfn.Func.Dcl {
275 if n.Class != PAUTO || n.Op != ONAME {
281 if w >= Thearch.MAXWIDTH || w < 0 {
285 Stksize = Rnd(Stksize, int64(n.Type.Align))
286 if haspointers(n.Type) {
289 if Thearch.Thechar == '0' || Thearch.Thechar == '5' || Thearch.Thechar == '7' || Thearch.Thechar == '9' {
290 Stksize = Rnd(Stksize, int64(Widthptr))
292 if Stksize >= 1<<31 {
294 Yyerror("stack frame too large (>2GB)")
297 stkdelta[n] = -Stksize - n.Xoffset
300 Stksize = Rnd(Stksize, int64(Widthreg))
301 stkptrsize = Rnd(stkptrsize, int64(Widthreg))
305 // The debug information needs accurate offsets on the symbols.
306 for _, ln := range Curfn.Func.Dcl {
307 if ln.Class != PAUTO || ln.Op != ONAME {
310 ln.Xoffset += stkdelta[ln]
315 func Cgen_checknil(n *Node) {
316 if Disable_checknil != 0 {
320 // Ideally we wouldn't see any integer types here, but we do.
321 if n.Type == nil || (!Isptr[n.Type.Etype] && !Isint[n.Type.Etype] && n.Type.Etype != TUNSAFEPTR) {
323 Fatalf("bad checknil")
326 if ((Thearch.Thechar == '0' || Thearch.Thechar == '5' || Thearch.Thechar == '7' || Thearch.Thechar == '9') && n.Op != OREGISTER) || !n.Addable || n.Op == OLITERAL {
328 Regalloc(®, Types[Tptr], n)
330 Thearch.Gins(obj.ACHECKNIL, ®, nil)
335 Thearch.Gins(obj.ACHECKNIL, n, nil)
338 func compile(fn *Node) {
340 Newproc = Sysfunc("newproc")
341 Deferproc = Sysfunc("deferproc")
342 Deferreturn = Sysfunc("deferreturn")
343 Panicindex = Sysfunc("panicindex")
344 panicslice = Sysfunc("panicslice")
345 panicdivide = Sysfunc("panicdivide")
346 throwreturn = Sysfunc("throwreturn")
347 growslice = Sysfunc("growslice")
348 writebarrierptr = Sysfunc("writebarrierptr")
349 typedmemmove = Sysfunc("typedmemmove")
350 panicdottype = Sysfunc("panicdottype")
369 if pure_go != 0 || strings.HasPrefix(fn.Func.Nname.Sym.Name, "init.") {
370 Yyerror("missing function body for %q", fn.Func.Nname.Sym.Name)
383 // set up domain for labels
386 if Curfn.Type.Outnamed {
387 // add clearing of the output parameters
389 t := Structfirst(&save, Getoutarg(Curfn.Type))
393 n = Nod(OAS, t.Nname, nil)
395 Curfn.Nbody = concat(list1(n), Curfn.Nbody)
398 t = structnext(&save)
419 // Build an SSA backend function.
420 if shouldssa(Curfn) {
421 ssafn = buildssa(Curfn)
428 pl.Name = Linksym(Curfn.Func.Nname.Sym)
432 Nodconst(&nod1, Types[TINT32], 0)
433 nam = Curfn.Func.Nname
437 ptxt = Thearch.Gins(obj.ATEXT, nam, &nod1)
438 Afunclit(&ptxt.From, Curfn.Func.Nname)
439 ptxt.From3 = new(obj.Addr)
441 ptxt.From3.Offset |= obj.DUPOK
444 ptxt.From3.Offset |= obj.WRAPPER
446 if fn.Func.Needctxt {
447 ptxt.From3.Offset |= obj.NEEDCTXT
449 if fn.Func.Pragma&Nosplit != 0 {
450 ptxt.From3.Offset |= obj.NOSPLIT
452 if fn.Func.Pragma&Systemstack != 0 {
453 ptxt.From.Sym.Cfunc = 1
456 // Clumsy but important.
457 // See test/recover.go for test cases and src/reflect/value.go
458 // for the actual functions being considered.
459 if myimportpath != "" && myimportpath == "reflect" {
460 if Curfn.Func.Nname.Sym.Name == "callReflect" || Curfn.Func.Nname.Sym.Name == "callMethod" {
461 ptxt.From3.Offset |= obj.WRAPPER
467 gcargs = makefuncdatasym("gcargs·%d", obj.FUNCDATA_ArgsPointerMaps)
468 gclocals = makefuncdatasym("gclocals·%d", obj.FUNCDATA_LocalsPointerMaps)
470 for _, t := range Curfn.Func.Fieldtrack {
474 for _, n := range fn.Func.Dcl {
475 if n.Op != ONAME { // might be OTYPE or OLITERAL
479 case PAUTO, PPARAM, PPARAMOUT:
480 Nodconst(&nod1, Types[TUINTPTR], n.Type.Width)
481 p = Thearch.Gins(obj.ATYPE, n, &nod1)
482 p.From.Gotype = Linksym(ngotype(n))
487 genssa(ssafn, ptxt, gcargs, gclocals)
488 if Curfn.Func.Endlineno != 0 {
489 lineno = Curfn.Func.Endlineno
494 Genslice(Curfn.Func.Enter.Slice())
501 if Curfn.Func.Endlineno != 0 {
502 lineno = Curfn.Func.Endlineno
505 if Curfn.Type.Outtuple != 0 {
506 Ginscall(throwreturn, 0)
511 // TODO: Determine when the final cgen_ret can be omitted. Perhaps always?
515 // deferreturn pretends to have one uintptr argument.
516 // Reserve space for it so stack scanner is happy.
517 if Maxarg < int64(Widthptr) {
518 Maxarg = int64(Widthptr)
527 Pc.As = obj.ARET // overwrite AEND
531 if Debug['N'] == 0 || Debug['R'] != 0 || Debug['P'] != 0 {
536 Thearch.Expandchecks(ptxt)
542 fmt.Printf("allocauto: %d to %d\n", oldstksize, int64(Stksize))
546 if int64(Stksize)+Maxarg > 1<<31 {
547 Yyerror("stack frame too large (>2GB)")
551 // Emit garbage collection symbols.
552 liveness(Curfn, ptxt, gcargs, gclocals)
557 Thearch.Defframe(ptxt)
563 // Remove leftover instrumentation from the instruction stream.