1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
12 // portable half of code generator.
13 // mainly statements and control flow.
18 func Sysfunc(name string) *Node {
19 n := newname(Pkglookup(name, Runtimepkg))
24 // addrescapes tags node n as having had its address taken
25 // by "increasing" the "value" of n.Esc to EscHeap.
26 // Storage is allocated as necessary to allow the address
28 func addrescapes(n *Node) {
30 // probably a type error already.
31 // dump("addrescapes", n);
40 // if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping.
41 // on PPARAM it means something different.
42 if n.Class == PAUTO && n.Esc == EscNever {
48 addrescapes(n.Name.Defn)
50 // if func param, need separate temporary
51 // to hold heap pointer.
52 // the function type has already been checked
53 // (we're in the function body)
54 // so the param already has a valid xoffset.
56 // expression to refer to stack copy
57 case PPARAM, PPARAMOUT:
58 n.Name.Param.Stackparam = Nod(OPARAM, n, nil)
60 n.Name.Param.Stackparam.Type = n.Type
61 n.Name.Param.Stackparam.Addable = true
62 if n.Xoffset == BADWIDTH {
63 Fatalf("addrescapes before param assignment")
65 n.Name.Param.Stackparam.Xoffset = n.Xoffset
75 // create stack variable to hold pointer to heap
79 n.Name.Heapaddr = temp(Ptrto(n.Type))
80 buf := fmt.Sprintf("&%v", n.Sym)
81 n.Name.Heapaddr.Sym = Lookup(buf)
82 n.Name.Heapaddr.Orig.Sym = n.Name.Heapaddr.Sym
85 fmt.Printf("%v: moved to heap: %v\n", n.Line(), n)
93 // ODOTPTR has already been introduced,
94 // so these are the non-pointer ODOT and OINDEX.
95 // In &x[0], if x is a slice, then x does not
96 // escape--the pointer inside x does, but that
97 // is always a heap pointer anyway.
98 case ODOT, OINDEX, OPAREN, OCONVNOP:
99 if !Isslice(n.Left.Type) {
106 for l := labellist; l != nil; l = l.Link {
114 func newlab(n *Node) *Label {
119 if lastlabel == nil {
131 Yyerror("label %v already defined at %v", s, lab.Def.Line())
136 lab.Use = append(lab.Use, n)
142 // There is a copy of checkgoto in the new SSA backend.
143 // Please keep them in sync.
144 func checkgoto(from *Node, to *Node) {
145 if from.Sym == to.Sym {
150 for fs := from.Sym; fs != nil; fs = fs.Link {
154 for fs := to.Sym; fs != nil; fs = fs.Link {
158 for ; nf > nt; nf-- {
165 // decide what to complain about.
166 // prefer to complain about 'into block' over declarations,
167 // so scan backward to find most recent block or else dcl.
172 for ; nt > nf; nt-- {
192 Yyerror("goto %v jumps into block starting at %v", from.Left.Sym, Ctxt.Line(int(block.Lastlineno)))
194 Yyerror("goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, Ctxt.Line(int(dcl.Lastlineno)))
200 func stmtlabel(n *Node) *Label {
205 if lab.Def.Name.Defn == n {
214 // compile statements
215 func Genlist(l *NodeList) {
216 for ; l != nil; l = l.Next {
221 // generate code to start new proc running call n.
222 func cgen_proc(n *Node, proc int) {
225 Fatalf("cgen_proc: unknown call %v", Oconv(int(n.Left.Op), 0))
228 cgen_callmeth(n.Left, proc)
231 cgen_callinter(n.Left, nil, proc)
234 cgen_call(n.Left, proc)
238 // generate declaration.
239 // have to allocate heap copy
240 // for escaped variables.
241 func cgen_dcl(n *Node) {
243 Dump("\ncgen-dcl", n)
250 if n.Class&PHEAP == 0 {
253 if compiling_runtime != 0 {
254 Yyerror("%v escapes to heap, not allowed in runtime.", n)
256 if prealloc[n] == nil {
257 prealloc[n] = callnew(n.Type)
259 Cgen_as(n.Name.Heapaddr, prealloc[n])
262 // generate discard of value
263 func cgen_discard(nr *Node) {
270 if nr.Class&PHEAP == 0 && nr.Class != PEXTERN && nr.Class != PFUNC && nr.Class != PPARAMREF {
291 cgen_discard(nr.Left)
293 cgen_discard(nr.Right)
302 cgen_discard(nr.Left)
305 Cgen_checknil(nr.Left)
307 // special enough to just evaluate
310 Tempname(&tmp, nr.Type)
317 // clearslim generates code to zero a slim node.
318 func Clearslim(n *Node) {
324 switch Simtype[n.Type.Etype] {
325 case TCOMPLEX64, TCOMPLEX128:
326 z.SetVal(Val{new(Mpcplx)})
327 Mpmovecflt(&z.Val().U.(*Mpcplx).Real, 0.0)
328 Mpmovecflt(&z.Val().U.(*Mpcplx).Imag, 0.0)
330 case TFLOAT32, TFLOAT64:
332 Mpmovecflt(&zero, 0.0)
335 case TPTR32, TPTR64, TCHAN, TMAP:
336 z.SetVal(Val{new(NilVal)})
349 z.SetVal(Val{new(Mpint)})
350 Mpmovecfix(z.Val().U.(*Mpint), 0)
353 Fatalf("clearslim called on type %v", n.Type)
361 // res = iface{typ, data}
364 func Cgen_eface(n *Node, res *Node) {
365 // the right node of an eface may contain function calls that uses res as an argument,
366 // so it's important that it is done first
368 tmp := temp(Types[Tptr])
374 dst.Type = Types[Tptr]
375 dst.Xoffset += int64(Widthptr)
378 dst.Xoffset -= int64(Widthptr)
383 // res, resok = x.(T)
384 // res = x.(T) (when resok == nil)
387 func cgen_dottype(n *Node, res, resok *Node, wb bool) {
388 if Debug_typeassert > 0 {
389 Warn("type assertion inlined")
392 // r1 := iword(iface)
393 // if n.Left is non-empty interface {
397 // res = idata(iface)
400 // assert[EI]2T(x, T, nil) // (when resok == nil; does not return)
401 // resok = false // (when resok != nil)
405 Igen(n.Left, &iface, res)
407 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte)
408 Regalloc(&r1, byteptr, nil)
411 if !isnilinter(n.Left.Type) {
412 // Holding itab, want concrete type in second word.
413 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1)
416 r2.Xoffset = int64(Widthptr)
420 Regalloc(&r2, byteptr, nil)
421 Cgen(typename(n.Type), &r2)
422 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1)
423 Regfree(&r2) // not needed for success path; reclaimed on one failure path
424 iface.Xoffset += int64(Widthptr)
430 cgen_wb(&r1, res, wb)
431 q := Gbranch(obj.AJMP, nil, 0)
433 Regrealloc(&r2) // reclaim from above, for this failure path
434 fn := syslook("panicdottype", 0)
436 call := Nod(OCALLFUNC, fn, nil)
439 call.List = list(list(list1(&r1), &r2), typename(n.Left.Type))
440 call.List = ascompatte(OCALLFUNC, call, false, getinarg(fn.Type), call.List, 0, nil)
444 Thearch.Gins(obj.AUNDEF, nil, nil)
447 // This half is handling the res, resok = x.(T) case,
448 // which is called from gen, not cgen, and is consequently fussier
449 // about blank assignments. We have to avoid calling cgen for those.
452 cgen_wb(&r1, res, wb)
456 Cgen(Nodbool(true), resok)
458 q := Gbranch(obj.AJMP, nil, 0)
466 Cgen(Nodbool(false), resok)
473 // res, resok = x.(T)
476 func Cgen_As2dottype(n, res, resok *Node) {
477 if Debug_typeassert > 0 {
478 Warn("type assertion inlined")
481 // r1 := iword(iface)
482 // if n.Left is non-empty interface {
486 // res = idata(iface)
494 Igen(n.Left, &iface, nil)
496 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte)
497 Regalloc(&r1, byteptr, res)
500 if !isnilinter(n.Left.Type) {
501 // Holding itab, want concrete type in second word.
502 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1)
505 r2.Xoffset = int64(Widthptr)
509 Regalloc(&r2, byteptr, nil)
510 Cgen(typename(n.Type), &r2)
511 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1)
513 iface.Xoffset += int64(Widthptr)
519 q := Gbranch(obj.AJMP, nil, 0)
522 fn := syslook("panicdottype", 0)
524 call := Nod(OCALLFUNC, fn, nil)
525 call.List = list(list(list1(&r1), &r2), typename(n.Left.Type))
526 call.List = ascompatte(OCALLFUNC, call, false, getinarg(fn.Type), call.List, 0, nil)
530 Thearch.Gins(obj.AUNDEF, nil, nil)
534 // gather series of offsets
535 // >=0 is direct addressed field
536 // <0 is pointer to next field (+1)
537 func Dotoffset(n *Node, oary []int64, nn **Node) int {
542 if n.Xoffset == BADWIDTH {
543 Dump("bad width in dotoffset", n)
544 Fatalf("bad width in dotoffset")
547 i = Dotoffset(n.Left, oary, nn)
550 oary[i-1] += n.Xoffset
552 oary[i-1] -= n.Xoffset
563 if n.Xoffset == BADWIDTH {
564 Dump("bad width in dotoffset", n)
565 Fatalf("bad width in dotoffset")
568 i = Dotoffset(n.Left, oary, nn)
570 oary[i] = -(n.Xoffset + 1)
585 // make a new off the books
586 func Tempname(nn *Node, t *Type) {
588 Fatalf("no curfn for tempname")
592 Yyerror("tempname called with nil type")
596 // give each tmp a different name so that there
597 // a chance to registerizer them
598 s := Lookupf("autotmp_%.4d", statuniqgen)
600 n := Nod(ONAME, nil, nil)
609 Curfn.Func.Dcl = list(Curfn.Func.Dcl, n)
616 func temp(t *Type) *Node {
617 n := Nod(OXXX, nil, nil)
619 n.Sym.Def.Used = true
628 wasregalloc := Anyregalloc()
642 Fatalf("gen: unknown op %v", Nconv(n, obj.FmtShort|obj.FmtSign))
660 if isblanksym(n.Left.Sym) {
666 // if there are pending gotos, resolve them all to the current pc.
668 for p1 := lab.Gotopc; p1 != nil; p1 = p2 {
674 if lab.Labelpc == nil {
678 if n.Name.Defn != nil {
679 switch n.Name.Defn.Op {
680 // so stmtlabel can find the label
681 case OFOR, OSWITCH, OSELECT:
682 n.Name.Defn.Sym = lab.Sym
686 // if label is defined, emit jump to it.
687 // otherwise save list of pending gotos in lab->gotopc.
688 // the list is linked through the normal jump target field
689 // to avoid a second list. (the jumps are actually still
690 // valid code, since they're just going to another goto
691 // to the same label. we'll unwind it when we learn the pc
692 // of the label in the OLABEL case above.)
696 if lab.Labelpc != nil {
699 lab.Gotopc = gjmp(lab.Gotopc)
704 lab := n.Left.Sym.Label
706 Yyerror("break label not defined: %v", n.Left.Sym)
711 if lab.Breakpc == nil {
712 Yyerror("invalid break label %v", n.Left.Sym)
721 Yyerror("break is not in a loop")
729 lab := n.Left.Sym.Label
731 Yyerror("continue label not defined: %v", n.Left.Sym)
736 if lab.Continpc == nil {
737 Yyerror("invalid continue label %v", n.Left.Sym)
746 Yyerror("continue is not in a loop")
754 p1 := gjmp(nil) // goto test
755 breakpc = gjmp(nil) // break: goto done
759 // define break and continue labels
762 lab.Breakpc = breakpc
763 lab.Continpc = continpc
766 gen(n.Right) // contin: incr
767 Patch(p1, Pc) // test:
768 Bgen(n.Left, false, -1, breakpc) // if(!test) goto break
769 Genlist(n.Nbody) // body
771 Patch(breakpc, Pc) // done:
780 p1 := gjmp(nil) // goto test
781 p2 := gjmp(nil) // p2: goto else
782 Patch(p1, Pc) // test:
783 Bgen(n.Left, false, int(-n.Likely), p2) // if(!test) goto p2
784 Genlist(n.Nbody) // then
785 p3 := gjmp(nil) // goto done
786 Patch(p2, Pc) // else:
787 Genlist(n.Rlist) // else
788 Patch(p3, Pc) // done:
792 p1 := gjmp(nil) // goto test
793 breakpc = gjmp(nil) // break: goto done
795 // define break label
798 lab.Breakpc = breakpc
801 Patch(p1, Pc) // test:
802 Genlist(n.Nbody) // switch(test) body
803 Patch(breakpc, Pc) // done:
811 p1 := gjmp(nil) // goto test
812 breakpc = gjmp(nil) // break: goto done
814 // define break label
817 lab.Breakpc = breakpc
820 Patch(p1, Pc) // test:
821 Genlist(n.Nbody) // select() body
822 Patch(breakpc, Pc) // done:
832 if gen_as_init(n, false) {
835 Cgen_as(n.Left, n.Right)
838 Cgen_as_wb(n.Left, n.Right, true)
841 cgen_dottype(n.Rlist.N, n.List.N, n.List.Next.N, false)
847 cgen_callinter(n, nil, 0)
858 case ORETURN, ORETJMP:
861 // Function calls turned into compiler intrinsics.
862 // At top level, can just ignore the call and make sure to preserve side effects in the argument, if any.
869 Cgen_checknil(n.Left)
879 if Anyregalloc() != wasregalloc {
881 Fatalf("registers left allocated")
887 func Cgen_as(nl, nr *Node) {
888 Cgen_as_wb(nl, nr, false)
891 func Cgen_as_wb(nl, nr *Node, wb bool) {
901 for nr != nil && nr.Op == OCONVNOP {
905 if nl == nil || isblank(nl) {
910 if nr == nil || iszero(nr) {
911 // heaps should already be clear
912 if nr == nil && (nl.Class&PHEAP != 0) {
940 func cgen_callmeth(n *Node, proc int) {
941 // generate a rewrite in n2 for the method call
942 // (p.f)(...) goes to (f)(p,...)
946 if l.Op != ODOTMETH {
947 Fatalf("cgen_callmeth: not dotmethod: %v", l)
953 n2.Left.Type = l.Type
955 if n2.Left.Op == ONAME {
956 n2.Left.Class = PFUNC
961 // CgenTemp creates a temporary node, assigns n to it, and returns it.
962 func CgenTemp(n *Node) *Node {
964 Tempname(&tmp, n.Type)
970 for lab := labellist; lab != nil; lab = lab.Link {
972 for _, n := range lab.Use {
973 yyerrorl(int(n.Lineno), "label %v not defined", lab.Sym)
978 if lab.Use == nil && !lab.Used {
979 yyerrorl(int(lab.Def.Lineno), "label %v defined and not used", lab.Sym)
983 if lab.Gotopc != nil {
984 Fatalf("label %v never resolved", lab.Sym)
986 for _, n := range lab.Use {
987 checkgoto(n, lab.Def)
992 // Componentgen copies a composite value by moving its individual components.
993 // Slices, strings and interfaces are supported. Small structs or arrays with
994 // elements of basic type are also supported.
995 // nr is nil when assigning a zero value.
996 func Componentgen(nr, nl *Node) bool {
997 return componentgen_wb(nr, nl, false)
1000 // componentgen_wb is like componentgen but if wb==true emits write barriers for pointer updates.
1001 func componentgen_wb(nr, nl *Node, wb bool) bool {
1002 // Don't generate any code for complete copy of a variable into itself.
1003 // It's useless, and the VARDEF will incorrectly mark the old value as dead.
1004 // (This check assumes that the arguments passed to componentgen did not
1005 // themselves come from Igen, or else we could have Op==ONAME but
1006 // with a Type and Xoffset describing an individual field, not the entire
1008 if nl.Op == ONAME && nl == nr {
1012 // Count number of moves required to move components.
1013 // If using write barrier, can only emit one pointer.
1014 // TODO(rsc): Allow more pointers, for reflect.Value.
1018 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool {
1020 if Simtype[t.Etype] == Tptr && t != itable {
1023 return n <= maxMoves && (!wb || numPtr <= 1)
1025 if n > maxMoves || wb && numPtr > 1 {
1029 // Must call emitVardef after evaluating rhs but before writing to lhs.
1030 emitVardef := func() {
1031 // Emit vardef if needed.
1033 switch nl.Type.Etype {
1034 case TARRAY, TSTRING, TINTER, TSTRUCT:
1040 isConstString := Isconst(nr, CTSTR)
1042 if !cadable(nl) && nr != nil && !cadable(nr) && !isConstString {
1050 if nr != nil && !cadable(nr) && !isConstString {
1053 if nr == nil || isConstString || nl.Ullman >= nr.Ullman {
1054 Igen(nl, &nodl, nil)
1055 defer Regfree(&nodl)
1058 lbase := nodl.Xoffset
1060 // Special case: zeroing.
1063 // When zeroing, prepare a register containing zero.
1064 // TODO(rsc): Check that this is actually generating the best code.
1065 if Thearch.REGZERO != 0 {
1066 // cpu has a dedicated zero register
1067 Nodreg(&nodr, Types[TUINT], Thearch.REGZERO)
1069 // no dedicated zero register
1071 Nodconst(&zero, nl.Type, 0)
1072 Regalloc(&nodr, Types[TUINT], nil)
1073 Thearch.Gmove(&zero, &nodr)
1074 defer Regfree(&nodr)
1078 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool {
1080 nodl.Xoffset = lbase + offset
1082 if Isfloat[t.Etype] {
1083 // TODO(rsc): Cache zero register like we do for integers?
1086 Thearch.Gmove(&nodr, &nodl)
1093 // Special case: assignment of string constant.
1098 nodl.Type = Ptrto(Types[TUINT8])
1099 Regalloc(&nodr, Types[Tptr], nil)
1100 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &nodr)
1101 Datastring(nr.Val().U.(string), &p.From)
1102 p.From.Type = obj.TYPE_ADDR
1103 Thearch.Gmove(&nodr, &nodl)
1107 nodl.Type = Types[Simtype[TUINT]]
1108 nodl.Xoffset += int64(Array_nel) - int64(Array_array)
1109 Nodconst(&nodr, nodl.Type, int64(len(nr.Val().U.(string))))
1110 Thearch.Gmove(&nodr, &nodl)
1114 // General case: copy nl = nr.
1117 if nr.Ullman >= UINF && nodl.Op == OINDREG {
1118 Fatalf("miscompile")
1120 Igen(nr, &nodr, nil)
1121 defer Regfree(&nodr)
1123 rbase := nodr.Xoffset
1126 Igen(nl, &nodl, nil)
1127 defer Regfree(&nodl)
1128 lbase = nodl.Xoffset
1136 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool {
1137 if wb && Simtype[t.Etype] == Tptr && t != itable {
1139 Fatalf("componentgen_wb %v", Tconv(nl.Type, 0))
1146 nodl.Xoffset = lbase + offset
1148 nodr.Xoffset = rbase + offset
1149 Thearch.Gmove(&nodr, &nodl)
1154 nodl.Xoffset = lbase + ptrOffset
1156 nodr.Xoffset = rbase + ptrOffset
1157 cgen_wbptr(&nodr, &nodl)
1162 // visitComponents walks the individual components of the type t,
1163 // walking into array elements, struct fields, the real and imaginary
1164 // parts of complex numbers, and on 32-bit systems the high and
1165 // low halves of 64-bit integers.
1166 // It calls f for each such component, passing the component (aka element)
1167 // type and memory offset, assuming t starts at startOffset.
1168 // If f ever returns false, visitComponents returns false without any more
1169 // calls to f. Otherwise visitComponents returns true.
1170 func visitComponents(t *Type, startOffset int64, f func(elem *Type, elemOffset int64) bool) bool {
1176 // NOTE: Assuming little endian (signed top half at offset 4).
1177 // We don't have any 32-bit big-endian systems.
1178 if Thearch.Thechar != '5' && Thearch.Thechar != '8' {
1179 Fatalf("unknown 32-bit architecture")
1181 return f(Types[TUINT32], startOffset) &&
1182 f(Types[TINT32], startOffset+4)
1188 return f(Types[TUINT32], startOffset) &&
1189 f(Types[TUINT32], startOffset+4)
1192 return f(Types[TFLOAT32], startOffset) &&
1193 f(Types[TFLOAT32], startOffset+4)
1196 return f(Types[TFLOAT64], startOffset) &&
1197 f(Types[TFLOAT64], startOffset+8)
1200 return f(itable, startOffset) &&
1201 f(Ptrto(Types[TUINT8]), startOffset+int64(Widthptr))
1204 return f(Ptrto(Types[TUINT8]), startOffset) &&
1205 f(Types[Simtype[TUINT]], startOffset+int64(Widthptr))
1209 return f(Ptrto(t.Type), startOffset+int64(Array_array)) &&
1210 f(Types[Simtype[TUINT]], startOffset+int64(Array_nel)) &&
1211 f(Types[Simtype[TUINT]], startOffset+int64(Array_cap))
1214 // Short-circuit [1e6]struct{}.
1215 if t.Type.Width == 0 {
1219 for i := int64(0); i < t.Bound; i++ {
1220 if !visitComponents(t.Type, startOffset+i*t.Type.Width, f) {
1227 if t.Type != nil && t.Type.Width != 0 {
1228 // NOTE(rsc): If this happens, the right thing to do is to say
1229 // startOffset -= t.Type.Width
1230 // but I want to see if it does.
1231 // The old version of componentgen handled this,
1232 // in code introduced in CL 6932045 to fix issue #4518.
1233 // But the test case in issue 4518 does not trigger this anymore,
1234 // so maybe this complication is no longer needed.
1235 Fatalf("struct not at offset 0")
1238 for field := t.Type; field != nil; field = field.Down {
1239 if field.Etype != TFIELD {
1240 Fatalf("bad struct")
1242 if !visitComponents(field.Type, startOffset+field.Width, f) {
1248 return f(t, startOffset)
1251 func cadable(n *Node) bool {
1252 // Note: Not sure why you can have n.Op == ONAME without n.Addable, but you can.
1253 return n.Addable && n.Op == ONAME