package amd64
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/internal/obj"
"cmd/internal/obj/x86"
if cnt%int64(gc.Widthreg) != 0 {
// should only happen with nacl
if cnt%int64(gc.Widthptr) != 0 {
- gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
+ base.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
}
if *state&ax == 0 {
p = pp.Appendpp(p, x86.AMOVQ, obj.TYPE_CONST, 0, 0, obj.TYPE_REG, x86.REG_AX, 0)
"fmt"
"math"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
r := v.Reg()
// See the comments in cmd/internal/obj/x86/obj6.go
// near CanUse1InsnTLS for a detailed explanation of these instructions.
- if x86.CanUse1InsnTLS(gc.Ctxt) {
+ if x86.CanUse1InsnTLS(base.Ctxt) {
// MOVQ (TLS), r
p := s.Prog(x86.AMOVQ)
p.From.Type = obj.TYPE_MEM
}
p := s.Prog(mov)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize() // 0 on amd64, just to be consistent with other architectures
+ p.From.Offset = -base.Ctxt.FixedFrameSize() // 0 on amd64, just to be consistent with other architectures
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpAMD64MOVBatomicload, ssa.OpAMD64MOVLatomicload, ssa.OpAMD64MOVQatomicload:
p := s.Prog(v.Op.Asm())
"math"
"math/bits"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpARMLoweredZero:
// MOVW.P Rarg2, 4(R1)
// caller's SP is FixedFrameSize below the address of the first arg
p := s.Prog(arm.AMOVW)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize()
+ p.From.Offset = -base.Ctxt.FixedFrameSize()
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
import (
"math"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Line==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Line==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpARM64Equal,
ssa.OpARM64NotEqual,
// caller's SP is FixedFrameSize below the address of the first arg
p := s.Prog(arm64.AMOVD)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize()
+ p.From.Offset = -base.Ctxt.FixedFrameSize()
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
--- /dev/null
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package base
+
+import (
+ "os"
+
+ "cmd/internal/obj"
+)
+
+var Ctxt *obj.Link
+
+var atExitFuncs []func()
+
+func AtExit(f func()) {
+ atExitFuncs = append(atExitFuncs, f)
+}
+
+func Exit(code int) {
+ for i := len(atExitFuncs) - 1; i >= 0; i-- {
+ f := atExitFuncs[i]
+ atExitFuncs = atExitFuncs[:i]
+ f()
+ }
+ os.Exit(code)
+}
// Debug arguments, set by -d flag.
-package gc
+package base
import (
"fmt"
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package gc
+package base
import (
"encoding/json"
"os"
"reflect"
"runtime"
-
"strings"
"cmd/internal/objabi"
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package gc
+package base
import (
- "cmd/internal/objabi"
- "cmd/internal/src"
"fmt"
"os"
"runtime/debug"
"sort"
"strings"
+
+ "cmd/internal/objabi"
+ "cmd/internal/src"
)
// An errorMsg is a queued error message, waiting to be printed.
// Pos is the current source position being processed,
// printed by Errorf, ErrorfLang, Fatalf, and Warnf.
-var lineno src.XPos
+var Pos src.XPos
var (
errorMsgs []errorMsg
// Only add the position if know the position.
// See issue golang.org/issue/11361.
if pos.IsKnown() {
- msg = fmt.Sprintf("%v: %s", linestr(pos), msg)
+ msg = fmt.Sprintf("%v: %s", FmtPos(pos), msg)
}
errorMsgs = append(errorMsgs, errorMsg{
pos: pos,
}
// FmtPos formats pos as a file:line string.
-func linestr(pos src.XPos) string {
+func FmtPos(pos src.XPos) string {
if Ctxt == nil {
return "???"
}
// FlushErrors sorts errors seen so far by line number, prints them to stdout,
// and empties the errors array.
-func flusherrors() {
+func FlushErrors() {
Ctxt.Bso.Flush()
if len(errorMsgs) == 0 {
return
}
// Errorf reports a formatted error at the current line.
-func yyerror(format string, args ...interface{}) {
- yyerrorl(lineno, format, args...)
+func Errorf(format string, args ...interface{}) {
+ ErrorfAt(Pos, format, args...)
}
// ErrorfAt reports a formatted error message at pos.
-func yyerrorl(pos src.XPos, format string, args ...interface{}) {
+func ErrorfAt(pos src.XPos, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
if strings.HasPrefix(msg, "syntax error") {
hcrash()
if numErrors >= 10 && Flag.LowerE == 0 {
- flusherrors()
- fmt.Printf("%v: too many errors\n", linestr(pos))
- errorexit()
+ FlushErrors()
+ fmt.Printf("%v: too many errors\n", FmtPos(pos))
+ ErrorExit()
}
}
// ErrorfVers reports that a language feature (format, args) requires a later version of Go.
-func yyerrorv(lang string, format string, args ...interface{}) {
- yyerror("%s requires %s or later (-lang was set to %s; check go.mod)", fmt.Sprintf(format, args...), lang, Flag.Lang)
+func ErrorfVers(lang string, format string, args ...interface{}) {
+ Errorf("%s requires %s or later (-lang was set to %s; check go.mod)", fmt.Sprintf(format, args...), lang, Flag.Lang)
}
// UpdateErrorDot is a clumsy hack that rewrites the last error,
// so this should be used only when the user has opted in
// to additional output by setting a particular flag.
func Warn(format string, args ...interface{}) {
- Warnl(lineno, format, args...)
+ WarnfAt(Pos, format, args...)
}
// WarnfAt reports a formatted warning at pos.
// In general the Go compiler does NOT generate warnings,
// so this should be used only when the user has opted in
// to additional output by setting a particular flag.
-func Warnl(pos src.XPos, format string, args ...interface{}) {
+func WarnfAt(pos src.XPos, format string, args ...interface{}) {
addErrorMsg(pos, format, args...)
if Flag.LowerM != 0 {
- flusherrors()
+ FlushErrors()
}
}
//
// If -h has been specified, Fatalf panics to force the usual runtime info dump.
func Fatalf(format string, args ...interface{}) {
- FatalfAt(lineno, format, args...)
+ FatalfAt(Pos, format, args...)
}
// FatalfAt reports a fatal error - an internal problem - at pos and exits.
//
// If -h has been specified, FatalfAt panics to force the usual runtime info dump.
func FatalfAt(pos src.XPos, format string, args ...interface{}) {
- flusherrors()
+ FlushErrors()
if Debug.Panic != 0 || numErrors == 0 {
- fmt.Printf("%v: internal compiler error: ", linestr(pos))
+ fmt.Printf("%v: internal compiler error: ", FmtPos(pos))
fmt.Printf(format, args...)
fmt.Printf("\n")
}
hcrash()
- errorexit()
+ ErrorExit()
}
// hcrash crashes the compiler when -h is set, to find out where a message is generated.
func hcrash() {
if Flag.LowerH != 0 {
- flusherrors()
+ FlushErrors()
if Flag.LowerO != "" {
os.Remove(Flag.LowerO)
}
// ErrorExit handles an error-status exit.
// It flushes any pending errors, removes the output file, and exits.
-func errorexit() {
- flusherrors()
+func ErrorExit() {
+ FlushErrors()
if Flag.LowerO != "" {
os.Remove(Flag.LowerO)
}
// ExitIfErrors calls ErrorExit if any errors have been reported.
func ExitIfErrors() {
if Errors() > 0 {
- errorexit()
+ ErrorExit()
}
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
return ret, nil
}
- Fatalf("algtype1: unexpected type %v", t)
+ base.Fatalf("algtype1: unexpected type %v", t)
return 0, nil
}
switch algtype(t) {
default:
// genhash is only called for types that have equality
- Fatalf("genhash %v", t)
+ base.Fatalf("genhash %v", t)
case AMEM0:
return sysClosure("memhash0")
case AMEM8:
}
sym := typesymprefix(".hash", t)
- if Flag.LowerR != 0 {
+ if base.Flag.LowerR != 0 {
fmt.Printf("genhash %v %v %v\n", closure, sym, t)
}
- lineno = autogeneratedPos // less confusing than end of input
+ base.Pos = autogeneratedPos // less confusing than end of input
dclcontext = PEXTERN
// func sym(p *T, h uintptr) uintptr
r.List.Append(nh)
fn.Nbody.Append(r)
- if Flag.LowerR != 0 {
+ if base.Flag.LowerR != 0 {
dumplist("genhash body", fn.Nbody)
}
typecheckslice(fn.Nbody.Slice(), ctxStmt)
Curfn = nil
- if Debug.DclStack != 0 {
+ if base.Debug.DclStack != 0 {
testdclstack()
}
switch a, _ := algtype1(t); a {
case AMEM:
- Fatalf("hashfor with AMEM type")
+ base.Fatalf("hashfor with AMEM type")
case AINTER:
sym = Runtimepkg.Lookup("interhash")
case ANILINTER:
return closure
}
sym := typesymprefix(".eq", t)
- if Flag.LowerR != 0 {
+ if base.Flag.LowerR != 0 {
fmt.Printf("geneq %v\n", t)
}
// Autogenerate code for equality of structs and arrays.
- lineno = autogeneratedPos // less confusing than end of input
+ base.Pos = autogeneratedPos // less confusing than end of input
dclcontext = PEXTERN
// func sym(p, q *T) bool
// so t must be either an array or a struct.
switch t.Etype {
default:
- Fatalf("geneq %v", t)
+ base.Fatalf("geneq %v", t)
case TARRAY:
nelem := t.NumElem()
// We should really do a generic CL that shares epilogues across
// the board. See #24936.
- if Flag.LowerR != 0 {
+ if base.Flag.LowerR != 0 {
dumplist("geneq body", fn.Nbody)
}
typecheckslice(fn.Nbody.Slice(), ctxStmt)
Curfn = nil
- if Debug.DclStack != 0 {
+ if base.Debug.DclStack != 0 {
testdclstack()
}
// eqtab must be evaluated before eqdata, and shortcircuiting is required.
func eqinterface(s, t *Node) (eqtab, eqdata *Node) {
if !types.Identical(s.Type, t.Type) {
- Fatalf("eqinterface %v %v", s.Type, t.Type)
+ base.Fatalf("eqinterface %v %v", s.Type, t.Type)
}
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
// by padding.
func ispaddedfield(t *types.Type, i int) bool {
if !t.IsStruct() {
- Fatalf("ispaddedfield called non-struct %v", t)
+ base.Fatalf("ispaddedfield called non-struct %v", t)
}
end := t.Width
if i+1 < t.NumFields() {
import (
"bytes"
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"fmt"
"sort"
func Rnd(o int64, r int64) int64 {
if r < 1 || r > 8 || r&(r-1) != 0 {
- Fatalf("rnd %d", r)
+ base.Fatalf("rnd %d", r)
}
return (o + r - 1) &^ (r - 1)
}
case langSupported(1, 14, t.Pkg()) && !explicit && types.Identical(m.Type, prev.Type):
return
default:
- yyerrorl(m.Pos, "duplicate method %s", m.Sym.Name)
+ base.ErrorfAt(m.Pos, "duplicate method %s", m.Sym.Name)
}
methods = append(methods, m)
}
}
if !m.Type.IsInterface() {
- yyerrorl(m.Pos, "interface contains embedded non-interface %v", m.Type)
+ base.ErrorfAt(m.Pos, "interface contains embedded non-interface %v", m.Type)
m.SetBroke(true)
t.SetBroke(true)
// Add to fields so that error messages
sort.Sort(methcmp(methods))
if int64(len(methods)) >= thearch.MAXWIDTH/int64(Widthptr) {
- yyerrorl(typePos(t), "interface too large")
+ base.ErrorfAt(typePos(t), "interface too large")
}
for i, m := range methods {
m.Offset = int64(i) * int64(Widthptr)
w := f.Type.Width
if w < 0 {
- Fatalf("invalid width %d", f.Type.Width)
+ base.Fatalf("invalid width %d", f.Type.Width)
}
if w == 0 {
lastzero = o
maxwidth = 1<<31 - 1
}
if o >= maxwidth {
- yyerrorl(typePos(errtype), "type %L too large", errtype)
+ base.ErrorfAt(typePos(errtype), "type %L too large", errtype)
o = 8 // small but nonzero
}
}
var l []*types.Type
if !findTypeLoop(t, &l) {
- Fatalf("failed to find type loop for: %v", t)
+ base.Fatalf("failed to find type loop for: %v", t)
}
// Rotate loop so that the earliest type declaration is first.
var msg bytes.Buffer
fmt.Fprintf(&msg, "invalid recursive type %v\n", l[0])
for _, t := range l {
- fmt.Fprintf(&msg, "\t%v: %v refers to\n", linestr(typePos(t)), t)
+ fmt.Fprintf(&msg, "\t%v: %v refers to\n", base.FmtPos(typePos(t)), t)
t.SetBroke(true)
}
- fmt.Fprintf(&msg, "\t%v: %v", linestr(typePos(l[0])), l[0])
- yyerrorl(typePos(l[0]), msg.String())
+ fmt.Fprintf(&msg, "\t%v: %v", base.FmtPos(typePos(l[0])), l[0])
+ base.ErrorfAt(typePos(l[0]), msg.String())
}
// dowidth calculates and stores the size and alignment for t.
return
}
if Widthptr == 0 {
- Fatalf("dowidth without betypeinit")
+ base.Fatalf("dowidth without betypeinit")
}
if t == nil {
return
}
t.SetBroke(true)
- Fatalf("width not calculated: %v", t)
+ base.Fatalf("width not calculated: %v", t)
}
// break infinite recursion if the broken recursive type
// defer checkwidth calls until after we're done
defercheckwidth()
- lno := lineno
+ lno := base.Pos
if asNode(t.Nod) != nil {
- lineno = asNode(t.Nod).Pos
+ base.Pos = asNode(t.Nod).Pos
}
t.Width = -2
var w int64
switch et {
default:
- Fatalf("dowidth: unknown type: %v", t)
+ base.Fatalf("dowidth: unknown type: %v", t)
// compiler-specific stuff
case TINT8, TUINT8, TBOOL:
t1 := t.ChanArgs()
dowidth(t1) // just in case
if t1.Elem().Width >= 1<<16 {
- yyerrorl(typePos(t1), "channel element type too large (>64kB)")
+ base.ErrorfAt(typePos(t1), "channel element type too large (>64kB)")
}
w = 1 // anything will do
case TANY:
// not a real type; should be replaced before use.
- Fatalf("dowidth any")
+ base.Fatalf("dowidth any")
case TSTRING:
if sizeofString == 0 {
- Fatalf("early dowidth string")
+ base.Fatalf("early dowidth string")
}
w = sizeofString
t.Align = uint8(Widthptr)
if t.Elem().Width != 0 {
cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
if uint64(t.NumElem()) > cap {
- yyerrorl(typePos(t), "type %L larger than address space", t)
+ base.ErrorfAt(typePos(t), "type %L larger than address space", t)
}
}
w = t.NumElem() * t.Elem().Width
case TSTRUCT:
if t.IsFuncArgStruct() {
- Fatalf("dowidth fn struct %v", t)
+ base.Fatalf("dowidth fn struct %v", t)
}
w = widstruct(t, t, 0, 1)
w = widstruct(t1, t1.Results(), w, Widthreg)
t1.Extra.(*types.Func).Argwid = w
if w%int64(Widthreg) != 0 {
- Warn("bad type %v %d\n", t1, w)
+ base.Warn("bad type %v %d\n", t1, w)
}
t.Align = 1
}
if Widthptr == 4 && w != int64(int32(w)) {
- yyerrorl(typePos(t), "type %v too large", t)
+ base.ErrorfAt(typePos(t), "type %v too large", t)
}
t.Width = w
if t.Align == 0 {
if w == 0 || w > 8 || w&(w-1) != 0 {
- Fatalf("invalid alignment for %v", t)
+ base.Fatalf("invalid alignment for %v", t)
}
t.Align = uint8(w)
}
- lineno = lno
+ base.Pos = lno
resumecheckwidth()
}
// function arg structs should not be checked
// outside of the enclosing function.
if t.IsFuncArgStruct() {
- Fatalf("checkwidth %v", t)
+ base.Fatalf("checkwidth %v", t)
}
if defercalc == 0 {
package gc
-import "runtime"
+import (
+ "cmd/compile/internal/base"
+ "runtime"
+)
func startMutexProfiling() {
- Fatalf("mutex profiling unavailable in version %v", runtime.Version())
+ base.Fatalf("mutex profiling unavailable in version %v", runtime.Version())
}
import (
"math/bits"
+
+ "cmd/compile/internal/base"
)
const (
nword := (nbit + wordBits - 1) / wordBits
size := int64(nword) * int64(count)
if int64(int32(size*4)) != size*4 {
- Fatalf("bvbulkalloc too big: nbit=%d count=%d nword=%d size=%d", nbit, count, nword, size)
+ base.Fatalf("bvbulkalloc too big: nbit=%d count=%d nword=%d size=%d", nbit, count, nword, size)
}
return bulkBvec{
words: make([]uint32, size),
func (bv1 bvec) Eq(bv2 bvec) bool {
if bv1.n != bv2.n {
- Fatalf("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n)
+ base.Fatalf("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n)
}
for i, x := range bv1.b {
if x != bv2.b[i] {
func (bv bvec) Get(i int32) bool {
if i < 0 || i >= bv.n {
- Fatalf("bvget: index %d is out of bounds with length %d\n", i, bv.n)
+ base.Fatalf("bvget: index %d is out of bounds with length %d\n", i, bv.n)
}
mask := uint32(1 << uint(i%wordBits))
return bv.b[i>>wordShift]&mask != 0
func (bv bvec) Set(i int32) {
if i < 0 || i >= bv.n {
- Fatalf("bvset: index %d is out of bounds with length %d\n", i, bv.n)
+ base.Fatalf("bvset: index %d is out of bounds with length %d\n", i, bv.n)
}
mask := uint32(1 << uint(i%wordBits))
bv.b[i/wordBits] |= mask
func (bv bvec) Unset(i int32) {
if i < 0 || i >= bv.n {
- Fatalf("bvunset: index %d is out of bounds with length %d\n", i, bv.n)
+ base.Fatalf("bvunset: index %d is out of bounds with length %d\n", i, bv.n)
}
mask := uint32(1 << uint(i%wordBits))
bv.b[i/wordBits] &^= mask
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/syntax"
"cmd/compile/internal/types"
"cmd/internal/src"
if !n.Name.Captured() {
n.Name.SetCaptured(true)
if n.Name.Decldepth == 0 {
- Fatalf("typecheckclosure: var %S does not have decldepth assigned", n)
+ base.Fatalf("typecheckclosure: var %S does not have decldepth assigned", n)
}
// Ignore assignments to the variable in straightline code
// We use value capturing for values <= 128 bytes that are never reassigned
// after capturing (effectively constant).
func capturevars(dcl *Node) {
- lno := lineno
- lineno = dcl.Pos
+ lno := base.Pos
+ base.Pos = dcl.Pos
fn := dcl.Func
cvars := fn.ClosureVars.Slice()
out := cvars[:0]
outer = nod(OADDR, outer, nil)
}
- if Flag.LowerM > 1 {
+ if base.Flag.LowerM > 1 {
var name *types.Sym
if v.Name.Curfn != nil && v.Name.Curfn.Func.Nname != nil {
name = v.Name.Curfn.Func.Nname.Sym
if v.Name.Byval() {
how = "value"
}
- Warnl(v.Pos, "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym, outermost.Name.Addrtaken(), outermost.Name.Assigned(), int32(v.Type.Width))
+ base.WarnfAt(v.Pos, "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym, outermost.Name.Addrtaken(), outermost.Name.Assigned(), int32(v.Type.Width))
}
outer = typecheck(outer, ctxExpr)
}
fn.ClosureVars.Set(out)
- lineno = lno
+ base.Pos = lno
}
// transformclosure is called in a separate phase after escape analysis.
// It transform closure bodies to properly reference captured variables.
func transformclosure(dcl *Node) {
- lno := lineno
- lineno = dcl.Pos
+ lno := base.Pos
+ base.Pos = dcl.Pos
fn := dcl.Func
if fn.ClosureCalled {
}
}
- lineno = lno
+ base.Pos = lno
}
// hasemptycvars reports whether closure clo has an
// closuredebugruntimecheck applies boilerplate checks for debug flags
// and compiling runtime
func closuredebugruntimecheck(clo *Node) {
- if Debug.Closure > 0 {
+ if base.Debug.Closure > 0 {
if clo.Esc == EscHeap {
- Warnl(clo.Pos, "heap closure, captured vars = %v", clo.Func.ClosureVars)
+ base.WarnfAt(clo.Pos, "heap closure, captured vars = %v", clo.Func.ClosureVars)
} else {
- Warnl(clo.Pos, "stack closure, captured vars = %v", clo.Func.ClosureVars)
+ base.WarnfAt(clo.Pos, "stack closure, captured vars = %v", clo.Func.ClosureVars)
}
}
- if Flag.CompilingRuntime && clo.Esc == EscHeap {
- yyerrorl(clo.Pos, "heap-allocated closure, not allowed in runtime")
+ if base.Flag.CompilingRuntime && clo.Esc == EscHeap {
+ base.ErrorfAt(clo.Pos, "heap-allocated closure, not allowed in runtime")
}
}
// If no closure vars, don't bother wrapping.
if hasemptycvars(clo) {
- if Debug.Closure > 0 {
- Warnl(clo.Pos, "closure converted to global")
+ if base.Debug.Closure > 0 {
+ base.WarnfAt(clo.Pos, "closure converted to global")
}
return fn.Nname
}
break
default:
- Fatalf("invalid typecheckpartialcall")
+ base.Fatalf("invalid typecheckpartialcall")
}
// Create top-level function.
sym.SetUniq(true)
savecurfn := Curfn
- saveLineNo := lineno
+ saveLineNo := base.Pos
Curfn = nil
// Set line number equal to the line number where the method is declared.
var m *types.Field
if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
- lineno = m.Pos
+ base.Pos = m.Pos
}
// Note: !m.Pos.IsKnown() happens for method expressions where
// the method is implicitly declared. The Error method of the
sym.Def = asTypesNode(dcl)
xtop = append(xtop, dcl)
Curfn = savecurfn
- lineno = saveLineNo
+ base.Pos = saveLineNo
return dcl
}
// referenced by method value n.
func callpartMethod(n *Node) *types.Field {
if n.Op != OCALLPART {
- Fatalf("expected OCALLPART, got %v", n)
+ base.Fatalf("expected OCALLPART, got %v", n)
}
// TODO(mdempsky): Optimize this. If necessary,
// makepartialcall could save m for us somewhere.
var m *types.Field
if lookdot0(n.Right.Sym, n.Left.Type, &m, false) != 1 {
- Fatalf("failed to find field for OCALLPART")
+ base.Fatalf("failed to find field for OCALLPART")
}
return m
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
func (n *Node) ValueInterface() interface{} {
switch v := n.Val(); v.Kind() {
default:
- Fatalf("unexpected constant: %v", v)
+ base.Fatalf("unexpected constant: %v", v)
panic("unreachable")
case constant.Bool:
return constant.BoolVal(v)
return x
}
}
- Fatalf("%v out of range for %v", v, t)
+ base.Fatalf("%v out of range for %v", v, t)
panic("unreachable")
}
if x, _ := constant.Float64Val(v); !math.IsInf(x, 0) {
return x + 0 // avoid -0 (should not be needed, but be conservative)
}
- Fatalf("bad float64 value: %v", v)
+ base.Fatalf("bad float64 value: %v", v)
panic("unreachable")
}
case *big.Rat:
f.SetRat(u)
default:
- Fatalf("unexpected: %v", u)
+ base.Fatalf("unexpected: %v", u)
}
return f
}
// n must be an integer or rune constant.
func (n *Node) Int64Val() int64 {
if !Isconst(n, constant.Int) {
- Fatalf("Int64Val(%v)", n)
+ base.Fatalf("Int64Val(%v)", n)
}
x, ok := constant.Int64Val(n.Val())
if !ok {
- Fatalf("Int64Val(%v)", n)
+ base.Fatalf("Int64Val(%v)", n)
}
return x
}
// n must be an integer or rune constant.
func (n *Node) Uint64Val() uint64 {
if !Isconst(n, constant.Int) {
- Fatalf("Uint64Val(%v)", n)
+ base.Fatalf("Uint64Val(%v)", n)
}
x, ok := constant.Uint64Val(n.Val())
if !ok {
- Fatalf("Uint64Val(%v)", n)
+ base.Fatalf("Uint64Val(%v)", n)
}
return x
}
// n must be a boolean constant.
func (n *Node) BoolVal() bool {
if !Isconst(n, constant.Bool) {
- Fatalf("BoolVal(%v)", n)
+ base.Fatalf("BoolVal(%v)", n)
}
return constant.BoolVal(n.Val())
}
// n must be a string constant.
func (n *Node) StringVal() string {
if !Isconst(n, constant.String) {
- Fatalf("StringVal(%v)", n)
+ base.Fatalf("StringVal(%v)", n)
}
return constant.StringVal(n.Val())
}
f, _ := constant.Float64Val(v)
return makeFloat64(f)
}
- Fatalf("unexpected size: %v", sz)
+ base.Fatalf("unexpected size: %v", sz)
panic("unreachable")
}
// truncate Real and Imag parts of Mpcplx to 32-bit or 64-bit
// precision, according to type; return truncated value. In case of
-// overflow, calls yyerror but does not truncate the input value.
+// overflow, calls Errorf but does not truncate the input value.
func trunccmplxlit(v constant.Value, t *types.Type) constant.Value {
if t.IsUntyped() || overflow(v, t) {
// If there was overflow, simply continuing would set the
// message.
func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Node {
if explicit && t == nil {
- Fatalf("explicit conversion missing type")
+ base.Fatalf("explicit conversion missing type")
}
if t != nil && t.IsUntyped() {
- Fatalf("bad conversion to untyped: %v", t)
+ base.Fatalf("bad conversion to untyped: %v", t)
}
if n == nil || n.Type == nil {
// Nil is technically not a constant, so handle it specially.
if n.Type.Etype == TNIL {
if n.Op != ONIL {
- Fatalf("unexpected op: %v (%v)", n, n.Op)
+ base.Fatalf("unexpected op: %v (%v)", n, n.Op)
}
if t == nil {
- yyerror("use of untyped nil")
+ base.Errorf("use of untyped nil")
n.SetDiag(true)
n.Type = nil
return n
switch n.Op {
default:
- Fatalf("unexpected untyped expression: %v", n)
+ base.Fatalf("unexpected untyped expression: %v", n)
case OLITERAL:
v := convertVal(n.Val(), t, explicit)
return n
}
if !types.Identical(n.Left.Type, n.Right.Type) {
- yyerror("invalid operation: %v (mismatched types %v and %v)", n, n.Left.Type, n.Right.Type)
+ base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, n.Left.Type, n.Right.Type)
n.Type = nil
return n
}
n.Left = convlit1(n.Left, t, explicit, nil)
n.Type = n.Left.Type
if n.Type != nil && !n.Type.IsInteger() {
- yyerror("invalid operation: %v (shift of type %v)", n, n.Type)
+ base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type)
n.Type = nil
}
return n
if !n.Diag() {
if !t.Broke() {
if explicit {
- yyerror("cannot convert %L to type %v", n, t)
+ base.Errorf("cannot convert %L to type %v", n, t)
} else if context != nil {
- yyerror("cannot use %L as type %v in %s", n, t, context())
+ base.Errorf("cannot use %L as type %v in %s", n, t, context())
} else {
- yyerror("cannot use %L as type %v", n, t)
+ base.Errorf("cannot use %L as type %v", n, t)
}
}
n.SetDiag(true)
func toflt(v constant.Value) constant.Value {
if v.Kind() == constant.Complex {
if constant.Sign(constant.Imag(v)) != 0 {
- yyerror("constant %v truncated to real", v)
+ base.Errorf("constant %v truncated to real", v)
}
v = constant.Real(v)
}
func toint(v constant.Value) constant.Value {
if v.Kind() == constant.Complex {
if constant.Sign(constant.Imag(v)) != 0 {
- yyerror("constant %v truncated to integer", v)
+ base.Errorf("constant %v truncated to integer", v)
}
v = constant.Real(v)
}
// (See issue #11371).
f := bigFloatVal(v)
if f.MantExp(nil) > 2*Mpprec {
- yyerror("integer too large")
+ base.Errorf("integer too large")
} else {
var t big.Float
t.Parse(fmt.Sprint(v), 0)
if t.IsInt() {
- yyerror("constant truncated to integer")
+ base.Errorf("constant truncated to integer")
} else {
- yyerror("constant %v truncated to integer", v)
+ base.Errorf("constant %v truncated to integer", v)
}
}
ft := floatForComplex(t)
return doesoverflow(constant.Real(v), ft) || doesoverflow(constant.Imag(v), ft)
}
- Fatalf("doesoverflow: %v, %v", v, t)
+ base.Fatalf("doesoverflow: %v, %v", v, t)
panic("unreachable")
}
return false
}
if v.Kind() == constant.Int && constant.BitLen(v) > Mpprec {
- yyerror("integer too large")
+ base.Errorf("integer too large")
return true
}
if doesoverflow(v, t) {
- yyerror("constant %v overflows %v", vconv(v, 0), t)
+ base.Errorf("constant %v overflows %v", vconv(v, 0), t)
return true
}
return false
// check for divisor underflow in complex division (see issue 20227)
if op == ODIV && n.Type.IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 {
- yyerror("complex division by zero")
+ base.Errorf("complex division by zero")
n.Type = nil
return n
}
if (op == ODIV || op == OMOD) && constant.Sign(rval) == 0 {
- yyerror("division by zero")
+ base.Errorf("division by zero")
n.Type = nil
return n
}
const shiftBound = 1023 - 1 + 52
s, ok := constant.Uint64Val(nr.Val())
if !ok || s > shiftBound {
- yyerror("invalid shift count %v", nr)
+ base.Errorf("invalid shift count %v", nr)
n.Type = nil
break
}
func makeFloat64(f float64) constant.Value {
if math.IsInf(f, 0) {
- Fatalf("infinity is not a valid constant")
+ base.Fatalf("infinity is not a valid constant")
}
v := constant.MakeFloat64(f)
v = constant.ToFloat(v) // workaround #42641 (MakeFloat64(0).Kind() returns Int, not Float)
func origConst(n *Node, v constant.Value) *Node {
lno := setlineno(n)
v = convertVal(v, n.Type, false)
- lineno = lno
+ base.Pos = lno
switch v.Kind() {
case constant.Int:
case constant.Unknown:
what := overflowNames[n.Op]
if what == "" {
- Fatalf("unexpected overflow: %v", n.Op)
+ base.Fatalf("unexpected overflow: %v", n.Op)
}
- yyerrorl(n.Pos, "constant %v overflow", what)
+ base.ErrorfAt(n.Pos, "constant %v overflow", what)
n.Type = nil
return n
}
func assertRepresents(t *types.Type, v constant.Value) {
if !represents(t, v) {
- Fatalf("%v does not represent %v", t, v)
+ base.Fatalf("%v does not represent %v", t, v)
}
}
return t.IsComplex()
}
- Fatalf("unexpected constant kind: %v", v)
+ base.Fatalf("unexpected constant kind: %v", v)
panic("unreachable")
}
case constant.Complex:
return types.UntypedComplex
}
- Fatalf("unexpected Ctype: %v", ct)
+ base.Fatalf("unexpected Ctype: %v", ct)
return nil
}
case types.UntypedComplex:
return 3
}
- Fatalf("bad type %v", t)
+ base.Fatalf("bad type %v", t)
panic("unreachable")
}
return types.Types[TCOMPLEX128]
}
- Fatalf("bad type %v", t)
+ base.Fatalf("bad type %v", t)
return nil
}
return
}
if n.Type.IsUntyped() {
- Fatalf("%v is untyped", n)
+ base.Fatalf("%v is untyped", n)
}
// Consts are only duplicates if they have the same value and
}
if prevPos, isDup := s.m[k]; isDup {
- yyerrorl(pos, "duplicate %s %s in %s\n\tprevious %s at %v",
+ base.ErrorfAt(pos, "duplicate %s %s in %s\n\tprevious %s at %v",
what, nodeAndVal(n), where,
- what, linestr(prevPos))
+ what, base.FmtPos(prevPos))
} else {
s.m[k] = pos
}
import (
"bytes"
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
func testdclstack() {
if !types.IsDclstackValid() {
- Fatalf("mark left on the dclstack")
+ base.Fatalf("mark left on the dclstack")
}
}
if pkg == nil {
pkg = s.Pkg
}
- yyerrorl(pos, "%v redeclared %s\n"+
+ base.ErrorfAt(pos, "%v redeclared %s\n"+
"\tprevious declaration during import %q", s, where, pkg.Path)
} else {
prevPos := s.Lastlineno
pos, prevPos = prevPos, pos
}
- yyerrorl(pos, "%v redeclared %s\n"+
- "\tprevious declaration at %v", s, where, linestr(prevPos))
+ base.ErrorfAt(pos, "%v redeclared %s\n"+
+ "\tprevious declaration at %v", s, where, base.FmtPos(prevPos))
}
}
// kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
if !inimport && !typecheckok && s.Pkg != localpkg {
- yyerrorl(n.Pos, "cannot declare name %v", s)
+ base.ErrorfAt(n.Pos, "cannot declare name %v", s)
}
gen := 0
if ctxt == PEXTERN {
if s.Name == "init" {
- yyerrorl(n.Pos, "cannot declare init - must be func")
+ base.ErrorfAt(n.Pos, "cannot declare init - must be func")
}
if s.Name == "main" && s.Pkg.Name == "main" {
- yyerrorl(n.Pos, "cannot declare main - must be func")
+ base.ErrorfAt(n.Pos, "cannot declare main - must be func")
}
externdcl = append(externdcl, n)
} else {
if Curfn == nil && ctxt == PAUTO {
- lineno = n.Pos
- Fatalf("automatic outside function")
+ base.Pos = n.Pos
+ base.Fatalf("automatic outside function")
}
if Curfn != nil && ctxt != PFUNC {
Curfn.Func.Dcl = append(Curfn.Func.Dcl, n)
}
s.Block = types.Block
- s.Lastlineno = lineno
+ s.Lastlineno = base.Pos
s.Def = asTypesNode(n)
n.Name.Vargen = int32(gen)
n.SetClass(ctxt)
func addvar(n *Node, t *types.Type, ctxt Class) {
if n == nil || n.Sym == nil || (n.Op != ONAME && n.Op != ONONAME) || t == nil {
- Fatalf("addvar: n=%v t=%v nil", n, t)
+ base.Fatalf("addvar: n=%v t=%v nil", n, t)
}
n.Op = ONAME
var e *Node
if doexpr {
if len(el) == 0 {
- yyerror("assignment mismatch: %d variables but %d values", len(vl), nel)
+ base.Errorf("assignment mismatch: %d variables but %d values", len(vl), nel)
break
}
e = el[0]
}
if len(el) != 0 {
- yyerror("assignment mismatch: %d variables but %d values", len(vl), nel)
+ base.Errorf("assignment mismatch: %d variables but %d values", len(vl), nel)
}
return init
}
// newnoname returns a new ONONAME Node associated with symbol s.
func newnoname(s *types.Sym) *Node {
if s == nil {
- Fatalf("newnoname nil")
+ base.Fatalf("newnoname nil")
}
n := nod(ONONAME, nil, nil)
n.Sym = s
// newfuncnamel generates a new name node for a function or method.
func newfuncnamel(pos src.XPos, s *types.Sym, fn *Func) *Node {
if fn.Nname != nil {
- Fatalf("newfuncnamel - already have name")
+ base.Fatalf("newfuncnamel - already have name")
}
n := newnamel(pos, s)
n.Func = fn
n := oldname(sym)
if !types.IsExported(sym.Name) && sym.Pkg != localpkg {
n.SetDiag(true)
- yyerror("cannot refer to unexported name %s.%s", sym.Pkg.Name, sym.Name)
+ base.Errorf("cannot refer to unexported name %s.%s", sym.Pkg.Name, sym.Name)
}
return n
}
continue
}
if !colasname(n) {
- yyerrorl(defn.Pos, "non-name %v on left side of :=", n)
+ base.ErrorfAt(defn.Pos, "non-name %v on left side of :=", n)
nerr++
continue
}
if !n.Sym.Uniq() {
- yyerrorl(defn.Pos, "%v repeated on left side of :=", n.Sym)
+ base.ErrorfAt(defn.Pos, "%v repeated on left side of :=", n.Sym)
n.SetDiag(true)
nerr++
continue
}
if nnew == 0 && nerr == 0 {
- yyerrorl(defn.Pos, "no new variables on left side of :=")
+ base.ErrorfAt(defn.Pos, "no new variables on left side of :=")
}
}
// interface field declaration.
func ifacedcl(n *Node) {
if n.Op != ODCLFIELD || n.Left == nil {
- Fatalf("ifacedcl")
+ base.Fatalf("ifacedcl")
}
if n.Sym.IsBlank() {
- yyerror("methods must have a unique non-blank name")
+ base.Errorf("methods must have a unique non-blank name")
}
}
func funcargs(nt *Node) {
if nt.Op != OTFUNC {
- Fatalf("funcargs %v", nt.Op)
+ base.Fatalf("funcargs %v", nt.Op)
}
// re-start the variable generation number
func funcarg(n *Node, ctxt Class) {
if n.Op != ODCLFIELD {
- Fatalf("funcarg %v", n.Op)
+ base.Fatalf("funcarg %v", n.Op)
}
if n.Sym == nil {
return
// used functype directly to parse the function's type.
func funcargs2(t *types.Type) {
if t.Etype != TFUNC {
- Fatalf("funcargs2 %v", t)
+ base.Fatalf("funcargs2 %v", t)
}
for _, f := range t.Recvs().Fields().Slice() {
if t.Sym == nil && t.IsPtr() {
t = t.Elem()
if t.IsInterface() {
- yyerror("embedded type cannot be a pointer to interface")
+ base.Errorf("embedded type cannot be a pointer to interface")
}
}
if t.IsPtr() || t.IsUnsafePtr() {
- yyerror("embedded type cannot be a pointer")
+ base.Errorf("embedded type cannot be a pointer")
} else if t.Etype == TFORW && !t.ForwardType().Embedlineno.IsKnown() {
- t.ForwardType().Embedlineno = lineno
+ t.ForwardType().Embedlineno = base.Pos
}
}
func structfield(n *Node) *types.Field {
- lno := lineno
- lineno = n.Pos
+ lno := base.Pos
+ base.Pos = n.Pos
if n.Op != ODCLFIELD {
- Fatalf("structfield: oops %v\n", n)
+ base.Fatalf("structfield: oops %v\n", n)
}
if n.Left != nil {
f.Note = constant.StringVal(n.Val())
}
- lineno = lno
+ base.Pos = lno
return f
}
continue
}
if seen[f.Sym] {
- yyerrorl(f.Pos, "duplicate %s %s", what, f.Sym.Name)
+ base.ErrorfAt(f.Pos, "duplicate %s %s", what, f.Sym.Name)
continue
}
seen[f.Sym] = true
}
func interfacefield(n *Node) *types.Field {
- lno := lineno
- lineno = n.Pos
+ lno := base.Pos
+ base.Pos = n.Pos
if n.Op != ODCLFIELD {
- Fatalf("interfacefield: oops %v\n", n)
+ base.Fatalf("interfacefield: oops %v\n", n)
}
if n.HasVal() {
- yyerror("interface method cannot have annotation")
+ base.Errorf("interface method cannot have annotation")
}
// MethodSpec = MethodName Signature | InterfaceTypeName .
f := types.NewField(n.Pos, n.Sym, n.Type)
- lineno = lno
+ base.Pos = lno
return f
}
// start with a letter, number, or period.
func methodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sym {
if msym.IsBlank() {
- Fatalf("blank method name")
+ base.Fatalf("blank method name")
}
rsym := recv.Sym
if recv.IsPtr() {
if rsym != nil {
- Fatalf("declared pointer receiver type: %v", recv)
+ base.Fatalf("declared pointer receiver type: %v", recv)
}
rsym = recv.Elem().Sym
}
// Returns a pointer to the existing or added Field; or nil if there's an error.
func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
if msym == nil {
- Fatalf("no method symbol")
+ base.Fatalf("no method symbol")
}
// get parent type sym
rf := t.Recv() // ptr to this structure
if rf == nil {
- yyerror("missing receiver")
+ base.Errorf("missing receiver")
return nil
}
t := pa
if t != nil && t.IsPtr() {
if t.Sym != nil {
- yyerror("invalid receiver type %v (%v is a pointer type)", pa, t)
+ base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
return nil
}
t = t.Elem()
case t == nil || t.Broke():
// rely on typecheck having complained before
case t.Sym == nil:
- yyerror("invalid receiver type %v (%v is not a defined type)", pa, t)
+ base.Errorf("invalid receiver type %v (%v is not a defined type)", pa, t)
case t.IsPtr():
- yyerror("invalid receiver type %v (%v is a pointer type)", pa, t)
+ base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
case t.IsInterface():
- yyerror("invalid receiver type %v (%v is an interface type)", pa, t)
+ base.Errorf("invalid receiver type %v (%v is an interface type)", pa, t)
default:
// Should have picked off all the reasons above,
// but just in case, fall back to generic error.
- yyerror("invalid receiver type %v (%L / %L)", pa, pa, t)
+ base.Errorf("invalid receiver type %v (%L / %L)", pa, pa, t)
}
return nil
}
if local && mt.Sym.Pkg != localpkg {
- yyerror("cannot define new methods on non-local type %v", mt)
+ base.Errorf("cannot define new methods on non-local type %v", mt)
return nil
}
if mt.IsStruct() {
for _, f := range mt.Fields().Slice() {
if f.Sym == msym {
- yyerror("type %v has both field and method named %v", mt, msym)
+ base.Errorf("type %v has both field and method named %v", mt, msym)
f.SetBroke(true)
return nil
}
// types.Identical only checks that incoming and result parameters match,
// so explicitly check that the receiver parameters match too.
if !types.Identical(t, f.Type) || !types.Identical(t.Recv().Type, f.Type.Recv().Type) {
- yyerror("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t)
+ base.Errorf("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t)
}
return f
}
- f := types.NewField(lineno, msym, t)
+ f := types.NewField(base.Pos, msym, t)
f.Nname = asTypesNode(n.Func.Nname)
f.SetNointerface(nointerface)
// When dynamically linking, the necessary function
// symbols will be created explicitly with makefuncsym.
// See the makefuncsym comment for details.
- if !Ctxt.Flag_dynlink && !existed {
+ if !base.Ctxt.Flag_dynlink && !existed {
funcsyms = append(funcsyms, s)
}
funcsymsmu.Unlock()
// So instead, when dynamic linking, we only create
// the s·f stubs in s's package.
func makefuncsym(s *types.Sym) {
- if !Ctxt.Flag_dynlink {
- Fatalf("makefuncsym dynlink")
+ if !base.Ctxt.Flag_dynlink {
+ base.Fatalf("makefuncsym dynlink")
}
if s.IsBlank() {
return
}
- if Flag.CompilingRuntime && (s.Name == "getg" || s.Name == "getclosureptr" || s.Name == "getcallerpc" || s.Name == "getcallersp") {
+ if base.Flag.CompilingRuntime && (s.Name == "getg" || s.Name == "getclosureptr" || s.Name == "getcallerpc" || s.Name == "getcallersp") {
// runtime.getg(), getclosureptr(), getcallerpc(), and
// getcallersp() are not real functions and so do not
// get funcsyms.
// setNodeNameFunc marks a node as a function.
func setNodeNameFunc(n *Node) {
if n.Op != ONAME || n.Class() != Pxxx {
- Fatalf("expected ONAME/Pxxx node, got %v", n)
+ base.Fatalf("expected ONAME/Pxxx node, got %v", n)
}
n.SetClass(PFUNC)
func dclfunc(sym *types.Sym, tfn *Node) *Node {
if tfn.Op != OTFUNC {
- Fatalf("expected OTFUNC node, got %v", tfn)
+ base.Fatalf("expected OTFUNC node, got %v", tfn)
}
fn := nod(ODCLFUNC, nil, nil)
- fn.Func.Nname = newfuncnamel(lineno, sym, fn.Func)
+ fn.Func.Nname = newfuncnamel(base.Pos, sym, fn.Func)
fn.Func.Nname.Name.Defn = fn
fn.Func.Nname.Name.Param.Ntype = tfn
setNodeNameFunc(fn.Func.Nname)
case OCLOSURE:
callee = arg.Func.Decl
default:
- Fatalf("expected ONAME or OCLOSURE node, got %+v", arg)
+ base.Fatalf("expected ONAME or OCLOSURE node, got %+v", arg)
}
if callee.Op != ODCLFUNC {
- Fatalf("expected ODCLFUNC node, got %+v", callee)
+ base.Fatalf("expected ODCLFUNC node, got %+v", callee)
}
c.extraCalls[c.curfn] = append(c.extraCalls[c.curfn], nowritebarrierrecCall{callee, n.Pos})
return true
// This can be called concurrently for different from Nodes.
func (c *nowritebarrierrecChecker) recordCall(from *Node, to *obj.LSym, pos src.XPos) {
if from.Op != ODCLFUNC {
- Fatalf("expected ODCLFUNC, got %v", from)
+ base.Fatalf("expected ODCLFUNC, got %v", from)
}
// We record this information on the *Func so this is
// concurrent-safe.
}
// Check go:nowritebarrier functions.
if n.Func.Pragma&Nowritebarrier != 0 && n.Func.WBPos.IsKnown() {
- yyerrorl(n.Func.WBPos, "write barrier prohibited")
+ base.ErrorfAt(n.Func.WBPos, "write barrier prohibited")
}
}
var err bytes.Buffer
call := funcs[fn]
for call.target != nil {
- fmt.Fprintf(&err, "\n\t%v: called by %v", linestr(call.lineno), call.target.Func.Nname)
+ fmt.Fprintf(&err, "\n\t%v: called by %v", base.FmtPos(call.lineno), call.target.Func.Nname)
call = funcs[call.target]
}
- yyerrorl(fn.Func.WBPos, "write barrier prohibited by caller; %v%s", fn.Func.Nname, err.String())
+ base.ErrorfAt(fn.Func.WBPos, "write barrier prohibited by caller; %v%s", fn.Func.Nname, err.String())
continue
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
x = reflect.ValueOf(v.Slice())
case src.XPos:
- p.printf("%s", linestr(v))
+ p.printf("%s", base.FmtPos(v))
return
case *types.Node:
package gc
import (
+ "cmd/compile/internal/base"
"cmd/internal/dwarf"
"cmd/internal/obj"
"cmd/internal/src"
func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls {
var inlcalls dwarf.InlCalls
- if Debug.DwarfInl != 0 {
- Ctxt.Logf("assembling DWARF inlined routine info for %v\n", fnsym.Name)
+ if base.Debug.DwarfInl != 0 {
+ base.Ctxt.Logf("assembling DWARF inlined routine info for %v\n", fnsym.Name)
}
// This maps inline index (from Ctxt.InlTree) to index in inlcalls.Calls
}
m = makePreinlineDclMap(fnsym)
} else {
- ifnlsym := Ctxt.InlTree.InlinedFunction(int(ii - 1))
+ ifnlsym := base.Ctxt.InlTree.InlinedFunction(int(ii - 1))
m = makePreinlineDclMap(ifnlsym)
}
}
// Debugging
- if Debug.DwarfInl != 0 {
+ if base.Debug.DwarfInl != 0 {
dumpInlCalls(inlcalls)
dumpInlVars(dwVars)
}
// abstract function DIE for an inlined routine imported from a
// previously compiled package.
func genAbstractFunc(fn *obj.LSym) {
- ifn := Ctxt.DwFixups.GetPrecursorFunc(fn)
+ ifn := base.Ctxt.DwFixups.GetPrecursorFunc(fn)
if ifn == nil {
- Ctxt.Diag("failed to locate precursor fn for %v", fn)
+ base.Ctxt.Diag("failed to locate precursor fn for %v", fn)
return
}
- if Debug.DwarfInl != 0 {
- Ctxt.Logf("DwarfAbstractFunc(%v)\n", fn.Name)
+ if base.Debug.DwarfInl != 0 {
+ base.Ctxt.Logf("DwarfAbstractFunc(%v)\n", fn.Name)
}
- Ctxt.DwarfAbstractFunc(ifn, fn, Ctxt.Pkgpath)
+ base.Ctxt.DwarfAbstractFunc(ifn, fn, base.Ctxt.Pkgpath)
}
// Undo any versioning performed when a name was written
dcl := preInliningDcls(fnsym)
m := make(map[varPos]int)
for i, n := range dcl {
- pos := Ctxt.InnermostPos(n.Pos)
+ pos := base.Ctxt.InnermostPos(n.Pos)
vp := varPos{
DeclName: unversion(n.Sym.Name),
DeclFile: pos.RelFilename(),
DeclCol: pos.Col(),
}
if _, found := m[vp]; found {
- Fatalf("child dcl collision on symbol %s within %v\n", n.Sym.Name, fnsym.Name)
+ base.Fatalf("child dcl collision on symbol %s within %v\n", n.Sym.Name, fnsym.Name)
}
m[vp] = i
}
// is one. We do this first so that parents appear before their
// children in the resulting table.
parCallIdx := -1
- parInlIdx := Ctxt.InlTree.Parent(inlIdx)
+ parInlIdx := base.Ctxt.InlTree.Parent(inlIdx)
if parInlIdx >= 0 {
parCallIdx = insertInlCall(dwcalls, parInlIdx, imap)
}
// Create new entry for this inline
- inlinedFn := Ctxt.InlTree.InlinedFunction(inlIdx)
- callXPos := Ctxt.InlTree.CallPos(inlIdx)
- absFnSym := Ctxt.DwFixups.AbsFuncDwarfSym(inlinedFn)
- pb := Ctxt.PosTable.Pos(callXPos).Base()
- callFileSym := Ctxt.Lookup(pb.SymFilename())
+ inlinedFn := base.Ctxt.InlTree.InlinedFunction(inlIdx)
+ callXPos := base.Ctxt.InlTree.CallPos(inlIdx)
+ absFnSym := base.Ctxt.DwFixups.AbsFuncDwarfSym(inlinedFn)
+ pb := base.Ctxt.PosTable.Pos(callXPos).Base()
+ callFileSym := base.Ctxt.Lookup(pb.SymFilename())
ic := dwarf.InlCall{
InlIndex: inlIdx,
CallFile: callFileSym,
// the index for a node from the inlined body of D will refer to the
// call to D from C. Whew.
func posInlIndex(xpos src.XPos) int {
- pos := Ctxt.PosTable.Pos(xpos)
+ pos := base.Ctxt.PosTable.Pos(xpos)
if b := pos.Base(); b != nil {
ii := b.InliningIndex()
if ii >= 0 {
// Append range to correct inlined call
callIdx, found := imap[ii]
if !found {
- Fatalf("can't find inlIndex %d in imap for prog at %d\n", ii, start)
+ base.Fatalf("can't find inlIndex %d in imap for prog at %d\n", ii, start)
}
call := &calls[callIdx]
call.Ranges = append(call.Ranges, dwarf.Range{Start: start, End: end})
func dumpInlCall(inlcalls dwarf.InlCalls, idx, ilevel int) {
for i := 0; i < ilevel; i++ {
- Ctxt.Logf(" ")
+ base.Ctxt.Logf(" ")
}
ic := inlcalls.Calls[idx]
- callee := Ctxt.InlTree.InlinedFunction(ic.InlIndex)
- Ctxt.Logf(" %d: II:%d (%s) V: (", idx, ic.InlIndex, callee.Name)
+ callee := base.Ctxt.InlTree.InlinedFunction(ic.InlIndex)
+ base.Ctxt.Logf(" %d: II:%d (%s) V: (", idx, ic.InlIndex, callee.Name)
for _, f := range ic.InlVars {
- Ctxt.Logf(" %v", f.Name)
+ base.Ctxt.Logf(" %v", f.Name)
}
- Ctxt.Logf(" ) C: (")
+ base.Ctxt.Logf(" ) C: (")
for _, k := range ic.Children {
- Ctxt.Logf(" %v", k)
+ base.Ctxt.Logf(" %v", k)
}
- Ctxt.Logf(" ) R:")
+ base.Ctxt.Logf(" ) R:")
for _, r := range ic.Ranges {
- Ctxt.Logf(" [%d,%d)", r.Start, r.End)
+ base.Ctxt.Logf(" [%d,%d)", r.Start, r.End)
}
- Ctxt.Logf("\n")
+ base.Ctxt.Logf("\n")
for _, k := range ic.Children {
dumpInlCall(inlcalls, k, ilevel+1)
}
if dwv.IsInAbstract {
ia = 1
}
- Ctxt.Logf("V%d: %s CI:%d II:%d IA:%d %s\n", i, dwv.Name, dwv.ChildIndex, dwv.InlIndex-1, ia, typ)
+ base.Ctxt.Logf("V%d: %s CI:%d II:%d IA:%d %s\n", i, dwv.Name, dwv.ChildIndex, dwv.InlIndex-1, ia, typ)
}
}
// Callee
ic := inlCalls.Calls[idx]
- callee := Ctxt.InlTree.InlinedFunction(ic.InlIndex).Name
+ callee := base.Ctxt.InlTree.InlinedFunction(ic.InlIndex).Name
calleeRanges := ic.Ranges
// Caller
parentRanges := []dwarf.Range{dwarf.Range{Start: int64(0), End: funcSize}}
if parentIdx != -1 {
pic := inlCalls.Calls[parentIdx]
- caller = Ctxt.InlTree.InlinedFunction(pic.InlIndex).Name
+ caller = base.Ctxt.InlTree.InlinedFunction(pic.InlIndex).Name
parentRanges = pic.Ranges
}
// Callee ranges contained in caller ranges?
c, m := rangesContainsAll(parentRanges, calleeRanges)
if !c {
- Fatalf("** malformed inlined routine range in %s: caller %s callee %s II=%d %s\n", funcName, caller, callee, idx, m)
+ base.Fatalf("** malformed inlined routine range in %s: caller %s callee %s II=%d %s\n", funcName, caller, callee, idx, m)
}
// Now visit kids
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/syntax"
"cmd/compile/internal/types"
"cmd/internal/obj"
pos := embeds[0].Pos
if !haveEmbed {
- p.yyerrorpos(pos, "invalid go:embed: missing import \"embed\"")
+ p.errorAt(pos, "invalid go:embed: missing import \"embed\"")
return exprs
}
- if Flag.Cfg.Embed.Patterns == nil {
- p.yyerrorpos(pos, "invalid go:embed: build system did not supply embed configuration")
+ if base.Flag.Cfg.Embed.Patterns == nil {
+ p.errorAt(pos, "invalid go:embed: build system did not supply embed configuration")
return exprs
}
if len(names) > 1 {
- p.yyerrorpos(pos, "go:embed cannot apply to multiple vars")
+ p.errorAt(pos, "go:embed cannot apply to multiple vars")
return exprs
}
if len(exprs) > 0 {
- p.yyerrorpos(pos, "go:embed cannot apply to var with initializer")
+ p.errorAt(pos, "go:embed cannot apply to var with initializer")
return exprs
}
if typ == nil {
// Should not happen, since len(exprs) == 0 now.
- p.yyerrorpos(pos, "go:embed cannot apply to var without type")
+ p.errorAt(pos, "go:embed cannot apply to var without type")
return exprs
}
kind := embedKindApprox(typ)
if kind == embedUnknown {
- p.yyerrorpos(pos, "go:embed cannot apply to var of type %v", typ)
+ p.errorAt(pos, "go:embed cannot apply to var of type %v", typ)
return exprs
}
var list []string
for _, e := range embeds {
for _, pattern := range e.Patterns {
- files, ok := Flag.Cfg.Embed.Patterns[pattern]
+ files, ok := base.Flag.Cfg.Embed.Patterns[pattern]
if !ok {
- p.yyerrorpos(e.Pos, "invalid go:embed: build system did not map pattern: %s", pattern)
+ p.errorAt(e.Pos, "invalid go:embed: build system did not map pattern: %s", pattern)
}
for _, file := range files {
- if Flag.Cfg.Embed.Files[file] == "" {
- p.yyerrorpos(e.Pos, "invalid go:embed: build system did not map file: %s", file)
+ if base.Flag.Cfg.Embed.Files[file] == "" {
+ p.errorAt(e.Pos, "invalid go:embed: build system did not map file: %s", file)
continue
}
if !have[file] {
if kind == embedString || kind == embedBytes {
if len(list) > 1 {
- p.yyerrorpos(pos, "invalid go:embed: multiple files for type %v", typ)
+ p.errorAt(pos, "invalid go:embed: multiple files for type %v", typ)
return exprs
}
}
// can't tell whether "string" and "byte" really mean "string" and "byte".
// The result must be confirmed later, after type checking, using embedKind.
func embedKindApprox(typ *Node) int {
- if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && Ctxt.Pkgpath == "embed")) {
+ if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && base.Ctxt.Pkgpath == "embed")) {
return embedFiles
}
// These are not guaranteed to match only string and []byte -
// embedKind determines the kind of embedding variable.
func embedKind(typ *types.Type) int {
- if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && Ctxt.Pkgpath == "embed")) {
+ if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && base.Ctxt.Pkgpath == "embed")) {
return embedFiles
}
if typ == types.Types[TSTRING] {
files := v.Name.Param.EmbedFiles()
switch kind := embedKind(v.Type); kind {
case embedUnknown:
- yyerrorl(v.Pos, "go:embed cannot apply to var of type %v", v.Type)
+ base.ErrorfAt(v.Pos, "go:embed cannot apply to var of type %v", v.Type)
case embedString, embedBytes:
file := files[0]
- fsym, size, err := fileStringSym(v.Pos, Flag.Cfg.Embed.Files[file], kind == embedString, nil)
+ fsym, size, err := fileStringSym(v.Pos, base.Flag.Cfg.Embed.Files[file], kind == embedString, nil)
if err != nil {
- yyerrorl(v.Pos, "embed %s: %v", file, err)
+ base.ErrorfAt(v.Pos, "embed %s: %v", file, err)
}
sym := v.Sym.Linksym()
off := 0
}
case embedFiles:
- slicedata := Ctxt.Lookup(`"".` + v.Sym.Name + `.files`)
+ slicedata := base.Ctxt.Lookup(`"".` + v.Sym.Name + `.files`)
off := 0
// []files pointed at by Files
off = dsymptr(slicedata, off, slicedata, 3*Widthptr) // []file, pointing just past slice
off = duintptr(slicedata, off, 0)
off += hashSize
} else {
- fsym, size, err := fileStringSym(v.Pos, Flag.Cfg.Embed.Files[file], true, hash)
+ fsym, size, err := fileStringSym(v.Pos, base.Flag.Cfg.Embed.Files[file], true, hash)
if err != nil {
- yyerrorl(v.Pos, "embed %s: %v", file, err)
+ base.ErrorfAt(v.Pos, "embed %s: %v", file, err)
}
off = dsymptr(slicedata, off, fsym, 0) // data string
off = duintptr(slicedata, off, uint64(size))
- off = int(slicedata.WriteBytes(Ctxt, int64(off), hash))
+ off = int(slicedata.WriteBytes(base.Ctxt, int64(off), hash))
}
}
ggloblsym(slicedata, int32(off), obj.RODATA|obj.LOCAL)
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"fmt"
)
Curfn = Curfn.Func.Decl
panic("can't happen")
}
- ln := lineno
- lineno = Curfn.Pos
+ ln := base.Pos
+ base.Pos = Curfn.Pos
moveToHeap(n)
Curfn = oldfn
- lineno = ln
+ base.Pos = ln
// ODOTPTR has already been introduced,
// so these are the non-pointer ODOT and OINDEX.
// moveToHeap records the parameter or local variable n as moved to the heap.
func moveToHeap(n *Node) {
- if Flag.LowerR != 0 {
+ if base.Flag.LowerR != 0 {
Dump("MOVE", n)
}
- if Flag.CompilingRuntime {
- yyerror("%v escapes to heap, not allowed in runtime", n)
+ if base.Flag.CompilingRuntime {
+ base.Errorf("%v escapes to heap, not allowed in runtime", n)
}
if n.Class() == PAUTOHEAP {
Dump("n", n)
- Fatalf("double move to heap")
+ base.Fatalf("double move to heap")
}
// Allocate a local stack variable to hold the pointer to the heap copy.
// the function.
if n.Class() == PPARAM || n.Class() == PPARAMOUT {
if n.Xoffset == BADWIDTH {
- Fatalf("addrescapes before param assignment")
+ base.Fatalf("addrescapes before param assignment")
}
// We rewrite n below to be a heap variable (indirection of heapaddr).
}
}
if !found {
- Fatalf("cannot find %v in local variable list", n)
+ base.Fatalf("cannot find %v in local variable list", n)
}
Curfn.Func.Dcl = append(Curfn.Func.Dcl, n)
}
n.Xoffset = 0
n.Name.Param.Heapaddr = heapaddr
n.Esc = EscHeap
- if Flag.LowerM != 0 {
- Warnl(n.Pos, "moved to heap: %v", n)
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(n.Pos, "moved to heap: %v", n)
}
}
// but we are reusing the ability to annotate an individual function
// argument and pass those annotations along to importing code.
if f.Type.IsUintptr() {
- if Flag.LowerM != 0 {
- Warnl(f.Pos, "assuming %v is unsafe uintptr", name())
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(f.Pos, "assuming %v is unsafe uintptr", name())
}
return unsafeUintptrTag
}
// External functions are assumed unsafe, unless
// //go:noescape is given before the declaration.
if fn.Func.Pragma&Noescape != 0 {
- if Flag.LowerM != 0 && f.Sym != nil {
- Warnl(f.Pos, "%v does not escape", name())
+ if base.Flag.LowerM != 0 && f.Sym != nil {
+ base.WarnfAt(f.Pos, "%v does not escape", name())
}
} else {
- if Flag.LowerM != 0 && f.Sym != nil {
- Warnl(f.Pos, "leaking param: %v", name())
+ if base.Flag.LowerM != 0 && f.Sym != nil {
+ base.WarnfAt(f.Pos, "leaking param: %v", name())
}
esc.AddHeap(0)
}
if fn.Func.Pragma&UintptrEscapes != 0 {
if f.Type.IsUintptr() {
- if Flag.LowerM != 0 {
- Warnl(f.Pos, "marking %v as escaping uintptr", name())
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())
}
return uintptrEscapesTag
}
if f.IsDDD() && f.Type.Elem().IsUintptr() {
// final argument is ...uintptr.
- if Flag.LowerM != 0 {
- Warnl(f.Pos, "marking %v as escaping ...uintptr", name())
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(f.Pos, "marking %v as escaping ...uintptr", name())
}
return uintptrEscapesTag
}
esc := loc.paramEsc
esc.Optimize()
- if Flag.LowerM != 0 && !loc.escapes {
+ if base.Flag.LowerM != 0 && !loc.escapes {
if esc.Empty() {
- Warnl(f.Pos, "%v does not escape", name())
+ base.WarnfAt(f.Pos, "%v does not escape", name())
}
if x := esc.Heap(); x >= 0 {
if x == 0 {
- Warnl(f.Pos, "leaking param: %v", name())
+ base.WarnfAt(f.Pos, "leaking param: %v", name())
} else {
// TODO(mdempsky): Mention level=x like below?
- Warnl(f.Pos, "leaking param content: %v", name())
+ base.WarnfAt(f.Pos, "leaking param content: %v", name())
}
}
for i := 0; i < numEscResults; i++ {
if x := esc.Result(i); x >= 0 {
res := fn.Type.Results().Field(i).Sym
- Warnl(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x)
+ base.WarnfAt(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x)
}
}
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/logopt"
"cmd/compile/internal/types"
"cmd/internal/src"
func escapeFuncs(fns []*Node, recursive bool) {
for _, fn := range fns {
if fn.Op != ODCLFUNC {
- Fatalf("unexpected node: %v", fn)
+ base.Fatalf("unexpected node: %v", fn)
}
}
func (e *Escape) initFunc(fn *Node) {
if fn.Op != ODCLFUNC || fn.Esc != EscFuncUnknown {
- Fatalf("unexpected node: %v", fn)
+ base.Fatalf("unexpected node: %v", fn)
}
fn.Esc = EscFuncPlanned
- if Flag.LowerM > 3 {
+ if base.Flag.LowerM > 3 {
Dump("escAnalyze", fn)
}
lno := setlineno(n)
defer func() {
- lineno = lno
+ base.Pos = lno
}()
- if Flag.LowerM > 2 {
- fmt.Printf("%v:[%d] %v stmt: %v\n", linestr(lineno), e.loopDepth, funcSym(e.curfn), n)
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v:[%d] %v stmt: %v\n", base.FmtPos(base.Pos), e.loopDepth, funcSym(e.curfn), n)
}
e.stmts(n.Ninit)
switch n.Op {
default:
- Fatalf("unexpected stmt: %v", n)
+ base.Fatalf("unexpected stmt: %v", n)
case ODCLCONST, ODCLTYPE, OEMPTY, OFALL, OINLMARK:
// nop
case OLABEL:
switch asNode(n.Sym.Label) {
case nonlooping:
- if Flag.LowerM > 2 {
- fmt.Printf("%v:%v non-looping label\n", linestr(lineno), n)
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v:%v non-looping label\n", base.FmtPos(base.Pos), n)
}
case looping:
- if Flag.LowerM > 2 {
- fmt.Printf("%v: %v looping label\n", linestr(lineno), n)
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v: %v looping label\n", base.FmtPos(base.Pos), n)
}
e.loopDepth++
default:
- Fatalf("label missing tag")
+ base.Fatalf("label missing tag")
}
n.Sym.Label = nil
lno := setlineno(n)
defer func() {
- lineno = lno
+ base.Pos = lno
}()
uintptrEscapesHack := k.uintptrEscapesHack
switch n.Op {
default:
- Fatalf("unexpected expr: %v", n)
+ base.Fatalf("unexpected expr: %v", n)
case OLITERAL, ONIL, OGETG, OCLOSUREVAR, OTYPE, OMETHEXPR:
// nop
// for conversions from an unsafe.Pointer.
func (e *Escape) unsafeValue(k EscHole, n *Node) {
if n.Type.Etype != TUINTPTR {
- Fatalf("unexpected type %v for %v", n.Type, n)
+ base.Fatalf("unexpected type %v for %v", n.Type, n)
}
e.stmts(n.Ninit)
switch n.Op {
default:
- Fatalf("unexpected addr: %v", n)
+ base.Fatalf("unexpected addr: %v", n)
case ONAME:
if n.Class() == PEXTERN {
break
func (e *Escape) assign(dst, src *Node, why string, where *Node) {
// Filter out some no-op assignments for escape analysis.
ignore := dst != nil && src != nil && isSelfAssign(dst, src)
- if ignore && Flag.LowerM != 0 {
- Warnl(where.Pos, "%v ignoring self-assignment in %S", funcSym(e.curfn), where)
+ if ignore && base.Flag.LowerM != 0 {
+ base.WarnfAt(where.Pos, "%v ignoring self-assignment in %S", funcSym(e.curfn), where)
}
k := e.addr(dst)
switch call.Op {
default:
- Fatalf("unexpected call op: %v", call.Op)
+ base.Fatalf("unexpected call op: %v", call.Op)
case OCALLFUNC, OCALLMETH, OCALLINTER:
fixVariadicCall(call)
func (e *Escape) inMutualBatch(fn *Node) bool {
if fn.Name.Defn != nil && fn.Name.Defn.Esc < EscFuncTagged {
if fn.Name.Defn.Esc == EscFuncUnknown {
- Fatalf("graph inconsistency")
+ base.Fatalf("graph inconsistency")
}
return true
}
func (k EscHole) note(where *Node, why string) EscHole {
if where == nil || why == "" {
- Fatalf("note: missing where/why")
+ base.Fatalf("note: missing where/why")
}
- if Flag.LowerM >= 2 || logopt.Enabled() {
+ if base.Flag.LowerM >= 2 || logopt.Enabled() {
k.notes = &EscNote{
next: k.notes,
where: where,
func (k EscHole) shift(delta int) EscHole {
k.derefs += delta
if k.derefs < -1 {
- Fatalf("derefs underflow: %v", k.derefs)
+ base.Fatalf("derefs underflow: %v", k.derefs)
}
return k
}
// *ltmp" and "l2 = ltmp" and return "ltmp = &_"
// instead.
if k.derefs < 0 {
- Fatalf("teeHole: negative derefs")
+ base.Fatalf("teeHole: negative derefs")
}
e.flow(k, loc)
if n != nil && n.Op == ONAME && n.Name.IsClosureVar() {
n = n.Name.Defn
if n.Name.IsClosureVar() {
- Fatalf("still closure var")
+ base.Fatalf("still closure var")
}
}
func (e *Escape) newLoc(n *Node, transient bool) *EscLocation {
if e.curfn == nil {
- Fatalf("e.curfn isn't set")
+ base.Fatalf("e.curfn isn't set")
}
if n != nil && n.Type != nil && n.Type.NotInHeap() {
- yyerrorl(n.Pos, "%v is incomplete (or unallocatable); stack allocation disallowed", n.Type)
+ base.ErrorfAt(n.Pos, "%v is incomplete (or unallocatable); stack allocation disallowed", n.Type)
}
n = canonicalNode(n)
e.allLocs = append(e.allLocs, loc)
if n != nil {
if n.Op == ONAME && n.Name.Curfn != e.curfn {
- Fatalf("curfn mismatch: %v != %v", n.Name.Curfn, e.curfn)
+ base.Fatalf("curfn mismatch: %v != %v", n.Name.Curfn, e.curfn)
}
if n.HasOpt() {
- Fatalf("%v already has a location", n)
+ base.Fatalf("%v already has a location", n)
}
n.SetOpt(loc)
return
}
if dst.escapes && k.derefs < 0 { // dst = &src
- if Flag.LowerM >= 2 || logopt.Enabled() {
- pos := linestr(src.n.Pos)
- if Flag.LowerM >= 2 {
+ if base.Flag.LowerM >= 2 || logopt.Enabled() {
+ pos := base.FmtPos(src.n.Pos)
+ if base.Flag.LowerM >= 2 {
fmt.Printf("%s: %v escapes to heap:\n", pos, src.n)
}
explanation := e.explainFlow(pos, dst, src, k.derefs, k.notes, []*logopt.LoggedOpt{})
// that value flow for tagging the function
// later.
if l.isName(PPARAM) {
- if (logopt.Enabled() || Flag.LowerM >= 2) && !l.escapes {
- if Flag.LowerM >= 2 {
- fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", linestr(l.n.Pos), l.n, e.explainLoc(root), derefs)
+ if (logopt.Enabled() || base.Flag.LowerM >= 2) && !l.escapes {
+ if base.Flag.LowerM >= 2 {
+ fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", base.FmtPos(l.n.Pos), l.n, e.explainLoc(root), derefs)
}
explanation := e.explainPath(root, l)
if logopt.Enabled() {
// outlives it, then l needs to be heap
// allocated.
if addressOf && !l.escapes {
- if logopt.Enabled() || Flag.LowerM >= 2 {
- if Flag.LowerM >= 2 {
- fmt.Printf("%s: %v escapes to heap:\n", linestr(l.n.Pos), l.n)
+ if logopt.Enabled() || base.Flag.LowerM >= 2 {
+ if base.Flag.LowerM >= 2 {
+ fmt.Printf("%s: %v escapes to heap:\n", base.FmtPos(l.n.Pos), l.n)
}
explanation := e.explainPath(root, l)
if logopt.Enabled() {
// explainPath prints an explanation of how src flows to the walk root.
func (e *Escape) explainPath(root, src *EscLocation) []*logopt.LoggedOpt {
visited := make(map[*EscLocation]bool)
- pos := linestr(src.n.Pos)
+ pos := base.FmtPos(src.n.Pos)
var explanation []*logopt.LoggedOpt
for {
// Prevent infinite loop.
if visited[src] {
- if Flag.LowerM >= 2 {
+ if base.Flag.LowerM >= 2 {
fmt.Printf("%s: warning: truncated explanation due to assignment cycle; see golang.org/issue/35518\n", pos)
}
break
dst := src.dst
edge := &dst.edges[src.dstEdgeIdx]
if edge.src != src {
- Fatalf("path inconsistency: %v != %v", edge.src, src)
+ base.Fatalf("path inconsistency: %v != %v", edge.src, src)
}
explanation = e.explainFlow(pos, dst, src, edge.derefs, edge.notes, explanation)
if derefs >= 0 {
ops = strings.Repeat("*", derefs)
}
- print := Flag.LowerM >= 2
+ print := base.Flag.LowerM >= 2
flow := fmt.Sprintf(" flow: %s = %s%v:", e.explainLoc(dst), ops, e.explainLoc(srcloc))
if print {
for note := notes; note != nil; note = note.next {
if print {
- fmt.Printf("%s: from %v (%v) at %s\n", pos, note.where, note.why, linestr(note.where.Pos))
+ fmt.Printf("%s: from %v (%v) at %s\n", pos, note.where, note.why, base.FmtPos(note.where.Pos))
}
if logopt.Enabled() {
explanation = append(explanation, logopt.NewLoggedOpt(note.where.Pos, "escflow", "escape", e.curfn.funcname(),
// containsClosure reports whether c is a closure contained within f.
func containsClosure(f, c *Node) bool {
if f.Op != ODCLFUNC || c.Op != ODCLFUNC {
- Fatalf("bad containsClosure: %v, %v", f, c)
+ base.Fatalf("bad containsClosure: %v, %v", f, c)
}
// Common case.
if loc.escapes {
if n.Op != ONAME {
- if Flag.LowerM != 0 {
- Warnl(n.Pos, "%S escapes to heap", n)
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(n.Pos, "%S escapes to heap", n)
}
if logopt.Enabled() {
logopt.LogOpt(n.Pos, "escape", "escape", e.curfn.funcname())
n.Esc = EscHeap
addrescapes(n)
} else {
- if Flag.LowerM != 0 && n.Op != ONAME {
- Warnl(n.Pos, "%S does not escape", n)
+ if base.Flag.LowerM != 0 && n.Op != ONAME {
+ base.WarnfAt(n.Pos, "%S does not escape", n)
}
n.Esc = EscNone
if loc.transient {
func (l *EscLeaks) set(i, derefs int) {
v := derefs + 1
if v < 0 {
- Fatalf("invalid derefs count: %v", derefs)
+ base.Fatalf("invalid derefs count: %v", derefs)
}
if v > math.MaxUint8 {
v = math.MaxUint8
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/src"
func exportf(bout *bio.Writer, format string, args ...interface{}) {
fmt.Fprintf(bout, format, args...)
- if Debug.Export != 0 {
+ if base.Debug.Export != 0 {
fmt.Printf(format, args...)
}
}
}
n.Sym.SetOnExportList(true)
- if Flag.E != 0 {
+ if base.Flag.E != 0 {
fmt.Printf("export symbol %v\n", n.Sym)
}
if types.IsExported(n.Sym.Name) || initname(n.Sym.Name) {
exportsym(n)
}
- if Flag.AsmHdr != "" && !n.Sym.Asm() {
+ if base.Flag.AsmHdr != "" && !n.Sym.Asm() {
n.Sym.SetAsm(true)
asmlist = append(asmlist, n)
}
size := bout.Offset() - off
exportf(bout, "\n$$\n")
- if Debug.Export != 0 {
- fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", Ctxt.Pkgpath, size)
+ if base.Debug.Export != 0 {
+ fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, size)
}
}
// is declarations for Runtimepkg, which are populated
// by loadsys instead.
if s.Pkg != Runtimepkg {
- Fatalf("missing ONONAME for %v\n", s)
+ base.Fatalf("missing ONONAME for %v\n", s)
}
n = dclname(s)
s.Importdef = ipkg
}
if n.Op != ONONAME && n.Op != op {
- redeclare(lineno, s, fmt.Sprintf("during import %q", ipkg.Path))
+ redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
}
return n
}
t := n.Type
if t == nil {
- Fatalf("importtype %v", s)
+ base.Fatalf("importtype %v", s)
}
return t
}
n := importsym(ipkg, s, op)
if n.Op != ONONAME {
if n.Op == op && (n.Class() != ctxt || !types.Identical(n.Type, t)) {
- redeclare(lineno, s, fmt.Sprintf("during import %q", ipkg.Path))
+ redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
}
return nil
}
n.SetVal(val)
- if Flag.E != 0 {
+ if base.Flag.E != 0 {
fmt.Printf("import const %v %L = %v\n", s, t, val)
}
}
n.Func = new(Func)
- if Flag.E != 0 {
+ if base.Flag.E != 0 {
fmt.Printf("import func %v%S\n", s, t)
}
}
return
}
- if Flag.E != 0 {
+ if base.Flag.E != 0 {
fmt.Printf("import var %v %L\n", s, t)
}
}
return
}
- if Flag.E != 0 {
+ if base.Flag.E != 0 {
fmt.Printf("import type %v = %L\n", s, t)
}
}
func dumpasmhdr() {
- b, err := bio.Create(Flag.AsmHdr)
+ b, err := bio.Create(base.Flag.AsmHdr)
if err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", localpkg.Name)
for _, n := range asmlist {
import (
"bytes"
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
flag |= FmtSign
}
if s.Flag(' ') {
- Fatalf("FmtUnsigned in format string")
+ base.Fatalf("FmtUnsigned in format string")
}
if _, ok := s.Precision(); ok {
flag |= FmtComma
case int32, int64, string, types.EType, constant.Value:
// OK: printing these types doesn't depend on mode
default:
- Fatalf("mode.prepareArgs type %T", arg)
+ base.Fatalf("mode.prepareArgs type %T", arg)
}
}
}
short := flag&FmtShort != 0
// Useful to see which nodes in an AST printout are actually identical
- if Debug.DumpPtrs != 0 {
+ if base.Debug.DumpPtrs != 0 {
fmt.Fprintf(s, " p(%p)", n)
}
if !short && n.Name != nil && n.Name.Vargen != 0 {
fmt.Fprintf(s, " g(%d)", n.Name.Vargen)
}
- if Debug.DumpPtrs != 0 && !short && n.Name != nil && n.Name.Defn != nil {
+ if base.Debug.DumpPtrs != 0 && !short && n.Name != nil && n.Name.Defn != nil {
// Useful to see where Defn is set and what node it points to
fmt.Fprintf(s, " defn(%p)", n.Name.Defn)
}
case mt.Hiter:
b.WriteString("map.iter[")
default:
- Fatalf("unknown internal map type")
+ base.Fatalf("unknown internal map type")
}
tconv2(b, m.Key(), 0, mode, visited)
b.WriteByte(']')
case OSLICEHEADER:
if n.List.Len() != 2 {
- Fatalf("bad OSLICEHEADER list length %d", n.List.Len())
+ base.Fatalf("bad OSLICEHEADER list length %d", n.List.Len())
}
mode.Fprintf(s, "sliceheader{%v,%v,%v}", n.Left, n.List.First(), n.List.Second())
dumpdepth--
default:
- Fatalf("unhandled %%N mode: %d", mode)
+ base.Fatalf("unhandled %%N mode: %d", mode)
}
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
// make a new Node off the books
func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node {
if curfn == nil {
- Fatalf("no curfn for tempAt")
+ base.Fatalf("no curfn for tempAt")
}
if curfn.Op == OCLOSURE {
Dump("tempAt", curfn)
- Fatalf("adding tempAt to wrong closure function")
+ base.Fatalf("adding tempAt to wrong closure function")
}
if t == nil {
- Fatalf("tempAt called with nil type")
+ base.Fatalf("tempAt called with nil type")
}
s := &types.Sym{
}
func temp(t *types.Type) *Node {
- return tempAt(lineno, Curfn, t)
+ return tempAt(base.Pos, Curfn, t)
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
// isRuntimePkg reports whether p is package runtime.
func isRuntimePkg(p *types.Pkg) bool {
- if Flag.CompilingRuntime && p == localpkg {
+ if base.Flag.CompilingRuntime && p == localpkg {
return true
}
return p.Path == "runtime"
// isReflectPkg reports whether p is package reflect.
func isReflectPkg(p *types.Pkg) bool {
if p == localpkg {
- return Ctxt.Pkgpath == "reflect"
+ return base.Ctxt.Pkgpath == "reflect"
}
return p.Path == "reflect"
}
// Whether we are tracking lexical scopes for DWARF.
var trackScopes bool
-var Ctxt *obj.Link
-
var nodfp *Node
var autogeneratedPos src.XPos
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/ssa"
"cmd/internal/obj"
"cmd/internal/objabi"
// worker indicates which of the backend workers will use the Progs.
func newProgs(fn *Node, worker int) *Progs {
pp := new(Progs)
- if Ctxt.CanReuseProgs() {
- sz := len(sharedProgArray) / Flag.LowerC
+ if base.Ctxt.CanReuseProgs() {
+ sz := len(sharedProgArray) / base.Flag.LowerC
pp.progcache = sharedProgArray[sz*worker : sz*(worker+1)]
}
pp.curfn = fn
} else {
p = new(obj.Prog)
}
- p.Ctxt = Ctxt
+ p.Ctxt = base.Ctxt
return p
}
// Flush converts from pp to machine code.
func (pp *Progs) Flush() {
plist := &obj.Plist{Firstpc: pp.Text, Curfn: pp.curfn}
- obj.Flushplist(Ctxt, plist, pp.NewProg, Ctxt.Pkgpath)
+ obj.Flushplist(base.Ctxt, plist, pp.NewProg, base.Ctxt.Pkgpath)
}
// Free clears pp and any associated resources.
func (pp *Progs) Free() {
- if Ctxt.CanReuseProgs() {
+ if base.Ctxt.CanReuseProgs() {
// Clear progs to enable GC and avoid abuse.
s := pp.progcache[:pp.cacheidx]
for i := range s {
pp.clearp(pp.next)
p.Link = pp.next
- if !pp.pos.IsKnown() && Flag.K != 0 {
- Warn("prog: unknown position (line 0)")
+ if !pp.pos.IsKnown() && base.Flag.K != 0 {
+ base.Warn("prog: unknown position (line 0)")
}
p.As = as
func (pp *Progs) settext(fn *Node) {
if pp.Text != nil {
- Fatalf("Progs.settext called twice")
+ base.Fatalf("Progs.settext called twice")
}
ptxt := pp.Prog(obj.ATEXT)
pp.Text = ptxt
// called for both functions with bodies and functions without bodies.
func (f *Func) initLSym(hasBody bool) {
if f.lsym != nil {
- Fatalf("Func.initLSym called twice")
+ base.Fatalf("Func.initLSym called twice")
}
if nam := f.Nname; !nam.isBlank() {
// using the expected ABI.
want := obj.ABIInternal
if f.lsym.ABI() != want {
- Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.lsym.Name, f.lsym.ABI(), want)
+ base.Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.lsym.Name, f.lsym.ABI(), want)
}
}
}
asym.SetABI(aliasABI)
asym.Set(obj.AttrDuplicateOK, true)
- Ctxt.ABIAliases = append(Ctxt.ABIAliases, asym)
+ base.Ctxt.ABIAliases = append(base.Ctxt.ABIAliases, asym)
}
}
// Clumsy but important.
// See test/recover.go for test cases and src/reflect/value.go
// for the actual functions being considered.
- if Ctxt.Pkgpath == "reflect" {
+ if base.Ctxt.Pkgpath == "reflect" {
switch f.Nname.Sym.Name {
case "callReflect", "callMethod":
flag |= obj.WRAPPER
}
}
- Ctxt.InitTextSym(f.lsym, flag)
+ base.Ctxt.InitTextSym(f.lsym, flag)
}
func ggloblnod(nam *Node) {
if nam.Type != nil && !nam.Type.HasPointers() {
flags |= obj.NOPTR
}
- Ctxt.Globl(s, nam.Type.Width, flags)
+ base.Ctxt.Globl(s, nam.Type.Width, flags)
if nam.Name.LibfuzzerExtraCounter() {
s.Type = objabi.SLIBFUZZER_EXTRA_COUNTER
}
s.Set(obj.AttrLocal, true)
flags &^= obj.LOCAL
}
- Ctxt.Globl(s, int64(width), int(flags))
+ base.Ctxt.Globl(s, int64(width), int(flags))
}
func Addrconst(a *obj.Addr, v int64) {
func Patch(p *obj.Prog, to *obj.Prog) {
if p.To.Type != obj.TYPE_BRANCH {
- Fatalf("patch: not a branch")
+ base.Fatalf("patch: not a branch")
}
p.To.SetTarget(to)
p.To.Offset = to.Pc
import (
"bufio"
"bytes"
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/goobj"
"cmd/internal/src"
p.typIndex[pt] = uint64(i)
}
if len(p.typIndex) > predeclReserved {
- Fatalf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)
+ base.Fatalf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)
}
// Initialize work queue with exported declarations.
// Add fingerprint (used by linker object file).
// Attach this to the end, so tools (e.g. gcimporter) don't care.
- copy(Ctxt.Fingerprint[:], h.Sum(nil)[:])
- out.Write(Ctxt.Fingerprint[:])
+ copy(base.Ctxt.Fingerprint[:], h.Sum(nil)[:])
+ out.Write(base.Ctxt.Fingerprint[:])
}
// writeIndex writes out an object index. mainIndex indicates whether
// pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(n *Node) {
if n.Sym == nil || asNode(n.Sym.Def) != n && n.Op != OTYPE {
- Fatalf("weird Sym: %v, %v", n, n.Sym)
+ base.Fatalf("weird Sym: %v, %v", n, n.Sym)
}
// Don't export predeclared declarations.
case PFUNC:
if n.IsMethod() {
- Fatalf("unexpected method: %v", n)
+ base.Fatalf("unexpected method: %v", n)
}
// Function.
w.funcExt(n)
default:
- Fatalf("unexpected class: %v, %v", n, n.Class())
+ base.Fatalf("unexpected class: %v, %v", n, n.Class())
}
case OLITERAL:
}
default:
- Fatalf("unexpected node: %v", n)
+ base.Fatalf("unexpected node: %v", n)
}
p.declIndex[n] = w.flush()
}
func (w *exportWriter) pos(pos src.XPos) {
- p := Ctxt.PosTable.Pos(pos)
+ p := base.Ctxt.PosTable.Pos(pos)
file := p.Base().AbsFilename()
line := int64(p.RelLine())
column := int64(p.RelCol())
func (w *exportWriter) selector(s *types.Sym) {
if w.currPkg == nil {
- Fatalf("missing currPkg")
+ base.Fatalf("missing currPkg")
}
// Method selectors are rewritten into method symbols (of the
pkg = localpkg
}
if s.Pkg != pkg {
- Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
+ base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
}
}
func (w *exportWriter) doTyp(t *types.Type) {
if t.Sym != nil {
if t.Sym.Pkg == builtinpkg || t.Sym.Pkg == unsafepkg {
- Fatalf("builtin type missing from typIndex: %v", t)
+ base.Fatalf("builtin type missing from typIndex: %v", t)
}
w.startType(definedType)
}
default:
- Fatalf("unexpected type: %v", t)
+ base.Fatalf("unexpected type: %v", t)
}
}
return constant.Complex
}
- Fatalf("unexpected constant type: %v", typ)
+ base.Fatalf("unexpected constant type: %v", typ)
return 0
}
negative := constant.Sign(x) < 0
if !signed && negative {
- Fatalf("negative unsigned integer; type %v, value %v", typ, x)
+ base.Fatalf("negative unsigned integer; type %v, value %v", typ, x)
}
b := constant.Bytes(x) // little endian
}
if len(b) > 0 && b[0] == 0 {
- Fatalf("leading zeros")
+ base.Fatalf("leading zeros")
}
if uint(len(b)) > maxBytes {
- Fatalf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)
+ base.Fatalf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)
}
maxSmall := 256 - maxBytes
}
}
if n < maxSmall || n >= 256 {
- Fatalf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)
+ base.Fatalf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)
}
w.data.WriteByte(byte(n))
func (w *exportWriter) mpfloat(v constant.Value, typ *types.Type) {
f := bigFloatVal(v)
if f.IsInf() {
- Fatalf("infinite constant")
+ base.Fatalf("infinite constant")
}
// Break into f = mant × 2**exp, with 0.5 <= mant < 1.
manti, acc := mant.Int(nil)
if acc != big.Exact {
- Fatalf("mantissa scaling failed for %f (%s)", f, acc)
+ base.Fatalf("mantissa scaling failed for %f (%s)", f, acc)
}
w.mpint(makeInt(manti), typ)
if manti.Sign() != 0 {
w.string(n.Sym.Name)
default:
- Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op)
+ base.Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op)
}
}
w.uint64(uint64(len(cases)))
for _, cas := range cases {
if cas.Op != OCASE {
- Fatalf("expected OCASE, got %v", cas)
+ base.Fatalf("expected OCASE, got %v", cas)
}
w.pos(cas.Pos)
w.stmtList(cas.List)
// (somewhat closely following the structure of exprfmt in fmt.go)
case ONIL:
if !n.Type.HasNil() {
- Fatalf("unexpected type for nil: %v", n.Type)
+ base.Fatalf("unexpected type for nil: %v", n.Type)
}
if n.Orig != nil && n.Orig != n {
w.expr(n.Orig)
var s *types.Sym
if n.Left != nil {
if n.Left.Op != ONONAME {
- Fatalf("expected ONONAME, got %v", n.Left)
+ base.Fatalf("expected ONONAME, got %v", n.Left)
}
s = n.Left.Sym
}
if op == OAPPEND {
w.bool(n.IsDDD())
} else if n.IsDDD() {
- Fatalf("exporter: unexpected '...' with %v call", op)
+ base.Fatalf("exporter: unexpected '...' with %v call", op)
}
case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER, OGETG:
// has already been replaced with literals
default:
- Fatalf("cannot export %v (%d) node\n"+
+ base.Fatalf("cannot export %v (%d) node\n"+
"\t==> please file an issue and assign to gri@", n.Op, int(n.Op))
}
}
// TODO(mdempsky): Fix autotmp hack.
if i := strings.LastIndex(name, "."); i >= 0 && !strings.HasPrefix(name, ".autotmp_") {
- Fatalf("unexpected dot in identifier: %v", name)
+ base.Fatalf("unexpected dot in identifier: %v", name)
}
if v > 0 {
if strings.Contains(name, "·") {
- Fatalf("exporter: unexpected · in symbol name")
+ base.Fatalf("exporter: unexpected · in symbol name")
}
name = fmt.Sprintf("%s·%d", name, v)
}
if !types.IsExported(name) && s.Pkg != w.currPkg {
- Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path)
+ base.Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path)
}
w.string(name)
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/goobj"
r := importReaderFor(fn, inlineImporter)
if r == nil {
- Fatalf("missing import reader for %v", fn)
+ base.Fatalf("missing import reader for %v", fn)
}
r.doInline(fn)
func (r *intReader) int64() int64 {
i, err := binary.ReadVarint(r.Reader)
if err != nil {
- yyerror("import %q: read error: %v", r.pkg.Path, err)
- errorexit()
+ base.Errorf("import %q: read error: %v", r.pkg.Path, err)
+ base.ErrorExit()
}
return i
}
func (r *intReader) uint64() uint64 {
i, err := binary.ReadUvarint(r.Reader)
if err != nil {
- yyerror("import %q: read error: %v", r.pkg.Path, err)
- errorexit()
+ base.Errorf("import %q: read error: %v", r.pkg.Path, err)
+ base.ErrorExit()
}
return i
}
version := ird.uint64()
if version != iexportVersion {
- yyerror("import %q: unknown export format version %d", pkg.Path, version)
- errorexit()
+ base.Errorf("import %q: unknown export format version %d", pkg.Path, version)
+ base.ErrorExit()
}
sLen := ird.uint64()
// returning individual substrings very efficiently.
data, err := mapFile(in.File(), in.Offset(), int64(sLen+dLen))
if err != nil {
- yyerror("import %q: mapping input: %v", pkg.Path, err)
- errorexit()
+ base.Errorf("import %q: mapping input: %v", pkg.Path, err)
+ base.ErrorExit()
}
stringData := data[:sLen]
declData := data[sLen:]
pkg.Lookup("_").Def = asTypesNode(nblank)
} else {
if pkg.Name != pkgName {
- Fatalf("conflicting package names %v and %v for path %q", pkg.Name, pkgName, pkg.Path)
+ base.Fatalf("conflicting package names %v and %v for path %q", pkg.Name, pkgName, pkg.Path)
}
if pkg.Height != pkgHeight {
- Fatalf("conflicting package heights %v and %v for path %q", pkg.Height, pkgHeight, pkg.Path)
+ base.Fatalf("conflicting package heights %v and %v for path %q", pkg.Height, pkgHeight, pkg.Path)
}
}
// Create stub declaration. If used, this will
// be overwritten by expandDecl.
if s.Def != nil {
- Fatalf("unexpected definition for %v: %v", s, asNode(s.Def))
+ base.Fatalf("unexpected definition for %v: %v", s, asNode(s.Def))
}
s.Def = asTypesNode(npos(src.NoXPos, dclname(s)))
}
// Fingerprint.
_, err = io.ReadFull(in, fingerprint[:])
if err != nil {
- yyerror("import %s: error reading fingerprint", pkg.Path)
- errorexit()
+ base.Errorf("import %s: error reading fingerprint", pkg.Path)
+ base.ErrorExit()
}
return fingerprint
}
slen, n := binary.Uvarint(x[:n])
if n <= 0 {
- Fatalf("varint failed")
+ base.Fatalf("varint failed")
}
spos := off + uint64(n)
return p.stringData[spos : spos+slen]
func (r *importReader) doDecl(n *Node) {
if n.Op != ONONAME {
- Fatalf("doDecl: unexpected Op for %v: %v", n.Sym, n.Op)
+ base.Fatalf("doDecl: unexpected Op for %v: %v", n.Sym, n.Op)
}
tag := r.byte()
r.varExt(n)
default:
- Fatalf("unexpected tag: %v", tag)
+ base.Fatalf("unexpected tag: %v", tag)
}
}
return makeComplex(p.float(typ), p.float(typ))
}
- Fatalf("unexpected value type: %v", typ)
+ base.Fatalf("unexpected value type: %v", typ)
panic("unreachable")
}
v = -(n &^ 1) >> 1
}
if v < 1 || uint(v) > maxBytes {
- Fatalf("weird decoding: %v, %v => %v", n, signed, v)
+ base.Fatalf("weird decoding: %v, %v => %v", n, signed, v)
}
b := make([]byte, v)
p.Read(b)
}
if r.prevBase == nil {
- Fatalf("missing posbase")
+ base.Fatalf("missing posbase")
}
pos := src.MakePos(r.prevBase, uint(r.prevLine), uint(r.prevColumn))
- return Ctxt.PosTable.XPos(pos)
+ return base.Ctxt.PosTable.XPos(pos)
}
func (r *importReader) typ() *types.Type {
t, ok := p.typCache[off]
if !ok {
if off < predeclReserved {
- Fatalf("predeclared type missing from cache: %d", off)
+ base.Fatalf("predeclared type missing from cache: %d", off)
}
t = p.newReader(off-predeclReserved, nil).typ1()
p.typCache[off] = t
func (r *importReader) typ1() *types.Type {
switch k := r.kind(); k {
default:
- Fatalf("unexpected kind tag in %q: %v", r.p.ipkg.Path, k)
+ base.Fatalf("unexpected kind tag in %q: %v", r.p.ipkg.Path, k)
return nil
case definedType:
expandDecl(n)
}
if n.Op != OTYPE {
- Fatalf("expected OTYPE, got %v: %v, %v", n.Op, n.Sym, n)
+ base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op, n.Sym, n)
}
return n.Type
case pointerType:
func (r *importReader) int64() int64 {
n, err := binary.ReadVarint(r)
if err != nil {
- Fatalf("readVarint: %v", err)
+ base.Fatalf("readVarint: %v", err)
}
return n
}
func (r *importReader) uint64() uint64 {
n, err := binary.ReadUvarint(r)
if err != nil {
- Fatalf("readVarint: %v", err)
+ base.Fatalf("readVarint: %v", err)
}
return n
}
func (r *importReader) byte() byte {
x, err := r.ReadByte()
if err != nil {
- Fatalf("declReader.ReadByte: %v", err)
+ base.Fatalf("declReader.ReadByte: %v", err)
}
return x
}
idx := int32(r.int64())
if idx != -1 {
if s.Linkname != "" {
- Fatalf("bad index for linknamed symbol: %v %d\n", lsym, idx)
+ base.Fatalf("bad index for linknamed symbol: %v %d\n", lsym, idx)
}
lsym.SymIdx = idx
lsym.Set(obj.AttrIndexed, true)
func (r *importReader) doInline(n *Node) {
if len(n.Func.Inl.Body) != 0 {
- Fatalf("%v already has inline body", n)
+ base.Fatalf("%v already has inline body", n)
}
funchdr(n)
importlist = append(importlist, n)
- if Flag.E > 0 && Flag.LowerM > 2 {
- if Flag.LowerM > 3 {
+ if base.Flag.E > 0 && base.Flag.LowerM > 2 {
+ if base.Flag.LowerM > 3 {
fmt.Printf("inl body for %v %#v: %+v\n", n, n.Type, asNodes(n.Func.Inl.Body))
} else {
fmt.Printf("inl body for %v %#v: %v\n", n, n.Type, asNodes(n.Func.Inl.Body))
func (r *importReader) expr() *Node {
n := r.node()
if n != nil && n.Op == OBLOCK {
- Fatalf("unexpected block node: %v", n)
+ base.Fatalf("unexpected block node: %v", n)
}
return n
}
case OSTRUCTLIT:
// TODO(mdempsky): Export position information for OSTRUCTKEY nodes.
- savedlineno := lineno
- lineno = r.pos()
- n := nodl(lineno, OCOMPLIT, nil, typenod(r.typ()))
+ savedlineno := base.Pos
+ base.Pos = r.pos()
+ n := nodl(base.Pos, OCOMPLIT, nil, typenod(r.typ()))
n.List.Set(r.elemList()) // special handling of field names
- lineno = savedlineno
+ base.Pos = savedlineno
return n
// case OARRAYLIT, OSLICELIT, OMAPLIT:
return nil
default:
- Fatalf("cannot import %v (%d) node\n"+
+ base.Fatalf("cannot import %v (%d) node\n"+
"\t==> please file an issue and assign to gri@", op, int(op))
panic("unreachable") // satisfy compiler
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
)
// Make a function that contains all the initialization statements.
if len(nf) > 0 {
- lineno = nf[0].Pos // prolog/epilog gets line number of first init stmt
+ base.Pos = nf[0].Pos // prolog/epilog gets line number of first init stmt
initializers := lookup("init")
fn := dclfunc(initializers, nod(OTFUNC, nil, nil))
for _, dcl := range initTodo.Func.Dcl {
// We only generate temps using initTodo if there
// are package-scope initialization statements, so
// something's weird if we get here.
- Fatalf("initTodo still has declarations")
+ base.Fatalf("initTodo still has declarations")
}
initTodo = nil
"bytes"
"container/heap"
"fmt"
+
+ "cmd/compile/internal/base"
)
// Package initialization
case ODCLCONST, ODCLFUNC, ODCLTYPE:
// nop
default:
- Fatalf("unexpected package-level statement: %v", n)
+ base.Fatalf("unexpected package-level statement: %v", n)
}
}
// confused us and there might not be
// a loop. Let the user fix those
// first.
- ExitIfErrors()
+ base.ExitIfErrors()
findInitLoopAndExit(firstLHS(n), new([]*Node))
- Fatalf("initialization unfinished, but failed to identify loop")
+ base.Fatalf("initialization unfinished, but failed to identify loop")
}
}
}
// Invariant consistency check. If this is non-zero, then we
// should have found a cycle above.
if len(o.blocking) != 0 {
- Fatalf("expected empty map: %v", o.blocking)
+ base.Fatalf("expected empty map: %v", o.blocking)
}
return s.out
func (o *InitOrder) processAssign(n *Node) {
if n.Initorder() != InitNotStarted || n.Xoffset != BADWIDTH {
- Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
+ base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
}
n.SetInitorder(InitPending)
for o.ready.Len() != 0 {
n := heap.Pop(&o.ready).(*Node)
if n.Initorder() != InitPending || n.Xoffset != 0 {
- Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
+ base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
}
initialize(n)
}
fmt.Fprintf(&msg, "\t%v: %v", l[0].Line(), l[0])
- yyerrorl(l[0].Pos, msg.String())
- errorexit()
+ base.ErrorfAt(l[0].Pos, msg.String())
+ base.ErrorExit()
}
// collectDeps returns all of the package-level functions and
case ODCLFUNC:
d.inspectList(n.Nbody)
default:
- Fatalf("unexpected Op: %v", n.Op)
+ base.Fatalf("unexpected Op: %v", n.Op)
}
return d.seen
}
return n.List.First()
}
- Fatalf("unexpected Op: %v", n.Op)
+ base.Fatalf("unexpected Op: %v", n.Op)
return nil
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/logopt"
"cmd/compile/internal/types"
"cmd/internal/obj"
rcvr = rcvr.Elem()
}
if rcvr.Sym == nil {
- Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym, fn, rcvr)
+ base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym, fn, rcvr)
}
return rcvr.Sym.Pkg
}
return // typecheckinl on local function
}
- if Flag.LowerM > 2 || Debug.Export != 0 {
+ if base.Flag.LowerM > 2 || base.Debug.Export != 0 {
fmt.Printf("typecheck import [%v] %L { %#v }\n", fn.Sym, fn, asNodes(fn.Func.Inl.Body))
}
fn.Func.Inl.Dcl = append(fn.Func.Inl.Dcl, fn.Func.Dcl...)
fn.Func.Dcl = nil
- lineno = lno
+ base.Pos = lno
}
// Caninl determines whether fn is inlineable.
// fn and ->nbody will already have been typechecked.
func caninl(fn *Node) {
if fn.Op != ODCLFUNC {
- Fatalf("caninl %v", fn)
+ base.Fatalf("caninl %v", fn)
}
if fn.Func.Nname == nil {
- Fatalf("caninl no nname %+v", fn)
+ base.Fatalf("caninl no nname %+v", fn)
}
var reason string // reason, if any, that the function was not inlined
- if Flag.LowerM > 1 || logopt.Enabled() {
+ if base.Flag.LowerM > 1 || logopt.Enabled() {
defer func() {
if reason != "" {
- if Flag.LowerM > 1 {
+ if base.Flag.LowerM > 1 {
fmt.Printf("%v: cannot inline %v: %s\n", fn.Line(), fn.Func.Nname, reason)
}
if logopt.Enabled() {
}
// If marked "go:norace" and -race compilation, don't inline.
- if Flag.Race && fn.Func.Pragma&Norace != 0 {
+ if base.Flag.Race && fn.Func.Pragma&Norace != 0 {
reason = "marked go:norace with -race compilation"
return
}
// If marked "go:nocheckptr" and -d checkptr compilation, don't inline.
- if Debug.Checkptr != 0 && fn.Func.Pragma&NoCheckPtr != 0 {
+ if base.Debug.Checkptr != 0 && fn.Func.Pragma&NoCheckPtr != 0 {
reason = "marked go:nocheckptr"
return
}
}
if fn.Typecheck() == 0 {
- Fatalf("caninl on non-typechecked function %v", fn)
+ base.Fatalf("caninl on non-typechecked function %v", fn)
}
n := fn.Func.Nname
defer n.Func.SetInlinabilityChecked(true)
cc := int32(inlineExtraCallCost)
- if Flag.LowerL == 4 {
+ if base.Flag.LowerL == 4 {
cc = 1 // this appears to yield better performance than 0.
}
Body: inlcopylist(fn.Nbody.Slice()),
}
- if Flag.LowerM > 1 {
+ if base.Flag.LowerM > 1 {
fmt.Printf("%v: can inline %#v with cost %d as: %#v { %#v }\n", fn.Line(), n, inlineMaxBudget-visitor.budget, fn.Type, asNodes(n.Func.Inl.Body))
- } else if Flag.LowerM != 0 {
+ } else if base.Flag.LowerM != 0 {
fmt.Printf("%v: can inline %v\n", fn.Line(), n)
}
if logopt.Enabled() {
return
}
if n.Op != ONAME || n.Class() != PFUNC {
- Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op, n.Class())
+ base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op, n.Class())
}
if n.Func == nil {
- Fatalf("inlFlood: missing Func on %v", n)
+ base.Fatalf("inlFlood: missing Func on %v", n)
}
if n.Func.Inl == nil {
return
//
// When we do, we'll probably want:
// inlFlood(n.Func.Closure.Func.Nname)
- Fatalf("unexpected closure in inlinable function")
+ base.Fatalf("unexpected closure in inlinable function")
}
return true
})
case OCALLMETH:
t := n.Left.Type
if t == nil {
- Fatalf("no function type for [%p] %+v\n", n.Left, n.Left)
+ base.Fatalf("no function type for [%p] %+v\n", n.Left, n.Left)
}
if isRuntimePkg(n.Left.Sym.Pkg) {
fn := n.Left.Sym.Name
case OBREAK, OCONTINUE:
if n.Sym != nil {
// Should have short-circuited due to labeledControl above.
- Fatalf("unexpected labeled break/continue: %v", n)
+ base.Fatalf("unexpected labeled break/continue: %v", n)
}
case OIF:
v.budget--
// When debugging, don't stop early, to get full cost of inlining this function
- if v.budget < 0 && Flag.LowerM < 2 && !logopt.Enabled() {
+ if v.budget < 0 && base.Flag.LowerM < 2 && !logopt.Enabled() {
return true
}
m := n.copy()
if n.Op != OCALLPART && m.Func != nil {
- Fatalf("unexpected Func: %v", m)
+ base.Fatalf("unexpected Func: %v", m)
}
m.Left = inlcopy(n.Left)
m.Right = inlcopy(n.Right)
inlMap := make(map[*Node]bool)
fn = inlnode(fn, maxCost, inlMap)
if fn != Curfn {
- Fatalf("inlnode replaced curfn")
+ base.Fatalf("inlnode replaced curfn")
}
Curfn = savefn
}
// statements.
func inlconv2list(n *Node) []*Node {
if n.Op != OINLCALL || n.Rlist.Len() == 0 {
- Fatalf("inlconv2list %+v\n", n)
+ base.Fatalf("inlconv2list %+v\n", n)
}
s := n.Rlist.Slice()
case OCALLMETH:
// Prevent inlining some reflect.Value methods when using checkptr,
// even when package reflect was compiled without it (#35073).
- if s := n.Left.Sym; Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
+ if s := n.Left.Sym; base.Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
return n
}
}
switch n.Op {
case OCALLFUNC:
- if Flag.LowerM > 3 {
+ if base.Flag.LowerM > 3 {
fmt.Printf("%v:call to func %+v\n", n.Line(), n.Left)
}
if isIntrinsicCall(n) {
}
case OCALLMETH:
- if Flag.LowerM > 3 {
+ if base.Flag.LowerM > 3 {
fmt.Printf("%v:call to meth %L\n", n.Line(), n.Left.Right)
}
// typecheck should have resolved ODOTMETH->type, whose nname points to the actual function.
if n.Left.Type == nil {
- Fatalf("no function type for [%p] %+v\n", n.Left, n.Left)
+ base.Fatalf("no function type for [%p] %+v\n", n.Left, n.Left)
}
n = mkinlcall(n, n.Left.MethodName(), maxCost, inlMap)
}
- lineno = lno
+ base.Pos = lno
return n
}
break FindRHS
}
}
- Fatalf("%v missing from LHS of %v", n, defn)
+ base.Fatalf("%v missing from LHS of %v", n, defn)
default:
return nil
}
if rhs == nil {
- Fatalf("RHS is nil: %v", defn)
+ base.Fatalf("RHS is nil: %v", defn)
}
unsafe, _ := reassigned(n)
// TODO: handle initial declaration not including an assignment and followed by a single assignment?
func reassigned(n *Node) (bool, *Node) {
if n.Op != ONAME {
- Fatalf("reassigned %v", n)
+ base.Fatalf("reassigned %v", n)
}
// no way to reliably check for no-reassignment of globals, assume it can be
if n.Name.Curfn == nil {
inlvar := inlvars[n]
if inlvar == nil {
- Fatalf("missing inlvar for %v", n)
+ base.Fatalf("missing inlvar for %v", n)
}
as.Ninit.Append(nod(ODCL, inlvar, nil))
inlvar.Name.Defn = as
}
if inlMap[fn] {
- if Flag.LowerM > 1 {
+ if base.Flag.LowerM > 1 {
fmt.Printf("%v: cannot inline %v into %v: repeated recursive cycle\n", n.Line(), fn, Curfn.funcname())
}
return n
defer func() {
inlMap[fn] = false
}()
- if Debug.TypecheckInl == 0 {
+ if base.Debug.TypecheckInl == 0 {
typecheckinl(fn)
}
// We have a function node, and it has an inlineable body.
- if Flag.LowerM > 1 {
+ if base.Flag.LowerM > 1 {
fmt.Printf("%v: inlining call to %v %#v { %#v }\n", n.Line(), fn.Sym, fn.Type, asNodes(fn.Func.Inl.Body))
- } else if Flag.LowerM != 0 {
+ } else if base.Flag.LowerM != 0 {
fmt.Printf("%v: inlining call to %v\n", n.Line(), fn)
}
- if Flag.LowerM > 2 {
+ if base.Flag.LowerM > 2 {
fmt.Printf("%v: Before inlining: %+v\n", n.Line(), n)
}
callee = callee.Left
}
if callee.Op != ONAME && callee.Op != OCLOSURE && callee.Op != OMETHEXPR {
- Fatalf("unexpected callee expression: %v", callee)
+ base.Fatalf("unexpected callee expression: %v", callee)
}
}
// the reassigned check via some sort of copy propagation this would most
// likely need to be changed to a loop to walk up to the correct Param
if o == nil || (o.Name.Curfn != Curfn && o.Name.Curfn.Func.OClosure != Curfn) {
- Fatalf("%v: unresolvable capture %v %v\n", n.Line(), fn, v)
+ base.Fatalf("%v: unresolvable capture %v %v\n", n.Line(), fn, v)
}
if v.Name.Byval() {
// this never actually happens. We currently
// perform inlining before escape analysis, so
// nothing should have moved to the heap yet.
- Fatalf("impossible: %v", ln)
+ base.Fatalf("impossible: %v", ln)
}
inlf := typecheck(inlvar(ln), ctxExpr)
inlvars[ln] = inlf
- if Flag.GenDwarfInl > 0 {
+ if base.Flag.GenDwarfInl > 0 {
if ln.Class() == PPARAM {
inlf.Name.SetInlFormal(true)
} else {
m = retvar(t, i)
}
- if Flag.GenDwarfInl > 0 {
+ if base.Flag.GenDwarfInl > 0 {
// Don't update the src.Pos on a return variable if it
// was manufactured by the inliner (e.g. "~R2"); such vars
// were not part of the original callee.
as.SetColas(true)
if n.Op == OCALLMETH {
if n.Left.Left == nil {
- Fatalf("method call without receiver: %+v", n)
+ base.Fatalf("method call without receiver: %+v", n)
}
as.Rlist.Append(n.Left.Left)
}
inlgen++
parent := -1
- if b := Ctxt.PosTable.Pos(n.Pos).Base(); b != nil {
+ if b := base.Ctxt.PosTable.Pos(n.Pos).Base(); b != nil {
parent = b.InliningIndex()
}
- newIndex := Ctxt.InlTree.Add(parent, n.Pos, fn.Sym.Linksym())
+ newIndex := base.Ctxt.InlTree.Add(parent, n.Pos, fn.Sym.Linksym())
// Add an inline mark just before the inlined body.
// This mark is inline in the code so that it's a reasonable spot
inlMark.Xoffset = int64(newIndex)
ninit.Append(inlMark)
- if Flag.GenDwarfInl > 0 {
+ if base.Flag.GenDwarfInl > 0 {
if !fn.Sym.Linksym().WasInlined() {
- Ctxt.DwFixups.SetPrecursorFunc(fn.Sym.Linksym(), fn)
+ base.Ctxt.DwFixups.SetPrecursorFunc(fn.Sym.Linksym(), fn)
fn.Sym.Linksym().Set(obj.AttrWasInlined, true)
}
}
typecheckslice(body, ctxStmt)
- if Flag.GenDwarfInl > 0 {
+ if base.Flag.GenDwarfInl > 0 {
for _, v := range inlfvars {
v.Pos = subst.updatedPos(v.Pos)
}
}
}
- if Flag.LowerM > 2 {
+ if base.Flag.LowerM > 2 {
fmt.Printf("%v: After inlining %+v\n\n", call.Line(), call)
}
// PAUTO's in the calling functions, and link them off of the
// PPARAM's, PAUTOS and PPARAMOUTs of the called function.
func inlvar(var_ *Node) *Node {
- if Flag.LowerM > 3 {
+ if base.Flag.LowerM > 3 {
fmt.Printf("inlvar %+v\n", var_)
}
switch n.Op {
case ONAME:
if inlvar := subst.inlvars[n]; inlvar != nil { // These will be set during inlnode
- if Flag.LowerM > 2 {
+ if base.Flag.LowerM > 2 {
fmt.Printf("substituting name %+v -> %+v\n", n, inlvar)
}
return inlvar
}
- if Flag.LowerM > 2 {
+ if base.Flag.LowerM > 2 {
fmt.Printf("not substituting name %+v\n", n)
}
return n
m.Ninit.Set(nil)
if n.Op == OCLOSURE {
- Fatalf("cannot inline function containing closure: %+v", n)
+ base.Fatalf("cannot inline function containing closure: %+v", n)
}
m.Left = subst.node(n.Left)
}
func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos {
- pos := Ctxt.PosTable.Pos(xpos)
+ pos := base.Ctxt.PosTable.Pos(xpos)
oldbase := pos.Base() // can be nil
newbase := subst.bases[oldbase]
if newbase == nil {
subst.bases[oldbase] = newbase
}
pos.SetBase(newbase)
- return Ctxt.PosTable.XPos(pos)
+ return base.Ctxt.PosTable.XPos(pos)
}
func pruneUnusedAutos(ll []*Node, vis *hairyVisitor) []*Node {
x = typecheck(x, ctxExpr|ctxCallee)
switch x.Op {
case ODOTMETH:
- if Flag.LowerM != 0 {
- Warnl(call.Pos, "devirtualizing %v to %v", call.Left, typ)
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(call.Pos, "devirtualizing %v to %v", call.Left, typ)
}
call.Op = OCALLMETH
call.Left = x
case ODOTINTER:
// Promoted method from embedded interface-typed field (#42279).
- if Flag.LowerM != 0 {
- Warnl(call.Pos, "partially devirtualizing %v to %v", call.Left, typ)
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(call.Pos, "partially devirtualizing %v to %v", call.Left, typ)
}
call.Op = OCALLINTER
call.Left = x
default:
// TODO(mdempsky): Turn back into Fatalf after more testing.
- if Flag.LowerM != 0 {
- Warnl(call.Pos, "failed to devirtualize %v (%v)", x, x.Op)
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(call.Pos, "failed to devirtualize %v (%v)", x, x.Op)
}
return
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/syntax"
"cmd/internal/objabi"
"cmd/internal/src"
)
func makePos(b *src.PosBase, line, col uint) src.XPos {
- return Ctxt.PosTable.XPos(src.MakePos(b, line, col))
+ return base.Ctxt.PosTable.XPos(src.MakePos(b, line, col))
}
func isSpace(c rune) bool {
import (
"bufio"
"bytes"
+ "cmd/compile/internal/base"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
)
func hidePanic() {
- if Debug.Panic == 0 && Errors() > 0 {
+ if base.Debug.Panic == 0 && base.Errors() > 0 {
// If we've already complained about things
// in the program, don't bother complaining
// about a panic too; let the user clean up
// the code and try again.
if err := recover(); err != nil {
- errorexit()
+ base.ErrorExit()
}
}
}
archInit(&thearch)
- Ctxt = obj.Linknew(thearch.LinkArch)
- Ctxt.DiagFunc = yyerror
- Ctxt.DiagFlush = flusherrors
- Ctxt.Bso = bufio.NewWriter(os.Stdout)
+ base.Ctxt = obj.Linknew(thearch.LinkArch)
+ base.Ctxt.DiagFunc = base.Errorf
+ base.Ctxt.DiagFlush = base.FlushErrors
+ base.Ctxt.Bso = bufio.NewWriter(os.Stdout)
// UseBASEntries is preferred because it shaves about 2% off build time, but LLDB, dsymutil, and dwarfdump
// on Darwin don't support it properly, especially since macOS 10.14 (Mojave). This is exposed as a flag
// to allow testing with LLVM tools on Linux, and to help with reporting this bug to the LLVM project.
// See bugs 31188 and 21945 (CLs 170638, 98075, 72371).
- Ctxt.UseBASEntries = Ctxt.Headtype != objabi.Hdarwin
+ base.Ctxt.UseBASEntries = base.Ctxt.Headtype != objabi.Hdarwin
localpkg = types.NewPkg("", "")
localpkg.Prefix = "\"\""
// pseudo-package used for methods with anonymous receivers
gopkg = types.NewPkg("go", "")
- DebugSSA = ssa.PhaseOption
- ParseFlags()
+ base.DebugSSA = ssa.PhaseOption
+ base.ParseFlags()
// Record flags that affect the build result. (And don't
// record flags that don't, since that would cause spurious
// changes in the binary.)
recordFlags("B", "N", "l", "msan", "race", "shared", "dynlink", "dwarflocationlists", "dwarfbasentries", "smallframes", "spectre")
- if !enableTrace && Flag.LowerT {
+ if !enableTrace && base.Flag.LowerT {
log.Fatalf("compiler not built with support for -t")
}
// default: inlining on. (Flag.LowerL == 1)
// -l: inlining off (Flag.LowerL == 0)
// -l=2, -l=3: inlining on again, with extra debugging (Flag.LowerL > 1)
- if Flag.LowerL <= 1 {
- Flag.LowerL = 1 - Flag.LowerL
+ if base.Flag.LowerL <= 1 {
+ base.Flag.LowerL = 1 - base.Flag.LowerL
}
- if Flag.SmallFrames {
+ if base.Flag.SmallFrames {
maxStackVarSize = 128 * 1024
maxImplicitStackVarSize = 16 * 1024
}
- if Flag.Dwarf {
- Ctxt.DebugInfo = debuginfo
- Ctxt.GenAbstractFunc = genAbstractFunc
- Ctxt.DwFixups = obj.NewDwarfFixupTable(Ctxt)
+ if base.Flag.Dwarf {
+ base.Ctxt.DebugInfo = debuginfo
+ base.Ctxt.GenAbstractFunc = genAbstractFunc
+ base.Ctxt.DwFixups = obj.NewDwarfFixupTable(base.Ctxt)
} else {
// turn off inline generation if no dwarf at all
- Flag.GenDwarfInl = 0
- Ctxt.Flag_locationlists = false
+ base.Flag.GenDwarfInl = 0
+ base.Ctxt.Flag_locationlists = false
}
- if Ctxt.Flag_locationlists && len(Ctxt.Arch.DWARFRegisters) == 0 {
- log.Fatalf("location lists requested but register mapping not available on %v", Ctxt.Arch.Name)
+ if base.Ctxt.Flag_locationlists && len(base.Ctxt.Arch.DWARFRegisters) == 0 {
+ log.Fatalf("location lists requested but register mapping not available on %v", base.Ctxt.Arch.Name)
}
checkLang()
- if Flag.SymABIs != "" {
- readSymABIs(Flag.SymABIs, Ctxt.Pkgpath)
+ if base.Flag.SymABIs != "" {
+ readSymABIs(base.Flag.SymABIs, base.Ctxt.Pkgpath)
}
if ispkgin(omit_pkgs) {
- Flag.Race = false
- Flag.MSan = false
+ base.Flag.Race = false
+ base.Flag.MSan = false
}
- thearch.LinkArch.Init(Ctxt)
+ thearch.LinkArch.Init(base.Ctxt)
startProfile()
- if Flag.Race {
+ if base.Flag.Race {
racepkg = types.NewPkg("runtime/race", "")
}
- if Flag.MSan {
+ if base.Flag.MSan {
msanpkg = types.NewPkg("runtime/msan", "")
}
- if Flag.Race || Flag.MSan {
+ if base.Flag.Race || base.Flag.MSan {
instrumenting = true
}
- if Flag.Dwarf {
- dwarf.EnableLogging(Debug.DwarfInl != 0)
+ if base.Flag.Dwarf {
+ dwarf.EnableLogging(base.Debug.DwarfInl != 0)
}
- if Debug.SoftFloat != 0 {
+ if base.Debug.SoftFloat != 0 {
thearch.SoftFloat = true
}
- if Flag.JSON != "" { // parse version,destination from json logging optimization.
- logopt.LogJsonOption(Flag.JSON)
+ if base.Flag.JSON != "" { // parse version,destination from json logging optimization.
+ logopt.LogJsonOption(base.Flag.JSON)
}
ssaDump = os.Getenv("GOSSAFUNC")
}
}
- trackScopes = Flag.Dwarf
+ trackScopes = base.Flag.Dwarf
Widthptr = thearch.LinkArch.PtrSize
Widthreg = thearch.LinkArch.RegSize
// would lead to import cycles)
types.Widthptr = Widthptr
types.Dowidth = dowidth
- types.Fatalf = Fatalf
+ types.Fatalf = base.Fatalf
types.Sconv = func(s *types.Sym, flag, mode int) string {
return sconv(s, FmtFlag(flag), fmtMode(mode))
}
types.FmtLeft = int(FmtLeft)
types.FmtUnsigned = int(FmtUnsigned)
types.FErr = int(FErr)
- types.Ctxt = Ctxt
+ types.Ctxt = base.Ctxt
initUniverse()
if n.Op == ODCLFUNC {
Curfn = n
decldepth = 1
- errorsBefore := Errors()
+ errorsBefore := base.Errors()
typecheckslice(Curfn.Nbody.Slice(), ctxStmt)
checkreturn(Curfn)
- if Errors() > errorsBefore {
+ if base.Errors() > errorsBefore {
Curfn.Nbody.Set(nil) // type errors; do not compile
}
// Now that we've checked whether n terminates,
// check past phase 9 isn't sufficient, as we may exit with other errors
// before then, thus skipping map key errors.
checkMapKeys()
- ExitIfErrors()
+ base.ExitIfErrors()
timings.AddEvent(fcount, "funcs")
}
capturevarscomplete = true
Curfn = nil
- ExitIfErrors()
+ base.ExitIfErrors()
// Phase 5: Inlining
timings.Start("fe", "inlining")
- if Debug.TypecheckInl != 0 {
+ if base.Debug.TypecheckInl != 0 {
// Typecheck imported function bodies if Debug.l > 1,
// otherwise lazily when used or re-exported.
for _, n := range importlist {
typecheckinl(n)
}
}
- ExitIfErrors()
+ base.ExitIfErrors()
}
- if Flag.LowerL != 0 {
+ if base.Flag.LowerL != 0 {
// Find functions that can be inlined and clone them before walk expands them.
visitBottomUp(xtop, func(list []*Node, recursive bool) {
numfns := numNonClosures(list)
// across more than one function.
caninl(n)
} else {
- if Flag.LowerM > 1 {
+ if base.Flag.LowerM > 1 {
fmt.Printf("%v: cannot inline %v: recursive\n", n.Line(), n.Func.Nname)
}
}
// checking. This must happen before transformclosure.
// We'll do the final check after write barriers are
// inserted.
- if Flag.CompilingRuntime {
+ if base.Flag.CompilingRuntime {
nowritebarrierrecCheck = newNowritebarrierrecChecker()
}
// Finalize DWARF inline routine DIEs, then explicitly turn off
// DWARF inlining gen so as to avoid problems with generated
// method wrappers.
- if Ctxt.DwFixups != nil {
- Ctxt.DwFixups.Finalize(Ctxt.Pkgpath, Debug.DwarfInl != 0)
- Ctxt.DwFixups = nil
- Flag.GenDwarfInl = 0
+ if base.Ctxt.DwFixups != nil {
+ base.Ctxt.DwFixups.Finalize(base.Ctxt.Pkgpath, base.Debug.DwarfInl != 0)
+ base.Ctxt.DwFixups = nil
+ base.Flag.GenDwarfInl = 0
}
// Phase 9: Check external declarations.
// Check the map keys again, since we typechecked the external
// declarations.
checkMapKeys()
- ExitIfErrors()
+ base.ExitIfErrors()
// Write object data to disk.
timings.Start("be", "dumpobj")
dumpdata()
- Ctxt.NumberSyms()
+ base.Ctxt.NumberSyms()
dumpobj()
- if Flag.AsmHdr != "" {
+ if base.Flag.AsmHdr != "" {
dumpasmhdr()
}
})
for _, large := range largeStackFrames {
if large.callee != 0 {
- yyerrorl(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args + %d MB callee", large.locals>>20, large.args>>20, large.callee>>20)
+ base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args + %d MB callee", large.locals>>20, large.args>>20, large.callee>>20)
} else {
- yyerrorl(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args", large.locals>>20, large.args>>20)
+ base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args", large.locals>>20, large.args>>20)
}
}
if len(funcStack) != 0 {
- Fatalf("funcStack is non-empty: %v", len(funcStack))
+ base.Fatalf("funcStack is non-empty: %v", len(funcStack))
}
if len(compilequeue) != 0 {
- Fatalf("%d uncompiled functions", len(compilequeue))
+ base.Fatalf("%d uncompiled functions", len(compilequeue))
}
- logopt.FlushLoggedOpts(Ctxt, Ctxt.Pkgpath)
- ExitIfErrors()
+ logopt.FlushLoggedOpts(base.Ctxt, base.Ctxt.Pkgpath)
+ base.ExitIfErrors()
- flusherrors()
+ base.FlushErrors()
timings.Stop()
- if Flag.Bench != "" {
- if err := writebench(Flag.Bench); err != nil {
+ if base.Flag.Bench != "" {
+ if err := writebench(base.Flag.Bench); err != nil {
log.Fatalf("cannot write benchmark data: %v", err)
}
}
fmt.Fprintln(&buf, "commit:", objabi.Version)
fmt.Fprintln(&buf, "goos:", runtime.GOOS)
fmt.Fprintln(&buf, "goarch:", runtime.GOARCH)
- timings.Write(&buf, "BenchmarkCompile:"+Ctxt.Pkgpath+":")
+ timings.Write(&buf, "BenchmarkCompile:"+base.Ctxt.Pkgpath+":")
n, err := f.Write(buf.Bytes())
if err != nil {
func findpkg(name string) (file string, ok bool) {
if islocalname(name) {
- if Flag.NoLocalImports {
+ if base.Flag.NoLocalImports {
return "", false
}
- if Flag.Cfg.PackageFile != nil {
- file, ok = Flag.Cfg.PackageFile[name]
+ if base.Flag.Cfg.PackageFile != nil {
+ file, ok = base.Flag.Cfg.PackageFile[name]
return file, ok
}
// don't want to see "encoding/../encoding/base64"
// as different from "encoding/base64".
if q := path.Clean(name); q != name {
- yyerror("non-canonical import path %q (should be %q)", name, q)
+ base.Errorf("non-canonical import path %q (should be %q)", name, q)
return "", false
}
- if Flag.Cfg.PackageFile != nil {
- file, ok = Flag.Cfg.PackageFile[name]
+ if base.Flag.Cfg.PackageFile != nil {
+ file, ok = base.Flag.Cfg.PackageFile[name]
return file, ok
}
- for _, dir := range Flag.Cfg.ImportDirs {
+ for _, dir := range base.Flag.Cfg.ImportDirs {
file = fmt.Sprintf("%s/%s.a", dir, name)
if _, err := os.Stat(file); err == nil {
return file, true
if objabi.GOROOT != "" {
suffix := ""
suffixsep := ""
- if Flag.InstallSuffix != "" {
+ if base.Flag.InstallSuffix != "" {
suffixsep = "_"
- suffix = Flag.InstallSuffix
- } else if Flag.Race {
+ suffix = base.Flag.InstallSuffix
+ } else if base.Flag.Race {
suffixsep = "_"
suffix = "race"
- } else if Flag.MSan {
+ } else if base.Flag.MSan {
suffixsep = "_"
suffix = "msan"
}
case varTag:
importvar(Runtimepkg, src.NoXPos, sym, typ)
default:
- Fatalf("unhandled declaration tag %v", d.tag)
+ base.Fatalf("unhandled declaration tag %v", d.tag)
}
}
func importfile(f constant.Value) *types.Pkg {
if f.Kind() != constant.String {
- yyerror("import path must be a string")
+ base.Errorf("import path must be a string")
return nil
}
path_ := constant.StringVal(f)
if len(path_) == 0 {
- yyerror("import path is empty")
+ base.Errorf("import path is empty")
return nil
}
// the main package, just as we reserve the import
// path "math" to identify the standard math package.
if path_ == "main" {
- yyerror("cannot import \"main\"")
- errorexit()
+ base.Errorf("cannot import \"main\"")
+ base.ErrorExit()
}
- if Ctxt.Pkgpath != "" && path_ == Ctxt.Pkgpath {
- yyerror("import %q while compiling that package (import cycle)", path_)
- errorexit()
+ if base.Ctxt.Pkgpath != "" && path_ == base.Ctxt.Pkgpath {
+ base.Errorf("import %q while compiling that package (import cycle)", path_)
+ base.ErrorExit()
}
- if mapped, ok := Flag.Cfg.ImportMap[path_]; ok {
+ if mapped, ok := base.Flag.Cfg.ImportMap[path_]; ok {
path_ = mapped
}
if islocalname(path_) {
if path_[0] == '/' {
- yyerror("import path cannot be absolute path")
+ base.Errorf("import path cannot be absolute path")
return nil
}
- prefix := Ctxt.Pathname
- if Flag.D != "" {
- prefix = Flag.D
+ prefix := base.Ctxt.Pathname
+ if base.Flag.D != "" {
+ prefix = base.Flag.D
}
path_ = path.Join(prefix, path_)
file, found := findpkg(path_)
if !found {
- yyerror("can't find import: %q", path_)
- errorexit()
+ base.Errorf("can't find import: %q", path_)
+ base.ErrorExit()
}
importpkg := types.NewPkg(path_, "")
imp, err := bio.Open(file)
if err != nil {
- yyerror("can't open import: %q: %v", path_, err)
- errorexit()
+ base.Errorf("can't open import: %q: %v", path_, err)
+ base.ErrorExit()
}
defer imp.Close()
// check object header
p, err := imp.ReadString('\n')
if err != nil {
- yyerror("import %s: reading input: %v", file, err)
- errorexit()
+ base.Errorf("import %s: reading input: %v", file, err)
+ base.ErrorExit()
}
if p == "!<arch>\n" { // package archive
// package export block should be first
sz := arsize(imp.Reader, "__.PKGDEF")
if sz <= 0 {
- yyerror("import %s: not a package file", file)
- errorexit()
+ base.Errorf("import %s: not a package file", file)
+ base.ErrorExit()
}
p, err = imp.ReadString('\n')
if err != nil {
- yyerror("import %s: reading input: %v", file, err)
- errorexit()
+ base.Errorf("import %s: reading input: %v", file, err)
+ base.ErrorExit()
}
}
if !strings.HasPrefix(p, "go object ") {
- yyerror("import %s: not a go object file: %s", file, p)
- errorexit()
+ base.Errorf("import %s: not a go object file: %s", file, p)
+ base.ErrorExit()
}
q := fmt.Sprintf("%s %s %s %s\n", objabi.GOOS, objabi.GOARCH, objabi.Version, objabi.Expstring())
if p[10:] != q {
- yyerror("import %s: object is [%s] expected [%s]", file, p[10:], q)
- errorexit()
+ base.Errorf("import %s: object is [%s] expected [%s]", file, p[10:], q)
+ base.ErrorExit()
}
// process header lines
for {
p, err = imp.ReadString('\n')
if err != nil {
- yyerror("import %s: reading input: %v", file, err)
- errorexit()
+ base.Errorf("import %s: reading input: %v", file, err)
+ base.ErrorExit()
}
if p == "\n" {
break // header ends with blank line
var fingerprint goobj.FingerprintType
switch c {
case '\n':
- yyerror("cannot import %s: old export format no longer supported (recompile library)", path_)
+ base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path_)
return nil
case 'B':
- if Debug.Export != 0 {
+ if base.Debug.Export != 0 {
fmt.Printf("importing %s (%s)\n", path_, file)
}
imp.ReadByte() // skip \n after $$B
c, err = imp.ReadByte()
if err != nil {
- yyerror("import %s: reading input: %v", file, err)
- errorexit()
+ base.Errorf("import %s: reading input: %v", file, err)
+ base.ErrorExit()
}
// Indexed format is distinguished by an 'i' byte,
// whereas previous export formats started with 'c', 'd', or 'v'.
if c != 'i' {
- yyerror("import %s: unexpected package format byte: %v", file, c)
- errorexit()
+ base.Errorf("import %s: unexpected package format byte: %v", file, c)
+ base.ErrorExit()
}
fingerprint = iimport(importpkg, imp)
default:
- yyerror("no import in %q", path_)
- errorexit()
+ base.Errorf("no import in %q", path_)
+ base.ErrorExit()
}
// assume files move (get installed) so don't record the full path
- if Flag.Cfg.PackageFile != nil {
+ if base.Flag.Cfg.PackageFile != nil {
// If using a packageFile map, assume path_ can be recorded directly.
- Ctxt.AddImport(path_, fingerprint)
+ base.Ctxt.AddImport(path_, fingerprint)
} else {
// For file "/Users/foo/go/pkg/darwin_amd64/math.a" record "math.a".
- Ctxt.AddImport(file[len(file)-len(path_)-len(".a"):], fingerprint)
+ base.Ctxt.AddImport(file[len(file)-len(path_)-len(".a"):], fingerprint)
}
if importpkg.Height >= myheight {
elem = elem[i+1:]
}
if name == "" || elem == name {
- yyerrorl(lineno, "imported and not used: %q", path)
+ base.ErrorfAt(lineno, "imported and not used: %q", path)
} else {
- yyerrorl(lineno, "imported and not used: %q as %s", path, name)
+ base.ErrorfAt(lineno, "imported and not used: %q as %s", path, name)
}
}
func mkpackage(pkgname string) {
if localpkg.Name == "" {
if pkgname == "_" {
- yyerror("invalid package name _")
+ base.Errorf("invalid package name _")
}
localpkg.Name = pkgname
} else {
if pkgname != localpkg.Name {
- yyerror("package %s; expected %s", pkgname, localpkg.Name)
+ base.Errorf("package %s; expected %s", pkgname, localpkg.Name)
}
}
}
// leave s->block set to cause redeclaration
// errors if a conflicting top-level name is
// introduced by a different file.
- if !n.Name.Used() && SyntaxErrors() == 0 {
+ if !n.Name.Used() && base.SyntaxErrors() == 0 {
unused = append(unused, importedPkg{n.Pos, n.Name.Pkg.Path, s.Name})
}
s.Def = nil
if IsAlias(s) {
// throw away top-level name left over
// from previous import . "x"
- if n.Name != nil && n.Name.Pack != nil && !n.Name.Pack.Name.Used() && SyntaxErrors() == 0 {
+ if n.Name != nil && n.Name.Pack != nil && !n.Name.Pack.Name.Used() && base.SyntaxErrors() == 0 {
unused = append(unused, importedPkg{n.Name.Pack.Pos, n.Name.Pack.Name.Pkg.Path, ""})
n.Name.Pack.Name.SetUsed(true)
}
// recordFlags records the specified command-line flags to be placed
// in the DWARF info.
func recordFlags(flags ...string) {
- if Ctxt.Pkgpath == "" {
+ if base.Ctxt.Pkgpath == "" {
// We can't record the flags if we don't know what the
// package name is.
return
if cmd.Len() == 0 {
return
}
- s := Ctxt.Lookup(dwarf.CUInfoPrefix + "producer." + Ctxt.Pkgpath)
+ s := base.Ctxt.Lookup(dwarf.CUInfoPrefix + "producer." + base.Ctxt.Pkgpath)
s.Type = objabi.SDWARFCUINFO
// Sometimes (for example when building tests) we can link
// together two package main archives. So allow dups.
s.Set(obj.AttrDuplicateOK, true)
- Ctxt.Data = append(Ctxt.Data, s)
+ base.Ctxt.Data = append(base.Ctxt.Data, s)
s.P = cmd.Bytes()[1:]
}
// recordPackageName records the name of the package being
// compiled, so that the linker can save it in the compile unit's DIE.
func recordPackageName() {
- s := Ctxt.Lookup(dwarf.CUInfoPrefix + "packagename." + Ctxt.Pkgpath)
+ s := base.Ctxt.Lookup(dwarf.CUInfoPrefix + "packagename." + base.Ctxt.Pkgpath)
s.Type = objabi.SDWARFCUINFO
// Sometimes (for example when building tests) we can link
// together two package main archives. So allow dups.
s.Set(obj.AttrDuplicateOK, true)
- Ctxt.Data = append(Ctxt.Data, s)
+ base.Ctxt.Data = append(base.Ctxt.Data, s)
s.P = []byte(localpkg.Name)
}
// checkLang verifies that the -lang flag holds a valid value, and
// exits if not. It initializes data used by langSupported.
func checkLang() {
- if Flag.Lang == "" {
+ if base.Flag.Lang == "" {
return
}
var err error
- langWant, err = parseLang(Flag.Lang)
+ langWant, err = parseLang(base.Flag.Lang)
if err != nil {
- log.Fatalf("invalid value %q for -lang: %v", Flag.Lang, err)
+ log.Fatalf("invalid value %q for -lang: %v", base.Flag.Lang, err)
}
- if def := currentLang(); Flag.Lang != def {
+ if def := currentLang(); base.Flag.Lang != def {
defVers, err := parseLang(def)
if err != nil {
log.Fatalf("internal error parsing default lang %q: %v", def, err)
}
if langWant.major > defVers.major || (langWant.major == defVers.major && langWant.minor > defVers.minor) {
- log.Fatalf("invalid value %q for -lang: max known version is %q", Flag.Lang, def)
+ log.Fatalf("invalid value %q for -lang: max known version is %q", base.Flag.Lang, def)
}
}
}
"unicode"
"unicode/utf8"
+ "cmd/compile/internal/base"
"cmd/compile/internal/syntax"
"cmd/compile/internal/types"
"cmd/internal/obj"
var lines uint
for _, p := range noders {
for e := range p.err {
- p.yyerrorpos(e.Pos, "%s", e.Msg)
+ p.errorAt(e.Pos, "%s", e.Msg)
}
p.node()
lines += p.file.Lines
p.file = nil // release memory
- if SyntaxErrors() != 0 {
- errorexit()
+ if base.SyntaxErrors() != 0 {
+ base.ErrorExit()
}
// Always run testdclstack here, even when debug_dclstack is not set, as a sanity measure.
testdclstack()
}
func (p *noder) makeXPos(pos syntax.Pos) (_ src.XPos) {
- return Ctxt.PosTable.XPos(src.MakePos(p.makeSrcPosBase(pos.Base()), pos.Line(), pos.Col()))
+ return base.Ctxt.PosTable.XPos(src.MakePos(p.makeSrcPosBase(pos.Base()), pos.Line(), pos.Col()))
}
-func (p *noder) yyerrorpos(pos syntax.Pos, format string, args ...interface{}) {
- yyerrorl(p.makeXPos(pos), format, args...)
+func (p *noder) errorAt(pos syntax.Pos, format string, args ...interface{}) {
+ base.ErrorfAt(p.makeXPos(pos), format, args...)
}
// TODO(gri) Can we eliminate fileh in favor of absFilename?
func fileh(name string) string {
- return objabi.AbsFile("", name, Flag.TrimPath)
+ return objabi.AbsFile("", name, base.Flag.TrimPath)
}
func absFilename(name string) string {
- return objabi.AbsFile(Ctxt.Pathname, name, Flag.TrimPath)
+ return objabi.AbsFile(base.Ctxt.Pathname, name, base.Flag.TrimPath)
}
// noder transforms package syntax's AST into a Node tree.
}
fn.Nbody.Set(body)
- lineno = p.makeXPos(block.Rbrace)
- fn.Func.Endlineno = lineno
+ base.Pos = p.makeXPos(block.Rbrace)
+ fn.Func.Endlineno = base.Pos
}
funcbody()
// no variables were declared in this scope, so we can retract it.
if int(p.scope) != len(Curfn.Func.Parents) {
- Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted")
+ base.Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted")
}
p.scope = Curfn.Func.Parents[p.scope-1]
for _, n := range p.linknames {
if !p.importedUnsafe {
- p.yyerrorpos(n.pos, "//go:linkname only allowed in Go files that import \"unsafe\"")
+ p.errorAt(n.pos, "//go:linkname only allowed in Go files that import \"unsafe\"")
continue
}
s := lookup(n.local)
} else {
// Use the default object symbol name if the
// user didn't provide one.
- if Ctxt.Pkgpath == "" {
- p.yyerrorpos(n.pos, "//go:linkname requires linkname argument or -p compiler flag")
+ if base.Ctxt.Pkgpath == "" {
+ p.errorAt(n.pos, "//go:linkname requires linkname argument or -p compiler flag")
} else {
- s.Linkname = objabi.PathToPrefix(Ctxt.Pkgpath) + "." + n.local
+ s.Linkname = objabi.PathToPrefix(base.Ctxt.Pkgpath) + "." + n.local
}
}
}
}
pragcgobuf = append(pragcgobuf, p.pragcgobuf...)
- lineno = src.NoXPos
+ base.Pos = src.NoXPos
clearImports()
}
ipkg := importfile(p.basicLit(imp.Path))
if ipkg == nil {
- if Errors() == 0 {
- Fatalf("phase error in import")
+ if base.Errors() == 0 {
+ base.Fatalf("phase error in import")
}
return
}
importdot(ipkg, pack)
return
case "init":
- yyerrorl(pack.Pos, "cannot import package as init - init must be a func")
+ base.ErrorfAt(pack.Pos, "cannot import package as init - init must be a func")
return
case "_":
return
// so at that point it hasn't seen the imports.
// We're left to check now, just before applying the //go:embed lines.
for _, e := range pragma.Embeds {
- p.yyerrorpos(e.Pos, "//go:embed only allowed in Go files that import \"embed\"")
+ p.errorAt(e.Pos, "//go:embed only allowed in Go files that import \"embed\"")
}
} else {
exprs = varEmbed(p, names, typ, exprs, pragma.Embeds)
cs.typ, cs.values = typ, values
} else {
if typ != nil {
- yyerror("const declaration cannot have type without expression")
+ base.Errorf("const declaration cannot have type without expression")
}
typ, values = cs.typ, cs.values
}
nn := make([]*Node, 0, len(names))
for i, n := range names {
if i >= len(values) {
- yyerror("missing value in const declaration")
+ base.Errorf("missing value in const declaration")
break
}
v := values[i]
}
if len(values) > len(names) {
- yyerror("extra expression in const declaration")
+ base.Errorf("extra expression in const declaration")
}
cs.iota++
nod := p.nod(decl, ODCLTYPE, n, nil)
if param.Alias() && !langSupported(1, 9, localpkg) {
- yyerrorl(nod.Pos, "type aliases only supported as of -lang=go1.9")
+ base.ErrorfAt(nod.Pos, "type aliases only supported as of -lang=go1.9")
}
return nod
}
if name.Name == "init" {
name = renameinit()
if t.List.Len() > 0 || t.Rlist.Len() > 0 {
- yyerrorl(f.Pos, "func init must have no arguments and no return values")
+ base.ErrorfAt(f.Pos, "func init must have no arguments and no return values")
}
}
if localpkg.Name == "main" && name.Name == "main" {
if t.List.Len() > 0 || t.Rlist.Len() > 0 {
- yyerrorl(f.Pos, "func main must have no arguments and no return values")
+ base.ErrorfAt(f.Pos, "func main must have no arguments and no return values")
}
}
} else {
if pragma, ok := fun.Pragma.(*Pragma); ok {
f.Func.Pragma = pragma.Flag & FuncPragmas
if pragma.Flag&Systemstack != 0 && pragma.Flag&Nosplit != 0 {
- yyerrorl(f.Pos, "go:nosplit and go:systemstack cannot be combined")
+ base.ErrorfAt(f.Pos, "go:nosplit and go:systemstack cannot be combined")
}
pragma.Flag &^= FuncPragmas
p.checkUnused(pragma)
if fun.Body != nil {
if f.Func.Pragma&Noescape != 0 {
- yyerrorl(f.Pos, "can only use //go:noescape with external func implementations")
+ base.ErrorfAt(f.Pos, "can only use //go:noescape with external func implementations")
}
} else {
- if Flag.Complete || strings.HasPrefix(f.funcname(), "init.") {
+ if base.Flag.Complete || strings.HasPrefix(f.funcname(), "init.") {
// Linknamed functions are allowed to have no body. Hopefully
// the linkname target has a body. See issue 23311.
isLinknamed := false
}
}
if !isLinknamed {
- yyerrorl(f.Pos, "missing function body")
+ base.ErrorfAt(f.Pos, "missing function body")
}
}
}
if typ.Op == ODDD {
if !dddOk {
// We mark these as syntax errors to get automatic elimination
- // of multiple such errors per line (see yyerrorl in subr.go).
- yyerror("syntax error: cannot use ... in receiver or result parameter list")
+ // of multiple such errors per line (see ErrorfAt in subr.go).
+ base.Errorf("syntax error: cannot use ... in receiver or result parameter list")
} else if !final {
if param.Name == nil {
- yyerror("syntax error: cannot use ... with non-final parameter")
+ base.Errorf("syntax error: cannot use ... with non-final parameter")
} else {
- p.yyerrorpos(param.Name.Pos(), "syntax error: cannot use ... with non-final parameter %s", param.Name.Value)
+ p.errorAt(param.Name.Pos(), "syntax error: cannot use ... with non-final parameter %s", param.Name.Value)
}
}
typ.Op = OTARRAY
l[i] = p.wrapname(expr.ElemList[i], e)
}
n.List.Set(l)
- lineno = p.makeXPos(expr.Rbrace)
+ base.Pos = p.makeXPos(expr.Rbrace)
return n
case *syntax.KeyValueExpr:
// use position of expr.Key rather than of expr (which has position of ':')
if expr.Lhs != nil {
n.Left = p.declName(expr.Lhs)
if n.Left.isBlank() {
- yyerror("invalid variable name %v in type switch", n.Left)
+ base.Errorf("invalid variable name %v in type switch", n.Left)
}
}
return n
name := p.name(expr.X.(*syntax.Name))
def := asNode(name.Def)
if def == nil {
- yyerror("undefined: %v", name)
+ base.Errorf("undefined: %v", name)
return name
}
var pkg *types.Pkg
if def.Op != OPACK {
- yyerror("%v is not a package", name)
+ base.Errorf("%v is not a package", name)
pkg = localpkg
} else {
def.Name.SetUsed(true)
op = OCONTINUE
case syntax.Fallthrough:
if !fallOK {
- yyerror("fallthrough statement out of place")
+ base.Errorf("fallthrough statement out of place")
}
op = OFALL
case syntax.Goto:
break
}
if asNode(ln.Sym.Def) != ln {
- yyerror("%s is shadowed during return", ln.Sym.Name)
+ base.Errorf("%s is shadowed during return", ln.Sym.Name)
}
}
}
name, ok := expr.(*syntax.Name)
if !ok {
- p.yyerrorpos(expr.Pos(), "non-name %v on left side of :=", p.expr(expr))
+ p.errorAt(expr.Pos(), "non-name %v on left side of :=", p.expr(expr))
newOrErr = true
continue
}
}
if seen[sym] {
- p.yyerrorpos(expr.Pos(), "%v repeated on left side of :=", sym)
+ p.errorAt(expr.Pos(), "%v repeated on left side of :=", sym)
newOrErr = true
continue
}
}
if !newOrErr {
- yyerrorl(defn.Pos, "no new variables on left side of :=")
+ base.ErrorfAt(defn.Pos, "no new variables on left side of :=")
}
return res
}
n.Nbody.Set(p.stmtsFall(body, true))
if l := n.Nbody.Len(); l > 0 && n.Nbody.Index(l-1).Op == OFALL {
if tswitch != nil {
- yyerror("cannot fallthrough in type switch")
+ base.Errorf("cannot fallthrough in type switch")
}
if i+1 == len(clauses) {
- yyerror("cannot fallthrough final case in switch")
+ base.Errorf("cannot fallthrough final case in switch")
}
}
}
// len(s) > 2
if strings.Contains(s, "_") {
- yyerrorv("go1.13", "underscores in numeric literals")
+ base.ErrorfVers("go1.13", "underscores in numeric literals")
return
}
if s[0] != '0' {
}
radix := s[1]
if radix == 'b' || radix == 'B' {
- yyerrorv("go1.13", "binary literals")
+ base.ErrorfVers("go1.13", "binary literals")
return
}
if radix == 'o' || radix == 'O' {
- yyerrorv("go1.13", "0o/0O-style octal literals")
+ base.ErrorfVers("go1.13", "0o/0O-style octal literals")
return
}
if lit.Kind != syntax.IntLit && (radix == 'x' || radix == 'X') {
- yyerrorv("go1.13", "hexadecimal floating-point literals")
+ base.ErrorfVers("go1.13", "hexadecimal floating-point literals")
}
}
v := constant.MakeFromLiteral(lit.Value, tokenForLitKind[lit.Kind], 0)
if v.Kind() == constant.Unknown {
// TODO(mdempsky): Better error message?
- p.yyerrorpos(lit.Pos(), "malformed constant: %s", lit.Value)
+ p.errorAt(lit.Pos(), "malformed constant: %s", lit.Value)
}
// go/constant uses big.Rat by default, which is more precise, but
func (p *noder) pos(n syntax.Node) src.XPos {
// TODO(gri): orig.Pos() should always be known - fix package syntax
- xpos := lineno
+ xpos := base.Pos
if pos := n.Pos(); pos.IsKnown() {
xpos = p.makeXPos(pos)
}
func (p *noder) setlineno(n syntax.Node) {
if n != nil {
- lineno = p.pos(n)
+ base.Pos = p.pos(n)
}
}
func (p *noder) checkUnused(pragma *Pragma) {
for _, pos := range pragma.Pos {
if pos.Flag&pragma.Flag != 0 {
- p.yyerrorpos(pos.Pos, "misplaced compiler directive")
+ p.errorAt(pos.Pos, "misplaced compiler directive")
}
}
if len(pragma.Embeds) > 0 {
for _, e := range pragma.Embeds {
- p.yyerrorpos(e.Pos, "misplaced go:embed directive")
+ p.errorAt(e.Pos, "misplaced go:embed directive")
}
}
}
// For security, we disallow //go:cgo_* directives other
// than cgo_import_dynamic outside cgo-generated files.
// Exception: they are allowed in the standard library, for runtime and syscall.
- if !isCgoGeneratedFile(pos) && !Flag.Std {
+ if !isCgoGeneratedFile(pos) && !base.Flag.Std {
p.error(syntax.Error{Pos: pos, Msg: fmt.Sprintf("//%s only allowed in cgo-generated code", text)})
}
p.pragcgo(pos, text)
}
flag := pragmaFlag(verb)
const runtimePragmas = Systemstack | Nowritebarrier | Nowritebarrierrec | Yeswritebarrierrec
- if !Flag.CompilingRuntime && flag&runtimePragmas != 0 {
+ if !base.Flag.CompilingRuntime && flag&runtimePragmas != 0 {
p.error(syntax.Error{Pos: pos, Msg: fmt.Sprintf("//%s only allowed in runtime", verb)})
}
- if flag == 0 && !allowedStdPragmas[verb] && Flag.Std {
+ if flag == 0 && !allowedStdPragmas[verb] && base.Flag.Std {
p.error(syntax.Error{Pos: pos, Msg: fmt.Sprintf("//%s is not allowed in the standard library", verb)})
}
pragma.Flag |= flag
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/obj"
)
func dumpobj() {
- if Flag.LinkObj == "" {
- dumpobj1(Flag.LowerO, modeCompilerObj|modeLinkerObj)
+ if base.Flag.LinkObj == "" {
+ dumpobj1(base.Flag.LowerO, modeCompilerObj|modeLinkerObj)
return
}
- dumpobj1(Flag.LowerO, modeCompilerObj)
- dumpobj1(Flag.LinkObj, modeLinkerObj)
+ dumpobj1(base.Flag.LowerO, modeCompilerObj)
+ dumpobj1(base.Flag.LinkObj, modeLinkerObj)
}
func dumpobj1(outfile string, mode int) {
bout, err := bio.Create(outfile)
if err != nil {
- flusherrors()
+ base.FlushErrors()
fmt.Printf("can't create %s: %v\n", outfile, err)
- errorexit()
+ base.ErrorExit()
}
defer bout.Close()
bout.WriteString("!<arch>\n")
func printObjHeader(bout *bio.Writer) {
fmt.Fprintf(bout, "go object %s %s %s %s\n", objabi.GOOS, objabi.GOARCH, objabi.Version, objabi.Expstring())
- if Flag.BuildID != "" {
- fmt.Fprintf(bout, "build id %q\n", Flag.BuildID)
+ if base.Flag.BuildID != "" {
+ fmt.Fprintf(bout, "build id %q\n", base.Flag.BuildID)
}
if localpkg.Name == "main" {
fmt.Fprintf(bout, "main\n")
addGCLocals()
if exportlistLen != len(exportlist) {
- Fatalf("exportlist changed after compile functions loop")
+ base.Fatalf("exportlist changed after compile functions loop")
}
if ptabsLen != len(ptabs) {
- Fatalf("ptabs changed after compile functions loop")
+ base.Fatalf("ptabs changed after compile functions loop")
}
if itabsLen != len(itabs) {
- Fatalf("itabs changed after compile functions loop")
+ base.Fatalf("itabs changed after compile functions loop")
}
}
fmt.Fprintf(bout, "\n$$\n\n$$\n\n")
fmt.Fprintf(bout, "\n$$ // cgo\n")
if err := json.NewEncoder(bout).Encode(pragcgobuf); err != nil {
- Fatalf("serializing pragcgobuf: %v", err)
+ base.Fatalf("serializing pragcgobuf: %v", err)
}
fmt.Fprintf(bout, "\n$$\n\n")
}
fmt.Fprintf(bout, "\n!\n")
- obj.WriteObjFile(Ctxt, bout)
+ obj.WriteObjFile(base.Ctxt, bout)
}
func addptabs() {
- if !Ctxt.Flag_dynlink || localpkg.Name != "main" {
+ if !base.Ctxt.Flag_dynlink || localpkg.Name != "main" {
return
}
for _, exportn := range exportlist {
func dumpGlobal(n *Node) {
if n.Type == nil {
- Fatalf("external %v nil type\n", n)
+ base.Fatalf("external %v nil type\n", n)
}
if n.Class() == PFUNC {
return
return
}
}
- Ctxt.DwarfIntConst(Ctxt.Pkgpath, n.Sym.Name, typesymname(t), int64Val(t, v))
+ base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym.Name, typesymname(t), int64Val(t, v))
}
func dumpglobls() {
// This is done during the sequential phase after compilation, since
// global symbols can't be declared during parallel compilation.
func addGCLocals() {
- for _, s := range Ctxt.Text {
+ for _, s := range base.Ctxt.Text {
fn := s.Func()
if fn == nil {
continue
func duintxx(s *obj.LSym, off int, v uint64, wid int) int {
if off&(wid-1) != 0 {
- Fatalf("duintxxLSym: misaligned: v=%d wid=%d off=%d", v, wid, off)
+ base.Fatalf("duintxxLSym: misaligned: v=%d wid=%d off=%d", v, wid, off)
}
- s.WriteInt(Ctxt, int64(off), wid, int64(v))
+ s.WriteInt(base.Ctxt, int64(off), wid, int64(v))
return off + wid
}
symname = strconv.Quote(s)
}
- symdata := Ctxt.Lookup(stringSymPrefix + symname)
+ symdata := base.Ctxt.Lookup(stringSymPrefix + symname)
if !symdata.OnList() {
off := dstringdata(symdata, 0, s, pos, "string")
ggloblsym(symdata, int32(off), obj.DUPOK|obj.RODATA|obj.LOCAL)
var symdata *obj.LSym
if readonly {
symname := fmt.Sprintf(stringSymPattern, size, sum)
- symdata = Ctxt.Lookup(stringSymPrefix + symname)
+ symdata = base.Ctxt.Lookup(stringSymPrefix + symname)
if !symdata.OnList() {
info := symdata.NewFileInfo()
info.Name = file
func slicebytes(nam *Node, s string) {
if nam.Op != ONAME {
- Fatalf("slicebytes %v", nam)
+ base.Fatalf("slicebytes %v", nam)
}
slicesym(nam, slicedata(nam.Pos, s), int64(len(s)))
}
// causing a cryptic error message by the linker. Check for oversize objects here
// and provide a useful error message instead.
if int64(len(t)) > 2e9 {
- yyerrorl(pos, "%v with length %v is too big", what, len(t))
+ base.ErrorfAt(pos, "%v with length %v is too big", what, len(t))
return 0
}
- s.WriteString(Ctxt, int64(off), len(t), t)
+ s.WriteString(base.Ctxt, int64(off), len(t), t)
return off + len(t)
}
func dsymptr(s *obj.LSym, off int, x *obj.LSym, xoff int) int {
off = int(Rnd(int64(off), int64(Widthptr)))
- s.WriteAddr(Ctxt, int64(off), Widthptr, x, int64(xoff))
+ s.WriteAddr(base.Ctxt, int64(off), Widthptr, x, int64(xoff))
off += Widthptr
return off
}
func dsymptrOff(s *obj.LSym, off int, x *obj.LSym) int {
- s.WriteOff(Ctxt, int64(off), x, 0)
+ s.WriteOff(base.Ctxt, int64(off), x, 0)
off += 4
return off
}
func dsymptrWeakOff(s *obj.LSym, off int, x *obj.LSym) int {
- s.WriteWeakOff(Ctxt, int64(off), x, 0)
+ s.WriteWeakOff(base.Ctxt, int64(off), x, 0)
off += 4
return off
}
s := n.Sym.Linksym()
off := n.Xoffset
if arr.Op != ONAME {
- Fatalf("slicesym non-name arr %v", arr)
+ base.Fatalf("slicesym non-name arr %v", arr)
}
- s.WriteAddr(Ctxt, off, Widthptr, arr.Sym.Linksym(), arr.Xoffset)
- s.WriteInt(Ctxt, off+sliceLenOffset, Widthptr, lencap)
- s.WriteInt(Ctxt, off+sliceCapOffset, Widthptr, lencap)
+ s.WriteAddr(base.Ctxt, off, Widthptr, arr.Sym.Linksym(), arr.Xoffset)
+ s.WriteInt(base.Ctxt, off+sliceLenOffset, Widthptr, lencap)
+ s.WriteInt(base.Ctxt, off+sliceCapOffset, Widthptr, lencap)
}
// addrsym writes the static address of a to n. a must be an ONAME.
// Neither n nor a is modified.
func addrsym(n, a *Node) {
if n.Op != ONAME {
- Fatalf("addrsym n op %v", n.Op)
+ base.Fatalf("addrsym n op %v", n.Op)
}
if n.Sym == nil {
- Fatalf("addrsym nil n sym")
+ base.Fatalf("addrsym nil n sym")
}
if a.Op != ONAME {
- Fatalf("addrsym a op %v", a.Op)
+ base.Fatalf("addrsym a op %v", a.Op)
}
s := n.Sym.Linksym()
- s.WriteAddr(Ctxt, n.Xoffset, Widthptr, a.Sym.Linksym(), a.Xoffset)
+ s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, a.Sym.Linksym(), a.Xoffset)
}
// pfuncsym writes the static address of f to n. f must be a global function.
// Neither n nor f is modified.
func pfuncsym(n, f *Node) {
if n.Op != ONAME {
- Fatalf("pfuncsym n op %v", n.Op)
+ base.Fatalf("pfuncsym n op %v", n.Op)
}
if n.Sym == nil {
- Fatalf("pfuncsym nil n sym")
+ base.Fatalf("pfuncsym nil n sym")
}
if f.Class() != PFUNC {
- Fatalf("pfuncsym class not PFUNC %d", f.Class())
+ base.Fatalf("pfuncsym class not PFUNC %d", f.Class())
}
s := n.Sym.Linksym()
- s.WriteAddr(Ctxt, n.Xoffset, Widthptr, funcsym(f.Sym).Linksym(), f.Xoffset)
+ s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, funcsym(f.Sym).Linksym(), f.Xoffset)
}
// litsym writes the static literal c to n.
// Neither n nor c is modified.
func litsym(n, c *Node, wid int) {
if n.Op != ONAME {
- Fatalf("litsym n op %v", n.Op)
+ base.Fatalf("litsym n op %v", n.Op)
}
if n.Sym == nil {
- Fatalf("litsym nil n sym")
+ base.Fatalf("litsym nil n sym")
}
if !types.Identical(n.Type, c.Type) {
- Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type, c, c.Type)
+ base.Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type, c, c.Type)
}
if c.Op == ONIL {
return
}
if c.Op != OLITERAL {
- Fatalf("litsym c op %v", c.Op)
+ base.Fatalf("litsym c op %v", c.Op)
}
s := n.Sym.Linksym()
switch u := c.Val(); u.Kind() {
case constant.Bool:
i := int64(obj.Bool2int(constant.BoolVal(u)))
- s.WriteInt(Ctxt, n.Xoffset, wid, i)
+ s.WriteInt(base.Ctxt, n.Xoffset, wid, i)
case constant.Int:
- s.WriteInt(Ctxt, n.Xoffset, wid, int64Val(n.Type, u))
+ s.WriteInt(base.Ctxt, n.Xoffset, wid, int64Val(n.Type, u))
case constant.Float:
f, _ := constant.Float64Val(u)
switch n.Type.Etype {
case TFLOAT32:
- s.WriteFloat32(Ctxt, n.Xoffset, float32(f))
+ s.WriteFloat32(base.Ctxt, n.Xoffset, float32(f))
case TFLOAT64:
- s.WriteFloat64(Ctxt, n.Xoffset, f)
+ s.WriteFloat64(base.Ctxt, n.Xoffset, f)
}
case constant.Complex:
im, _ := constant.Float64Val(constant.Imag(u))
switch n.Type.Etype {
case TCOMPLEX64:
- s.WriteFloat32(Ctxt, n.Xoffset, float32(re))
- s.WriteFloat32(Ctxt, n.Xoffset+4, float32(im))
+ s.WriteFloat32(base.Ctxt, n.Xoffset, float32(re))
+ s.WriteFloat32(base.Ctxt, n.Xoffset+4, float32(im))
case TCOMPLEX128:
- s.WriteFloat64(Ctxt, n.Xoffset, re)
- s.WriteFloat64(Ctxt, n.Xoffset+8, im)
+ s.WriteFloat64(base.Ctxt, n.Xoffset, re)
+ s.WriteFloat64(base.Ctxt, n.Xoffset+8, im)
}
case constant.String:
i := constant.StringVal(u)
symdata := stringsym(n.Pos, i)
- s.WriteAddr(Ctxt, n.Xoffset, Widthptr, symdata, 0)
- s.WriteInt(Ctxt, n.Xoffset+int64(Widthptr), Widthptr, int64(len(i)))
+ s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, symdata, 0)
+ s.WriteInt(base.Ctxt, n.Xoffset+int64(Widthptr), Widthptr, int64(len(i)))
default:
- Fatalf("litsym unhandled OLITERAL %v", c)
+ base.Fatalf("litsym unhandled OLITERAL %v", c)
}
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
// Order rewrites fn.Nbody to apply the ordering constraints
// described in the comment at the top of the file.
func order(fn *Node) {
- if Flag.W > 1 {
+ if base.Flag.W > 1 {
s := fmt.Sprintf("\nbefore order %v", fn.Func.Nname.Sym)
dumplist(s, fn.Nbody)
}
return typecheck(a, ctxExpr)
default:
- Fatalf("order.safeExpr %v", n.Op)
+ base.Fatalf("order.safeExpr %v", n.Op)
return nil // not reached
}
}
var s InitSchedule
s.staticassign(vstat, n)
if s.out != nil {
- Fatalf("staticassign of const generated code: %+v", n)
+ base.Fatalf("staticassign of const generated code: %+v", n)
}
vstat = typecheck(vstat, ctxExpr)
return vstat
// and rewrites it to:
// m = OMAKESLICECOPY([]T, x, s); nil
func orderMakeSliceCopy(s []*Node) {
- if Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || instrumenting {
return
}
// edge inserts coverage instrumentation for libfuzzer.
func (o *Order) edge() {
- if Debug.Libfuzzer == 0 {
+ if base.Debug.Libfuzzer == 0 {
return
}
// For concurrency safety, don't mutate potentially shared nodes.
// First, ensure that no work is required here.
if n.Ninit.Len() > 0 {
- Fatalf("order.init shared node with ninit")
+ base.Fatalf("order.init shared node with ninit")
}
return
}
func (o *Order) call(n *Node) {
if n.Ninit.Len() > 0 {
// Caller should have already called o.init(n).
- Fatalf("%v with unexpected ninit", n.Op)
+ base.Fatalf("%v with unexpected ninit", n.Op)
}
// Builtin functions.
func (o *Order) mapAssign(n *Node) {
switch n.Op {
default:
- Fatalf("order.mapAssign %v", n.Op)
+ base.Fatalf("order.mapAssign %v", n.Op)
case OAS, OASOP:
if n.Left.Op == OINDEXMAP {
switch n.Op {
default:
- Fatalf("order.stmt %v", n.Op)
+ base.Fatalf("order.stmt %v", n.Op)
case OVARKILL, OVARLIVE, OINLMARK:
o.out = append(o.out, n)
_ = mapKeyReplaceStrConv(r.Right)
r.Right = o.mapKeyTemp(r.Left.Type, r.Right)
default:
- Fatalf("order.stmt: %v", r.Op)
+ base.Fatalf("order.stmt: %v", r.Op)
}
o.okAs2(n)
orderBody := true
switch n.Type.Etype {
default:
- Fatalf("order.stmt range %v", n.Type)
+ base.Fatalf("order.stmt range %v", n.Type)
case TARRAY, TSLICE:
if n.List.Len() < 2 || n.List.Second().isBlank() {
for _, n2 := range n.List.Slice() {
if n2.Op != OCASE {
- Fatalf("order select case %v", n2.Op)
+ base.Fatalf("order select case %v", n2.Op)
}
r := n2.Left
setlineno(n2)
// Append any new body prologue to ninit.
// The next loop will insert ninit into nbody.
if n2.Ninit.Len() != 0 {
- Fatalf("order select ninit")
+ base.Fatalf("order select ninit")
}
if r == nil {
continue
switch r.Op {
default:
Dump("select case", r)
- Fatalf("unknown op in select %v", r.Op)
+ base.Fatalf("unknown op in select %v", r.Op)
// If this is case x := <-ch or case x, y := <-ch, the case has
// the ODCL nodes to declare x and y. We want to delay that
if r.Ninit.Len() != 0 {
dumplist("ninit", r.Ninit)
- Fatalf("ninit on select recv")
+ base.Fatalf("ninit on select recv")
}
// case x = <-c
case OSEND:
if r.Ninit.Len() != 0 {
dumplist("ninit", r.Ninit)
- Fatalf("ninit on select send")
+ base.Fatalf("ninit on select send")
}
// case c <- x
// For now just clean all the temporaries at the end.
// In practice that's fine.
case OSWITCH:
- if Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
+ if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
// Add empty "default:" case for instrumentation.
n.List.Append(nod(OCASE, nil, nil))
}
n.Left = o.expr(n.Left, nil)
for _, ncas := range n.List.Slice() {
if ncas.Op != OCASE {
- Fatalf("order switch case %v", ncas.Op)
+ base.Fatalf("order switch case %v", ncas.Op)
}
o.exprListInPlace(ncas.List)
orderBlock(&ncas.Nbody, o.free)
o.cleanTemp(t)
}
- lineno = lno
+ base.Pos = lno
}
func hasDefaultCase(n *Node) bool {
for _, ncas := range n.List.Slice() {
if ncas.Op != OCASE {
- Fatalf("expected case, found %v", ncas.Op)
+ base.Fatalf("expected case, found %v", ncas.Op)
}
if ncas.List.Len() == 0 {
return true
var dynamics []*Node
for _, r := range entries {
if r.Op != OKEY {
- Fatalf("OMAPLIT entry not OKEY: %v\n", r)
+ base.Fatalf("OMAPLIT entry not OKEY: %v\n", r)
}
if !isStaticCompositeLiteral(r.Left) || !isStaticCompositeLiteral(r.Right) {
}
}
- lineno = lno
+ base.Pos = lno
return n
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/dwarf"
if fn.funcname() == "_" || fn.Func.Nname.Sym.Linkname != "" {
return
}
- lsym := Ctxt.Lookup(fn.Func.lsym.Name + ".args_stackmap")
+ lsym := base.Ctxt.Lookup(fn.Func.lsym.Name + ".args_stackmap")
nptr := int(fn.Type.ArgWidth() / int64(Widthptr))
bv := bvalloc(int32(nptr) * 2)
dowidth(n.Type)
w := n.Type.Width
if w >= thearch.MAXWIDTH || w < 0 {
- Fatalf("bad width")
+ base.Fatalf("bad width")
}
if w == 0 && lastHasPtr {
// Pad between a pointer-containing object and a zero-sized object.
func funccompile(fn *Node) {
if Curfn != nil {
- Fatalf("funccompile %v inside %v", fn.Func.Nname.Sym, Curfn.Func.Nname.Sym)
+ base.Fatalf("funccompile %v inside %v", fn.Func.Nname.Sym, Curfn.Func.Nname.Sym)
}
if fn.Type == nil {
- if Errors() == 0 {
- Fatalf("funccompile missing type")
+ if base.Errors() == 0 {
+ base.Fatalf("funccompile missing type")
}
return
}
}
func compile(fn *Node) {
- errorsBefore := Errors()
+ errorsBefore := base.Errors()
order(fn)
- if Errors() > errorsBefore {
+ if base.Errors() > errorsBefore {
return
}
fn.Func.initLSym(true)
walk(fn)
- if Errors() > errorsBefore {
+ if base.Errors() > errorsBefore {
return
}
if instrumenting {
// Also make sure we allocate a linker symbol
// for the stack object data, for the same reason.
if fn.Func.lsym.Func().StackObjects == nil {
- fn.Func.lsym.Func().StackObjects = Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj")
+ fn.Func.lsym.Func().StackObjects = base.Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj")
}
}
}
if fn.IsMethod() && isInlinableButNotInlined(fn) {
return false
}
- return Flag.LowerC == 1 && Debug.CompileLater == 0
+ return base.Flag.LowerC == 1 && base.Debug.CompileLater == 0
}
// isInlinableButNotInlined returns true if 'fn' was marked as an
})
}
var wg sync.WaitGroup
- Ctxt.InParallel = true
- c := make(chan *Node, Flag.LowerC)
- for i := 0; i < Flag.LowerC; i++ {
+ base.Ctxt.InParallel = true
+ c := make(chan *Node, base.Flag.LowerC)
+ for i := 0; i < base.Flag.LowerC; i++ {
wg.Add(1)
go func(worker int) {
for fn := range c {
close(c)
compilequeue = nil
wg.Wait()
- Ctxt.InParallel = false
+ base.Ctxt.InParallel = false
sizeCalculationDisabled = false
}
}
fn := curfn.(*Node)
if fn.Func.Nname != nil {
if expect := fn.Func.Nname.Sym.Linksym(); fnsym != expect {
- Fatalf("unexpected fnsym: %v != %v", fnsym, expect)
+ base.Fatalf("unexpected fnsym: %v != %v", fnsym, expect)
}
}
if !n.Name.Used() {
// Text == nil -> generating abstract function
if fnsym.Func().Text != nil {
- Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)")
+ base.Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)")
}
continue
}
scopes := assembleScopes(fnsym, fn, dwarfVars, varScopes)
var inlcalls dwarf.InlCalls
- if Flag.GenDwarfInl > 0 {
+ if base.Flag.GenDwarfInl > 0 {
inlcalls = assembleInlines(fnsym, dwarfVars)
}
return scopes, inlcalls
switch n.Class() {
case PAUTO:
abbrev = dwarf.DW_ABRV_AUTO
- if Ctxt.FixedFrameSize() == 0 {
+ if base.Ctxt.FixedFrameSize() == 0 {
offs -= int64(Widthptr)
}
if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" {
case PPARAM, PPARAMOUT:
abbrev = dwarf.DW_ABRV_PARAM
- offs += Ctxt.FixedFrameSize()
+ offs += base.Ctxt.FixedFrameSize()
default:
- Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n)
+ base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n)
}
typename := dwarf.InfoPrefix + typesymname(n.Type)
delete(fnsym.Func().Autot, ngotype(n).Linksym())
inlIndex := 0
- if Flag.GenDwarfInl > 1 {
+ if base.Flag.GenDwarfInl > 1 {
if n.Name.InlFormal() || n.Name.InlLocal() {
inlIndex = posInlIndex(n.Pos) + 1
if n.Name.InlFormal() {
}
}
}
- declpos := Ctxt.InnermostPos(declPos(n))
+ declpos := base.Ctxt.InnermostPos(declPos(n))
return &dwarf.Var{
Name: n.Sym.Name,
IsReturnValue: n.Class() == PPARAMOUT,
IsInlFormal: n.Name.InlFormal(),
Abbrev: abbrev,
StackOffset: int32(offs),
- Type: Ctxt.Lookup(typename),
+ Type: base.Ctxt.Lookup(typename),
DeclFile: declpos.RelFilename(),
DeclLine: declpos.RelLine(),
DeclCol: declpos.Col(),
var vars []*dwarf.Var
var decls []*Node
var selected map[*Node]bool
- if Ctxt.Flag_locationlists && Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK {
+ if base.Ctxt.Flag_locationlists && base.Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK {
decls, vars, selected = createComplexVars(fnsym, fn)
} else {
decls, vars, selected = createSimpleVars(fnsym, apDecls)
}
}
inlIndex := 0
- if Flag.GenDwarfInl > 1 {
+ if base.Flag.GenDwarfInl > 1 {
if n.Name.InlFormal() || n.Name.InlLocal() {
inlIndex = posInlIndex(n.Pos) + 1
if n.Name.InlFormal() {
}
}
}
- declpos := Ctxt.InnermostPos(n.Pos)
+ declpos := base.Ctxt.InnermostPos(n.Pos)
vars = append(vars, &dwarf.Var{
Name: n.Sym.Name,
IsReturnValue: isReturnValue,
Abbrev: abbrev,
StackOffset: int32(n.Xoffset),
- Type: Ctxt.Lookup(typename),
+ Type: base.Ctxt.Lookup(typename),
DeclFile: declpos.RelFilename(),
DeclLine: declpos.RelLine(),
DeclCol: declpos.Col(),
// names of the variables may have been "versioned" to avoid conflicts
// with local vars; disregard this versioning when sorting.
func preInliningDcls(fnsym *obj.LSym) []*Node {
- fn := Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*Node)
+ fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*Node)
var rdcl []*Node
for _, n := range fn.Func.Inl.Dcl {
c := n.Sym.Name[0]
var off int64
switch n.Class() {
case PAUTO:
- if Ctxt.FixedFrameSize() == 0 {
+ if base.Ctxt.FixedFrameSize() == 0 {
off -= int64(Widthptr)
}
if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" {
off -= int64(Widthptr)
}
case PPARAM, PPARAMOUT:
- off += Ctxt.FixedFrameSize()
+ off += base.Ctxt.FixedFrameSize()
}
return int32(off + n.Xoffset + slot.Off)
}
delete(fnsym.Func().Autot, gotype)
typename := dwarf.InfoPrefix + gotype.Name[len("type."):]
inlIndex := 0
- if Flag.GenDwarfInl > 1 {
+ if base.Flag.GenDwarfInl > 1 {
if n.Name.InlFormal() || n.Name.InlLocal() {
inlIndex = posInlIndex(n.Pos) + 1
if n.Name.InlFormal() {
}
}
}
- declpos := Ctxt.InnermostPos(n.Pos)
+ declpos := base.Ctxt.InnermostPos(n.Pos)
dvar := &dwarf.Var{
Name: n.Sym.Name,
IsReturnValue: n.Class() == PPARAMOUT,
IsInlFormal: n.Name.InlFormal(),
Abbrev: abbrev,
- Type: Ctxt.Lookup(typename),
+ Type: base.Ctxt.Lookup(typename),
// The stack offset is used as a sorting key, so for decomposed
// variables just give it the first one. It's not used otherwise.
// This won't work well if the first slot hasn't been assigned a stack
list := debug.LocationLists[varID]
if len(list) != 0 {
dvar.PutLocationList = func(listSym, startPC dwarf.Sym) {
- debug.PutLocationList(list, Ctxt, listSym.(*obj.LSym), startPC.(*obj.LSym))
+ debug.PutLocationList(list, base.Ctxt, listSym.(*obj.LSym), startPC.(*obj.LSym))
}
}
return dvar
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
func (lv *Liveness) initcache() {
if lv.cache.initialized {
- Fatalf("liveness cache initialized twice")
+ base.Fatalf("liveness cache initialized twice")
return
}
lv.cache.initialized = true
case *Node:
return a, e
default:
- Fatalf("weird aux: %s", v.LongString())
+ base.Fatalf("weird aux: %s", v.LongString())
return nil, e
}
}
// on future calls with the same type t.
func onebitwalktype1(t *types.Type, off int64, bv bvec) {
if t.Align > 0 && off&int64(t.Align-1) != 0 {
- Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
+ base.Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
}
if !t.HasPointers() {
// Note: this case ensures that pointers to go:notinheap types
switch t.Etype {
case TPTR, TUNSAFEPTR, TFUNC, TCHAN, TMAP:
if off&int64(Widthptr-1) != 0 {
- Fatalf("onebitwalktype1: invalid alignment, %v", t)
+ base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(Widthptr))) // pointer
case TSTRING:
// struct { byte *str; intgo len; }
if off&int64(Widthptr-1) != 0 {
- Fatalf("onebitwalktype1: invalid alignment, %v", t)
+ base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(Widthptr))) //pointer in first slot
// or, when isnilinter(t)==true:
// struct { Type *type; void *data; }
if off&int64(Widthptr-1) != 0 {
- Fatalf("onebitwalktype1: invalid alignment, %v", t)
+ base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
// The first word of an interface is a pointer, but we don't
// treat it as such.
case TSLICE:
// struct { byte *array; uintgo len; uintgo cap; }
if off&int64(Widthptr-1) != 0 {
- Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
+ base.Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
}
bv.Set(int32(off / int64(Widthptr))) // pointer in first slot (BitsPointer)
}
default:
- Fatalf("onebitwalktype1: unexpected type, %v", t)
+ base.Fatalf("onebitwalktype1: unexpected type, %v", t)
}
}
// go:nosplit functions are similar. Since safe points used to
// be coupled with stack checks, go:nosplit often actually
// means "no safe points in this function".
- return Flag.CompilingRuntime || f.NoSplit
+ return base.Flag.CompilingRuntime || f.NoSplit
}
// markUnsafePoints finds unsafe points and computes lv.unsafePoints.
if n.Class() == PPARAMOUT {
if n.Name.IsOutputParamHeapAddr() {
// Just to be paranoid. Heap addresses are PAUTOs.
- Fatalf("variable %v both output param and heap output param", n)
+ base.Fatalf("variable %v both output param and heap output param", n)
}
if n.Name.Param.Heapaddr != nil {
// If this variable moved to the heap, then
livedefer.Set(int32(i))
// It was already marked as Needzero when created.
if !n.Name.Needzero() {
- Fatalf("all pointer-containing defer arg slots should have Needzero set")
+ base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
}
}
}
if b == lv.f.Entry {
if index != 0 {
- Fatalf("bad index for entry point: %v", index)
+ base.Fatalf("bad index for entry point: %v", index)
}
// Check to make sure only input variables are live.
if n.Class() == PPARAM {
continue // ok
}
- Fatalf("bad live variable at entry of %v: %L", lv.fn.Func.Nname, n)
+ base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Func.Nname, n)
}
// Record live variables.
}
func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
- if Flag.Live == 0 || lv.fn.funcname() == "init" || strings.HasPrefix(lv.fn.funcname(), ".") {
+ if base.Flag.Live == 0 || lv.fn.funcname() == "init" || strings.HasPrefix(lv.fn.funcname(), ".") {
return
}
if !(v == nil || v.Op.IsCall()) {
}
}
- Warnl(pos, s)
+ base.WarnfAt(pos, s)
}
func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool {
if b == lv.f.Entry {
live := lv.stackMaps[0]
- fmt.Printf("(%s) function entry\n", linestr(lv.fn.Func.Nname.Pos))
+ fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Func.Nname.Pos))
fmt.Printf("\tlive=")
printed = false
for j, n := range lv.vars {
}
for _, v := range b.Values {
- fmt.Printf("(%s) %v\n", linestr(v.Pos), v.LongString())
+ fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
pcdata := lv.livenessMap.Get(v)
// These symbols will be added to Ctxt.Data by addGCLocals
// after parallel compilation is done.
makeSym := func(tmpSym *obj.LSym) *obj.LSym {
- return Ctxt.LookupInit(fmt.Sprintf("gclocals·%x", md5.Sum(tmpSym.P)), func(lsym *obj.LSym) {
+ return base.Ctxt.LookupInit(fmt.Sprintf("gclocals·%x", md5.Sum(tmpSym.P)), func(lsym *obj.LSym) {
lsym.P = tmpSym.P
lsym.Set(obj.AttrContentAddressable, true)
})
lv.prologue()
lv.solve()
lv.epilogue()
- if Flag.Live > 0 {
+ if base.Flag.Live > 0 {
lv.showlive(nil, lv.stackMaps[0])
for _, b := range f.Blocks {
for _, val := range b.Values {
}
}
}
- if Flag.Live >= 2 {
+ if base.Flag.Live >= 2 {
lv.printDebug()
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"cmd/internal/sys"
var norace_inst_pkgs = []string{"sync", "sync/atomic"}
func ispkgin(pkgs []string) bool {
- if Ctxt.Pkgpath != "" {
+ if base.Ctxt.Pkgpath != "" {
for _, p := range pkgs {
- if Ctxt.Pkgpath == p {
+ if base.Ctxt.Pkgpath == p {
return true
}
}
return
}
- if !Flag.Race || !ispkgin(norace_inst_pkgs) {
+ if !base.Flag.Race || !ispkgin(norace_inst_pkgs) {
fn.Func.SetInstrumentBody(true)
}
- if Flag.Race {
- lno := lineno
- lineno = src.NoXPos
+ if base.Flag.Race {
+ lno := base.Pos
+ base.Pos = src.NoXPos
if thearch.LinkArch.Arch.Family != sys.AMD64 {
fn.Func.Enter.Prepend(mkcall("racefuncenterfp", nil, nil))
fn.Func.Enter.Prepend(mkcall("racefuncenter", nil, nil, nodpc))
fn.Func.Exit.Append(mkcall("racefuncexit", nil, nil))
}
- lineno = lno
+ base.Pos = lno
}
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/sys"
"unicode/utf8"
toomany := false
switch t.Etype {
default:
- yyerrorl(n.Pos, "cannot range over %L", n.Right)
+ base.ErrorfAt(n.Pos, "cannot range over %L", n.Right)
return
case TARRAY, TSLICE:
case TCHAN:
if !t.ChanDir().CanRecv() {
- yyerrorl(n.Pos, "invalid operation: range %v (receive from send-only type %v)", n.Right, n.Right.Type)
+ base.ErrorfAt(n.Pos, "invalid operation: range %v (receive from send-only type %v)", n.Right, n.Right.Type)
return
}
}
if n.List.Len() > 2 || toomany {
- yyerrorl(n.Pos, "too many variables in range")
+ base.ErrorfAt(n.Pos, "too many variables in range")
}
var v1, v2 *Node
v1.Type = t1
} else if v1.Type != nil {
if op, why := assignop(t1, v1.Type); op == OXXX {
- yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why)
+ base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why)
}
}
checkassign(n, v1)
v2.Type = t2
} else if v2.Type != nil {
if op, why := assignop(t2, v2.Type); op == OXXX {
- yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why)
+ base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why)
}
}
checkassign(n, v2)
m := n.Right
lno := setlineno(m)
n = mapClear(m)
- lineno = lno
+ base.Pos = lno
return n
}
}
if v1 == nil && v2 != nil {
- Fatalf("walkrange: v2 != nil while v1 == nil")
+ base.Fatalf("walkrange: v2 != nil while v1 == nil")
}
// n.List has no meaning anymore, clear it
var init []*Node
switch t.Etype {
default:
- Fatalf("walkrange")
+ base.Fatalf("walkrange")
case TARRAY, TSLICE:
if arrayClear(n, v1, v2, a) {
- lineno = lno
+ base.Pos = lno
return n
}
n = walkstmt(n)
- lineno = lno
+ base.Pos = lno
return n
}
//
// where == for keys of map m is reflexive.
func isMapClear(n *Node) bool {
- if Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || instrumenting {
return false
}
//
// Parameters are as in walkrange: "for v1, v2 = range a".
func arrayClear(n, v1, v2, a *Node) bool {
- if Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || instrumenting {
return false
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/gcprog"
"cmd/internal/obj"
// Check invariants that map code depends on.
if !IsComparable(t.Key()) {
- Fatalf("unsupported map key type for %v", t)
+ base.Fatalf("unsupported map key type for %v", t)
}
if BUCKETSIZE < 8 {
- Fatalf("bucket size too small for proper alignment")
+ base.Fatalf("bucket size too small for proper alignment")
}
if keytype.Align > BUCKETSIZE {
- Fatalf("key align too big for %v", t)
+ base.Fatalf("key align too big for %v", t)
}
if elemtype.Align > BUCKETSIZE {
- Fatalf("elem align too big for %v", t)
+ base.Fatalf("elem align too big for %v", t)
}
if keytype.Width > MAXKEYSIZE {
- Fatalf("key size to large for %v", t)
+ base.Fatalf("key size to large for %v", t)
}
if elemtype.Width > MAXELEMSIZE {
- Fatalf("elem size to large for %v", t)
+ base.Fatalf("elem size to large for %v", t)
}
if t.Key().Width > MAXKEYSIZE && !keytype.IsPtr() {
- Fatalf("key indirect incorrect for %v", t)
+ base.Fatalf("key indirect incorrect for %v", t)
}
if t.Elem().Width > MAXELEMSIZE && !elemtype.IsPtr() {
- Fatalf("elem indirect incorrect for %v", t)
+ base.Fatalf("elem indirect incorrect for %v", t)
}
if keytype.Width%int64(keytype.Align) != 0 {
- Fatalf("key size not a multiple of key align for %v", t)
+ base.Fatalf("key size not a multiple of key align for %v", t)
}
if elemtype.Width%int64(elemtype.Align) != 0 {
- Fatalf("elem size not a multiple of elem align for %v", t)
+ base.Fatalf("elem size not a multiple of elem align for %v", t)
}
if bucket.Align%keytype.Align != 0 {
- Fatalf("bucket align not multiple of key align %v", t)
+ base.Fatalf("bucket align not multiple of key align %v", t)
}
if bucket.Align%elemtype.Align != 0 {
- Fatalf("bucket align not multiple of elem align %v", t)
+ base.Fatalf("bucket align not multiple of elem align %v", t)
}
if keys.Offset%int64(keytype.Align) != 0 {
- Fatalf("bad alignment of keys in bmap for %v", t)
+ base.Fatalf("bad alignment of keys in bmap for %v", t)
}
if elems.Offset%int64(elemtype.Align) != 0 {
- Fatalf("bad alignment of elems in bmap for %v", t)
+ base.Fatalf("bad alignment of elems in bmap for %v", t)
}
// Double-check that overflow field is final memory in struct,
// with no padding at end.
if overflow.Offset != bucket.Width-int64(Widthptr) {
- Fatalf("bad offset of overflow in bmap for %v", t)
+ base.Fatalf("bad offset of overflow in bmap for %v", t)
}
t.MapType().Bucket = bucket
// The size of hmap should be 48 bytes on 64 bit
// and 28 bytes on 32 bit platforms.
if size := int64(8 + 5*Widthptr); hmap.Width != size {
- Fatalf("hmap size not correct: got %d, want %d", hmap.Width, size)
+ base.Fatalf("hmap size not correct: got %d, want %d", hmap.Width, size)
}
t.MapType().Hmap = hmap
hiter.SetFields(fields)
dowidth(hiter)
if hiter.Width != int64(12*Widthptr) {
- Fatalf("hash_iter size not correct %d %d", hiter.Width, 12*Widthptr)
+ base.Fatalf("hash_iter size not correct %d %d", hiter.Width, 12*Widthptr)
}
t.MapType().Hiter = hiter
hiter.StructType().Map = t
var ms []*Sig
for _, f := range mt.AllMethods().Slice() {
if !f.IsMethod() {
- Fatalf("non-method on %v method %v %v\n", mt, f.Sym, f)
+ base.Fatalf("non-method on %v method %v %v\n", mt, f.Sym, f)
}
if f.Type.Recv() == nil {
- Fatalf("receiver with no type on %v method %v %v\n", mt, f.Sym, f)
+ base.Fatalf("receiver with no type on %v method %v %v\n", mt, f.Sym, f)
}
if f.Nointerface() {
continue
continue
}
if f.Sym.IsBlank() {
- Fatalf("unexpected blank symbol in interface method set")
+ base.Fatalf("unexpected blank symbol in interface method set")
}
if n := len(methods); n > 0 {
last := methods[n-1]
if !last.name.Less(f.Sym) {
- Fatalf("sigcmp vs sortinter %v %v", last.name, f.Sym)
+ base.Fatalf("sigcmp vs sortinter %v %v", last.name, f.Sym)
}
}
// If we are compiling the runtime package, there are two runtime packages around
// -- localpkg and Runtimepkg. We don't want to produce import path symbols for
// both of them, so just produce one for localpkg.
- if Ctxt.Pkgpath == "runtime" && p == Runtimepkg {
+ if base.Ctxt.Pkgpath == "runtime" && p == Runtimepkg {
return
}
str := p.Path
if p == localpkg {
// Note: myimportpath != "", or else dgopkgpath won't call dimportpath.
- str = Ctxt.Pkgpath
+ str = base.Ctxt.Pkgpath
}
- s := Ctxt.Lookup("type..importpath." + p.Prefix + ".")
+ s := base.Ctxt.Lookup("type..importpath." + p.Prefix + ".")
ot := dnameData(s, 0, str, "", nil, false)
ggloblsym(s, int32(ot), obj.DUPOK|obj.RODATA)
s.Set(obj.AttrContentAddressable, true)
return duintptr(s, ot, 0)
}
- if pkg == localpkg && Ctxt.Pkgpath == "" {
+ if pkg == localpkg && base.Ctxt.Pkgpath == "" {
// If we don't know the full import path of the package being compiled
// (i.e. -p was not passed on the compiler command line), emit a reference to
// type..importpath.""., which the linker will rewrite using the correct import path.
// Every package that imports this one directly defines the symbol.
// See also https://groups.google.com/forum/#!topic/golang-dev/myb9s53HxGQ.
- ns := Ctxt.Lookup(`type..importpath."".`)
+ ns := base.Ctxt.Lookup(`type..importpath."".`)
return dsymptr(s, ot, ns, 0)
}
if pkg == nil {
return duint32(s, ot, 0)
}
- if pkg == localpkg && Ctxt.Pkgpath == "" {
+ if pkg == localpkg && base.Ctxt.Pkgpath == "" {
// If we don't know the full import path of the package being compiled
// (i.e. -p was not passed on the compiler command line), emit a reference to
// type..importpath.""., which the linker will rewrite using the correct import path.
// Every package that imports this one directly defines the symbol.
// See also https://groups.google.com/forum/#!topic/golang-dev/myb9s53HxGQ.
- ns := Ctxt.Lookup(`type..importpath."".`)
+ ns := base.Ctxt.Lookup(`type..importpath."".`)
return dsymptrOff(s, ot, ns)
}
// dnameField dumps a reflect.name for a struct field.
func dnameField(lsym *obj.LSym, ot int, spkg *types.Pkg, ft *types.Field) int {
if !types.IsExported(ft.Sym.Name) && ft.Sym.Pkg != spkg {
- Fatalf("package mismatch for %v", ft.Sym)
+ base.Fatalf("package mismatch for %v", ft.Sym)
}
nsym := dname(ft.Sym.Name, ft.Note, nil, types.IsExported(ft.Sym.Name))
return dsymptr(lsym, ot, nsym, 0)
// dnameData writes the contents of a reflect.name into s at offset ot.
func dnameData(s *obj.LSym, ot int, name, tag string, pkg *types.Pkg, exported bool) int {
if len(name) > 1<<16-1 {
- Fatalf("name too long: %s", name)
+ base.Fatalf("name too long: %s", name)
}
if len(tag) > 1<<16-1 {
- Fatalf("tag too long: %s", tag)
+ base.Fatalf("tag too long: %s", tag)
}
// Encode name and tag. See reflect/type.go for details.
copy(tb[2:], tag)
}
- ot = int(s.WriteBytes(Ctxt, int64(ot), b))
+ ot = int(s.WriteBytes(base.Ctxt, int64(ot), b))
if pkg != nil {
ot = dgopkgpathOff(s, ot, pkg)
sname = fmt.Sprintf(`%s"".%d`, sname, dnameCount)
dnameCount++
}
- s := Ctxt.Lookup(sname)
+ s := base.Ctxt.Lookup(sname)
if len(s.P) > 0 {
return s
}
}
noff := int(Rnd(int64(ot), int64(Widthptr)))
if noff != ot {
- Fatalf("unexpected alignment in dextratype for %v", t)
+ base.Fatalf("unexpected alignment in dextratype for %v", t)
}
for _, a := range m {
dataAdd += uncommonSize(t)
mcount := len(m)
if mcount != int(uint16(mcount)) {
- Fatalf("too many methods on %v: %d", t, mcount)
+ base.Fatalf("too many methods on %v: %d", t, mcount)
}
xcount := sort.Search(mcount, func(i int) bool { return !types.IsExported(m[i].name.Name) })
if dataAdd != int(uint32(dataAdd)) {
- Fatalf("methods are too far away on %v: %d", t, dataAdd)
+ base.Fatalf("methods are too far away on %v: %d", t, dataAdd)
}
ot = duint16(lsym, ot, uint16(mcount))
return lastPtrField.Offset + typeptrdata(lastPtrField.Type)
default:
- Fatalf("typeptrdata: unexpected type, %v", t)
+ base.Fatalf("typeptrdata: unexpected type, %v", t)
return 0
}
}
i = 1
}
if i&(i-1) != 0 {
- Fatalf("invalid alignment %d for %v", t.Align, t)
+ base.Fatalf("invalid alignment %d for %v", t.Align, t)
}
ot = duint8(lsym, ot, t.Align) // align
ot = duint8(lsym, ot, t.Align) // fieldAlign
func typenamesym(t *types.Type) *types.Sym {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() {
- Fatalf("typenamesym %v", t)
+ base.Fatalf("typenamesym %v", t)
}
s := typesym(t)
signatmu.Lock()
func itabname(t, itype *types.Type) *Node {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() {
- Fatalf("itabname(%v, %v)", t, itype)
+ base.Fatalf("itabname(%v, %v)", t, itype)
}
s := itabpkg.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil {
return true
default:
- Fatalf("bad type for map key: %v", t)
+ base.Fatalf("bad type for map key: %v", t)
return false
}
}
return false
default:
- Fatalf("bad type for map key: %v", t)
+ base.Fatalf("bad type for map key: %v", t)
return true
}
}
func dtypesym(t *types.Type) *obj.LSym {
t = formalType(t)
if t.IsUntyped() {
- Fatalf("dtypesym %v", t)
+ base.Fatalf("dtypesym %v", t)
}
s := typesym(t)
dupok = obj.DUPOK
}
- if Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Etype] && tbase != types.Bytetype && tbase != types.Runetype && tbase != types.Errortype) { // int, float, etc
+ if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Etype] && tbase != types.Bytetype && tbase != types.Runetype && tbase != types.Errortype) { // int, float, etc
// named types from other files are defined only by those files
if tbase.Sym != nil && tbase.Sym.Pkg != localpkg {
if i, ok := typeSymIdx[tbase]; ok {
ot = dsymptr(lsym, ot, dtypesym(f.Type), 0)
offsetAnon := uint64(f.Offset) << 1
if offsetAnon>>1 != uint64(f.Offset) {
- Fatalf("%v: bad field offset for %s", t, f.Sym.Name)
+ base.Fatalf("%v: bad field offset for %s", t, f.Sym.Name)
}
if f.Embedded != 0 {
offsetAnon |= 1
//
// When buildmode=shared, all types are in typelinks so the
// runtime can deduplicate type pointers.
- keep := Ctxt.Flag_dynlink
+ keep := base.Ctxt.Flag_dynlink
if !keep && t.Sym == nil {
// For an unnamed type, we only need the link if the type can
// be created at run time by reflect.PtrTo and similar
}
if len(sigs) != 0 {
- Fatalf("incomplete itab")
+ base.Fatalf("incomplete itab")
}
return out
// process ptabs
if localpkg.Name == "main" && len(ptabs) > 0 {
ot := 0
- s := Ctxt.Lookup("go.plugin.tabs")
+ s := base.Ctxt.Lookup("go.plugin.tabs")
for _, p := range ptabs {
// Dump ptab symbol into go.pluginsym package.
//
ggloblsym(s, int32(ot), int16(obj.RODATA))
ot = 0
- s = Ctxt.Lookup("go.plugin.exports")
+ s = base.Ctxt.Lookup("go.plugin.exports")
for _, p := range ptabs {
ot = dsymptr(s, ot, p.s.Linksym(), 0)
}
// so this is as good as any.
// another possible choice would be package main,
// but using runtime means fewer copies in object files.
- if Ctxt.Pkgpath == "runtime" {
+ if base.Ctxt.Pkgpath == "runtime" {
for i := types.EType(1); i <= TBOOL; i++ {
dtypesym(types.NewPtr(types.Types[i]))
}
// add paths for runtime and main, which 6l imports implicitly.
dimportpath(Runtimepkg)
- if Flag.Race {
+ if base.Flag.Race {
dimportpath(racepkg)
}
- if Flag.MSan {
+ if base.Flag.MSan {
dimportpath(msanpkg)
}
dimportpath(types.NewPkg("main", ""))
func dgcprog(t *types.Type) (*obj.LSym, int64) {
dowidth(t)
if t.Width == BADWIDTH {
- Fatalf("dgcprog: %v badwidth", t)
+ base.Fatalf("dgcprog: %v badwidth", t)
}
lsym := typesymprefix(".gcprog", t).Linksym()
var p GCProg
offset := p.w.BitIndex() * int64(Widthptr)
p.end()
if ptrdata := typeptrdata(t); offset < ptrdata || offset > t.Width {
- Fatalf("dgcprog: %v: offset=%d but ptrdata=%d size=%d", t, offset, ptrdata, t.Width)
+ base.Fatalf("dgcprog: %v: offset=%d but ptrdata=%d size=%d", t, offset, ptrdata, t.Width)
}
return lsym, offset
}
p.lsym = lsym
p.symoff = 4 // first 4 bytes hold program length
p.w.Init(p.writeByte)
- if Debug.GCProg > 0 {
+ if base.Debug.GCProg > 0 {
fmt.Fprintf(os.Stderr, "compile: start GCProg for %v\n", lsym)
p.w.Debug(os.Stderr)
}
p.w.End()
duint32(p.lsym, 0, uint32(p.symoff-4))
ggloblsym(p.lsym, int32(p.symoff), obj.DUPOK|obj.RODATA|obj.LOCAL)
- if Debug.GCProg > 0 {
+ if base.Debug.GCProg > 0 {
fmt.Fprintf(os.Stderr, "compile: end GCProg for %v\n", p.lsym)
}
}
}
switch t.Etype {
default:
- Fatalf("GCProg.emit: unexpected type %v", t)
+ base.Fatalf("GCProg.emit: unexpected type %v", t)
case TSTRING:
p.w.Ptr(offset / int64(Widthptr))
case TARRAY:
if t.NumElem() == 0 {
// should have been handled by haspointers check above
- Fatalf("GCProg.emit: empty array")
+ base.Fatalf("GCProg.emit: empty array")
}
// Flatten array-of-array-of-array to just a big array by multiplying counts.
// size bytes of zeros.
func zeroaddr(size int64) *Node {
if size >= 1<<31 {
- Fatalf("map elem too big %d", size)
+ base.Fatalf("map elem too big %d", size)
}
if zerosize < size {
zerosize = size
package gc
import (
+ "cmd/compile/internal/base"
"cmd/internal/dwarf"
"cmd/internal/obj"
"cmd/internal/src"
// See golang.org/issue/20390.
func xposBefore(p, q src.XPos) bool {
- return Ctxt.PosTable.Pos(p).Before(Ctxt.PosTable.Pos(q))
+ return base.Ctxt.PosTable.Pos(p).Before(base.Ctxt.PosTable.Pos(q))
}
func findScope(marks []Mark, pos src.XPos) ScopeID {
package gc
-import "cmd/compile/internal/types"
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/types"
+)
// select
func typecheckselect(sel *Node) {
for _, ncase := range sel.List.Slice() {
if ncase.Op != OCASE {
setlineno(ncase)
- Fatalf("typecheckselect %v", ncase.Op)
+ base.Fatalf("typecheckselect %v", ncase.Op)
}
if ncase.List.Len() == 0 {
// default
if def != nil {
- yyerrorl(ncase.Pos, "multiple defaults in select (first at %v)", def.Line())
+ base.ErrorfAt(ncase.Pos, "multiple defaults in select (first at %v)", def.Line())
} else {
def = ncase
}
} else if ncase.List.Len() > 1 {
- yyerrorl(ncase.Pos, "select cases cannot be lists")
+ base.ErrorfAt(ncase.Pos, "select cases cannot be lists")
} else {
ncase.List.SetFirst(typecheck(ncase.List.First(), ctxStmt))
n := ncase.List.First()
// on the same line). This matches the approach before 1.10.
pos = ncase.Pos
}
- yyerrorl(pos, "select case must be receive, send or assign recv")
+ base.ErrorfAt(pos, "select case must be receive, send or assign recv")
// convert x = <-c into OSELRECV(x, <-c).
// remove implicit conversions; the eventual assignment
}
if n.Right.Op != ORECV {
- yyerrorl(n.Pos, "select assignment must have receive on right hand side")
+ base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side")
break
}
// convert x, ok = <-c into OSELRECV2(x, <-c) with ntest=ok
case OAS2RECV:
if n.Right.Op != ORECV {
- yyerrorl(n.Pos, "select assignment must have receive on right hand side")
+ base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side")
break
}
typecheckslice(ncase.Nbody.Slice(), ctxStmt)
}
- lineno = lno
+ base.Pos = lno
}
func walkselect(sel *Node) {
lno := setlineno(sel)
if sel.Nbody.Len() != 0 {
- Fatalf("double walkselect")
+ base.Fatalf("double walkselect")
}
init := sel.Ninit.Slice()
sel.Nbody.Set(init)
walkstmtlist(sel.Nbody.Slice())
- lineno = lno
+ base.Pos = lno
}
func walkselectcases(cases *Nodes) []*Node {
ncas := cases.Len()
- sellineno := lineno
+ sellineno := base.Pos
// optimization: zero-case select
if ncas == 0 {
n.Ninit.Set(nil)
switch n.Op {
default:
- Fatalf("select %v", n.Op)
+ base.Fatalf("select %v", n.Op)
case OSEND:
// already ok
r.Ninit.Set(cas.Ninit.Slice())
switch n.Op {
default:
- Fatalf("select %v", n.Op)
+ base.Fatalf("select %v", n.Op)
case OSEND:
// if selectnbsend(c, v) { body } else { default body }
var init []*Node
// generate sel-struct
- lineno = sellineno
+ base.Pos = sellineno
selv := temp(types.NewArray(scasetype(), int64(ncas)))
r := nod(OAS, selv, nil)
r = typecheck(r, ctxStmt)
order := temp(types.NewArray(types.Types[TUINT16], 2*int64(ncas)))
var pc0, pcs *Node
- if Flag.Race {
+ if base.Flag.Race {
pcs = temp(types.NewArray(types.Types[TUINTPTR], int64(ncas)))
pc0 = typecheck(nod(OADDR, nod(OINDEX, pcs, nodintconst(0)), nil), ctxExpr)
} else {
var c, elem *Node
switch n.Op {
default:
- Fatalf("select %v", n.Op)
+ base.Fatalf("select %v", n.Op)
case OSEND:
i = nsends
nsends++
// TODO(mdempsky): There should be a cleaner way to
// handle this.
- if Flag.Race {
+ if base.Flag.Race {
r = mkcall("selectsetpc", nil, nil, nod(OADDR, nod(OINDEX, pcs, nodintconst(int64(i))), nil))
init = append(init, r)
}
}
if nsends+nrecvs != ncas {
- Fatalf("walkselectcases: miscount: %v + %v != %v", nsends, nrecvs, ncas)
+ base.Fatalf("walkselectcases: miscount: %v + %v != %v", nsends, nrecvs, ncas)
}
// run the select
- lineno = sellineno
+ base.Pos = sellineno
chosen := temp(types.Types[TINT])
recvOK := temp(types.Types[TBOOL])
r = nod(OAS2, nil, nil)
// selv and order are no longer alive after selectgo.
init = append(init, nod(OVARKILL, selv, nil))
init = append(init, nod(OVARKILL, order, nil))
- if Flag.Race {
+ if base.Flag.Race {
init = append(init, nod(OVARKILL, pcs, nil))
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
// staticInit adds an initialization statement n to the schedule.
func (s *InitSchedule) staticInit(n *Node) {
if !s.tryStaticInit(n) {
- if Flag.Percent != 0 {
+ if base.Flag.Percent != 0 {
Dump("nonstatic", n)
}
s.append(n)
return true
}
lno := setlineno(n)
- defer func() { lineno = lno }()
+ defer func() { base.Pos = lno }()
return s.staticassign(n.Left, n.Right)
}
case OCLOSURE:
if hasemptycvars(r) {
- if Debug.Closure > 0 {
- Warnl(r.Pos, "closure converted to global")
+ if base.Debug.Closure > 0 {
+ base.WarnfAt(r.Pos, "closure converted to global")
}
// Closures with no captured variables are globals,
// so the assignment can be done at link time.
case OSTRUCTLIT:
for _, r := range n.List.Slice() {
if r.Op != OSTRUCTKEY {
- Fatalf("isStaticCompositeLiteral: rhs not OSTRUCTKEY: %v", r)
+ base.Fatalf("isStaticCompositeLiteral: rhs not OSTRUCTKEY: %v", r)
}
if !isStaticCompositeLiteral(r.Left) {
return false
if r.Op == OKEY {
k = indexconst(r.Left)
if k < 0 {
- Fatalf("fixedlit: invalid index %v", r.Left)
+ base.Fatalf("fixedlit: invalid index %v", r.Left)
}
r = r.Right
}
case OSTRUCTLIT:
splitnode = func(r *Node) (*Node, *Node) {
if r.Op != OSTRUCTKEY {
- Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r)
+ base.Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r)
}
if r.Sym.IsBlank() || isBlank {
return nblank, r.Left
return nodSym(ODOT, var_, r.Sym), r.Left
}
default:
- Fatalf("fixedlit bad op: %v", n.Op)
+ base.Fatalf("fixedlit bad op: %v", n.Op)
}
for _, r := range n.List.Slice() {
a = walkstmt(a)
init.Append(a)
default:
- Fatalf("fixedlit: bad kind %d", kind)
+ base.Fatalf("fixedlit: bad kind %d", kind)
}
}
var_ = typecheck(var_, ctxExpr|ctxAssign)
nam := stataddr(var_)
if nam == nil || nam.Class() != PEXTERN {
- Fatalf("slicelit: %v", var_)
+ base.Fatalf("slicelit: %v", var_)
}
slicesym(nam, vstat, t.NumElem())
return
if value.Op == OKEY {
index = indexconst(value.Left)
if index < 0 {
- Fatalf("slicelit: invalid index %v", value.Left)
+ base.Fatalf("slicelit: invalid index %v", value.Left)
}
value = value.Right
}
// All remaining entries are static. Double-check that.
for _, r := range entries {
if !isStaticCompositeLiteral(r.Left) || !isStaticCompositeLiteral(r.Right) {
- Fatalf("maplit: entry is not a literal: %v", r)
+ base.Fatalf("maplit: entry is not a literal: %v", r)
}
}
t := n.Type
switch n.Op {
default:
- Fatalf("anylit: not lit, op=%v node=%v", n.Op, n)
+ base.Fatalf("anylit: not lit, op=%v node=%v", n.Op, n)
case ONAME, OMETHEXPR:
a := nod(OAS, var_, n)
case OPTRLIT:
if !t.IsPtr() {
- Fatalf("anylit: not ptr")
+ base.Fatalf("anylit: not ptr")
}
var r *Node
case OSTRUCTLIT, OARRAYLIT:
if !t.IsStruct() && !t.IsArray() {
- Fatalf("anylit: not struct/array")
+ base.Fatalf("anylit: not struct/array")
}
if var_.isSimpleName() && n.List.Len() > 4 {
case OMAPLIT:
if !t.IsMap() {
- Fatalf("anylit: not map")
+ base.Fatalf("anylit: not map")
}
maplit(n, var_, init)
}
s.initplans[n] = p
switch n.Op {
default:
- Fatalf("initplan")
+ base.Fatalf("initplan")
case OARRAYLIT, OSLICELIT:
var k int64
if a.Op == OKEY {
k = indexconst(a.Left)
if k < 0 {
- Fatalf("initplan arraylit: invalid index %v", a.Left)
+ base.Fatalf("initplan arraylit: invalid index %v", a.Left)
}
a = a.Right
}
case OSTRUCTLIT:
for _, a := range n.List.Slice() {
if a.Op != OSTRUCTKEY {
- Fatalf("initplan structlit")
+ base.Fatalf("initplan structlit")
}
if a.Sym.IsBlank() {
continue
case OMAPLIT:
for _, a := range n.List.Slice() {
if a.Op != OKEY {
- Fatalf("initplan maplit")
+ base.Fatalf("initplan maplit")
}
s.addvalue(p, -1, a.Right)
}
func genAsStatic(as *Node) {
if as.Left.Type == nil {
- Fatalf("genAsStatic as.Left not typechecked")
+ base.Fatalf("genAsStatic as.Left not typechecked")
}
nam := stataddr(as.Left)
if nam == nil || (nam.Class() != PEXTERN && as.Left != nblank) {
- Fatalf("genAsStatic: lhs %v", as.Left)
+ base.Fatalf("genAsStatic: lhs %v", as.Left)
}
switch {
case (as.Right.Op == ONAME || as.Right.Op == OMETHEXPR) && as.Right.Class() == PFUNC:
pfuncsym(nam, as.Right)
default:
- Fatalf("genAsStatic: rhs %v", as.Right)
+ base.Fatalf("genAsStatic: rhs %v", as.Right)
}
}
"bufio"
"bytes"
+ "cmd/compile/internal/base"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
_ = types.NewPtr(types.Types[TINT64]) // *int64
_ = types.NewPtr(types.Errortype) // *error
types.NewPtrCacheEnabled = false
- ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, *types_, Ctxt, Flag.N == 0)
+ ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, *types_, base.Ctxt, base.Flag.N == 0)
ssaConfig.SoftFloat = thearch.SoftFloat
- ssaConfig.Race = Flag.Race
- ssaCaches = make([]ssa.Cache, Flag.LowerC)
+ ssaConfig.Race = base.Flag.Race
+ ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
// Set up some runtime functions we'll need to call.
assertE2I = sysfunc("assertE2I")
// - Size of the argument
// - Offset of where argument should be placed in the args frame when making call
func (s *state) emitOpenDeferInfo() {
- x := Ctxt.Lookup(s.curfn.Func.lsym.Name + ".opendefer")
+ x := base.Ctxt.Lookup(s.curfn.Func.lsym.Name + ".opendefer")
s.curfn.Func.lsym.Func().OpenCodedDeferInfo = x
off := 0
name := fn.funcname()
printssa := false
if ssaDump != "" { // match either a simple name e.g. "(*Reader).Reset", or a package.name e.g. "compress/gzip.(*Reader).Reset"
- printssa = name == ssaDump || Ctxt.Pkgpath+"."+name == ssaDump
+ printssa = name == ssaDump || base.Ctxt.Pkgpath+"."+name == ssaDump
}
var astBuf *bytes.Buffer
if printssa {
if printssa {
ssaDF := ssaDumpFile
if ssaDir != "" {
- ssaDF = filepath.Join(ssaDir, Ctxt.Pkgpath+"."+name+".html")
+ ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+name+".html")
ssaD := filepath.Dir(ssaDF)
os.MkdirAll(ssaD, 0755)
}
s.fwdVars = map[*Node]*ssa.Value{}
s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
- s.hasOpenDefers = Flag.N == 0 && s.hasdefer && !s.curfn.Func.OpenCodedDeferDisallowed()
+ s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.Func.OpenCodedDeferDisallowed()
switch {
- case s.hasOpenDefers && (Ctxt.Flag_shared || Ctxt.Flag_dynlink) && thearch.LinkArch.Name == "386":
+ case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && thearch.LinkArch.Name == "386":
// Don't support open-coded defers for 386 ONLY when using shared
// libraries, because there is extra code (added by rewriteToUseGot())
// preceding the deferreturn/ret code that is generated by gencallret()
func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *Node) {
// Read sources of target function fn.
- fname := Ctxt.PosTable.Pos(fn.Pos).Filename()
+ fname := base.Ctxt.PosTable.Pos(fn.Pos).Filename()
targetFn, err := readFuncLines(fname, fn.Pos.Line(), fn.Func.Endlineno.Line())
if err != nil {
writer.Logf("cannot read sources for function %v: %v", fn, err)
} else {
elno = fi.Name.Defn.Func.Endlineno
}
- fname := Ctxt.PosTable.Pos(fi.Pos).Filename()
+ fname := base.Ctxt.PosTable.Pos(fi.Pos).Filename()
fnLines, err := readFuncLines(fname, fi.Pos.Line(), elno.Line())
if err != nil {
writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
// the frontend may emit node with line number missing,
// use the parent line number in this case.
line = s.peekPos()
- if Flag.K != 0 {
- Warn("buildssa: unknown position (line 0)")
+ if base.Flag.K != 0 {
+ base.Warn("buildssa: unknown position (line 0)")
}
} else {
s.lastPos = line
var fn *obj.LSym
needWidth := false
- if Flag.MSan {
+ if base.Flag.MSan {
fn = msanread
if wr {
fn = msanwrite
}
needWidth = true
- } else if Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
+ } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
// for composite objects we have to write every address
// because a write might happen to any subobject.
// composites with only one element don't have subobjects, though.
fn = racewriterange
}
needWidth = true
- } else if Flag.Race {
+ } else if base.Flag.Race {
// for non-composite objects we can write just the start
// address, as any write must write the first byte.
fn = raceread
case OCALLMETH, OCALLINTER:
s.callResult(n, callNormal)
if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Class() == PFUNC {
- if fn := n.Left.Sym.Name; Flag.CompilingRuntime && fn == "throw" ||
+ if fn := n.Left.Sym.Name; base.Flag.CompilingRuntime && fn == "throw" ||
n.Left.Sym.Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
m := s.mem()
b := s.endBlock()
}
}
case ODEFER:
- if Debug.Defer > 0 {
+ if base.Debug.Defer > 0 {
var defertype string
if s.hasOpenDefers {
defertype = "open-coded"
} else {
defertype = "heap-allocated"
}
- Warnl(n.Pos, "%s defer", defertype)
+ base.WarnfAt(n.Pos, "%s defer", defertype)
}
if s.hasOpenDefers {
s.openDeferRecord(n.Left)
// Check whether we're writing the result of an append back to the same slice.
// If so, we handle it specially to avoid write barriers on the fast
// (non-growth) path.
- if !samesafeexpr(n.Left, rhs.List.First()) || Flag.N != 0 {
+ if !samesafeexpr(n.Left, rhs.List.First()) || base.Flag.N != 0 {
break
}
// If the slice can be SSA'd, it'll be on the stack,
// so there will be no write barriers,
// so there's no need to attempt to prevent them.
if s.canSSA(n.Left) {
- if Debug.Append > 0 { // replicating old diagnostic message
- Warnl(n.Pos, "append: len-only update (in local slice)")
+ if base.Debug.Append > 0 { // replicating old diagnostic message
+ base.WarnfAt(n.Pos, "append: len-only update (in local slice)")
}
break
}
- if Debug.Append > 0 {
- Warnl(n.Pos, "append: len-only update")
+ if base.Debug.Append > 0 {
+ base.WarnfAt(n.Pos, "append: len-only update")
}
s.append(rhs, true)
return
case TCOMPLEX128:
return types.Types[TFLOAT64]
}
- Fatalf("unexpected type: %v", t)
+ base.Fatalf("unexpected type: %v", t)
return nil
}
case TFLOAT64:
return types.Types[TCOMPLEX128]
}
- Fatalf("unexpected type: %v", t)
+ base.Fatalf("unexpected type: %v", t)
return nil
}
}
pkg := sym.Pkg.Path
if sym.Pkg == localpkg {
- pkg = Ctxt.Pkgpath
+ pkg = base.Ctxt.Pkgpath
}
- if Flag.Race && pkg == "sync/atomic" {
+ if base.Flag.Race && pkg == "sync/atomic" {
// The race detector needs to be able to intercept these calls.
// We can't intrinsify them.
return nil
if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
x = x.Args[0]
}
- Warnl(n.Pos, "intrinsic substitution for %v with %s", n.Left.Sym.Name, x.LongString())
+ base.WarnfAt(n.Pos, "intrinsic substitution for %v with %s", n.Left.Sym.Name, x.LongString())
}
return v
}
}
} else if n.Op == OCALLMETH {
if fn.Op != ODOTMETH {
- Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn)
+ base.Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn)
}
closureVal := s.getMethodClosure(fn)
// We must always store the function value in a stack slot for the
opendefer.closureNode = closure.Aux.(*Node)
} else {
if fn.Op != ODOTINTER {
- Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op)
+ base.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op)
}
closure, rcvr := s.getClosureAndRcvr(fn)
opendefer.closure = s.openDeferSave(nil, closure.Type, closure)
// Generate code to call the function call of the defer, using the
// closure/receiver/args that were stored in argtmps at the point
// of the defer statement.
- argStart := Ctxt.FixedFrameSize()
+ argStart := base.Ctxt.FixedFrameSize()
fn := r.n.Left
stksize := fn.Type.ArgWidth()
var ACArgs []ssa.Param
nf := res.NumFields()
for i := 0; i < nf; i++ {
fp := res.Field(i)
- ACResults = append(ACResults, ssa.Param{Type: fp.Type, Offset: int32(fp.Offset + Ctxt.FixedFrameSize())})
+ ACResults = append(ACResults, ssa.Param{Type: fp.Type, Offset: int32(fp.Offset + base.Ctxt.FixedFrameSize())})
}
}
}
// Call runtime.deferprocStack with pointer to _defer record.
- ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(Ctxt.FixedFrameSize())})
+ ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(base.Ctxt.FixedFrameSize())})
aux := ssa.StaticAuxCall(deferprocStack, ACArgs, ACResults)
if testLateExpansion {
callArgs = append(callArgs, addr, s.mem())
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
call.AddArgs(callArgs...)
} else {
- arg0 := s.constOffPtrSP(types.Types[TUINTPTR], Ctxt.FixedFrameSize())
+ arg0 := s.constOffPtrSP(types.Types[TUINTPTR], base.Ctxt.FixedFrameSize())
s.store(types.Types[TUINTPTR], arg0, addr)
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
}
} else {
// Store arguments to stack, including defer/go arguments and receiver for method calls.
// These are written in SP-offset order.
- argStart := Ctxt.FixedFrameSize()
+ argStart := base.Ctxt.FixedFrameSize()
// Defer/go args.
if k != callNormal {
// Write argsize and closure (args to newproc/deferproc).
if testLateExpansion {
return s.newValue1I(ssa.OpSelectNAddr, pt, 0, call)
}
- return s.constOffPtrSP(pt, fp.Offset+Ctxt.FixedFrameSize())
+ return s.constOffPtrSP(pt, fp.Offset+base.Ctxt.FixedFrameSize())
}
if testLateExpansion {
return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
}
- return s.load(n.Type, s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+Ctxt.FixedFrameSize()))
+ return s.load(n.Type, s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+base.Ctxt.FixedFrameSize()))
}
// maybeNilCheckClosure checks if a nil check of a closure is needed in some
// canSSA reports whether n is SSA-able.
// n must be an ONAME (or an ODOT sequence with an ONAME base).
func (s *state) canSSA(n *Node) bool {
- if Flag.N != 0 {
+ if base.Flag.N != 0 {
return false
}
for n.Op == ODOT || (n.Op == OINDEX && n.Left.Type.IsArray()) {
// Used only for automatically inserted nil checks,
// not for user code like 'x != nil'.
func (s *state) nilCheck(ptr *ssa.Value) {
- if Debug.DisableNil != 0 || s.curfn.Func.NilCheckDisabled() {
+ if base.Debug.DisableNil != 0 || s.curfn.Func.NilCheckDisabled() {
return
}
s.newValue2(ssa.OpNilCheck, types.TypeVoid, ptr, s.mem())
func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
idx = s.extendIndex(idx, len, kind, bounded)
- if bounded || Flag.B != 0 {
+ if bounded || base.Flag.B != 0 {
// If bounded or bounds checking is flag-disabled, then no check necessary,
// just return the extended index.
//
s.startBlock(bNext)
// In Spectre index mode, apply an appropriate mask to avoid speculative out-of-bounds accesses.
- if Flag.Cfg.SpectreIndex {
+ if base.Flag.Cfg.SpectreIndex {
op := ssa.OpSpectreIndex
if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
op = ssa.OpSpectreSliceIndex
b.Likely = ssa.BranchLikely
bNext := s.f.NewBlock(ssa.BlockPlain)
line := s.peekPos()
- pos := Ctxt.PosTable.Pos(line)
+ pos := base.Ctxt.PosTable.Pos(line)
fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
bPanic := s.panics[fl]
if bPanic == nil {
func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
s.prevCall = nil
// Write args to the stack
- off := Ctxt.FixedFrameSize()
+ off := base.Ctxt.FixedFrameSize()
testLateExpansion := ssa.LateCallExpansionEnabledWithin(s.f)
var ACArgs []ssa.Param
var ACResults []ssa.Param
b := s.endBlock()
b.Kind = ssa.BlockExit
b.SetControl(call)
- call.AuxInt = off - Ctxt.FixedFrameSize()
+ call.AuxInt = off - base.Ctxt.FixedFrameSize()
if len(results) > 0 {
s.Fatalf("panic call can't have results")
}
if n.Type.IsEmptyInterface() {
// Converting to an empty interface.
// Input could be an empty or nonempty interface.
- if Debug.TypeAssert > 0 {
- Warnl(n.Pos, "type assertion inlined")
+ if base.Debug.TypeAssert > 0 {
+ base.WarnfAt(n.Pos, "type assertion inlined")
}
// Get itab/type field from input.
return
}
// converting to a nonempty interface needs a runtime call.
- if Debug.TypeAssert > 0 {
- Warnl(n.Pos, "type assertion not inlined")
+ if base.Debug.TypeAssert > 0 {
+ base.WarnfAt(n.Pos, "type assertion not inlined")
}
if n.Left.Type.IsEmptyInterface() {
if commaok {
return s.rtcall(assertI2I, true, []*types.Type{n.Type}, target, iface)[0], nil
}
- if Debug.TypeAssert > 0 {
- Warnl(n.Pos, "type assertion inlined")
+ if base.Debug.TypeAssert > 0 {
+ base.WarnfAt(n.Pos, "type assertion inlined")
}
// Converting to a concrete type.
direct := isdirectiface(n.Type)
itab := s.newValue1(ssa.OpITab, byteptr, iface) // type word of interface
- if Debug.TypeAssert > 0 {
- Warnl(n.Pos, "type assertion inlined")
+ if base.Debug.TypeAssert > 0 {
+ base.WarnfAt(n.Pos, "type assertion inlined")
}
var targetITab *ssa.Value
if n.Left.Type.IsEmptyInterface() {
p.To.Name = obj.NAME_EXTERN
p.To.Sym = x
- if Flag.Live != 0 {
+ if base.Flag.Live != 0 {
for _, v := range vars {
- Warnl(v.Pos, "stack object %v %s", v, v.Type.String())
+ base.WarnfAt(v.Pos, "stack object %v %s", v, v.Type.String())
}
}
}
s.ScratchFpMem = e.scratchFpMem
- if Ctxt.Flag_locationlists {
+ if base.Ctxt.Flag_locationlists {
if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
}
thearch.SSAGenValue(&s, v)
}
- if Ctxt.Flag_locationlists {
+ if base.Ctxt.Flag_locationlists {
valueToProgAfter[v.ID] = s.pp.next
}
}
// Emit control flow instructions for block
var next *ssa.Block
- if i < len(f.Blocks)-1 && Flag.N == 0 {
+ if i < len(f.Blocks)-1 && base.Flag.N == 0 {
// If -N, leave next==nil so every block with successors
// ends in a JMP (except call blocks - plive doesn't like
// select{send,recv} followed by a JMP call). Helps keep
}
}
- if Ctxt.Flag_locationlists {
- e.curfn.Func.DebugInfo = ssa.BuildFuncDebug(Ctxt, f, Debug.LocationLists > 1, stackOffset)
+ if base.Ctxt.Flag_locationlists {
+ e.curfn.Func.DebugInfo = ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists > 1, stackOffset)
bstart := s.bstart
// Note that at this moment, Prog.Pc is a sequence number; it's
// not a real PC until after assembly, so this mapping has to
} else {
lo = s.newValue1(ssa.OpInt64Lo, types.Types[TUINT], idx)
}
- if bounded || Flag.B != 0 {
+ if bounded || base.Flag.B != 0 {
return lo
}
bNext := s.f.NewBlock(ssa.BlockPlain)
func CheckLoweredGetClosurePtr(v *ssa.Value) {
entry := v.Block.Func.Entry
if entry != v.Block || entry.Values[0] != v {
- Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
+ base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
}
}
case sys.ARM, sys.ARM64, sys.MIPS, sys.MIPS64:
p.To.Type = obj.TYPE_MEM
default:
- Fatalf("unknown indirect call family")
+ base.Fatalf("unknown indirect call family")
}
p.To.Reg = v.Args[0].Reg()
}
if !idx.StackMapValid() {
// See Liveness.hasStackMap.
if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == typedmemclr || sym.Fn == typedmemmove) {
- Fatalf("missing stack map index for %v", v.LongString())
+ base.Fatalf("missing stack map index for %v", v.LongString())
}
}
}
func (e *ssafn) Line(pos src.XPos) string {
- return linestr(pos)
+ return base.FmtPos(pos)
}
// Log logs a message from the compiler.
// Fatal reports a compiler error and exits.
func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
- lineno = pos
+ base.Pos = pos
nargs := append([]interface{}{e.curfn.funcname()}, args...)
- Fatalf("'%s': "+msg, nargs...)
+ base.Fatalf("'%s': "+msg, nargs...)
}
// Warnl reports a "warning", which is usually flag-triggered
// logging output for the benefit of tests.
func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
- Warnl(pos, fmt_, args...)
+ base.WarnfAt(pos, fmt_, args...)
}
func (e *ssafn) Debug_checknil() bool {
- return Debug.Nil != 0
+ return base.Debug.Nil != 0
}
func (e *ssafn) UseWriteBarrier() bool {
- return Flag.WB
+ return base.Flag.WB
}
func (e *ssafn) Syslook(name string) *obj.LSym {
}
func (e *ssafn) MyImportPath() string {
- return Ctxt.Pkgpath
+ return base.Ctxt.Pkgpath
}
func (n *Node) Typ() *types.Type {
case PAUTO:
return ssa.ClassAuto
default:
- Fatalf("untranslatable storage class for %v: %s", n, n.Class())
+ base.Fatalf("untranslatable storage class for %v: %s", n, n.Class())
return 0
}
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"crypto/md5"
}
if !n.Pos.IsKnown() {
- if Flag.K != 0 {
- Warn("setlineno: unknown position (line 0)")
+ if base.Flag.K != 0 {
+ base.Warn("setlineno: unknown position (line 0)")
}
return false
}
}
func setlineno(n *Node) src.XPos {
- lno := lineno
+ lno := base.Pos
if n != nil && hasUniquePos(n) {
- lineno = n.Pos
+ base.Pos = n.Pos
}
return lno
}
// user labels.
func autolabel(prefix string) *types.Sym {
if prefix[0] != '.' {
- Fatalf("autolabel prefix must start with '.', have %q", prefix)
+ base.Fatalf("autolabel prefix must start with '.', have %q", prefix)
}
fn := Curfn
if Curfn == nil {
- Fatalf("autolabel outside function")
+ base.Fatalf("autolabel outside function")
}
n := fn.Func.Label
fn.Func.Label++
s1 := lookup(s.Name)
if s1.Def != nil {
pkgerror := fmt.Sprintf("during import %q", opkg.Path)
- redeclare(lineno, s1, pkgerror)
+ redeclare(base.Pos, s1, pkgerror)
continue
}
s1.Block = s.Block
if asNode(s1.Def).Name == nil {
Dump("s1def", asNode(s1.Def))
- Fatalf("missing Name")
+ base.Fatalf("missing Name")
}
asNode(s1.Def).Name.Pack = pack
s1.Origpkg = opkg
if n == 0 {
// can't possibly be used - there were no symbols
- yyerrorl(pack.Pos, "imported and not used: %q", opkg.Path)
+ base.ErrorfAt(pack.Pos, "imported and not used: %q", opkg.Path)
}
}
func nod(op Op, nleft, nright *Node) *Node {
- return nodl(lineno, op, nleft, nright)
+ return nodl(base.Pos, op, nleft, nright)
}
func nodl(pos src.XPos, op Op, nleft, nright *Node) *Node {
n.Func = &x.f
n.Func.Decl = n
case ONAME:
- Fatalf("use newname instead")
+ base.Fatalf("use newname instead")
case OLABEL, OPACK:
var x struct {
n Node
// newname returns a new ONAME Node associated with symbol s.
func newname(s *types.Sym) *Node {
- n := newnamel(lineno, s)
+ n := newnamel(base.Pos, s)
n.Name.Curfn = Curfn
return n
}
// The caller is responsible for setting n.Name.Curfn.
func newnamel(pos src.XPos, s *types.Sym) *Node {
if s == nil {
- Fatalf("newnamel nil")
+ base.Fatalf("newnamel nil")
}
var x struct {
// nodSym makes a Node with Op op and with the Left field set to left
// and the Sym field set to sym. This is for ODOT and friends.
func nodSym(op Op, left *Node, sym *types.Sym) *Node {
- return nodlSym(lineno, op, left, sym)
+ return nodlSym(base.Pos, op, left, sym)
}
// nodlSym makes a Node with position Pos, with Op op, and with the Left field set to left
}
if m.Name != nil && n.Op != ODCLFIELD {
Dump("treecopy", n)
- Fatalf("treecopy Name")
+ base.Fatalf("treecopy Name")
}
return m
}
if t.Etype == TBLANK && n.Type.Etype == TNIL {
- yyerror("use of untyped nil")
+ base.Errorf("use of untyped nil")
}
n = convlit1(n, t, false, context)
op, why := assignop(n.Type, t)
if op == OXXX {
- yyerror("cannot use %L as type %v in %s%s", n, t, context(), why)
+ base.Errorf("cannot use %L as type %v in %s%s", n, t, context(), why)
op = OCONV
}
s := n.List.Slice()
return s[0], s[1], s[2]
}
- Fatalf("SliceBounds op %v: %v", n.Op, n)
+ base.Fatalf("SliceBounds op %v: %v", n.Op, n)
return nil, nil, nil
}
switch n.Op {
case OSLICE, OSLICEARR, OSLICESTR:
if max != nil {
- Fatalf("SetSliceBounds %v given three bounds", n.Op)
+ base.Fatalf("SetSliceBounds %v given three bounds", n.Op)
}
s := n.List.Slice()
if s == nil {
s[2] = max
return
}
- Fatalf("SetSliceBounds op %v: %v", n.Op, n)
+ base.Fatalf("SetSliceBounds op %v: %v", n.Op, n)
}
// IsSlice3 reports whether o is a slice3 op (OSLICE3, OSLICE3ARR).
case OSLICE3, OSLICE3ARR:
return true
}
- Fatalf("IsSlice3 op %v", o)
+ base.Fatalf("IsSlice3 op %v", o)
return false
}
var init Nodes
c := cheapexpr(n, &init)
if c != n || init.Len() != 0 {
- Fatalf("backingArrayPtrLen not cheap: %v", n)
+ base.Fatalf("backingArrayPtrLen not cheap: %v", n)
}
ptr = nod(OSPTR, n, nil)
if n.Type.IsString() {
// associated with the label n, if any.
func (n *Node) labeledControl() *Node {
if n.Op != OLABEL {
- Fatalf("labeledControl %v", n.Op)
+ base.Fatalf("labeledControl %v", n.Op)
}
ctl := n.Name.Defn
if ctl == nil {
func syslook(name string) *Node {
s := Runtimepkg.Lookup(name)
if s == nil || s.Def == nil {
- Fatalf("syslook: can't find runtime.%s", name)
+ base.Fatalf("syslook: can't find runtime.%s", name)
}
return asNode(s.Def)
}
switch n.Op {
case OLITERAL, ONIL, ONAME, OTYPE:
if n.HasCall() {
- Fatalf("OLITERAL/ONAME/OTYPE should never have calls: %+v", n)
+ base.Fatalf("OLITERAL/ONAME/OTYPE should never have calls: %+v", n)
}
return false
case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER:
}
}
- yyerror("illegal types for operand: %v%s", op, s)
+ base.Errorf("illegal types for operand: %v%s", op, s)
}
// brcom returns !(op).
case OGE:
return OLT
}
- Fatalf("brcom: no com for %v\n", op)
+ base.Fatalf("brcom: no com for %v\n", op)
return op
}
case OGE:
return OLE
}
- Fatalf("brrev: no rev for %v\n", op)
+ base.Fatalf("brrev: no rev for %v\n", op)
return op
}
// make a copy; must not be used as an lvalue
if islvalue(n) {
- Fatalf("missing lvalue case in safeexpr: %v", n)
+ base.Fatalf("missing lvalue case in safeexpr: %v", n)
}
return cheapexpr(n, init)
}
n.Left.SetImplicit(true)
}
case ambig:
- yyerror("ambiguous selector %v", n)
+ base.Errorf("ambiguous selector %v", n)
n.Left = nil
}
// method - M func (t T)(), a TFIELD type struct
// newnam - the eventual mangled name of this function
func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
- if false && Flag.LowerR != 0 {
+ if false && base.Flag.LowerR != 0 {
fmt.Printf("genwrapper rcvrtype=%v method=%v newnam=%v\n", rcvr, method, newnam)
}
return
}
- lineno = autogeneratedPos
+ base.Pos = autogeneratedPos
dclcontext = PEXTERN
tfn := nod(OTFUNC, nil, nil)
// the TOC to the appropriate value for that module. But if it returns
// directly to the wrapper's caller, nothing will reset it to the correct
// value for that function.
- if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && Ctxt.Flag_dynlink) {
+ if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) {
// generate tail call: adjust pointer receiver and jump to embedded method.
dot = dot.Left // skip final .M
// TODO(mdempsky): Remove dependency on dotlist.
fn.Nbody.Append(call)
}
- if false && Flag.LowerR != 0 {
+ if false && base.Flag.LowerR != 0 {
dumplist("genwrapper body", fn.Nbody)
}
funcbody()
- if Debug.DclStack != 0 {
+ if base.Debug.DclStack != 0 {
testdclstack()
}
path, ambig := dotpath(s, t, &m, ignorecase)
if path == nil {
if ambig {
- yyerror("%v.%v is ambiguous", t, s)
+ base.Errorf("%v.%v is ambiguous", t, s)
}
return nil, false
}
}
if !m.IsMethod() {
- yyerror("%v.%v is a field, not a method", t, s)
+ base.Errorf("%v.%v is a field, not a method", t, s)
return nil, followptr
}
// the method does not exist for value types.
rcvr := tm.Type.Recv().Type
if rcvr.IsPtr() && !t0.IsPtr() && !followptr && !isifacemethod(tm.Type) {
- if false && Flag.LowerR != 0 {
- yyerror("interface pointer mismatch")
+ if false && base.Flag.LowerR != 0 {
+ base.Errorf("interface pointer mismatch")
}
*m = im
func isbadimport(path string, allowSpace bool) bool {
if strings.Contains(path, "\x00") {
- yyerror("import path contains NUL")
+ base.Errorf("import path contains NUL")
return true
}
for _, ri := range reservedimports {
if path == ri {
- yyerror("import path %q is reserved and cannot be used", path)
+ base.Errorf("import path %q is reserved and cannot be used", path)
return true
}
}
for _, r := range path {
if r == utf8.RuneError {
- yyerror("import path contains invalid UTF-8 sequence: %q", path)
+ base.Errorf("import path contains invalid UTF-8 sequence: %q", path)
return true
}
if r < 0x20 || r == 0x7f {
- yyerror("import path contains control character: %q", path)
+ base.Errorf("import path contains control character: %q", path)
return true
}
if r == '\\' {
- yyerror("import path contains backslash; use slash: %q", path)
+ base.Errorf("import path contains backslash; use slash: %q", path)
return true
}
if !allowSpace && unicode.IsSpace(r) {
- yyerror("import path contains space character: %q", path)
+ base.Errorf("import path contains space character: %q", path)
return true
}
if strings.ContainsRune("!\"#$%&'()*,:;<=>?[]^`{|}", r) {
- yyerror("import path contains invalid character '%c': %q", r, path)
+ base.Errorf("import path contains invalid character '%c': %q", r, path)
return true
}
}
// It follows the pointer if !isdirectiface(t).
func ifaceData(pos src.XPos, n *Node, t *types.Type) *Node {
if t.IsInterface() {
- Fatalf("ifaceData interface: %v", t)
+ base.Fatalf("ifaceData interface: %v", t)
}
ptr := nodlSym(pos, OIDATA, n, nil)
if isdirectiface(t) {
func typePos(t *types.Type) src.XPos {
n := asNode(t.Nod)
if n == nil || !n.Pos.IsKnown() {
- Fatalf("bad type: %v", t)
+ base.Fatalf("bad type: %v", t)
}
return n.Pos
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"go/constant"
n.Left.Right = typecheck(n.Left.Right, ctxExpr)
t := n.Left.Right.Type
if t != nil && !t.IsInterface() {
- yyerrorl(n.Pos, "cannot type switch on non-interface value %L", n.Left.Right)
+ base.ErrorfAt(n.Pos, "cannot type switch on non-interface value %L", n.Left.Right)
t = nil
}
// declaration itself. So if there are no cases, we won't
// notice that it went unused.
if v := n.Left.Left; v != nil && !v.isBlank() && n.List.Len() == 0 {
- yyerrorl(v.Pos, "%v declared but not used", v.Sym)
+ base.ErrorfAt(v.Pos, "%v declared but not used", v.Sym)
}
var defCase, nilCase *Node
ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
- yyerrorl(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
+ base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
} else {
defCase = ncase
}
switch {
case n1.isNil(): // case nil:
if nilCase != nil {
- yyerrorl(ncase.Pos, "multiple nil cases in type switch (first at %v)", nilCase.Line())
+ base.ErrorfAt(ncase.Pos, "multiple nil cases in type switch (first at %v)", nilCase.Line())
} else {
nilCase = ncase
}
case n1.Op != OTYPE:
- yyerrorl(ncase.Pos, "%L is not a type", n1)
+ base.ErrorfAt(ncase.Pos, "%L is not a type", n1)
case !n1.Type.IsInterface() && !implements(n1.Type, t, &missing, &have, &ptr) && !missing.Broke():
if have != nil && !have.Broke() {
- yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
+ base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", n.Left.Right, n1.Type, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
- yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
+ base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
" (%v method has pointer receiver)", n.Left.Right, n1.Type, missing.Sym)
} else {
- yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
+ base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
" (missing %v method)", n.Left.Right, n1.Type, missing.Sym)
}
}
prevs := s.m[ls]
for _, prev := range prevs {
if types.Identical(typ, prev.typ) {
- yyerrorl(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, linestr(prev.pos))
+ base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos))
return
}
}
case !IsComparable(t):
if t.IsStruct() {
- yyerrorl(n.Pos, "cannot switch on %L (struct containing %v cannot be compared)", n.Left, IncomparableField(t).Type)
+ base.ErrorfAt(n.Pos, "cannot switch on %L (struct containing %v cannot be compared)", n.Left, IncomparableField(t).Type)
} else {
- yyerrorl(n.Pos, "cannot switch on %L", n.Left)
+ base.ErrorfAt(n.Pos, "cannot switch on %L", n.Left)
}
t = nil
}
ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
- yyerrorl(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
+ base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
} else {
defCase = ncase
}
}
if nilonly != "" && !n1.isNil() {
- yyerrorl(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left)
+ base.ErrorfAt(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left)
} else if t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type) {
- yyerrorl(ncase.Pos, "invalid case %L in switch (incomparable type)", n1)
+ base.ErrorfAt(ncase.Pos, "invalid case %L in switch (incomparable type)", n1)
} else {
op1, _ := assignop(n1.Type, t)
op2, _ := assignop(t, n1.Type)
if op1 == OXXX && op2 == OXXX {
if n.Left != nil {
- yyerrorl(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t)
+ base.ErrorfAt(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t)
} else {
- yyerrorl(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type)
+ base.ErrorfAt(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type)
}
}
}
cond = copyexpr(cond, cond.Type, &sw.Nbody)
}
- lineno = lno
+ base.Pos = lno
s := exprSwitch{
exprname: cond,
// Process case dispatch.
if ncase.List.Len() == 0 {
if defaultGoto != nil {
- Fatalf("duplicate default case not detected during typechecking")
+ base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
for _, ncase := range sw.List.Slice() {
if ncase.Op != OCASE {
- Fatalf("switch string(byteslice) bad op: %v", ncase.Op)
+ base.Fatalf("switch string(byteslice) bad op: %v", ncase.Op)
}
for _, v := range ncase.List.Slice() {
if v.Op != OLITERAL {
// Use a similar strategy for non-empty interfaces.
ifNil := nod(OIF, nil, nil)
ifNil.Left = nod(OEQ, itab, nodnil())
- lineno = lineno.WithNotStmt() // disable statement marks after the first check.
+ base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
ifNil.Left = typecheck(ifNil.Left, ctxExpr)
ifNil.Left = defaultlit(ifNil.Left, nil)
// ifNil.Nbody assigned at end.
if ncase.List.Len() == 0 { // default:
if defaultGoto != nil {
- Fatalf("duplicate default case not detected during typechecking")
+ base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
for _, n1 := range ncase.List.Slice() {
if n1.isNil() { // case nil:
if nilGoto != nil {
- Fatalf("duplicate nil case not detected during typechecking")
+ base.Fatalf("duplicate nil case not detected during typechecking")
}
nilGoto = jmp
continue
if singleType != nil {
// We have a single concrete type. Extract the data.
if singleType.IsInterface() {
- Fatalf("singleType interface should have been handled in Add")
+ base.Fatalf("singleType interface should have been handled in Add")
}
val = ifaceData(ncase.Pos, s.facename, singleType)
}
for i := lo; i < hi; i++ {
nif := nod(OIF, nil, nil)
leaf(i, nif)
- lineno = lineno.WithNotStmt()
+ base.Pos = base.Pos.WithNotStmt()
nif.Left = typecheck(nif.Left, ctxExpr)
nif.Left = defaultlit(nif.Left, nil)
out.Append(nif)
half := lo + n/2
nif := nod(OIF, nil, nil)
nif.Left = less(half)
- lineno = lineno.WithNotStmt()
+ base.Pos = base.Pos.WithNotStmt()
nif.Left = typecheck(nif.Left, ctxExpr)
nif.Left = defaultlit(nif.Left, nil)
do(lo, half, &nif.Nbody)
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
switch n.Op {
case OASOP, ONAME:
default:
- Fatalf("unexpected op: %v", n.Op)
+ base.Fatalf("unexpected op: %v", n.Op)
}
return Op(n.aux)
}
switch n.Op {
case OASOP, ONAME:
default:
- Fatalf("unexpected op: %v", n.Op)
+ base.Fatalf("unexpected op: %v", n.Op)
}
n.aux = uint8(op)
}
func (n *Node) IndexMapLValue() bool {
if n.Op != OINDEXMAP {
- Fatalf("unexpected op: %v", n.Op)
+ base.Fatalf("unexpected op: %v", n.Op)
}
return n.aux != 0
}
func (n *Node) SetIndexMapLValue(b bool) {
if n.Op != OINDEXMAP {
- Fatalf("unexpected op: %v", n.Op)
+ base.Fatalf("unexpected op: %v", n.Op)
}
if b {
n.aux = 1
func (n *Node) TChanDir() types.ChanDir {
if n.Op != OTCHAN {
- Fatalf("unexpected op: %v", n.Op)
+ base.Fatalf("unexpected op: %v", n.Op)
}
return types.ChanDir(n.aux)
}
func (n *Node) SetTChanDir(dir types.ChanDir) {
if n.Op != OTCHAN {
- Fatalf("unexpected op: %v", n.Op)
+ base.Fatalf("unexpected op: %v", n.Op)
}
n.aux = uint8(dir)
}
// inserted before dereferencing. See state.exprPtr.
func (n *Node) MarkNonNil() {
if !n.Type.IsPtr() && !n.Type.IsUnsafePtr() {
- Fatalf("MarkNonNil(%v), type %v", n, n.Type)
+ base.Fatalf("MarkNonNil(%v), type %v", n, n.Type)
}
n.flags.set(nodeNonNil, true)
}
// No length and cap checks needed
// since new slice and copied over slice data have same length.
default:
- Fatalf("SetBounded(%v)", n)
+ base.Fatalf("SetBounded(%v)", n)
}
n.flags.set(nodeBounded, b)
}
// MarkReadonly indicates that n is an ONAME with readonly contents.
func (n *Node) MarkReadonly() {
if n.Op != ONAME {
- Fatalf("Node.MarkReadonly %v", n.Op)
+ base.Fatalf("Node.MarkReadonly %v", n.Op)
}
n.Name.SetReadonly(true)
// Mark the linksym as readonly immediately
// which must not have been used with SetOpt.
func (n *Node) SetVal(v constant.Value) {
if n.HasOpt() {
- Flag.LowerH = 1
+ base.Flag.LowerH = 1
Dump("have Opt", n)
- Fatalf("have Opt")
+ base.Fatalf("have Opt")
}
if n.Op == OLITERAL {
assertRepresents(n.Type, v)
return
}
if n.HasVal() {
- Flag.LowerH = 1
+ base.Flag.LowerH = 1
Dump("have Val", n)
- Fatalf("have Val")
+ base.Fatalf("have Val")
}
n.SetHasOpt(true)
n.E = x
}
pkg := s.Pkg
- p := Ctxt.Pkgpath
+ p := base.Ctxt.Pkgpath
if pkg != nil && pkg.Path != "" {
p = pkg.Path
}
func (f *Func) SetOpenCodedDeferDisallowed(b bool) { f.flags.set(funcOpenCodedDeferDisallowed, b) }
func (f *Func) setWBPos(pos src.XPos) {
- if Debug.WB != 0 {
- Warnl(pos, "write barrier")
+ if base.Debug.WB != 0 {
+ base.WarnfAt(pos, "write barrier")
}
if !f.WBPos.IsKnown() {
f.WBPos = pos
import (
"os"
tracepkg "runtime/trace"
+
+ "cmd/compile/internal/base"
)
func init() {
func traceHandlerGo17(traceprofile string) {
f, err := os.Create(traceprofile)
if err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
if err := tracepkg.Start(f); err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
- atExit(tracepkg.Stop)
+ base.AtExit(tracepkg.Stop)
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"fmt"
"go/constant"
var pos, op string
var tc uint8
if n != nil {
- pos = linestr(n.Pos)
+ pos = base.FmtPos(n.Pos)
op = n.Op.String()
tc = n.Typecheck()
}
var tc uint8
var typ *types.Type
if n != nil {
- pos = linestr(n.Pos)
+ pos = base.FmtPos(n.Pos)
op = n.Op.String()
tc = n.Typecheck()
typ = n.Type
}
// only trace if there's work to do
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("resolve", n)(&res)
}
if n.Sym.Pkg != localpkg {
if inimport {
- Fatalf("recursive inimport")
+ base.Fatalf("recursive inimport")
}
inimport = true
expandDecl(n)
func typecheck(n *Node, top int) (res *Node) {
// cannot type check until all the source has been parsed
if !typecheckok {
- Fatalf("early typecheck")
+ base.Fatalf("early typecheck")
}
if n == nil {
}
// only trace if there's work to do
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheck", n)(&res)
}
break
default:
- lineno = lno
+ base.Pos = lno
return n
}
}
// We can already diagnose variables used as types.
case ONAME:
if top&(ctxExpr|ctxType) == ctxType {
- yyerror("%v is not a type", n)
+ base.Errorf("%v is not a type", n)
}
case OTYPE:
// with aliases that we can't handle properly yet.
// Report an error rather than crashing later.
if n.Name != nil && n.Name.Param.Alias() && n.Type == nil {
- lineno = n.Pos
- Fatalf("cannot handle alias type declaration (issue #25838): %v", n)
+ base.Pos = n.Pos
+ base.Fatalf("cannot handle alias type declaration (issue #25838): %v", n)
}
- lineno = lno
+ base.Pos = lno
return n
}
}
- yyerrorl(n.Pos, "invalid recursive type alias %v%s", n, cycleTrace(cycle))
+ base.ErrorfAt(n.Pos, "invalid recursive type alias %v%s", n, cycleTrace(cycle))
}
case OLITERAL:
if top&(ctxExpr|ctxType) == ctxType {
- yyerror("%v is not a type", n)
+ base.Errorf("%v is not a type", n)
break
}
- yyerrorl(n.Pos, "constant definition loop%s", cycleTrace(cycleFor(n)))
+ base.ErrorfAt(n.Pos, "constant definition loop%s", cycleTrace(cycleFor(n)))
}
- if Errors() == 0 {
+ if base.Errors() == 0 {
var trace string
for i := len(typecheck_tcstack) - 1; i >= 0; i-- {
x := typecheck_tcstack[i]
trace += fmt.Sprintf("\n\t%v %v", x.Line(), x)
}
- yyerror("typechecking loop involving %v%s", n, trace)
+ base.Errorf("typechecking loop involving %v%s", n, trace)
}
- lineno = lno
+ base.Pos = lno
return n
}
typecheck_tcstack[last] = nil
typecheck_tcstack = typecheck_tcstack[:last]
- lineno = lno
+ base.Pos = lno
return n
}
// The result of typecheck1 MUST be assigned back to n, e.g.
// n.Left = typecheck1(n.Left, top)
func typecheck1(n *Node, top int) (res *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheck1", n)(&res)
}
}
if n.Op == ONAME && n.SubOp() != 0 && top&ctxCallee == 0 {
- yyerror("use of builtin %v not in function call", n.Sym)
+ base.Errorf("use of builtin %v not in function call", n.Sym)
n.Type = nil
return n
}
default:
Dump("typecheck", n)
- Fatalf("typecheck %v", n.Op)
+ base.Fatalf("typecheck %v", n.Op)
// names
case OLITERAL:
ok |= ctxExpr
if n.Type == nil && n.Val().Kind() == constant.String {
- Fatalf("string literal missing type")
+ base.Fatalf("string literal missing type")
}
case ONIL, ONONAME:
if top&ctxAssign == 0 {
// not a write to the variable
if n.isBlank() {
- yyerror("cannot use _ as value")
+ base.Errorf("cannot use _ as value")
n.Type = nil
return n
}
ok |= ctxExpr
case OPACK:
- yyerror("use of package %v without selector", n.Sym)
+ base.Errorf("use of package %v without selector", n.Sym)
n.Type = nil
return n
} else if n.Left.Op == ODDD {
if !n.Diag() {
n.SetDiag(true)
- yyerror("use of [...] array outside of array literal")
+ base.Errorf("use of [...] array outside of array literal")
}
n.Type = nil
return n
case l.Type == nil:
// Error already reported elsewhere.
case l.Type.IsInteger() && l.Op != OLITERAL:
- yyerror("non-constant array bound %v", l)
+ base.Errorf("non-constant array bound %v", l)
default:
- yyerror("invalid array bound %v", l)
+ base.Errorf("invalid array bound %v", l)
}
n.Type = nil
return n
v := l.Val()
if doesoverflow(v, types.Types[TINT]) {
- yyerror("array bound is too large")
+ base.Errorf("array bound is too large")
n.Type = nil
return n
}
if constant.Sign(v) < 0 {
- yyerror("array bound must be non-negative")
+ base.Errorf("array bound must be non-negative")
n.Type = nil
return n
}
return n
}
if l.Type.NotInHeap() {
- yyerror("incomplete (or unallocatable) map key not allowed")
+ base.Errorf("incomplete (or unallocatable) map key not allowed")
}
if r.Type.NotInHeap() {
- yyerror("incomplete (or unallocatable) map value not allowed")
+ base.Errorf("incomplete (or unallocatable) map value not allowed")
}
setTypeNode(n, types.NewMap(l.Type, r.Type))
return n
}
if l.Type.NotInHeap() {
- yyerror("chan of incomplete (or unallocatable) type not allowed")
+ base.Errorf("chan of incomplete (or unallocatable) type not allowed")
}
setTypeNode(n, types.NewChan(l.Type, n.TChanDir()))
if !t.IsPtr() {
if top&(ctxExpr|ctxStmt) != 0 {
- yyerror("invalid indirect of %L", n.Left)
+ base.Errorf("invalid indirect of %L", n.Left)
n.Type = nil
return n
}
return n
}
if n.Implicit() && !okforarith[l.Type.Etype] {
- yyerror("invalid operation: %v (non-numeric type %v)", n, l.Type)
+ base.Errorf("invalid operation: %v (non-numeric type %v)", n, l.Type)
n.Type = nil
return n
}
n.Right = r
t := r.Type
if !t.IsInteger() {
- yyerror("invalid operation: %v (shift count type %v, must be integer)", n, r.Type)
+ base.Errorf("invalid operation: %v (shift count type %v, must be integer)", n, r.Type)
n.Type = nil
return n
}
if t.IsSigned() && !langSupported(1, 13, curpkg()) {
- yyerrorv("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type)
+ base.ErrorfVers("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type)
n.Type = nil
return n
}
t = l.Type
if t != nil && t.Etype != TIDEAL && !t.IsInteger() {
- yyerror("invalid operation: %v (shift of type %v)", n, t)
+ base.Errorf("invalid operation: %v (shift of type %v)", n, t)
n.Type = nil
return n
}
// can't be converted to int (see issue #41500).
if n.Op == OANDAND || n.Op == OOROR {
if !n.Left.Type.IsBoolean() {
- yyerror("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Left.Type))
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Left.Type))
n.Type = nil
return n
}
if !n.Right.Type.IsBoolean() {
- yyerror("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Right.Type))
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Right.Type))
n.Type = nil
return n
}
aop, _ = assignop(l.Type, r.Type)
if aop != OXXX {
if r.Type.IsInterface() && !l.Type.IsInterface() && !IsComparable(l.Type) {
- yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type))
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type))
n.Type = nil
return n
}
aop, _ = assignop(r.Type, l.Type)
if aop != OXXX {
if l.Type.IsInterface() && !r.Type.IsInterface() && !IsComparable(r.Type) {
- yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type))
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type))
n.Type = nil
return n
}
return n
}
if l.Type.IsInterface() == r.Type.IsInterface() || aop == 0 {
- yyerror("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type)
+ base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type)
n.Type = nil
return n
}
t = mixUntyped(l.Type, r.Type)
}
if dt := defaultType(t); !okfor[op][dt.Etype] {
- yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t))
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t))
n.Type = nil
return n
}
// okfor allows any array == array, map == map, func == func.
// restrict to slice/map/func == nil and nil == slice/map/func.
if l.Type.IsArray() && !IsComparable(l.Type) {
- yyerror("invalid operation: %v (%v cannot be compared)", n, l.Type)
+ base.Errorf("invalid operation: %v (%v cannot be compared)", n, l.Type)
n.Type = nil
return n
}
if l.Type.IsSlice() && !l.isNil() && !r.isNil() {
- yyerror("invalid operation: %v (slice can only be compared to nil)", n)
+ base.Errorf("invalid operation: %v (slice can only be compared to nil)", n)
n.Type = nil
return n
}
if l.Type.IsMap() && !l.isNil() && !r.isNil() {
- yyerror("invalid operation: %v (map can only be compared to nil)", n)
+ base.Errorf("invalid operation: %v (map can only be compared to nil)", n)
n.Type = nil
return n
}
if l.Type.Etype == TFUNC && !l.isNil() && !r.isNil() {
- yyerror("invalid operation: %v (func can only be compared to nil)", n)
+ base.Errorf("invalid operation: %v (func can only be compared to nil)", n)
n.Type = nil
return n
}
if l.Type.IsStruct() {
if f := IncomparableField(l.Type); f != nil {
- yyerror("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type)
+ base.Errorf("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type)
n.Type = nil
return n
}
if (op == ODIV || op == OMOD) && Isconst(r, constant.Int) {
if constant.Sign(r.Val()) == 0 {
- yyerror("division by zero")
+ base.Errorf("division by zero")
n.Type = nil
return n
}
return n
}
if !okfor[n.Op][defaultType(t).Etype] {
- yyerror("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(t))
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(t))
n.Type = nil
return n
}
r := outervalue(n.Left)
if r.Op == ONAME {
if r.Orig != r {
- Fatalf("found non-orig name node %v", r) // TODO(mdempsky): What does this mean?
+ base.Fatalf("found non-orig name node %v", r) // TODO(mdempsky): What does this mean?
}
r.Name.SetAddrtaken(true)
if r.Name.IsClosureVar() && !capturevarscomplete {
t := n.Left.Type
if t == nil {
- UpdateErrorDot(n.Line(), n.Left.String(), n.String())
+ base.UpdateErrorDot(n.Line(), n.Left.String(), n.String())
n.Type = nil
return n
}
}
if n.Sym.IsBlank() {
- yyerror("cannot refer to blank field or method")
+ base.Errorf("cannot refer to blank field or method")
n.Type = nil
return n
}
// Legitimate field or method lookup failed, try to explain the error
switch {
case t.IsEmptyInterface():
- yyerror("%v undefined (type %v is interface with no methods)", n, n.Left.Type)
+ base.Errorf("%v undefined (type %v is interface with no methods)", n, n.Left.Type)
case t.IsPtr() && t.Elem().IsInterface():
// Pointer to interface is almost always a mistake.
- yyerror("%v undefined (type %v is pointer to interface, not interface)", n, n.Left.Type)
+ base.Errorf("%v undefined (type %v is pointer to interface, not interface)", n, n.Left.Type)
case lookdot(n, t, 1) != nil:
// Field or method matches by name, but it is not exported.
- yyerror("%v undefined (cannot refer to unexported field or method %v)", n, n.Sym)
+ base.Errorf("%v undefined (cannot refer to unexported field or method %v)", n, n.Sym)
default:
if mt := lookdot(n, t, 2); mt != nil && visible(mt.Sym) { // Case-insensitive lookup.
- yyerror("%v undefined (type %v has no field or method %v, but does have %v)", n, n.Left.Type, n.Sym, mt.Sym)
+ base.Errorf("%v undefined (type %v has no field or method %v, but does have %v)", n, n.Left.Type, n.Sym, mt.Sym)
} else {
- yyerror("%v undefined (type %v has no field or method %v)", n, n.Left.Type, n.Sym)
+ base.Errorf("%v undefined (type %v has no field or method %v)", n, n.Left.Type, n.Sym)
}
}
n.Type = nil
return n
}
if !t.IsInterface() {
- yyerror("invalid type assertion: %v (non-interface type %v on left)", n, t)
+ base.Errorf("invalid type assertion: %v (non-interface type %v on left)", n, t)
n.Type = nil
return n
}
var ptr int
if !implements(n.Type, t, &missing, &have, &ptr) {
if have != nil && have.Sym == missing.Sym {
- yyerror("impossible type assertion:\n\t%v does not implement %v (wrong type for %v method)\n"+
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (wrong type for %v method)\n"+
"\t\thave %v%0S\n\t\twant %v%0S", n.Type, t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
- yyerror("impossible type assertion:\n\t%v does not implement %v (%v method has pointer receiver)", n.Type, t, missing.Sym)
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (%v method has pointer receiver)", n.Type, t, missing.Sym)
} else if have != nil {
- yyerror("impossible type assertion:\n\t%v does not implement %v (missing %v method)\n"+
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)\n"+
"\t\thave %v%0S\n\t\twant %v%0S", n.Type, t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else {
- yyerror("impossible type assertion:\n\t%v does not implement %v (missing %v method)", n.Type, t, missing.Sym)
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)", n.Type, t, missing.Sym)
}
n.Type = nil
return n
}
switch t.Etype {
default:
- yyerror("invalid operation: %v (type %v does not support indexing)", n, t)
+ base.Errorf("invalid operation: %v (type %v does not support indexing)", n, t)
n.Type = nil
return n
}
if n.Right.Type != nil && !n.Right.Type.IsInteger() {
- yyerror("non-integer %s index %v", why, n.Right)
+ base.Errorf("non-integer %s index %v", why, n.Right)
break
}
if !n.Bounded() && Isconst(n.Right, constant.Int) {
x := n.Right.Val()
if constant.Sign(x) < 0 {
- yyerror("invalid %s index %v (index must be non-negative)", why, n.Right)
+ base.Errorf("invalid %s index %v (index must be non-negative)", why, n.Right)
} else if t.IsArray() && constant.Compare(x, token.GEQ, constant.MakeInt64(t.NumElem())) {
- yyerror("invalid array index %v (out of bounds for %d-element array)", n.Right, t.NumElem())
+ base.Errorf("invalid array index %v (out of bounds for %d-element array)", n.Right, t.NumElem())
} else if Isconst(n.Left, constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(n.Left.StringVal())))) {
- yyerror("invalid string index %v (out of bounds for %d-byte string)", n.Right, len(n.Left.StringVal()))
+ base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Right, len(n.Left.StringVal()))
} else if doesoverflow(x, types.Types[TINT]) {
- yyerror("invalid %s index %v (index too large)", why, n.Right)
+ base.Errorf("invalid %s index %v (index too large)", why, n.Right)
}
}
return n
}
if !t.IsChan() {
- yyerror("invalid operation: %v (receive from non-chan type %v)", n, t)
+ base.Errorf("invalid operation: %v (receive from non-chan type %v)", n, t)
n.Type = nil
return n
}
if !t.ChanDir().CanRecv() {
- yyerror("invalid operation: %v (receive from send-only type %v)", n, t)
+ base.Errorf("invalid operation: %v (receive from send-only type %v)", n, t)
n.Type = nil
return n
}
return n
}
if !t.IsChan() {
- yyerror("invalid operation: %v (send to non-chan type %v)", n, t)
+ base.Errorf("invalid operation: %v (send to non-chan type %v)", n, t)
n.Type = nil
return n
}
if !t.ChanDir().CanSend() {
- yyerror("invalid operation: %v (send to receive-only type %v)", n, t)
+ base.Errorf("invalid operation: %v (send to receive-only type %v)", n, t)
n.Type = nil
return n
}
n.Type = nil
case OSLICEHEADER:
- // Errors here are Fatalf instead of yyerror because only the compiler
+ // Errors here are Fatalf instead of Errorf because only the compiler
// can construct an OSLICEHEADER node.
// Components used in OSLICEHEADER that are supplied by parsed source code
// have already been typechecked in e.g. OMAKESLICE earlier.
t := n.Type
if t == nil {
- Fatalf("no type specified for OSLICEHEADER")
+ base.Fatalf("no type specified for OSLICEHEADER")
}
if !t.IsSlice() {
- Fatalf("invalid type %v for OSLICEHEADER", n.Type)
+ base.Fatalf("invalid type %v for OSLICEHEADER", n.Type)
}
if n.Left == nil || n.Left.Type == nil || !n.Left.Type.IsUnsafePtr() {
- Fatalf("need unsafe.Pointer for OSLICEHEADER")
+ base.Fatalf("need unsafe.Pointer for OSLICEHEADER")
}
if x := n.List.Len(); x != 2 {
- Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x)
+ base.Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x)
}
n.Left = typecheck(n.Left, ctxExpr)
c = defaultlit(c, types.Types[TINT])
if Isconst(l, constant.Int) && l.Int64Val() < 0 {
- Fatalf("len for OSLICEHEADER must be non-negative")
+ base.Fatalf("len for OSLICEHEADER must be non-negative")
}
if Isconst(c, constant.Int) && c.Int64Val() < 0 {
- Fatalf("cap for OSLICEHEADER must be non-negative")
+ base.Fatalf("cap for OSLICEHEADER must be non-negative")
}
if Isconst(l, constant.Int) && Isconst(c, constant.Int) && constant.Compare(l.Val(), token.GTR, c.Val()) {
- Fatalf("len larger than cap for OSLICEHEADER")
+ base.Fatalf("len larger than cap for OSLICEHEADER")
}
n.List.SetFirst(l)
n.List.SetSecond(c)
case OMAKESLICECOPY:
- // Errors here are Fatalf instead of yyerror because only the compiler
+ // Errors here are Fatalf instead of Errorf because only the compiler
// can construct an OMAKESLICECOPY node.
// Components used in OMAKESCLICECOPY that are supplied by parsed source code
// have already been typechecked in OMAKE and OCOPY earlier.
t := n.Type
if t == nil {
- Fatalf("no type specified for OMAKESLICECOPY")
+ base.Fatalf("no type specified for OMAKESLICECOPY")
}
if !t.IsSlice() {
- Fatalf("invalid type %v for OMAKESLICECOPY", n.Type)
+ base.Fatalf("invalid type %v for OMAKESLICECOPY", n.Type)
}
if n.Left == nil {
- Fatalf("missing len argument for OMAKESLICECOPY")
+ base.Fatalf("missing len argument for OMAKESLICECOPY")
}
if n.Right == nil {
- Fatalf("missing slice argument to copy for OMAKESLICECOPY")
+ base.Fatalf("missing slice argument to copy for OMAKESLICECOPY")
}
n.Left = typecheck(n.Left, ctxExpr)
n.Left = defaultlit(n.Left, types.Types[TINT])
if !n.Left.Type.IsInteger() && n.Type.Etype != TIDEAL {
- yyerror("non-integer len argument in OMAKESLICECOPY")
+ base.Errorf("non-integer len argument in OMAKESLICECOPY")
}
if Isconst(n.Left, constant.Int) {
if doesoverflow(n.Left.Val(), types.Types[TINT]) {
- Fatalf("len for OMAKESLICECOPY too large")
+ base.Fatalf("len for OMAKESLICECOPY too large")
}
if constant.Sign(n.Left.Val()) < 0 {
- Fatalf("len for OMAKESLICECOPY must be non-negative")
+ base.Fatalf("len for OMAKESLICECOPY must be non-negative")
}
}
}
if l.Type.IsArray() {
if !islvalue(n.Left) {
- yyerror("invalid operation %v (slice of unaddressable value)", n)
+ base.Errorf("invalid operation %v (slice of unaddressable value)", n)
n.Type = nil
return n
}
var tp *types.Type
if t.IsString() {
if hasmax {
- yyerror("invalid operation %v (3-index slice of string)", n)
+ base.Errorf("invalid operation %v (3-index slice of string)", n)
n.Type = nil
return n
}
} else if t.IsSlice() {
n.Type = t
} else {
- yyerror("cannot slice %v (type %v)", l, t)
+ base.Errorf("cannot slice %v (type %v)", l, t)
n.Type = nil
return n
}
if l.Op == ONAME && l.SubOp() != 0 {
if n.IsDDD() && l.SubOp() != OAPPEND {
- yyerror("invalid use of ... with builtin %v", l)
+ base.Errorf("invalid use of ... with builtin %v", l)
}
// builtin: OLEN, OCAP, etc.
if l.Op == OTYPE {
if n.IsDDD() {
if !l.Type.Broke() {
- yyerror("invalid use of ... in type conversion to %v", l.Type)
+ base.Errorf("invalid use of ... in type conversion to %v", l.Type)
}
n.SetDiag(true)
}
tp := t.Recv().Type
if l.Left == nil || !types.Identical(l.Left.Type, tp) {
- Fatalf("method receiver")
+ base.Fatalf("method receiver")
}
default:
if isBuiltinFuncName(name) && l.Name.Defn != nil {
// be more specific when the function
// name matches a predeclared function
- yyerror("cannot call non-function %s (type %v), declared at %s",
- name, t, linestr(l.Name.Defn.Pos))
+ base.Errorf("cannot call non-function %s (type %v), declared at %s",
+ name, t, base.FmtPos(l.Name.Defn.Pos))
} else {
- yyerror("cannot call non-function %s (type %v)", name, t)
+ base.Errorf("cannot call non-function %s (type %v)", name, t)
}
n.Type = nil
return n
// multiple return
if top&(ctxMultiOK|ctxStmt) == 0 {
- yyerror("multiple-value %v() in single-value context", l)
+ base.Errorf("multiple-value %v() in single-value context", l)
break
}
ok = okforcap[t.Etype]
}
if !ok {
- yyerror("invalid argument %L for %v", l, n.Op)
+ base.Errorf("invalid argument %L for %v", l, n.Op)
n.Type = nil
return n
}
case TCOMPLEX128:
n.Type = types.Types[TFLOAT64]
default:
- yyerror("invalid argument %L for %v", l, n.Op)
+ base.Errorf("invalid argument %L for %v", l, n.Op)
n.Type = nil
return n
}
n.Right = r
if !types.Identical(l.Type, r.Type) {
- yyerror("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type)
+ base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type)
n.Type = nil
return n
}
var t *types.Type
switch l.Type.Etype {
default:
- yyerror("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type)
+ base.Errorf("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type)
n.Type = nil
return n
return n
}
if !t.IsChan() {
- yyerror("invalid operation: %v (non-chan type %v)", n, t)
+ base.Errorf("invalid operation: %v (non-chan type %v)", n, t)
n.Type = nil
return n
}
if !t.ChanDir().CanSend() {
- yyerror("invalid operation: %v (cannot close receive-only channel)", n)
+ base.Errorf("invalid operation: %v (cannot close receive-only channel)", n)
n.Type = nil
return n
}
typecheckargs(n)
args := n.List
if args.Len() == 0 {
- yyerror("missing arguments to delete")
+ base.Errorf("missing arguments to delete")
n.Type = nil
return n
}
if args.Len() == 1 {
- yyerror("missing second (key) argument to delete")
+ base.Errorf("missing second (key) argument to delete")
n.Type = nil
return n
}
if args.Len() != 2 {
- yyerror("too many arguments to delete")
+ base.Errorf("too many arguments to delete")
n.Type = nil
return n
}
l := args.First()
r := args.Second()
if l.Type != nil && !l.Type.IsMap() {
- yyerror("first argument to delete must be map; have %L", l.Type)
+ base.Errorf("first argument to delete must be map; have %L", l.Type)
n.Type = nil
return n
}
typecheckargs(n)
args := n.List
if args.Len() == 0 {
- yyerror("missing arguments to append")
+ base.Errorf("missing arguments to append")
n.Type = nil
return n
}
n.Type = t
if !t.IsSlice() {
if args.First().isNil() {
- yyerror("first argument to append must be typed slice; have untyped nil")
+ base.Errorf("first argument to append must be typed slice; have untyped nil")
n.Type = nil
return n
}
- yyerror("first argument to append must be slice; have %L", t)
+ base.Errorf("first argument to append must be slice; have %L", t)
n.Type = nil
return n
}
if n.IsDDD() {
if args.Len() == 1 {
- yyerror("cannot use ... on first argument to append")
+ base.Errorf("cannot use ... on first argument to append")
n.Type = nil
return n
}
if args.Len() != 2 {
- yyerror("too many arguments to append")
+ base.Errorf("too many arguments to append")
n.Type = nil
return n
}
if types.Identical(n.Left.Type.Elem(), types.Bytetype) {
break
}
- yyerror("arguments to copy have different element types: %L and string", n.Left.Type)
+ base.Errorf("arguments to copy have different element types: %L and string", n.Left.Type)
n.Type = nil
return n
}
if !n.Left.Type.IsSlice() || !n.Right.Type.IsSlice() {
if !n.Left.Type.IsSlice() && !n.Right.Type.IsSlice() {
- yyerror("arguments to copy must be slices; have %L, %L", n.Left.Type, n.Right.Type)
+ base.Errorf("arguments to copy must be slices; have %L, %L", n.Left.Type, n.Right.Type)
} else if !n.Left.Type.IsSlice() {
- yyerror("first argument to copy should be slice; have %L", n.Left.Type)
+ base.Errorf("first argument to copy should be slice; have %L", n.Left.Type)
} else {
- yyerror("second argument to copy should be slice or string; have %L", n.Right.Type)
+ base.Errorf("second argument to copy should be slice or string; have %L", n.Right.Type)
}
n.Type = nil
return n
}
if !types.Identical(n.Left.Type.Elem(), n.Right.Type.Elem()) {
- yyerror("arguments to copy have different element types: %L and %L", n.Left.Type, n.Right.Type)
+ base.Errorf("arguments to copy have different element types: %L and %L", n.Left.Type, n.Right.Type)
n.Type = nil
return n
}
n.Op = op
if n.Op == OXXX {
if !n.Diag() && !n.Type.Broke() && !n.Left.Diag() {
- yyerror("cannot convert %L to type %v%s", n.Left, n.Type, why)
+ base.Errorf("cannot convert %L to type %v%s", n.Left, n.Type, why)
n.SetDiag(true)
}
n.Op = OCONV
ok |= ctxExpr
args := n.List.Slice()
if len(args) == 0 {
- yyerror("missing argument to make")
+ base.Errorf("missing argument to make")
n.Type = nil
return n
}
i := 1
switch t.Etype {
default:
- yyerror("cannot make type %v", t)
+ base.Errorf("cannot make type %v", t)
n.Type = nil
return n
case TSLICE:
if i >= len(args) {
- yyerror("missing len argument to make(%v)", t)
+ base.Errorf("missing len argument to make(%v)", t)
n.Type = nil
return n
}
return n
}
if Isconst(l, constant.Int) && r != nil && Isconst(r, constant.Int) && constant.Compare(l.Val(), token.GTR, r.Val()) {
- yyerror("len larger than cap in make(%v)", t)
+ base.Errorf("len larger than cap in make(%v)", t)
n.Type = nil
return n
}
}
if i < len(args) {
- yyerror("too many arguments to make(%v)", t)
+ base.Errorf("too many arguments to make(%v)", t)
n.Op = OMAKE
n.Type = nil
return n
ok |= ctxExpr
args := n.List
if args.Len() == 0 {
- yyerror("missing argument to new")
+ base.Errorf("missing argument to new")
n.Type = nil
return n
}
return n
}
if args.Len() > 1 {
- yyerror("too many arguments to new(%v)", t)
+ base.Errorf("too many arguments to new(%v)", t)
n.Type = nil
return n
}
case ORECOVER:
ok |= ctxExpr | ctxStmt
if n.List.Len() != 0 {
- yyerror("too many arguments to recover")
+ base.Errorf("too many arguments to recover")
n.Type = nil
return n
}
return n
}
if !t.IsInterface() {
- Fatalf("OITAB of %v", t)
+ base.Fatalf("OITAB of %v", t)
}
n.Type = types.NewPtr(types.Types[TUINTPTR])
case OIDATA:
// Whoever creates the OIDATA node must know a priori the concrete type at that moment,
// usually by just having checked the OITAB.
- Fatalf("cannot typecheck interface data %v", n)
+ base.Fatalf("cannot typecheck interface data %v", n)
case OSPTR:
ok |= ctxExpr
return n
}
if !t.IsSlice() && !t.IsString() {
- Fatalf("OSPTR of %v", t)
+ base.Fatalf("OSPTR of %v", t)
}
if t.IsString() {
n.Type = types.NewPtr(types.Types[TUINT8])
if n.Left != nil {
t := n.Left.Type
if t != nil && !t.IsBoolean() {
- yyerror("non-bool %L used as for condition", n.Left)
+ base.Errorf("non-bool %L used as for condition", n.Left)
}
}
n.Right = typecheck(n.Right, ctxStmt)
if n.Left != nil {
t := n.Left.Type
if t != nil && !t.IsBoolean() {
- yyerror("non-bool %L used as if condition", n.Left)
+ base.Errorf("non-bool %L used as if condition", n.Left)
}
}
typecheckslice(n.Nbody.Slice(), ctxStmt)
ok |= ctxStmt
typecheckargs(n)
if Curfn == nil {
- yyerror("return outside function")
+ base.Errorf("return outside function")
n.Type = nil
return n
}
typecheckrange(n)
case OTYPESW:
- yyerror("use of .(type) outside type switch")
+ base.Errorf("use of .(type) outside type switch")
n.Type = nil
return n
n = evalConst(n)
if n.Op == OTYPE && top&ctxType == 0 {
if !n.Type.Broke() {
- yyerror("type %v is not an expression", n.Type)
+ base.Errorf("type %v is not an expression", n.Type)
}
n.Type = nil
return n
}
if top&(ctxExpr|ctxType) == ctxType && n.Op != OTYPE {
- yyerror("%v is not a type", n)
+ base.Errorf("%v is not a type", n)
n.Type = nil
return n
}
// TODO(rsc): simplify
if (top&(ctxCallee|ctxExpr|ctxType) != 0) && top&ctxStmt == 0 && ok&(ctxExpr|ctxType|ctxCallee) == 0 {
- yyerror("%v used as value", n)
+ base.Errorf("%v used as value", n)
n.Type = nil
return n
}
if (top&ctxStmt != 0) && top&(ctxCallee|ctxExpr|ctxType) == 0 && ok&ctxStmt == 0 {
if !n.Diag() {
- yyerror("%v evaluated but not used", n)
+ base.Errorf("%v evaluated but not used", n)
n.SetDiag(true)
}
return false
}
if !t.IsInteger() {
- yyerror("invalid slice index %v (type %v)", r, t)
+ base.Errorf("invalid slice index %v (type %v)", r, t)
return false
}
if r.Op == OLITERAL {
x := r.Val()
if constant.Sign(x) < 0 {
- yyerror("invalid slice index %v (index must be non-negative)", r)
+ base.Errorf("invalid slice index %v (index must be non-negative)", r)
return false
} else if tp != nil && tp.NumElem() >= 0 && constant.Compare(x, token.GTR, constant.MakeInt64(tp.NumElem())) {
- yyerror("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem())
+ base.Errorf("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem())
return false
} else if Isconst(l, constant.String) && constant.Compare(x, token.GTR, constant.MakeInt64(int64(len(l.StringVal())))) {
- yyerror("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.StringVal()))
+ base.Errorf("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.StringVal()))
return false
} else if doesoverflow(x, types.Types[TINT]) {
- yyerror("invalid slice index %v (index too large)", r)
+ base.Errorf("invalid slice index %v (index too large)", r)
return false
}
}
func checksliceconst(lo *Node, hi *Node) bool {
if lo != nil && hi != nil && lo.Op == OLITERAL && hi.Op == OLITERAL && constant.Compare(lo.Val(), token.GTR, hi.Val()) {
- yyerror("invalid slice index: %v > %v", lo, hi)
+ base.Errorf("invalid slice index: %v > %v", lo, hi)
return false
}
if n.Left.Orig != nil && n.Left.Orig.Op == OCONV {
break
}
- yyerrorl(n.Pos, "%s discards result of %v", what, n.Left)
+ base.ErrorfAt(n.Pos, "%s discards result of %v", what, n.Left)
return
}
// The syntax made sure it was a call, so this must be
// a conversion.
n.SetDiag(true)
- yyerrorl(n.Pos, "%s requires function call, not conversion", what)
+ base.ErrorfAt(n.Pos, "%s requires function call, not conversion", what)
}
}
}
if n.List.Len() == 0 {
p := fmt.Sprintf(f, args...)
- yyerror("missing argument to %s: %v", p, n)
+ base.Errorf("missing argument to %s: %v", p, n)
return false
}
if n.List.Len() > 1 {
p := fmt.Sprintf(f, args...)
- yyerror("too many arguments to %s: %v", p, n)
+ base.Errorf("too many arguments to %s: %v", p, n)
n.Left = n.List.First()
n.List.Set(nil)
return false
}
if n.List.Len() != 2 {
if n.List.Len() < 2 {
- yyerror("not enough arguments in call to %v", n)
+ base.Errorf("not enough arguments in call to %v", n)
} else {
- yyerror("too many arguments in call to %v", n)
+ base.Errorf("too many arguments in call to %v", n)
}
return false
}
}
if r != nil {
if errnode != nil {
- yyerror("ambiguous selector %v", errnode)
+ base.Errorf("ambiguous selector %v", errnode)
} else if t.IsPtr() {
- yyerror("ambiguous selector (%v).%v", t, s)
+ base.Errorf("ambiguous selector (%v).%v", t, s)
} else {
- yyerror("ambiguous selector %v.%v", t, s)
+ base.Errorf("ambiguous selector %v.%v", t, s)
}
break
}
// typecheckMethodExpr checks selector expressions (ODOT) where the
// base expression is a type expression (OTYPE).
func typecheckMethodExpr(n *Node) (res *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckMethodExpr", n)(&res)
}
} else {
mt := methtype(t)
if mt == nil {
- yyerror("%v undefined (type %v has no method %v)", n, t, n.Sym)
+ base.Errorf("%v undefined (type %v has no method %v)", n, t, n.Sym)
n.Type = nil
return n
}
m := lookdot1(n, s, t, ms, 0)
if m == nil {
if lookdot1(n, s, t, ms, 1) != nil {
- yyerror("%v undefined (cannot refer to unexported method %v)", n, s)
+ base.Errorf("%v undefined (cannot refer to unexported method %v)", n, s)
} else if _, ambig := dotpath(s, t, nil, false); ambig {
- yyerror("%v undefined (ambiguous selector)", n) // method or field
+ base.Errorf("%v undefined (ambiguous selector)", n) // method or field
} else {
- yyerror("%v undefined (type %v has no method %v)", n, t, s)
+ base.Errorf("%v undefined (type %v has no method %v)", n, t, s)
}
n.Type = nil
return n
}
if !isMethodApplicable(t, m) {
- yyerror("invalid method expression %v (needs pointer receiver: (*%v).%S)", n, t, s)
+ base.Errorf("invalid method expression %v (needs pointer receiver: (*%v).%S)", n, t, s)
n.Type = nil
return n
}
// methodSym already marked n.Sym as a function.
// Issue 25065. Make sure that we emit the symbol for a local method.
- if Ctxt.Flag_dynlink && !inimport && (t.Sym == nil || t.Sym.Pkg == localpkg) {
+ if base.Ctxt.Flag_dynlink && !inimport && (t.Sym == nil || t.Sym.Pkg == localpkg) {
makefuncsym(n.Sym)
}
return f1
}
if f2 != nil {
- yyerror("%v is both field and method", n.Sym)
+ base.Errorf("%v is both field and method", n.Sym)
}
if f1.Offset == BADWIDTH {
- Fatalf("lookdot badwidth %v %p", f1, f1)
+ base.Fatalf("lookdot badwidth %v %p", f1, f1)
}
n.Xoffset = f1.Offset
n.Type = f1.Type
n.Left.SetImplicit(true)
n.Left = typecheck(n.Left, ctxType|ctxExpr)
} else if tt.IsPtr() && tt.Elem().IsPtr() && types.Identical(derefall(tt), derefall(rcvr)) {
- yyerror("calling method %v with receiver %L requires explicit dereference", n.Sym, n.Left)
+ base.Errorf("calling method %v with receiver %L requires explicit dereference", n.Sym, n.Left)
for tt.IsPtr() {
// Stop one level early for method with pointer receiver.
if rcvr.IsPtr() && !tt.Elem().IsPtr() {
tt = tt.Elem()
}
} else {
- Fatalf("method mismatch: %v for %v", rcvr, tt)
+ base.Fatalf("method mismatch: %v for %v", rcvr, tt)
}
}
var t *types.Type
var i int
- lno := lineno
- defer func() { lineno = lno }()
+ lno := base.Pos
+ defer func() { base.Pos = lno }()
if tstruct.Broke() {
return
}
if isddd {
if call != nil {
- yyerror("invalid use of ... in call to %v", call)
+ base.Errorf("invalid use of ... in call to %v", call)
} else {
- yyerror("invalid use of ... in %v", op)
+ base.Errorf("invalid use of ... in %v", op)
}
}
return
// Method expressions have the form T.M, and the compiler has
// rewritten those to ONAME nodes but left T in Left.
if call.Op == OMETHEXPR {
- yyerror("not enough arguments in call to method expression %v%s", call, details)
+ base.Errorf("not enough arguments in call to method expression %v%s", call, details)
} else {
- yyerror("not enough arguments in call to %v%s", call, details)
+ base.Errorf("not enough arguments in call to %v%s", call, details)
}
} else {
- yyerror("not enough arguments to %v%s", op, details)
+ base.Errorf("not enough arguments to %v%s", op, details)
}
if n != nil {
n.SetDiag(true)
toomany:
details := errorDetails(nl, tstruct, isddd)
if call != nil {
- yyerror("too many arguments in call to %v%s", call, details)
+ base.Errorf("too many arguments in call to %v%s", call, details)
} else {
- yyerror("too many arguments to %v%s", op, details)
+ base.Errorf("too many arguments to %v%s", op, details)
}
}
// Turn []T... argument to ...T for clearer error message.
if isddd {
if !t.IsSlice() {
- Fatalf("bad type for ... argument: %v", t)
+ base.Fatalf("bad type for ... argument: %v", t)
}
return "..." + t.Elem().String()
}
// type check composite
func fielddup(name string, hash map[string]bool) {
if hash[name] {
- yyerror("duplicate field name in struct literal: %s", name)
+ base.Errorf("duplicate field name in struct literal: %s", name)
return
}
hash[name] = true
// The result of typecheckcomplit MUST be assigned back to n, e.g.
// n.Left = typecheckcomplit(n.Left)
func typecheckcomplit(n *Node) (res *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckcomplit", n)(&res)
}
- lno := lineno
+ lno := base.Pos
defer func() {
- lineno = lno
+ base.Pos = lno
}()
if n.Right == nil {
- yyerrorl(n.Pos, "missing type in composite literal")
+ base.ErrorfAt(n.Pos, "missing type in composite literal")
n.Type = nil
return n
}
switch t.Etype {
default:
- yyerror("invalid composite literal type %v", t)
+ base.Errorf("invalid composite literal type %v", t)
n.Type = nil
case TARRAY:
setlineno(l)
if l.Op != OKEY {
n.List.SetIndex(i3, typecheck(l, ctxExpr))
- yyerror("missing key in map literal")
+ base.Errorf("missing key in map literal")
continue
}
r = pushtype(r, t.Key())
r = typecheck(r, ctxExpr)
l.Left = assignconv(r, t.Key(), "map key")
- cs.add(lineno, l.Left, "key", "map literal")
+ cs.add(base.Pos, l.Left, "key", "map literal")
r = l.Right
r = pushtype(r, t.Elem())
ls[i] = n1
if i >= t.NumFields() {
if !errored {
- yyerror("too many values in %v", n)
+ base.Errorf("too many values in %v", n)
errored = true
}
continue
f := t.Field(i)
s := f.Sym
if s != nil && !types.IsExported(s.Name) && s.Pkg != localpkg {
- yyerror("implicit assignment of unexported field '%s' in %v literal", s.Name, t)
+ base.Errorf("implicit assignment of unexported field '%s' in %v literal", s.Name, t)
}
// No pushtype allowed here. Must name fields for that.
n1 = assignconv(n1, f.Type, "field value")
ls[i] = n1
}
if len(ls) < t.NumFields() {
- yyerror("too few values in %v", n)
+ base.Errorf("too few values in %v", n)
}
} else {
hash := make(map[string]bool)
// so s will be non-nil, but an OXDOT
// is never a valid struct literal key.
if key.Sym == nil || key.Op == OXDOT || key.Sym.IsBlank() {
- yyerror("invalid field name %v in struct initializer", key)
+ base.Errorf("invalid field name %v in struct initializer", key)
l.Left = typecheck(l.Left, ctxExpr)
continue
}
if l.Op != OSTRUCTKEY {
if !errored {
- yyerror("mixture of field:value and value initializers")
+ base.Errorf("mixture of field:value and value initializers")
errored = true
}
ls[i] = typecheck(ls[i], ctxExpr)
if f == nil {
if ci := lookdot1(nil, l.Sym, t, t.Fields(), 2); ci != nil { // Case-insensitive lookup.
if visible(ci.Sym) {
- yyerror("unknown field '%v' in struct literal of type %v (but does have %v)", l.Sym, t, ci.Sym)
+ base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", l.Sym, t, ci.Sym)
} else if nonexported(l.Sym) && l.Sym.Name == ci.Sym.Name { // Ensure exactness before the suggestion.
- yyerror("cannot refer to unexported field '%v' in struct literal of type %v", l.Sym, t)
+ base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", l.Sym, t)
} else {
- yyerror("unknown field '%v' in struct literal of type %v", l.Sym, t)
+ base.Errorf("unknown field '%v' in struct literal of type %v", l.Sym, t)
}
continue
}
var f *types.Field
p, _ := dotpath(l.Sym, t, &f, true)
if p == nil || f.IsMethod() {
- yyerror("unknown field '%v' in struct literal of type %v", l.Sym, t)
+ base.Errorf("unknown field '%v' in struct literal of type %v", l.Sym, t)
continue
}
// dotpath returns the parent embedded types in reverse order.
ep = append(ep, p[ei].field.Sym.Name)
}
ep = append(ep, l.Sym.Name)
- yyerror("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t)
+ base.Errorf("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t)
continue
}
fielddup(f.Sym.Name, hash)
if key < 0 {
if !elt.Left.Diag() {
if key == -2 {
- yyerror("index too large")
+ base.Errorf("index too large")
} else {
- yyerror("index must be non-negative integer constant")
+ base.Errorf("index must be non-negative integer constant")
}
elt.Left.SetDiag(true)
}
if key >= 0 {
if indices != nil {
if indices[key] {
- yyerror("duplicate index in %s: %d", ctx, key)
+ base.Errorf("duplicate index in %s: %d", ctx, key)
} else {
indices[key] = true
}
}
if bound >= 0 && key >= bound {
- yyerror("array index %d out of bounds [0:%d]", key, bound)
+ base.Errorf("array index %d out of bounds [0:%d]", key, bound)
bound = -1
}
}
func checklvalue(n *Node, verb string) {
if !islvalue(n) {
- yyerror("cannot %s %v", verb, n)
+ base.Errorf("cannot %s %v", verb, n)
}
}
switch {
case n.Op == ODOT && n.Left.Op == OINDEXMAP:
- yyerror("cannot assign to struct field %v in map", n)
+ base.Errorf("cannot assign to struct field %v in map", n)
case (n.Op == OINDEX && n.Left.Type.IsString()) || n.Op == OSLICESTR:
- yyerror("cannot assign to %v (strings are immutable)", n)
+ base.Errorf("cannot assign to %v (strings are immutable)", n)
case n.Op == OLITERAL && n.Sym != nil && n.isGoConst():
- yyerror("cannot assign to %v (declared const)", n)
+ base.Errorf("cannot assign to %v (declared const)", n)
default:
- yyerror("cannot assign to %v", n)
+ base.Errorf("cannot assign to %v", n)
}
n.Type = nil
}
// if this assignment is the definition of a var on the left side,
// fill in the var's type.
func typecheckas(n *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckas", n)(nil)
}
checkassign(n, n.Left)
if n.Right != nil && n.Right.Type != nil {
if n.Right.Type.IsFuncArgStruct() {
- yyerror("assignment mismatch: 1 variable but %v returns %d values", n.Right.Left, n.Right.Type.NumFields())
+ base.Errorf("assignment mismatch: 1 variable but %v returns %d values", n.Right.Left, n.Right.Type.NumFields())
// Multi-value RHS isn't actually valid for OAS; nil out
// to indicate failed typechecking.
n.Right.Type = nil
func checkassignto(src *types.Type, dst *Node) {
if op, why := assignop(src, dst.Type); op == OXXX {
- yyerror("cannot assign %v to %L in multiple assignment%s", src, dst, why)
+ base.Errorf("cannot assign %v to %L in multiple assignment%s", src, dst, why)
return
}
}
func typecheckas2(n *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckas2", n)(nil)
}
mismatch:
switch r.Op {
default:
- yyerror("assignment mismatch: %d variables but %d values", cl, cr)
+ base.Errorf("assignment mismatch: %d variables but %d values", cl, cr)
case OCALLFUNC, OCALLMETH, OCALLINTER:
- yyerror("assignment mismatch: %d variables but %v returns %d values", cl, r.Left, cr)
+ base.Errorf("assignment mismatch: %d variables but %v returns %d values", cl, r.Left, cr)
}
// second half of dance
// type check function definition
func typecheckfunc(n *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckfunc", n)(nil)
}
declare(n.Func.Nname, PFUNC)
}
- if Ctxt.Flag_dynlink && !inimport && n.Func.Nname != nil {
+ if base.Ctxt.Flag_dynlink && !inimport && n.Func.Nname != nil {
makefuncsym(n.Func.Nname.Sym)
}
}
// n.Left = stringtoruneslit(n.Left)
func stringtoruneslit(n *Node) *Node {
if n.Left.Op != OLITERAL || n.Left.Val().Kind() != constant.String {
- Fatalf("stringtoarraylit %v", n)
+ base.Fatalf("stringtoarraylit %v", n)
}
var l []*Node
for _, n := range mapqueue {
k := n.Type.MapType().Key
if !k.Broke() && !IsComparable(k) {
- yyerrorl(n.Pos, "invalid map key type %v", k)
+ base.ErrorfAt(n.Pos, "invalid map key type %v", k)
}
}
mapqueue = nil
// Double-check use of type as embedded type.
if ft.Embedlineno.IsKnown() {
if t.IsPtr() || t.IsUnsafePtr() {
- yyerrorl(ft.Embedlineno, "embedded type cannot be a pointer")
+ base.ErrorfAt(ft.Embedlineno, "embedded type cannot be a pointer")
}
}
}
func typecheckdeftype(n *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckdeftype", n)(nil)
}
}
func typecheckdef(n *Node) {
- if enableTrace && Flag.LowerT {
+ if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckdef", n)(nil)
}
// Note: adderrorname looks for this string and
// adds context about the outer expression
- yyerrorl(lineno, "undefined: %v", n.Sym)
+ base.ErrorfAt(base.Pos, "undefined: %v", n.Sym)
}
- lineno = lno
+ base.Pos = lno
return
}
if n.Walkdef() == 1 {
- lineno = lno
+ base.Pos = lno
return
}
typecheckdefstack = append(typecheckdefstack, n)
if n.Walkdef() == 2 {
- flusherrors()
+ base.FlushErrors()
fmt.Printf("typecheckdef loop:")
for i := len(typecheckdefstack) - 1; i >= 0; i-- {
n := typecheckdefstack[i]
fmt.Printf(" %v", n.Sym)
}
fmt.Printf("\n")
- Fatalf("typecheckdef loop")
+ base.Fatalf("typecheckdef loop")
}
n.SetWalkdef(2)
switch n.Op {
default:
- Fatalf("typecheckdef %v", n.Op)
+ base.Fatalf("typecheckdef %v", n.Op)
case OLITERAL:
if n.Name.Param.Ntype != nil {
n.Name.Defn = nil
if e == nil {
Dump("typecheckdef nil defn", n)
- yyerrorl(n.Pos, "xxx")
+ base.ErrorfAt(n.Pos, "xxx")
}
e = typecheck(e, ctxExpr)
if !e.isGoConst() {
if !e.Diag() {
if e.Op == ONIL {
- yyerrorl(n.Pos, "const initializer cannot be nil")
+ base.ErrorfAt(n.Pos, "const initializer cannot be nil")
} else {
- yyerrorl(n.Pos, "const initializer %v is not a constant", e)
+ base.ErrorfAt(n.Pos, "const initializer %v is not a constant", e)
}
e.SetDiag(true)
}
t := n.Type
if t != nil {
if !okforconst[t.Etype] {
- yyerrorl(n.Pos, "invalid constant type %v", t)
+ base.ErrorfAt(n.Pos, "invalid constant type %v", t)
goto ret
}
if !e.Type.IsUntyped() && !types.Identical(t, e.Type) {
- yyerrorl(n.Pos, "cannot use %L as type %v in const initializer", e, t)
+ base.ErrorfAt(n.Pos, "cannot use %L as type %v in const initializer", e, t)
goto ret
}
if n.SubOp() != 0 { // like OPRINTN
break
}
- if Errors() > 0 {
+ if base.Errors() > 0 {
// Can have undefined variables in x := foo
// that make x have an n.name.Defn == nil.
// If there are other errors anyway, don't
break
}
- Fatalf("var without type, init: %v", n.Sym)
+ base.Fatalf("var without type, init: %v", n.Sym)
}
if n.Name.Defn.Op == ONAME {
n.SetWalkdef(1)
setTypeNode(n, types.New(TFORW))
n.Type.Sym = n.Sym
- errorsBefore := Errors()
+ errorsBefore := base.Errors()
typecheckdeftype(n)
- if n.Type.Etype == TFORW && Errors() > errorsBefore {
+ if n.Type.Etype == TFORW && base.Errors() > errorsBefore {
// Something went wrong during type-checking,
// but it was reported. Silence future errors.
n.Type.SetBroke(true)
ret:
if n.Op != OLITERAL && n.Type != nil && n.Type.IsUntyped() {
- Fatalf("got %v for %v", n.Type, n)
+ base.Fatalf("got %v for %v", n.Type, n)
}
last := len(typecheckdefstack) - 1
if typecheckdefstack[last] != n {
- Fatalf("typecheckdefstack mismatch")
+ base.Fatalf("typecheckdefstack mismatch")
}
typecheckdefstack[last] = nil
typecheckdefstack = typecheckdefstack[:last]
- lineno = lno
+ base.Pos = lno
n.SetWalkdef(1)
}
func checkmake(t *types.Type, arg string, np **Node) bool {
n := *np
if !n.Type.IsInteger() && n.Type.Etype != TIDEAL {
- yyerror("non-integer %s argument in make(%v) - %v", arg, t, n.Type)
+ base.Errorf("non-integer %s argument in make(%v) - %v", arg, t, n.Type)
return false
}
if n.Op == OLITERAL {
v := toint(n.Val())
if constant.Sign(v) < 0 {
- yyerror("negative %s argument in make(%v)", arg, t)
+ base.Errorf("negative %s argument in make(%v)", arg, t)
return false
}
if doesoverflow(v, types.Types[TINT]) {
- yyerror("%s argument too large in make(%v)", arg, t)
+ base.Errorf("%s argument too large in make(%v)", arg, t)
return false
}
}
if fn.Type.NumResults() != 0 && fn.Nbody.Len() != 0 {
markbreaklist(fn.Nbody, nil)
if !fn.Nbody.isterminating() {
- yyerrorl(fn.Func.Endlineno, "missing return at end of function")
+ base.ErrorfAt(fn.Func.Endlineno, "missing return at end of function")
}
}
}
case OCALLPART:
return callpartMethod(n)
}
- Fatalf("unexpected node: %v (%v)", n, n.Op)
+ base.Fatalf("unexpected node: %v (%v)", n, n.Op)
panic("unreachable")
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
)
for _, s := range &basicTypes {
etype := s.etype
if int(etype) >= len(types.Types) {
- Fatalf("lexinit: %s bad etype", s.name)
+ base.Fatalf("lexinit: %s bad etype", s.name)
}
s2 := builtinpkg.Lookup(s.name)
t := types.Types[etype]
func typeinit() {
if Widthptr == 0 {
- Fatalf("typeinit before betypeinit")
+ base.Fatalf("typeinit before betypeinit")
}
for et := types.EType(0); et < NTYPE; et++ {
package gc
+import "cmd/compile/internal/base"
+
// evalunsafe evaluates a package unsafe operation and returns the result.
func evalunsafe(n *Node) int64 {
switch n.Op {
case OOFFSETOF:
// must be a selector.
if n.Left.Op != OXDOT {
- yyerror("invalid expression %v", n)
+ base.Errorf("invalid expression %v", n)
return 0
}
case ODOT, ODOTPTR:
break
case OCALLPART:
- yyerror("invalid expression %v: argument is a method value", n)
+ base.Errorf("invalid expression %v: argument is a method value", n)
return 0
default:
- yyerror("invalid expression %v", n)
+ base.Errorf("invalid expression %v", n)
return 0
}
// but accessing f must not otherwise involve
// indirection via embedded pointer types.
if r.Left != sbase {
- yyerror("invalid expression %v: selector implies indirection of embedded %v", n, r.Left)
+ base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.Left)
return 0
}
fallthrough
v += r.Xoffset
default:
Dump("unsafenmagic", n.Left)
- Fatalf("impossible %#v node after dot insertion", r.Op)
+ base.Fatalf("impossible %#v node after dot insertion", r.Op)
}
}
return v
}
- Fatalf("unexpected op %v", n.Op)
+ base.Fatalf("unexpected op %v", n.Op)
return 0
}
"os"
"runtime"
"runtime/pprof"
+
+ "cmd/compile/internal/base"
)
// Line returns n's position as a string. If n has been inlined,
// it uses the outermost position where n has been inlined.
func (n *Node) Line() string {
- return linestr(n.Pos)
-}
-
-var atExitFuncs []func()
-
-func atExit(f func()) {
- atExitFuncs = append(atExitFuncs, f)
-}
-
-func Exit(code int) {
- for i := len(atExitFuncs) - 1; i >= 0; i-- {
- f := atExitFuncs[i]
- atExitFuncs = atExitFuncs[:i]
- f()
- }
- os.Exit(code)
+ return base.FmtPos(n.Pos)
}
var (
)
func startProfile() {
- if Flag.CPUProfile != "" {
- f, err := os.Create(Flag.CPUProfile)
+ if base.Flag.CPUProfile != "" {
+ f, err := os.Create(base.Flag.CPUProfile)
if err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
if err := pprof.StartCPUProfile(f); err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
- atExit(pprof.StopCPUProfile)
+ base.AtExit(pprof.StopCPUProfile)
}
- if Flag.MemProfile != "" {
+ if base.Flag.MemProfile != "" {
if memprofilerate != 0 {
runtime.MemProfileRate = int(memprofilerate)
}
- f, err := os.Create(Flag.MemProfile)
+ f, err := os.Create(base.Flag.MemProfile)
if err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
- atExit(func() {
+ base.AtExit(func() {
// Profile all outstanding allocations.
runtime.GC()
// compilebench parses the memory profile to extract memstats,
// See golang.org/issue/18641 and runtime/pprof/pprof.go:writeHeap.
const writeLegacyFormat = 1
if err := pprof.Lookup("heap").WriteTo(f, writeLegacyFormat); err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
})
} else {
// Not doing memory profiling; disable it entirely.
runtime.MemProfileRate = 0
}
- if Flag.BlockProfile != "" {
- f, err := os.Create(Flag.BlockProfile)
+ if base.Flag.BlockProfile != "" {
+ f, err := os.Create(base.Flag.BlockProfile)
if err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
runtime.SetBlockProfileRate(1)
- atExit(func() {
+ base.AtExit(func() {
pprof.Lookup("block").WriteTo(f, 0)
f.Close()
})
}
- if Flag.MutexProfile != "" {
- f, err := os.Create(Flag.MutexProfile)
+ if base.Flag.MutexProfile != "" {
+ f, err := os.Create(base.Flag.MutexProfile)
if err != nil {
- Fatalf("%v", err)
+ base.Fatalf("%v", err)
}
startMutexProfiling()
- atExit(func() {
+ base.AtExit(func() {
pprof.Lookup("mutex").WriteTo(f, 0)
f.Close()
})
}
- if Flag.TraceProfile != "" && traceHandler != nil {
- traceHandler(Flag.TraceProfile)
+ if base.Flag.TraceProfile != "" && traceHandler != nil {
+ traceHandler(base.Flag.TraceProfile)
}
}
package gc
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
func walk(fn *Node) {
Curfn = fn
- errorsBefore := Errors()
+ errorsBefore := base.Errors()
- if Flag.W != 0 {
+ if base.Flag.W != 0 {
s := fmt.Sprintf("\nbefore walk %v", Curfn.Func.Nname.Sym)
dumplist(s, Curfn.Nbody)
}
- lno := lineno
+ lno := base.Pos
// Final typecheck for any unused variables.
for i, ln := range fn.Func.Dcl {
if defn.Left.Name.Used() {
continue
}
- yyerrorl(defn.Left.Pos, "%v declared but not used", ln.Sym)
+ base.ErrorfAt(defn.Left.Pos, "%v declared but not used", ln.Sym)
defn.Left.Name.SetUsed(true) // suppress repeats
} else {
- yyerrorl(ln.Pos, "%v declared but not used", ln.Sym)
+ base.ErrorfAt(ln.Pos, "%v declared but not used", ln.Sym)
}
}
- lineno = lno
- if Errors() > errorsBefore {
+ base.Pos = lno
+ if base.Errors() > errorsBefore {
return
}
walkstmtlist(Curfn.Nbody.Slice())
- if Flag.W != 0 {
+ if base.Flag.W != 0 {
s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
dumplist(s, Curfn.Nbody)
}
zeroResults()
heapmoves()
- if Flag.W != 0 && Curfn.Func.Enter.Len() > 0 {
+ if base.Flag.W != 0 && Curfn.Func.Enter.Len() > 0 {
s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
dumplist(s, Curfn.Func.Enter)
}
switch n.Op {
default:
if n.Op == ONAME {
- yyerror("%v is not a top level statement", n.Sym)
+ base.Errorf("%v is not a top level statement", n.Sym)
} else {
- yyerror("%v is not a top level statement", n.Op)
+ base.Errorf("%v is not a top level statement", n.Op)
}
Dump("nottop", n)
ORECOVER,
OGETG:
if n.Typecheck() == 0 {
- Fatalf("missing typecheck: %+v", n)
+ base.Fatalf("missing typecheck: %+v", n)
}
wascopy := n.Op == OCOPY
init := n.Ninit
// the value received.
case ORECV:
if n.Typecheck() == 0 {
- Fatalf("missing typecheck: %+v", n)
+ base.Fatalf("missing typecheck: %+v", n)
}
init := n.Ninit
n.Ninit.Set(nil)
case ODCL:
v := n.Left
if v.Class() == PAUTOHEAP {
- if Flag.CompilingRuntime {
- yyerror("%v escapes to heap, not allowed in runtime", v)
+ if base.Flag.CompilingRuntime {
+ base.Errorf("%v escapes to heap, not allowed in runtime", v)
}
if prealloc[v] == nil {
prealloc[v] = callnew(v.Type)
walkstmtlist(n.List.Slice())
case OCASE:
- yyerror("case statement out of place")
+ base.Errorf("case statement out of place")
case ODEFER:
Curfn.Func.SetHasDefer(true)
if got, want := n.List.Len(), len(rl); got != want {
// order should have rewritten multi-value function calls
// with explicit OAS2FUNC nodes.
- Fatalf("expected %v return arguments, have %v", want, got)
+ base.Fatalf("expected %v return arguments, have %v", want, got)
}
// move function calls out, to make reorder3's job easier.
}
if n.Op == ONAME {
- Fatalf("walkstmt ended up with name: %+v", n)
+ base.Fatalf("walkstmt ended up with name: %+v", n)
}
return n
}
return "convT2I", true
}
}
- Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
+ base.Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
panic("unreachable")
}
// not okay to use n->ninit when walking n,
// because we might replace n with some other node
// and would lose the init list.
- Fatalf("walkexpr init == &n->ninit")
+ base.Fatalf("walkexpr init == &n->ninit")
}
if n.Ninit.Len() != 0 {
lno := setlineno(n)
- if Flag.LowerW > 1 {
+ if base.Flag.LowerW > 1 {
Dump("before walk expr", n)
}
if n.Typecheck() != 1 {
- Fatalf("missed typecheck: %+v", n)
+ base.Fatalf("missed typecheck: %+v", n)
}
if n.Type.IsUntyped() {
- Fatalf("expression has untyped type: %+v", n)
+ base.Fatalf("expression has untyped type: %+v", n)
}
if n.Op == ONAME && n.Class() == PAUTOHEAP {
switch n.Op {
default:
Dump("walk", n)
- Fatalf("walkexpr: switch 1 unknown op %+S", n)
+ base.Fatalf("walkexpr: switch 1 unknown op %+S", n)
case ONONAME, OEMPTY, OGETG, ONEWOBJ, OMETHEXPR:
// the mapassign call.
mapAppend := n.Left.Op == OINDEXMAP && n.Right.Op == OAPPEND
if mapAppend && !samesafeexpr(n.Left, n.Right.List.First()) {
- Fatalf("not same expressions: %v != %v", n.Left, n.Right.List.First())
+ base.Fatalf("not same expressions: %v != %v", n.Left, n.Right.List.First())
}
n.Left = walkexpr(n.Left, init)
// x = append(...)
r := n.Right
if r.Type.Elem().NotInHeap() {
- yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", r.Type.Elem())
+ base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", r.Type.Elem())
}
switch {
case isAppendOfMake(r):
}
if t.IsArray() {
n.SetBounded(bounded(r, t.NumElem()))
- if Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) {
- Warn("index bounds check elided")
+ if base.Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) {
+ base.Warn("index bounds check elided")
}
if smallintconst(n.Right) && !n.Bounded() {
- yyerror("index out of bounds")
+ base.Errorf("index out of bounds")
}
} else if Isconst(n.Left, constant.String) {
n.SetBounded(bounded(r, int64(len(n.Left.StringVal()))))
- if Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) {
- Warn("index bounds check elided")
+ if base.Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) {
+ base.Warn("index bounds check elided")
}
if smallintconst(n.Right) && !n.Bounded() {
- yyerror("index out of bounds")
+ base.Errorf("index out of bounds")
}
}
if Isconst(n.Right, constant.Int) {
if v := n.Right.Val(); constant.Sign(v) < 0 || doesoverflow(v, types.Types[TINT]) {
- yyerror("index out of bounds")
+ base.Errorf("index out of bounds")
}
}
n.SetTypecheck(1)
case ORECV:
- Fatalf("walkexpr ORECV") // should see inside OAS only
+ base.Fatalf("walkexpr ORECV") // should see inside OAS only
case OSLICEHEADER:
n.Left = walkexpr(n.Left, init)
case ONEW:
if n.Type.Elem().NotInHeap() {
- yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type.Elem())
+ base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type.Elem())
}
if n.Esc == EscNone {
if n.Type.Elem().Width >= maxImplicitStackVarSize {
- Fatalf("large ONEW with EscNone: %v", n)
+ base.Fatalf("large ONEW with EscNone: %v", n)
}
r := temp(n.Type.Elem())
r = nod(OAS, r, nil) // zero temp
case OAPPEND:
// order should make sure we only see OAS(node, OAPPEND), which we handle above.
- Fatalf("append outside assignment")
+ base.Fatalf("append outside assignment")
case OCOPY:
- n = copyany(n, init, instrumenting && !Flag.CompilingRuntime)
+ n = copyany(n, init, instrumenting && !base.Flag.CompilingRuntime)
// cannot use chanfn - closechan takes any, not chan any
case OCLOSE:
}
t := n.Type
if t.Elem().NotInHeap() {
- yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
+ base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
}
if n.Esc == EscNone {
if why := heapAllocReason(n); why != "" {
- Fatalf("%v has EscNone, but %v", n, why)
+ base.Fatalf("%v has EscNone, but %v", n, why)
}
// var arr [r]T
// n = arr[:l]
i := indexconst(r)
if i < 0 {
- Fatalf("walkexpr: invalid index %v", r)
+ base.Fatalf("walkexpr: invalid index %v", r)
}
// cap is constrained to [0,2^31) or [0,2^63) depending on whether
case OMAKESLICECOPY:
if n.Esc == EscNone {
- Fatalf("OMAKESLICECOPY with EscNone: %v", n)
+ base.Fatalf("OMAKESLICECOPY with EscNone: %v", n)
}
t := n.Type
if t.Elem().NotInHeap() {
- yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
+ base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
}
length := conv(n.Left, types.Types[TINT])
t := n.Type
n = evalConst(n)
if n.Type != t {
- Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
+ base.Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
}
if n.Op == OLITERAL {
n = typecheck(n, ctxExpr)
updateHasCall(n)
- if Flag.LowerW != 0 && n != nil {
+ if base.Flag.LowerW != 0 && n != nil {
Dump("after walk expr", n)
}
- lineno = lno
+ base.Pos = lno
return n
}
n.SetSliceBounds(low, high, max)
if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
// Reduce x[:] to x.
- if Debug.Slice > 0 {
- Warn("slice: omit slice operation")
+ if base.Debug.Slice > 0 {
+ base.Warn("slice: omit slice operation")
}
return n.Left
}
var nln, nrn Nodes
nln.Set(nl)
nrn.Set(nr)
- Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
+ base.Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
}
return nn
}
// expr-list = func()
func ascompatet(nl Nodes, nr *types.Type) []*Node {
if nl.Len() != nr.NumFields() {
- Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
+ base.Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
}
var nn, mm Nodes
}
res := nod(ORESULT, nil, nil)
- res.Xoffset = Ctxt.FixedFrameSize() + r.Offset
+ res.Xoffset = base.Ctxt.FixedFrameSize() + r.Offset
res.Type = r.Type
res.SetTypecheck(1)
updateHasCall(a)
if a.HasCall() {
Dump("ascompatet ucount", a)
- Fatalf("ascompatet: too many function calls evaluating parameters")
+ base.Fatalf("ascompatet: too many function calls evaluating parameters")
}
nn.Append(a)
n = typecheck(n, ctxExpr)
if n.Type == nil {
- Fatalf("mkdotargslice: typecheck failed")
+ base.Fatalf("mkdotargslice: typecheck failed")
}
return n
}
func convas(n *Node, init *Nodes) *Node {
if n.Op != OAS {
- Fatalf("convas: not OAS %v", n.Op)
+ base.Fatalf("convas: not OAS %v", n.Op)
}
defer updateHasCall(n)
switch l.Op {
default:
- Fatalf("reorder3 unexpected lvalue %#v", l.Op)
+ base.Fatalf("reorder3 unexpected lvalue %#v", l.Op)
case ONAME:
break
for {
switch n.Op {
case OXDOT:
- Fatalf("OXDOT in walk")
+ base.Fatalf("OXDOT in walk")
case ODOT, OPAREN, OCONVNOP:
n = n.Left
continue
switch l.Class() {
default:
- Fatalf("unexpected class: %v, %v", l, l.Class())
+ base.Fatalf("unexpected class: %v, %v", l, l.Class())
case PAUTOHEAP, PEXTERN:
memwrite = true
case ODOT: // but not ODOTPTR
// Should have been handled in aliased.
- Fatalf("varexpr unexpected ODOT")
+ base.Fatalf("varexpr unexpected ODOT")
}
// Be conservative.
// between the stack and the heap. The generated code is added to Curfn's
// Enter and Exit lists.
func heapmoves() {
- lno := lineno
- lineno = Curfn.Pos
+ lno := base.Pos
+ base.Pos = Curfn.Pos
nn := paramstoheap(Curfn.Type.Recvs())
nn = append(nn, paramstoheap(Curfn.Type.Params())...)
nn = append(nn, paramstoheap(Curfn.Type.Results())...)
Curfn.Func.Enter.Append(nn...)
- lineno = Curfn.Func.Endlineno
+ base.Pos = Curfn.Func.Endlineno
Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
- lineno = lno
+ base.Pos = lno
}
func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
if fn.Type == nil || fn.Type.Etype != TFUNC {
- Fatalf("mkcall %v %v", fn, fn.Type)
+ base.Fatalf("mkcall %v %v", fn, fn.Type)
}
n := fn.Type.NumParams()
if n != len(va) {
- Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
+ base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
}
r := nod(OCALL, fn, nil)
func chanfn(name string, n int, t *types.Type) *Node {
if !t.IsChan() {
- Fatalf("chanfn %v", t)
+ base.Fatalf("chanfn %v", t)
}
fn := syslook(name)
switch n {
default:
- Fatalf("chanfn %d", n)
+ base.Fatalf("chanfn %d", n)
case 1:
fn = substArgTypes(fn, t.Elem())
case 2:
func mapfn(name string, t *types.Type) *Node {
if !t.IsMap() {
- Fatalf("mapfn %v", t)
+ base.Fatalf("mapfn %v", t)
}
fn := syslook(name)
fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem())
func mapfndel(name string, t *types.Type) *Node {
if !t.IsMap() {
- Fatalf("mapfn %v", t)
+ base.Fatalf("mapfn %v", t)
}
fn := syslook(name)
fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key())
if Widthptr == 4 {
return mapfast32ptr
}
- Fatalf("small pointer %v", t.Key())
+ base.Fatalf("small pointer %v", t.Key())
case AMEM64:
if !t.Key().HasPointers() {
return mapfast64
c := n.List.Len()
if c < 2 {
- Fatalf("addstr count %d too small", c)
+ base.Fatalf("addstr count %d too small", c)
}
buf := nodnil()
ptr1, len1 := nptr1.backingArrayPtrLen()
ptr2, len2 := nptr2.backingArrayPtrLen()
ncopy = mkcall1(fn, types.Types[TINT], &nodes, typename(elemtype), ptr1, len1, ptr2, len2)
- } else if instrumenting && !Flag.CompilingRuntime {
+ } else if instrumenting && !base.Flag.CompilingRuntime {
// rely on runtime to instrument:
// copy(s[len(l1):], l2)
// l2 can be a slice or string.
// isAppendOfMake reports whether n is of the form append(x , make([]T, y)...).
// isAppendOfMake assumes n has already been typechecked.
func isAppendOfMake(n *Node) bool {
- if Flag.N != 0 || instrumenting {
+ if base.Flag.N != 0 || instrumenting {
return false
}
if n.Typecheck() == 0 {
- Fatalf("missing typecheck: %+v", n)
+ base.Fatalf("missing typecheck: %+v", n)
}
if n.Op != OAPPEND || !n.IsDDD() || n.List.Len() != 2 {
// General case, with no function calls left as arguments.
// Leave for gen, except that instrumentation requires old form.
- if !instrumenting || Flag.CompilingRuntime {
+ if !instrumenting || base.Flag.CompilingRuntime {
return n
}
})
return n, false
}
- Fatalf("eqfor %v", t)
+ base.Fatalf("eqfor %v", t)
return nil, false
}
switch t.Etype {
default:
- if Debug.Libfuzzer != 0 && t.IsInteger() {
+ if base.Debug.Libfuzzer != 0 && t.IsInteger() {
n.Left = cheapexpr(n.Left, init)
n.Right = cheapexpr(n.Right, init)
}
paramType = types.Types[TUINT64]
default:
- Fatalf("unexpected integer size %d for %v", t.Size(), t)
+ base.Fatalf("unexpected integer size %d for %v", t.Size(), t)
}
init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init)))
}
if !inline {
// eq algs take pointers; cmpl and cmpr must be addressable
if !islvalue(cmpl) || !islvalue(cmpr) {
- Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
+ base.Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
}
fn, needsize := eqfor(t)
switch n.Op {
default:
- Fatalf("usefield %v", n.Op)
+ base.Fatalf("usefield %v", n.Op)
case ODOT, ODOTPTR:
break
}
field := n.Opt().(*types.Field)
if field == nil {
- Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
+ base.Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
}
if field.Sym != n.Sym || field.Offset != n.Xoffset {
- Fatalf("field inconsistency: %v,%v != %v,%v", field.Sym, field.Offset, n.Sym, n.Xoffset)
+ base.Fatalf("field inconsistency: %v,%v != %v,%v", field.Sym, field.Offset, n.Sym, n.Xoffset)
}
if !strings.Contains(field.Note, "go:\"track\"") {
return
outer = outer.Elem()
}
if outer.Sym == nil {
- yyerror("tracked field must be in named struct type")
+ base.Errorf("tracked field must be in named struct type")
}
if !types.IsExported(field.Sym.Name) {
- yyerror("tracked field must be exported (upper case)")
+ base.Errorf("tracked field must be exported (upper case)")
}
sym := tracksym(outer, field)
}
n.Type = types.SubstAny(n.Type, &types_)
if len(types_) > 0 {
- Fatalf("substArgTypes: too many argument types")
+ base.Fatalf("substArgTypes: too many argument types")
}
return n
}
// isRuneCount reports whether n is of the form len([]rune(string)).
// These are optimized into a call to runtime.countrunes.
func isRuneCount(n *Node) bool {
- return Flag.N == 0 && !instrumenting && n.Op == OLEN && n.Left.Op == OSTR2RUNES
+ return base.Flag.N == 0 && !instrumenting && n.Op == OLEN && n.Left.Op == OSTR2RUNES
}
func walkCheckPtrAlignment(n *Node, init *Nodes, count *Node) *Node {
if !n.Type.IsPtr() {
- Fatalf("expected pointer type: %v", n.Type)
+ base.Fatalf("expected pointer type: %v", n.Type)
}
elem := n.Type.Elem()
if count != nil {
if !elem.IsArray() {
- Fatalf("expected array type: %v", elem)
+ base.Fatalf("expected array type: %v", elem)
}
elem = elem.Elem()
}
} else if opt != nil {
// We use n.Opt() here because today it's not used for OCONVNOP. If that changes,
// there's no guarantee that temporarily replacing it is safe, so just hard fail here.
- Fatalf("unexpected Opt: %v", opt)
+ base.Fatalf("unexpected Opt: %v", opt)
}
n.SetOpt(&walkCheckPtrArithmeticMarker)
defer n.SetOpt(nil)
// function fn at a given level. See debugHelpFooter for defined
// levels.
func checkPtr(fn *Node, level int) bool {
- return Debug.Checkptr >= level && fn.Func.Pragma&NoCheckPtr == 0
+ return base.Debug.Checkptr >= level && fn.Func.Pragma&NoCheckPtr == 0
}
package mips
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/internal/obj"
"cmd/internal/obj/mips"
}
if cnt < int64(4*gc.Widthptr) {
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
- p = pp.Appendpp(p, mips.AMOVW, obj.TYPE_REG, mips.REGZERO, 0, obj.TYPE_MEM, mips.REGSP, gc.Ctxt.FixedFrameSize()+off+i)
+ p = pp.Appendpp(p, mips.AMOVW, obj.TYPE_REG, mips.REGZERO, 0, obj.TYPE_MEM, mips.REGSP, base.Ctxt.FixedFrameSize()+off+i)
}
} else {
//fmt.Printf("zerorange frame:%v, lo: %v, hi:%v \n", frame ,lo, hi)
// MOVW R0, (Widthptr)r1
// ADD $Widthptr, r1
// BNE r1, r2, loop
- p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-4, obj.TYPE_REG, mips.REGRT1, 0)
+ p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-4, obj.TYPE_REG, mips.REGRT1, 0)
p.Reg = mips.REGSP
p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, mips.REGRT2, 0)
p.Reg = mips.REGRT1
import (
"math"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpMIPSFPFlagTrue,
ssa.OpMIPSFPFlagFalse:
// caller's SP is FixedFrameSize below the address of the first arg
p := s.Prog(mips.AMOVW)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize()
+ p.From.Offset = -base.Ctxt.FixedFrameSize()
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
import (
"math"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpMIPS64FPFlagTrue,
ssa.OpMIPS64FPFlagFalse:
// caller's SP is FixedFrameSize below the address of the first arg
p := s.Prog(mips.AMOVV)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize()
+ p.From.Offset = -base.Ctxt.FixedFrameSize()
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
package ppc64
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/internal/obj"
"cmd/internal/obj/ppc64"
}
if cnt < int64(4*gc.Widthptr) {
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
- p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_REG, ppc64.REGZERO, 0, obj.TYPE_MEM, ppc64.REGSP, gc.Ctxt.FixedFrameSize()+off+i)
+ p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_REG, ppc64.REGZERO, 0, obj.TYPE_MEM, ppc64.REGSP, base.Ctxt.FixedFrameSize()+off+i)
}
} else if cnt <= int64(128*gc.Widthptr) {
- p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGRT1, 0)
+ p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGRT1, 0)
p.Reg = ppc64.REGSP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN
p.To.Sym = gc.Duffzero
p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr))
} else {
- p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0)
+ p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0)
p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_REG, ppc64.REGTMP, 0, obj.TYPE_REG, ppc64.REGRT1, 0)
p.Reg = ppc64.REGSP
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, ppc64.REGTMP, 0)
// on ppc64 in both shared and non-shared modes.
ginsnop(pp)
- if gc.Ctxt.Flag_shared {
+ if base.Ctxt.Flag_shared {
p := pp.Prog(ppc64.AMOVD)
p.From.Type = obj.TYPE_MEM
p.From.Offset = 24
package ppc64
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
// caller's SP is FixedFrameSize below the address of the first arg
p := s.Prog(ppc64.AMOVD)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize()
+ p.From.Offset = -base.Ctxt.FixedFrameSize()
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
// Insert a hint this is not a subroutine return.
pp.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: 1})
- if gc.Ctxt.Flag_shared {
+ if base.Ctxt.Flag_shared {
// When compiling Go into PIC, the function we just
// called via pointer might have been implemented in
// a separate module and so overwritten the TOC
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
// These should be resolved by rules and not make it here.
package riscv64
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/internal/obj"
"cmd/internal/obj/riscv"
}
// Adjust the frame to account for LR.
- off += gc.Ctxt.FixedFrameSize()
+ off += base.Ctxt.FixedFrameSize()
if cnt < int64(4*gc.Widthptr) {
for i := int64(0); i < cnt; i += int64(gc.Widthptr) {
package riscv64
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
case 8:
return riscv.AMOVD
default:
- gc.Fatalf("unknown float width for load %d in type %v", width, t)
+ base.Fatalf("unknown float width for load %d in type %v", width, t)
return 0
}
}
case 8:
return riscv.AMOV
default:
- gc.Fatalf("unknown width for load %d in type %v", width, t)
+ base.Fatalf("unknown width for load %d in type %v", width, t)
return 0
}
}
case 8:
return riscv.AMOVD
default:
- gc.Fatalf("unknown float width for store %d in type %v", width, t)
+ base.Fatalf("unknown float width for store %d in type %v", width, t)
return 0
}
}
case 8:
return riscv.AMOV
default:
- gc.Fatalf("unknown width for store %d in type %v", width, t)
+ base.Fatalf("unknown width for store %d in type %v", width, t)
return 0
}
}
gc.AddAux(&p.From, v)
p.To.Type = obj.TYPE_REG
p.To.Reg = riscv.REG_ZERO
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpRISCV64LoweredGetClosurePtr:
// caller's SP is FixedFrameSize below the address of the first arg
p := s.Prog(riscv.AMOV)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize()
+ p.From.Offset = -base.Ctxt.FixedFrameSize()
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
package s390x
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/internal/obj"
"cmd/internal/obj/s390x"
}
// Adjust the frame to account for LR.
- off += gc.Ctxt.FixedFrameSize()
+ off += base.Ctxt.FixedFrameSize()
reg := int16(s390x.REGSP)
// If the off cannot fit in a 12-bit unsigned displacement then we
import (
"math"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
// caller's SP is FixedFrameSize below the address of the first arg
p := s.Prog(s390x.AMOVD)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize()
+ p.From.Offset = -base.Ctxt.FixedFrameSize()
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpS390XMVC:
vo := v.AuxValAndOff()
package wasm
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
return p
}
if cnt%8 != 0 {
- gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
+ base.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
}
for i := int64(0); i < cnt; i += 8 {
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpWasmLoweredWB:
package x86
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/internal/obj/x86"
"cmd/internal/objabi"
arch.SoftFloat = true
case "387":
fmt.Fprintf(os.Stderr, "unsupported setting GO386=387. Consider using GO386=softfloat instead.\n")
- gc.Exit(1)
+ base.Exit(1)
default:
fmt.Fprintf(os.Stderr, "unsupported setting GO386=%s\n", v)
- gc.Exit(1)
+ base.Exit(1)
}
"fmt"
"math"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
p.From.Name = obj.NAME_EXTERN
f := math.Float64frombits(uint64(v.AuxInt))
if v.Op == ssa.Op386MOVSDconst1 {
- p.From.Sym = gc.Ctxt.Float64Sym(f)
+ p.From.Sym = base.Ctxt.Float64Sym(f)
} else {
- p.From.Sym = gc.Ctxt.Float32Sym(float32(f))
+ p.From.Sym = base.Ctxt.Float32Sym(float32(f))
}
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
r := v.Reg()
// See the comments in cmd/internal/obj/x86/obj6.go
// near CanUse1InsnTLS for a detailed explanation of these instructions.
- if x86.CanUse1InsnTLS(gc.Ctxt) {
+ if x86.CanUse1InsnTLS(base.Ctxt) {
// MOVL (TLS), r
p := s.Prog(x86.AMOVL)
p.From.Type = obj.TYPE_MEM
// caller's SP is the address of the first arg
p := s.Prog(x86.AMOVL)
p.From.Type = obj.TYPE_ADDR
- p.From.Offset = -gc.Ctxt.FixedFrameSize() // 0 on 386, just to be consistent with other architectures
+ p.From.Offset = -base.Ctxt.FixedFrameSize() // 0 on 386, just to be consistent with other architectures
p.From.Name = obj.NAME_PARAM
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
}
- if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
- gc.Warnl(v.Pos, "generated nil check")
+ if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+ base.WarnfAt(v.Pos, "generated nil check")
}
case ssa.OpClobber:
p := s.Prog(x86.AMOVL)
"cmd/compile/internal/amd64"
"cmd/compile/internal/arm"
"cmd/compile/internal/arm64"
+ "cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/mips"
"cmd/compile/internal/mips64"
}
gc.Main(archInit)
- gc.Exit(0)
+ base.Exit(0)
}
"cmd/cgo",
"cmd/compile",
"cmd/compile/internal/amd64",
+ "cmd/compile/internal/base",
"cmd/compile/internal/arm",
"cmd/compile/internal/arm64",
"cmd/compile/internal/gc",
"cmd/internal/sys",
"cmd/link",
"cmd/link/internal/amd64",
+ "cmd/compile/internal/base",
"cmd/link/internal/arm",
"cmd/link/internal/arm64",
"cmd/link/internal/benchmark",