// An absent entry means that the format is not recognized as valid.
// An empty new format means that the format should remain unchanged.
var knownFormats = map[string]string{
- "*bytes.Buffer %s": "",
- "*cmd/compile/internal/ssa.Block %s": "",
- "*cmd/compile/internal/ssa.Func %s": "",
- "*cmd/compile/internal/ssa.Register %s": "",
- "*cmd/compile/internal/ssa.Value %s": "",
- "*cmd/compile/internal/types.Sym %+v": "",
- "*cmd/compile/internal/types.Sym %S": "",
- "*cmd/compile/internal/types.Type %+v": "",
- "*cmd/compile/internal/types.Type %-S": "",
- "*cmd/compile/internal/types.Type %L": "",
- "*cmd/compile/internal/types.Type %S": "",
- "*cmd/compile/internal/types.Type %s": "",
- "*math/big.Float %f": "",
- "*math/big.Int %s": "",
- "[]cmd/compile/internal/syntax.token %s": "",
- "cmd/compile/internal/arm.shift %d": "",
- "cmd/compile/internal/gc.RegIndex %d": "",
- "cmd/compile/internal/ir.Class %d": "",
- "cmd/compile/internal/ir.Node %+v": "",
- "cmd/compile/internal/ir.Node %L": "",
- "cmd/compile/internal/ir.Nodes %+v": "",
- "cmd/compile/internal/ir.Nodes %.v": "",
- "cmd/compile/internal/ir.Op %+v": "",
- "cmd/compile/internal/ssa.Aux %#v": "",
- "cmd/compile/internal/ssa.Aux %q": "",
- "cmd/compile/internal/ssa.Aux %s": "",
- "cmd/compile/internal/ssa.BranchPrediction %d": "",
- "cmd/compile/internal/ssa.ID %d": "",
- "cmd/compile/internal/ssa.LocalSlot %s": "",
- "cmd/compile/internal/ssa.Location %s": "",
- "cmd/compile/internal/ssa.Op %s": "",
- "cmd/compile/internal/ssa.ValAndOff %s": "",
- "cmd/compile/internal/ssa.flagConstant %s": "",
- "cmd/compile/internal/ssa.rbrank %d": "",
- "cmd/compile/internal/ssa.regMask %d": "",
- "cmd/compile/internal/ssa.register %d": "",
- "cmd/compile/internal/ssa.relation %s": "",
- "cmd/compile/internal/syntax.Error %q": "",
- "cmd/compile/internal/syntax.Expr %#v": "",
- "cmd/compile/internal/syntax.LitKind %d": "",
- "cmd/compile/internal/syntax.Operator %s": "",
- "cmd/compile/internal/syntax.Pos %s": "",
- "cmd/compile/internal/syntax.position %s": "",
- "cmd/compile/internal/syntax.token %q": "",
- "cmd/compile/internal/syntax.token %s": "",
- "cmd/compile/internal/types.Kind %d": "",
- "cmd/compile/internal/types.Kind %s": "",
- "cmd/compile/internal/walk.initKind %d": "",
- "go/constant.Value %#v": "",
- "math/big.Accuracy %s": "",
- "reflect.Type %s": "",
- "time.Duration %d": "",
+ "*bytes.Buffer %s": "",
+ "*cmd/compile/internal/ssa.Block %s": "",
+ "*cmd/compile/internal/ssa.Func %s": "",
+ "*cmd/compile/internal/ssa.Register %s": "",
+ "*cmd/compile/internal/ssa.Value %s": "",
+ "*cmd/compile/internal/syntax.CallExpr %s": "",
+ "*cmd/compile/internal/syntax.FuncLit %s": "",
+ "*cmd/compile/internal/syntax.IndexExpr %s": "",
+ "*cmd/compile/internal/types.Sym %+v": "",
+ "*cmd/compile/internal/types.Sym %S": "",
+ "*cmd/compile/internal/types.Type %+v": "",
+ "*cmd/compile/internal/types.Type %-S": "",
+ "*cmd/compile/internal/types.Type %L": "",
+ "*cmd/compile/internal/types.Type %S": "",
+ "*cmd/compile/internal/types.Type %s": "",
+ "*cmd/compile/internal/types2.Basic %s": "",
+ "*cmd/compile/internal/types2.Chan %s": "",
+ "*cmd/compile/internal/types2.Func %s": "",
+ "*cmd/compile/internal/types2.Initializer %s": "",
+ "*cmd/compile/internal/types2.Interface %s": "",
+ "*cmd/compile/internal/types2.MethodSet %s": "",
+ "*cmd/compile/internal/types2.Named %s": "",
+ "*cmd/compile/internal/types2.Package %s": "",
+ "*cmd/compile/internal/types2.Selection %s": "",
+ "*cmd/compile/internal/types2.Signature %s": "",
+ "*cmd/compile/internal/types2.TypeName %s": "",
+ "*cmd/compile/internal/types2.TypeParam %s": "",
+ "*cmd/compile/internal/types2.Var %s": "",
+ "*cmd/compile/internal/types2.operand %s": "",
+ "*cmd/compile/internal/types2.substMap %s": "",
+ "*math/big.Float %f": "",
+ "*math/big.Int %s": "",
+ "[]*cmd/compile/internal/types2.TypeName %s": "",
+ "[]cmd/compile/internal/syntax.token %s": "",
+ "[]cmd/compile/internal/types2.Type %s": "",
+ "cmd/compile/internal/arm.shift %d": "",
+ "cmd/compile/internal/gc.RegIndex %d": "",
- "cmd/compile/internal/gc.initKind %d": "",
+ "cmd/compile/internal/ir.Class %d": "",
+ "cmd/compile/internal/ir.Node %+v": "",
+ "cmd/compile/internal/ir.Node %L": "",
+ "cmd/compile/internal/ir.Nodes %+v": "",
+ "cmd/compile/internal/ir.Nodes %.v": "",
+ "cmd/compile/internal/ir.Op %+v": "",
+ "cmd/compile/internal/ssa.Aux %#v": "",
+ "cmd/compile/internal/ssa.Aux %q": "",
+ "cmd/compile/internal/ssa.Aux %s": "",
+ "cmd/compile/internal/ssa.BranchPrediction %d": "",
+ "cmd/compile/internal/ssa.ID %d": "",
+ "cmd/compile/internal/ssa.LocalSlot %s": "",
+ "cmd/compile/internal/ssa.Location %s": "",
+ "cmd/compile/internal/ssa.Op %s": "",
+ "cmd/compile/internal/ssa.ValAndOff %s": "",
+ "cmd/compile/internal/ssa.flagConstant %s": "",
+ "cmd/compile/internal/ssa.rbrank %d": "",
+ "cmd/compile/internal/ssa.regMask %d": "",
+ "cmd/compile/internal/ssa.register %d": "",
+ "cmd/compile/internal/ssa.relation %s": "",
+ "cmd/compile/internal/syntax.ChanDir %d": "",
+ "cmd/compile/internal/syntax.Error %q": "",
+ "cmd/compile/internal/syntax.Expr %#v": "",
+ "cmd/compile/internal/syntax.Expr %s": "",
+ "cmd/compile/internal/syntax.LitKind %d": "",
+ "cmd/compile/internal/syntax.Operator %s": "",
+ "cmd/compile/internal/syntax.Pos %s": "",
+ "cmd/compile/internal/syntax.position %s": "",
+ "cmd/compile/internal/syntax.token %q": "",
+ "cmd/compile/internal/syntax.token %s": "",
+ "cmd/compile/internal/types.Kind %d": "",
+ "cmd/compile/internal/types.Kind %s": "",
+ "cmd/compile/internal/types2.Object %s": "",
+ "cmd/compile/internal/types2.Type %s": "",
+ "cmd/compile/internal/types2.color %s": "",
++ "cmd/compile/internal/walk.initKind %d": "",
+ "go/constant.Value %#v": "",
+ "go/constant.Value %s": "",
+ "map[*cmd/compile/internal/types2.TypeParam]cmd/compile/internal/types2.Type %s": "",
+ "math/big.Accuracy %s": "",
+ "reflect.Type %s": "",
+ "time.Duration %d": "",
}
C CountFlag "help:\"disable printing of columns in error messages\""
D string "help:\"set relative `path` for local imports\""
E CountFlag "help:\"debug symbol export\""
+ G CountFlag "help:\"accept generic code\""
I func(string) "help:\"add `directory` to import search path\""
K CountFlag "help:\"debug missing line numbers\""
L CountFlag "help:\"show full file names in error messages\""
ImportMap map[string]string // set by -importmap OR -importcfg
PackageFile map[string]string // set by -importcfg; nil means not in use
SpectreIndex bool // set by -spectre=index or -spectre=all
+ // Whether we are adding any sort of code instrumentation, such as
+ // when the race detector is enabled.
+ Instrumenting bool
}
}
"bufio"
"bytes"
"cmd/compile/internal/base"
+ "cmd/compile/internal/dwarfgen"
+ "cmd/compile/internal/escape"
+ "cmd/compile/internal/inline"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
+ "cmd/compile/internal/noder"
+ "cmd/compile/internal/pkginit"
+ "cmd/compile/internal/reflectdata"
"cmd/compile/internal/ssa"
+ "cmd/compile/internal/ssagen"
+ "cmd/compile/internal/staticdata"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
- "cmd/internal/bio"
+ "cmd/compile/internal/walk"
"cmd/internal/dwarf"
- "cmd/internal/goobj"
"cmd/internal/obj"
"cmd/internal/objabi"
"cmd/internal/src"
"flag"
"fmt"
- "go/constant"
- "internal/goversion"
- "io"
- "io/ioutil"
"log"
"os"
- "path"
- "regexp"
"runtime"
- "sort"
- "strconv"
- "strings"
)
func hidePanic() {
}
}
- // Target is the package being compiled.
- var Target *ir.Package
-
// Main parses flags and Go source files specified in the command-line
// arguments, type-checks the parsed Go package, compiles functions to machine
// code, and finally writes the compiled package definition to disk.
- func Main(archInit func(*Arch)) {
- timings.Start("fe", "init")
+ func Main(archInit func(*ssagen.ArchInfo)) {
+ base.Timer.Start("fe", "init")
defer hidePanic()
- archInit(&thearch)
+ archInit(&ssagen.Arch)
- base.Ctxt = obj.Linknew(thearch.LinkArch)
+ base.Ctxt = obj.Linknew(ssagen.Arch.LinkArch)
base.Ctxt.DiagFunc = base.Errorf
base.Ctxt.DiagFlush = base.FlushErrors
base.Ctxt.Bso = bufio.NewWriter(os.Stdout)
types.BuiltinPkg.Prefix = "go.builtin" // not go%2ebuiltin
// pseudo-package, accessed by import "unsafe"
- unsafepkg = types.NewPkg("unsafe", "unsafe")
+ ir.Pkgs.Unsafe = types.NewPkg("unsafe", "unsafe")
// Pseudo-package that contains the compiler's builtin
// declarations for package runtime. These are declared in a
// separate package to avoid conflicts with package runtime's
// actual declarations, which may differ intentionally but
// insignificantly.
- Runtimepkg = types.NewPkg("go.runtime", "runtime")
- Runtimepkg.Prefix = "runtime"
+ ir.Pkgs.Runtime = types.NewPkg("go.runtime", "runtime")
+ ir.Pkgs.Runtime.Prefix = "runtime"
// pseudo-packages used in symbol tables
- itabpkg = types.NewPkg("go.itab", "go.itab")
- itabpkg.Prefix = "go.itab" // not go%2eitab
+ ir.Pkgs.Itab = types.NewPkg("go.itab", "go.itab")
+ ir.Pkgs.Itab.Prefix = "go.itab" // not go%2eitab
- itablinkpkg = types.NewPkg("go.itablink", "go.itablink")
- itablinkpkg.Prefix = "go.itablink" // not go%2eitablink
+ ir.Pkgs.Itablink = types.NewPkg("go.itablink", "go.itablink")
+ ir.Pkgs.Itablink.Prefix = "go.itablink" // not go%2eitablink
- trackpkg = types.NewPkg("go.track", "go.track")
- trackpkg.Prefix = "go.track" // not go%2etrack
+ ir.Pkgs.Track = types.NewPkg("go.track", "go.track")
+ ir.Pkgs.Track.Prefix = "go.track" // not go%2etrack
// pseudo-package used for map zero values
- mappkg = types.NewPkg("go.map", "go.map")
- mappkg.Prefix = "go.map"
+ ir.Pkgs.Map = types.NewPkg("go.map", "go.map")
+ ir.Pkgs.Map.Prefix = "go.map"
// pseudo-package used for methods with anonymous receivers
- gopkg = types.NewPkg("go", "")
+ ir.Pkgs.Go = types.NewPkg("go", "")
base.DebugSSA = ssa.PhaseOption
base.ParseFlags()
// Record flags that affect the build result. (And don't
// record flags that don't, since that would cause spurious
// changes in the binary.)
- recordFlags("B", "N", "l", "msan", "race", "shared", "dynlink", "dwarflocationlists", "dwarfbasentries", "smallframes", "spectre")
+ dwarfgen.RecordFlags("B", "N", "l", "msan", "race", "shared", "dynlink", "dwarflocationlists", "dwarfbasentries", "smallframes", "spectre")
- if !enableTrace && base.Flag.LowerT {
+ if !base.EnableTrace && base.Flag.LowerT {
log.Fatalf("compiler not built with support for -t")
}
}
if base.Flag.SmallFrames {
- maxStackVarSize = 128 * 1024
- maxImplicitStackVarSize = 16 * 1024
+ ir.MaxStackVarSize = 128 * 1024
+ ir.MaxImplicitStackVarSize = 16 * 1024
}
if base.Flag.Dwarf {
- base.Ctxt.DebugInfo = debuginfo
- base.Ctxt.GenAbstractFunc = genAbstractFunc
+ base.Ctxt.DebugInfo = dwarfgen.Info
+ base.Ctxt.GenAbstractFunc = dwarfgen.AbstractFunc
base.Ctxt.DwFixups = obj.NewDwarfFixupTable(base.Ctxt)
} else {
// turn off inline generation if no dwarf at all
log.Fatalf("location lists requested but register mapping not available on %v", base.Ctxt.Arch.Name)
}
- checkLang()
+ types.ParseLangFlag()
if base.Flag.SymABIs != "" {
- readSymABIs(base.Flag.SymABIs, base.Ctxt.Pkgpath)
+ ssagen.ReadSymABIs(base.Flag.SymABIs, base.Ctxt.Pkgpath)
}
- if ispkgin(omit_pkgs) {
+ if base.Compiling(base.NoInstrumentPkgs) {
base.Flag.Race = false
base.Flag.MSan = false
}
- thearch.LinkArch.Init(base.Ctxt)
+ ssagen.Arch.LinkArch.Init(base.Ctxt)
startProfile()
if base.Flag.Race {
- racepkg = types.NewPkg("runtime/race", "")
+ ir.Pkgs.Race = types.NewPkg("runtime/race", "")
}
if base.Flag.MSan {
- msanpkg = types.NewPkg("runtime/msan", "")
+ ir.Pkgs.Msan = types.NewPkg("runtime/msan", "")
}
if base.Flag.Race || base.Flag.MSan {
- instrumenting = true
+ base.Flag.Cfg.Instrumenting = true
}
if base.Flag.Dwarf {
dwarf.EnableLogging(base.Debug.DwarfInl != 0)
}
if base.Debug.SoftFloat != 0 {
- thearch.SoftFloat = true
+ ssagen.Arch.SoftFloat = true
}
if base.Flag.JSON != "" { // parse version,destination from json logging optimization.
logopt.LogJsonOption(base.Flag.JSON)
}
- ir.EscFmt = escFmt
- IsIntrinsicCall = isIntrinsicCall
- SSADumpInline = ssaDumpInline
- initSSAEnv()
- initSSATables()
+ ir.EscFmt = escape.Fmt
+ ir.IsIntrinsicCall = ssagen.IsIntrinsicCall
+ inline.SSADumpInline = ssagen.DumpInline
+ ssagen.InitEnv()
+ ssagen.InitTables()
- Widthptr = thearch.LinkArch.PtrSize
- Widthreg = thearch.LinkArch.RegSize
- MaxWidth = thearch.MAXWIDTH
+ types.PtrSize = ssagen.Arch.LinkArch.PtrSize
+ types.RegSize = ssagen.Arch.LinkArch.RegSize
+ types.MaxWidth = ssagen.Arch.MAXWIDTH
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
- return typenamesym(t).Linksym()
+ return reflectdata.TypeSym(t).Linksym()
}
- Target = new(ir.Package)
+ typecheck.Target = new(ir.Package)
- NeedFuncSym = makefuncsym
- NeedITab = func(t, iface *types.Type) { itabname(t, iface) }
- NeedRuntimeType = addsignat // TODO(rsc): typenamesym for lock?
+ typecheck.NeedFuncSym = staticdata.NeedFuncSym
+ typecheck.NeedITab = func(t, iface *types.Type) { reflectdata.ITabAddr(t, iface) }
+ typecheck.NeedRuntimeType = reflectdata.NeedRuntimeType // TODO(rsc): typenamesym for lock?
- autogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
+ base.AutogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
- return typenamesym(t).Linksym()
+ return reflectdata.TypeSym(t).Linksym()
}
- TypecheckInit()
+ typecheck.Init()
// Parse input.
- timings.Start("fe", "parse")
- lines := parseFiles(flag.Args())
- cgoSymABIs()
- timings.Stop()
- timings.AddEvent(int64(lines), "lines")
+ base.Timer.Start("fe", "parse")
+ lines := noder.ParseFiles(flag.Args())
+ ssagen.CgoSymABIs()
+ base.Timer.Stop()
+ base.Timer.AddEvent(int64(lines), "lines")
+ if base.Flag.G != 0 && base.Flag.G < 3 {
+ // can only parse generic code for now
+ base.ExitIfErrors()
+ return
+ }
+
- recordPackageName()
+ dwarfgen.RecordPackageName()
// Typecheck.
- TypecheckPackage()
+ typecheck.Package()
// With all user code typechecked, it's now safe to verify unused dot imports.
- checkDotImports()
+ noder.CheckDotImports()
base.ExitIfErrors()
// Build init task.
- if initTask := fninit(); initTask != nil {
- exportsym(initTask)
+ if initTask := pkginit.Task(); initTask != nil {
+ typecheck.Export(initTask)
}
// Inlining
- timings.Start("fe", "inlining")
+ base.Timer.Start("fe", "inlining")
if base.Flag.LowerL != 0 {
- InlinePackage()
+ inline.InlinePackage()
}
// Devirtualize.
- for _, n := range Target.Decls {
+ for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
- devirtualize(n.(*ir.Func))
+ inline.Devirtualize(n.(*ir.Func))
}
}
- Curfn = nil
+ ir.CurFunc = nil
// Escape analysis.
// Required for moving heap allocations onto stack,
// or else the stack copier will not update it.
// Large values are also moved off stack in escape analysis;
// because large values may contain pointers, it must happen early.
- timings.Start("fe", "escapes")
- escapes(Target.Decls)
+ base.Timer.Start("fe", "escapes")
+ escape.Funcs(typecheck.Target.Decls)
// Collect information for go:nowritebarrierrec
// checking. This must happen before transformclosure.
// We'll do the final check after write barriers are
// inserted.
if base.Flag.CompilingRuntime {
- EnableNoWriteBarrierRecCheck()
+ ssagen.EnableNoWriteBarrierRecCheck()
}
// Transform closure bodies to properly reference captured variables.
// This needs to happen before walk, because closures must be transformed
// before walk reaches a call of a closure.
- timings.Start("fe", "xclosures")
- for _, n := range Target.Decls {
+ base.Timer.Start("fe", "xclosures")
+ for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
- if n.Func().OClosure != nil {
- Curfn = n
- transformclosure(n)
+ if n.OClosure != nil {
+ ir.CurFunc = n
+ walk.Closure(n)
}
}
}
// Prepare for SSA compilation.
// This must be before peekitabs, because peekitabs
// can trigger function compilation.
- initssaconfig()
+ ssagen.InitConfig()
// Just before compilation, compile itabs found on
// the right side of OCONVIFACE so that methods
// can be de-virtualized during compilation.
- Curfn = nil
- peekitabs()
+ ir.CurFunc = nil
+ reflectdata.CompileITabs()
// Compile top level functions.
// Don't use range--walk can add functions to Target.Decls.
- timings.Start("be", "compilefuncs")
+ base.Timer.Start("be", "compilefuncs")
fcount := int64(0)
- for i := 0; i < len(Target.Decls); i++ {
- n := Target.Decls[i]
+ for i := 0; i < len(typecheck.Target.Decls); i++ {
+ n := typecheck.Target.Decls[i]
if n.Op() == ir.ODCLFUNC {
funccompile(n.(*ir.Func))
fcount++
}
}
- timings.AddEvent(fcount, "funcs")
+ base.Timer.AddEvent(fcount, "funcs")
compileFunctions()
if base.Flag.CompilingRuntime {
// Write barriers are now known. Check the call graph.
- NoWriteBarrierRecCheck()
+ ssagen.NoWriteBarrierRecCheck()
}
// Finalize DWARF inline routine DIEs, then explicitly turn off
}
// Write object data to disk.
- timings.Start("be", "dumpobj")
+ base.Timer.Start("be", "dumpobj")
dumpdata()
base.Ctxt.NumberSyms()
dumpobj()
dumpasmhdr()
}
- CheckLargeStacks()
- CheckFuncStack()
+ ssagen.CheckLargeStacks()
+ typecheck.CheckFuncStack()
if len(compilequeue) != 0 {
base.Fatalf("%d uncompiled functions", len(compilequeue))
base.ExitIfErrors()
base.FlushErrors()
- timings.Stop()
+ base.Timer.Stop()
if base.Flag.Bench != "" {
if err := writebench(base.Flag.Bench); err != nil {
}
}
- func CheckLargeStacks() {
- // Check whether any of the functions we have compiled have gigantic stack frames.
- sort.Slice(largeStackFrames, func(i, j int) bool {
- return largeStackFrames[i].pos.Before(largeStackFrames[j].pos)
- })
- for _, large := range largeStackFrames {
- if large.callee != 0 {
- base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args + %d MB callee", large.locals>>20, large.args>>20, large.callee>>20)
- } else {
- base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args", large.locals>>20, large.args>>20)
- }
- }
- }
-
- func cgoSymABIs() {
- // The linker expects an ABI0 wrapper for all cgo-exported
- // functions.
- for _, prag := range Target.CgoPragmas {
- switch prag[0] {
- case "cgo_export_static", "cgo_export_dynamic":
- if symabiRefs == nil {
- symabiRefs = make(map[string]obj.ABI)
- }
- symabiRefs[prag[1]] = obj.ABI0
- }
- }
- }
-
- // numNonClosures returns the number of functions in list which are not closures.
- func numNonClosures(list []*ir.Func) int {
- count := 0
- for _, fn := range list {
- if fn.OClosure == nil {
- count++
- }
- }
- return count
- }
-
func writebench(filename string) error {
f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0666)
if err != nil {
fmt.Fprintln(&buf, "commit:", objabi.Version)
fmt.Fprintln(&buf, "goos:", runtime.GOOS)
fmt.Fprintln(&buf, "goarch:", runtime.GOARCH)
- timings.Write(&buf, "BenchmarkCompile:"+base.Ctxt.Pkgpath+":")
+ base.Timer.Write(&buf, "BenchmarkCompile:"+base.Ctxt.Pkgpath+":")
n, err := f.Write(buf.Bytes())
if err != nil {
return f.Close()
}
- // symabiDefs and symabiRefs record the defined and referenced ABIs of
- // symbols required by non-Go code. These are keyed by link symbol
- // name, where the local package prefix is always `"".`
- var symabiDefs, symabiRefs map[string]obj.ABI
-
- // readSymABIs reads a symabis file that specifies definitions and
- // references of text symbols by ABI.
- //
- // The symabis format is a set of lines, where each line is a sequence
- // of whitespace-separated fields. The first field is a verb and is
- // either "def" for defining a symbol ABI or "ref" for referencing a
- // symbol using an ABI. For both "def" and "ref", the second field is
- // the symbol name and the third field is the ABI name, as one of the
- // named cmd/internal/obj.ABI constants.
- func readSymABIs(file, myimportpath string) {
- data, err := ioutil.ReadFile(file)
- if err != nil {
- log.Fatalf("-symabis: %v", err)
- }
-
- symabiDefs = make(map[string]obj.ABI)
- symabiRefs = make(map[string]obj.ABI)
-
- localPrefix := ""
- if myimportpath != "" {
- // Symbols in this package may be written either as
- // "".X or with the package's import path already in
- // the symbol.
- localPrefix = objabi.PathToPrefix(myimportpath) + "."
- }
-
- for lineNum, line := range strings.Split(string(data), "\n") {
- lineNum++ // 1-based
- line = strings.TrimSpace(line)
- if line == "" || strings.HasPrefix(line, "#") {
- continue
- }
-
- parts := strings.Fields(line)
- switch parts[0] {
- case "def", "ref":
- // Parse line.
- if len(parts) != 3 {
- log.Fatalf(`%s:%d: invalid symabi: syntax is "%s sym abi"`, file, lineNum, parts[0])
- }
- sym, abistr := parts[1], parts[2]
- abi, valid := obj.ParseABI(abistr)
- if !valid {
- log.Fatalf(`%s:%d: invalid symabi: unknown abi "%s"`, file, lineNum, abistr)
- }
-
- // If the symbol is already prefixed with
- // myimportpath, rewrite it to start with ""
- // so it matches the compiler's internal
- // symbol names.
- if localPrefix != "" && strings.HasPrefix(sym, localPrefix) {
- sym = `"".` + sym[len(localPrefix):]
- }
-
- // Record for later.
- if parts[0] == "def" {
- symabiDefs[sym] = abi
- } else {
- symabiRefs[sym] = abi
- }
- default:
- log.Fatalf(`%s:%d: invalid symabi type "%s"`, file, lineNum, parts[0])
- }
- }
- }
-
- func arsize(b *bufio.Reader, name string) int {
- var buf [ArhdrSize]byte
- if _, err := io.ReadFull(b, buf[:]); err != nil {
- return -1
- }
- aname := strings.Trim(string(buf[0:16]), " ")
- if !strings.HasPrefix(aname, name) {
- return -1
- }
- asize := strings.Trim(string(buf[48:58]), " ")
- i, _ := strconv.Atoi(asize)
- return i
- }
-
- func isDriveLetter(b byte) bool {
- return 'a' <= b && b <= 'z' || 'A' <= b && b <= 'Z'
- }
-
- // is this path a local name? begins with ./ or ../ or /
- func islocalname(name string) bool {
- return strings.HasPrefix(name, "/") ||
- runtime.GOOS == "windows" && len(name) >= 3 && isDriveLetter(name[0]) && name[1] == ':' && name[2] == '/' ||
- strings.HasPrefix(name, "./") || name == "." ||
- strings.HasPrefix(name, "../") || name == ".."
- }
-
- func findpkg(name string) (file string, ok bool) {
- if islocalname(name) {
- if base.Flag.NoLocalImports {
- return "", false
- }
-
- if base.Flag.Cfg.PackageFile != nil {
- file, ok = base.Flag.Cfg.PackageFile[name]
- return file, ok
- }
-
- // try .a before .6. important for building libraries:
- // if there is an array.6 in the array.a library,
- // want to find all of array.a, not just array.6.
- file = fmt.Sprintf("%s.a", name)
- if _, err := os.Stat(file); err == nil {
- return file, true
- }
- file = fmt.Sprintf("%s.o", name)
- if _, err := os.Stat(file); err == nil {
- return file, true
- }
- return "", false
- }
-
- // local imports should be canonicalized already.
- // don't want to see "encoding/../encoding/base64"
- // as different from "encoding/base64".
- if q := path.Clean(name); q != name {
- base.Errorf("non-canonical import path %q (should be %q)", name, q)
- return "", false
- }
-
- if base.Flag.Cfg.PackageFile != nil {
- file, ok = base.Flag.Cfg.PackageFile[name]
- return file, ok
- }
-
- for _, dir := range base.Flag.Cfg.ImportDirs {
- file = fmt.Sprintf("%s/%s.a", dir, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
- }
- file = fmt.Sprintf("%s/%s.o", dir, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
- }
- }
-
- if objabi.GOROOT != "" {
- suffix := ""
- suffixsep := ""
- if base.Flag.InstallSuffix != "" {
- suffixsep = "_"
- suffix = base.Flag.InstallSuffix
- } else if base.Flag.Race {
- suffixsep = "_"
- suffix = "race"
- } else if base.Flag.MSan {
- suffixsep = "_"
- suffix = "msan"
- }
-
- file = fmt.Sprintf("%s/pkg/%s_%s%s%s/%s.a", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffixsep, suffix, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
- }
- file = fmt.Sprintf("%s/pkg/%s_%s%s%s/%s.o", objabi.GOROOT, objabi.GOOS, objabi.GOARCH, suffixsep, suffix, name)
- if _, err := os.Stat(file); err == nil {
- return file, true
- }
- }
-
- return "", false
- }
-
- // loadsys loads the definitions for the low-level runtime functions,
- // so that the compiler can generate calls to them,
- // but does not make them visible to user code.
- func loadsys() {
- types.Block = 1
-
- inimport = true
- typecheckok = true
-
- typs := runtimeTypes()
- for _, d := range &runtimeDecls {
- sym := Runtimepkg.Lookup(d.name)
- typ := typs[d.typ]
- switch d.tag {
- case funcTag:
- importfunc(Runtimepkg, src.NoXPos, sym, typ)
- case varTag:
- importvar(Runtimepkg, src.NoXPos, sym, typ)
- default:
- base.Fatalf("unhandled declaration tag %v", d.tag)
- }
- }
-
- typecheckok = false
- inimport = false
- }
-
- // myheight tracks the local package's height based on packages
- // imported so far.
- var myheight int
-
- func importfile(f constant.Value) *types.Pkg {
- if f.Kind() != constant.String {
- base.Errorf("import path must be a string")
- return nil
- }
-
- path_ := constant.StringVal(f)
- if len(path_) == 0 {
- base.Errorf("import path is empty")
- return nil
- }
-
- if isbadimport(path_, false) {
- return nil
- }
-
- // The package name main is no longer reserved,
- // but we reserve the import path "main" to identify
- // the main package, just as we reserve the import
- // path "math" to identify the standard math package.
- if path_ == "main" {
- base.Errorf("cannot import \"main\"")
- base.ErrorExit()
- }
-
- if base.Ctxt.Pkgpath != "" && path_ == base.Ctxt.Pkgpath {
- base.Errorf("import %q while compiling that package (import cycle)", path_)
- base.ErrorExit()
- }
-
- if mapped, ok := base.Flag.Cfg.ImportMap[path_]; ok {
- path_ = mapped
- }
-
- if path_ == "unsafe" {
- return unsafepkg
- }
-
- if islocalname(path_) {
- if path_[0] == '/' {
- base.Errorf("import path cannot be absolute path")
- return nil
- }
-
- prefix := base.Ctxt.Pathname
- if base.Flag.D != "" {
- prefix = base.Flag.D
- }
- path_ = path.Join(prefix, path_)
-
- if isbadimport(path_, true) {
- return nil
- }
- }
-
- file, found := findpkg(path_)
- if !found {
- base.Errorf("can't find import: %q", path_)
- base.ErrorExit()
- }
-
- importpkg := types.NewPkg(path_, "")
- if importpkg.Imported {
- return importpkg
- }
-
- importpkg.Imported = true
-
- imp, err := bio.Open(file)
- if err != nil {
- base.Errorf("can't open import: %q: %v", path_, err)
- base.ErrorExit()
- }
- defer imp.Close()
-
- // check object header
- p, err := imp.ReadString('\n')
- if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
- }
-
- if p == "!<arch>\n" { // package archive
- // package export block should be first
- sz := arsize(imp.Reader, "__.PKGDEF")
- if sz <= 0 {
- base.Errorf("import %s: not a package file", file)
- base.ErrorExit()
- }
- p, err = imp.ReadString('\n')
- if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
- }
- }
-
- if !strings.HasPrefix(p, "go object ") {
- base.Errorf("import %s: not a go object file: %s", file, p)
- base.ErrorExit()
- }
- q := fmt.Sprintf("%s %s %s %s\n", objabi.GOOS, objabi.GOARCH, objabi.Version, objabi.Expstring())
- if p[10:] != q {
- base.Errorf("import %s: object is [%s] expected [%s]", file, p[10:], q)
- base.ErrorExit()
- }
-
- // process header lines
- for {
- p, err = imp.ReadString('\n')
- if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
- }
- if p == "\n" {
- break // header ends with blank line
- }
- }
-
- // Expect $$B\n to signal binary import format.
-
- // look for $$
- var c byte
- for {
- c, err = imp.ReadByte()
- if err != nil {
- break
- }
- if c == '$' {
- c, err = imp.ReadByte()
- if c == '$' || err != nil {
- break
- }
- }
- }
-
- // get character after $$
- if err == nil {
- c, _ = imp.ReadByte()
- }
-
- var fingerprint goobj.FingerprintType
- switch c {
- case '\n':
- base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path_)
- return nil
-
- case 'B':
- if base.Debug.Export != 0 {
- fmt.Printf("importing %s (%s)\n", path_, file)
- }
- imp.ReadByte() // skip \n after $$B
-
- c, err = imp.ReadByte()
- if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
- }
-
- // Indexed format is distinguished by an 'i' byte,
- // whereas previous export formats started with 'c', 'd', or 'v'.
- if c != 'i' {
- base.Errorf("import %s: unexpected package format byte: %v", file, c)
- base.ErrorExit()
- }
- fingerprint = iimport(importpkg, imp)
-
- default:
- base.Errorf("no import in %q", path_)
- base.ErrorExit()
- }
-
- // assume files move (get installed) so don't record the full path
- if base.Flag.Cfg.PackageFile != nil {
- // If using a packageFile map, assume path_ can be recorded directly.
- base.Ctxt.AddImport(path_, fingerprint)
- } else {
- // For file "/Users/foo/go/pkg/darwin_amd64/math.a" record "math.a".
- base.Ctxt.AddImport(file[len(file)-len(path_)-len(".a"):], fingerprint)
- }
-
- if importpkg.Height >= myheight {
- myheight = importpkg.Height + 1
- }
-
- return importpkg
- }
-
- func pkgnotused(lineno src.XPos, path string, name string) {
- // If the package was imported with a name other than the final
- // import path element, show it explicitly in the error message.
- // Note that this handles both renamed imports and imports of
- // packages containing unconventional package declarations.
- // Note that this uses / always, even on Windows, because Go import
- // paths always use forward slashes.
- elem := path
- if i := strings.LastIndex(elem, "/"); i >= 0 {
- elem = elem[i+1:]
- }
- if name == "" || elem == name {
- base.ErrorfAt(lineno, "imported and not used: %q", path)
- } else {
- base.ErrorfAt(lineno, "imported and not used: %q as %s", path, name)
- }
- }
-
- func mkpackage(pkgname string) {
- if types.LocalPkg.Name == "" {
- if pkgname == "_" {
- base.Errorf("invalid package name _")
- }
- types.LocalPkg.Name = pkgname
- } else {
- if pkgname != types.LocalPkg.Name {
- base.Errorf("package %s; expected %s", pkgname, types.LocalPkg.Name)
- }
- }
- }
-
- func clearImports() {
- type importedPkg struct {
- pos src.XPos
- path string
- name string
- }
- var unused []importedPkg
-
- for _, s := range types.LocalPkg.Syms {
- n := ir.AsNode(s.Def)
- if n == nil {
- continue
- }
- if n.Op() == ir.OPACK {
- // throw away top-level package name left over
- // from previous file.
- // leave s->block set to cause redeclaration
- // errors if a conflicting top-level name is
- // introduced by a different file.
- p := n.(*ir.PkgName)
- if !p.Used && base.SyntaxErrors() == 0 {
- unused = append(unused, importedPkg{p.Pos(), p.Pkg.Path, s.Name})
- }
- s.Def = nil
- continue
- }
- if IsAlias(s) {
- // throw away top-level name left over
- // from previous import . "x"
- // We'll report errors after type checking in checkDotImports.
- s.Def = nil
- continue
- }
- }
-
- sort.Slice(unused, func(i, j int) bool { return unused[i].pos.Before(unused[j].pos) })
- for _, pkg := range unused {
- pkgnotused(pkg.pos, pkg.path, pkg.name)
- }
- }
-
- func IsAlias(sym *types.Sym) bool {
- return sym.Def != nil && sym.Def.Sym() != sym
- }
-
- // recordFlags records the specified command-line flags to be placed
- // in the DWARF info.
- func recordFlags(flags ...string) {
- if base.Ctxt.Pkgpath == "" {
- // We can't record the flags if we don't know what the
- // package name is.
- return
- }
-
- type BoolFlag interface {
- IsBoolFlag() bool
- }
- type CountFlag interface {
- IsCountFlag() bool
- }
- var cmd bytes.Buffer
- for _, name := range flags {
- f := flag.Lookup(name)
- if f == nil {
- continue
- }
- getter := f.Value.(flag.Getter)
- if getter.String() == f.DefValue {
- // Flag has default value, so omit it.
- continue
- }
- if bf, ok := f.Value.(BoolFlag); ok && bf.IsBoolFlag() {
- val, ok := getter.Get().(bool)
- if ok && val {
- fmt.Fprintf(&cmd, " -%s", f.Name)
- continue
- }
- }
- if cf, ok := f.Value.(CountFlag); ok && cf.IsCountFlag() {
- val, ok := getter.Get().(int)
- if ok && val == 1 {
- fmt.Fprintf(&cmd, " -%s", f.Name)
- continue
- }
- }
- fmt.Fprintf(&cmd, " -%s=%v", f.Name, getter.Get())
- }
-
- if cmd.Len() == 0 {
- return
- }
- s := base.Ctxt.Lookup(dwarf.CUInfoPrefix + "producer." + base.Ctxt.Pkgpath)
- s.Type = objabi.SDWARFCUINFO
- // Sometimes (for example when building tests) we can link
- // together two package main archives. So allow dups.
- s.Set(obj.AttrDuplicateOK, true)
- base.Ctxt.Data = append(base.Ctxt.Data, s)
- s.P = cmd.Bytes()[1:]
- }
-
- // recordPackageName records the name of the package being
- // compiled, so that the linker can save it in the compile unit's DIE.
- func recordPackageName() {
- s := base.Ctxt.Lookup(dwarf.CUInfoPrefix + "packagename." + base.Ctxt.Pkgpath)
- s.Type = objabi.SDWARFCUINFO
- // Sometimes (for example when building tests) we can link
- // together two package main archives. So allow dups.
- s.Set(obj.AttrDuplicateOK, true)
- base.Ctxt.Data = append(base.Ctxt.Data, s)
- s.P = []byte(types.LocalPkg.Name)
- }
-
- // currentLang returns the current language version.
- func currentLang() string {
- return fmt.Sprintf("go1.%d", goversion.Version)
- }
-
- // goVersionRE is a regular expression that matches the valid
- // arguments to the -lang flag.
- var goVersionRE = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
-
- // A lang is a language version broken into major and minor numbers.
- type lang struct {
- major, minor int
- }
-
- // langWant is the desired language version set by the -lang flag.
- // If the -lang flag is not set, this is the zero value, meaning that
- // any language version is supported.
- var langWant lang
-
- // AllowsGoVersion reports whether a particular package
- // is allowed to use Go version major.minor.
- // We assume the imported packages have all been checked,
- // so we only have to check the local package against the -lang flag.
- func AllowsGoVersion(pkg *types.Pkg, major, minor int) bool {
- if pkg == nil {
- // TODO(mdempsky): Set Pkg for local types earlier.
- pkg = types.LocalPkg
- }
- if pkg != types.LocalPkg {
- // Assume imported packages passed type-checking.
- return true
- }
- if langWant.major == 0 && langWant.minor == 0 {
- return true
- }
- return langWant.major > major || (langWant.major == major && langWant.minor >= minor)
- }
-
- func langSupported(major, minor int, pkg *types.Pkg) bool {
- return AllowsGoVersion(pkg, major, minor)
- }
-
- // checkLang verifies that the -lang flag holds a valid value, and
- // exits if not. It initializes data used by langSupported.
- func checkLang() {
- if base.Flag.Lang == "" {
- return
- }
-
- var err error
- langWant, err = parseLang(base.Flag.Lang)
- if err != nil {
- log.Fatalf("invalid value %q for -lang: %v", base.Flag.Lang, err)
- }
-
- if def := currentLang(); base.Flag.Lang != def {
- defVers, err := parseLang(def)
- if err != nil {
- log.Fatalf("internal error parsing default lang %q: %v", def, err)
- }
- if langWant.major > defVers.major || (langWant.major == defVers.major && langWant.minor > defVers.minor) {
- log.Fatalf("invalid value %q for -lang: max known version is %q", base.Flag.Lang, def)
- }
- }
- }
-
- // parseLang parses a -lang option into a langVer.
- func parseLang(s string) (lang, error) {
- matches := goVersionRE.FindStringSubmatch(s)
- if matches == nil {
- return lang{}, fmt.Errorf(`should be something like "go1.12"`)
- }
- major, err := strconv.Atoi(matches[1])
- if err != nil {
- return lang{}, err
- }
- minor, err := strconv.Atoi(matches[2])
- if err != nil {
- return lang{}, err
- }
- return lang{major: major, minor: minor}, nil
- }
-
- // useNewABIWrapGen returns TRUE if the compiler should generate an
- // ABI wrapper for the function 'f'.
- func useABIWrapGen(f *ir.Func) bool {
- if !base.Flag.ABIWrap {
- return false
- }
-
- // Support limit option for bisecting.
- if base.Flag.ABIWrapLimit == 1 {
- return false
- }
- if base.Flag.ABIWrapLimit < 1 {
- return true
- }
- base.Flag.ABIWrapLimit--
- if base.Debug.ABIWrap != 0 && base.Flag.ABIWrapLimit == 1 {
- fmt.Fprintf(os.Stderr, "=-= limit reached after new wrapper for %s\n",
- f.LSym.Name)
- }
-
- return true
+ func makePos(b *src.PosBase, line, col uint) src.XPos {
+ return base.Ctxt.PosTable.XPos(src.MakePos(b, line, col))
}
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
- package gc
+ package noder
import (
"fmt"
"go/constant"
"go/token"
+ "io"
"os"
"path/filepath"
"runtime"
"unicode/utf8"
"cmd/compile/internal/base"
+ "cmd/compile/internal/importer"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
+ "cmd/compile/internal/types2"
"cmd/internal/objabi"
"cmd/internal/src"
)
- // parseFiles concurrently parses files into *syntax.File structures.
+ // ParseFiles concurrently parses files into *syntax.File structures.
// Each declaration in every *syntax.File is converted to a syntax tree
// and its root represented by *Node is appended to Target.Decls.
// Returns the total count of parsed lines.
- func parseFiles(filenames []string) (lines uint) {
-func ParseFiles(filenames []string) uint {
++func ParseFiles(filenames []string) (lines uint) {
noders := make([]*noder, 0, len(filenames))
// Limit the number of simultaneously open files.
sem := make(chan struct{}, runtime.GOMAXPROCS(0)+10)
sem <- struct{}{}
defer func() { <-sem }()
defer close(p.err)
- base := syntax.NewFileBase(filename)
+ fbase := syntax.NewFileBase(filename)
f, err := os.Open(filename)
if err != nil {
}
defer f.Close()
- p.file, _ = syntax.Parse(base, f, p.error, p.pragma, syntax.CheckBranches) // errors are tracked via p.error
+ mode := syntax.CheckBranches
+ if base.Flag.G != 0 {
+ mode |= syntax.AllowGenerics
+ }
+ p.file, _ = syntax.Parse(fbase, f, p.error, p.pragma, mode) // errors are tracked via p.error
}(filename)
}
- var lines uint
+ // generic noding phase (using new typechecker)
+ if base.Flag.G != 0 {
+ // setup and syntax error reporting
+ nodersmap := make(map[string]*noder)
+ var files []*syntax.File
+ for _, p := range noders {
+ for e := range p.err {
+ p.errorAt(e.Pos, "%s", e.Msg)
+ }
+
+ nodersmap[p.file.Pos().RelFilename()] = p
+ files = append(files, p.file)
+ lines += p.file.EOF.Line()
+
+ }
+ if base.SyntaxErrors() != 0 {
+ base.ErrorExit()
+ }
+
+ // typechecking
+ conf := types2.Config{
+ InferFromConstraints: true,
+ IgnoreBranches: true, // parser already checked via syntax.CheckBranches mode
+ CompilerErrorMessages: true, // use error strings matching existing compiler errors
+ Error: func(err error) {
+ terr := err.(types2.Error)
+ if len(terr.Msg) > 0 && terr.Msg[0] == '\t' {
+ // types2 reports error clarifications via separate
+ // error messages which are indented with a tab.
+ // Ignore them to satisfy tools and tests that expect
+ // only one error in such cases.
+ // TODO(gri) Need to adjust error reporting in types2.
+ return
+ }
+ p := nodersmap[terr.Pos.RelFilename()]
+ base.ErrorfAt(p.makeXPos(terr.Pos), "%s", terr.Msg)
+ },
+ Importer: &gcimports{
+ packages: make(map[string]*types2.Package),
+ lookup: func(path string) (io.ReadCloser, error) {
+ file, ok := findpkg(path)
+ if !ok {
+ return nil, fmt.Errorf("can't find import: %q", path)
+ }
+ return os.Open(file)
+ },
+ },
+ }
+ info := types2.Info{
+ Types: make(map[syntax.Expr]types2.TypeAndValue),
+ Defs: make(map[*syntax.Name]types2.Object),
+ Uses: make(map[*syntax.Name]types2.Object),
+ Selections: make(map[*syntax.SelectorExpr]*types2.Selection),
+ // expand as needed
+ }
+ conf.Check(base.Ctxt.Pkgpath, files, &info)
+ base.ExitIfErrors()
+ if base.Flag.G < 2 {
+ return
+ }
+
+ // noding
+ for _, p := range noders {
+ // errors have already been reported
+
+ p.typeInfo = &info
+ p.node()
+ lines += p.file.EOF.Line()
+ p.file = nil // release memory
+ base.ExitIfErrors()
+
+ // Always run testdclstack here, even when debug_dclstack is not set, as a sanity measure.
- testdclstack()
++ types.CheckDclstack()
+ }
+
+ types.LocalPkg.Height = myheight
+ return
+ }
+
+ // traditional (non-generic) noding phase
for _, p := range noders {
for e := range p.err {
p.errorAt(e.Pos, "%s", e.Msg)
}
p.node()
- lines += p.file.Lines
+ lines += p.file.EOF.Line()
p.file = nil // release memory
-
if base.SyntaxErrors() != 0 {
base.ErrorExit()
}
+
// Always run testdclstack here, even when debug_dclstack is not set, as a sanity measure.
- testdclstack()
+ types.CheckDclstack()
}
for _, p := range noders {
}
types.LocalPkg.Height = myheight
+ return
+}
- return lines
+// Temporary import helper to get type2-based type-checking going.
+type gcimports struct {
+ packages map[string]*types2.Package
+ lookup func(path string) (io.ReadCloser, error)
+}
+
+func (m *gcimports) Import(path string) (*types2.Package, error) {
+ return m.ImportFrom(path, "" /* no vendoring */, 0)
+}
+
+func (m *gcimports) ImportFrom(path, srcDir string, mode types2.ImportMode) (*types2.Package, error) {
+ if mode != 0 {
+ panic("mode must be 0")
+ }
+ return importer.Import(m.packages, path, srcDir, m.lookup)
}
// makeSrcPosBase translates from a *syntax.PosBase to a *src.PosBase.
trackScopes bool
scopeVars []int
+ // typeInfo provides access to the type information computed by the new
+ // typechecker. It is only present if -G is set, and all noders point to
+ // the same types.Info. For now this is a local field, if need be we can
+ // make it global.
+ typeInfo *types2.Info
+
lastCloseScopePos syntax.Pos
}
+// For now we provide these basic accessors to get to type and object
+// information of expression nodes during noding. Eventually we will
+// attach this information directly to the syntax tree which should
+// simplify access and make it more efficient as well.
+
+// typ returns the type and value information for the given expression.
+func (p *noder) typ(x syntax.Expr) types2.TypeAndValue {
+ return p.typeInfo.Types[x]
+}
+
+// def returns the object for the given name in its declaration.
+func (p *noder) def(x *syntax.Name) types2.Object {
+ return p.typeInfo.Defs[x]
+}
+
+// use returns the object for the given name outside its declaration.
+func (p *noder) use(x *syntax.Name) types2.Object {
+ return p.typeInfo.Uses[x]
+}
+
+// sel returns the selection information for the given selector expression.
+func (p *noder) sel(x *syntax.SelectorExpr) *types2.Selection {
+ return p.typeInfo.Selections[x]
+}
+
func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
oldScope := p.scope
p.scope = 0
- funchdr(fn)
+ typecheck.StartFuncBody(fn)
if block != nil {
body := p.stmts(block.List)
if body == nil {
- body = []ir.Node{ir.Nod(ir.OBLOCK, nil, nil)}
+ body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)}
}
- fn.PtrBody().Set(body)
+ fn.Body.Set(body)
base.Pos = p.makeXPos(block.Rbrace)
fn.Endlineno = base.Pos
}
- funcbody()
+ typecheck.FinishFuncBody()
p.scope = oldScope
}
types.Markdcl()
if p.trackScopes {
- Curfn.Parents = append(Curfn.Parents, p.scope)
- p.scopeVars = append(p.scopeVars, len(Curfn.Dcl))
- p.scope = ir.ScopeID(len(Curfn.Parents))
+ ir.CurFunc.Parents = append(ir.CurFunc.Parents, p.scope)
+ p.scopeVars = append(p.scopeVars, len(ir.CurFunc.Dcl))
+ p.scope = ir.ScopeID(len(ir.CurFunc.Parents))
p.markScope(pos)
}
if p.trackScopes {
scopeVars := p.scopeVars[len(p.scopeVars)-1]
p.scopeVars = p.scopeVars[:len(p.scopeVars)-1]
- if scopeVars == len(Curfn.Dcl) {
+ if scopeVars == len(ir.CurFunc.Dcl) {
// no variables were declared in this scope, so we can retract it.
- if int(p.scope) != len(Curfn.Parents) {
+ if int(p.scope) != len(ir.CurFunc.Parents) {
base.Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted")
}
- p.scope = Curfn.Parents[p.scope-1]
- Curfn.Parents = Curfn.Parents[:len(Curfn.Parents)-1]
+ p.scope = ir.CurFunc.Parents[p.scope-1]
+ ir.CurFunc.Parents = ir.CurFunc.Parents[:len(ir.CurFunc.Parents)-1]
- nmarks := len(Curfn.Marks)
- Curfn.Marks[nmarks-1].Scope = p.scope
+ nmarks := len(ir.CurFunc.Marks)
+ ir.CurFunc.Marks[nmarks-1].Scope = p.scope
prevScope := ir.ScopeID(0)
if nmarks >= 2 {
- prevScope = Curfn.Marks[nmarks-2].Scope
+ prevScope = ir.CurFunc.Marks[nmarks-2].Scope
}
- if Curfn.Marks[nmarks-1].Scope == prevScope {
- Curfn.Marks = Curfn.Marks[:nmarks-1]
+ if ir.CurFunc.Marks[nmarks-1].Scope == prevScope {
+ ir.CurFunc.Marks = ir.CurFunc.Marks[:nmarks-1]
}
return
}
- p.scope = Curfn.Parents[p.scope-1]
+ p.scope = ir.CurFunc.Parents[p.scope-1]
p.markScope(pos)
}
func (p *noder) markScope(pos syntax.Pos) {
xpos := p.makeXPos(pos)
- if i := len(Curfn.Marks); i > 0 && Curfn.Marks[i-1].Pos == xpos {
- Curfn.Marks[i-1].Scope = p.scope
+ if i := len(ir.CurFunc.Marks); i > 0 && ir.CurFunc.Marks[i-1].Pos == xpos {
+ ir.CurFunc.Marks[i-1].Scope = p.scope
} else {
- Curfn.Marks = append(Curfn.Marks, ir.Mark{Pos: xpos, Scope: p.scope})
+ ir.CurFunc.Marks = append(ir.CurFunc.Marks, ir.Mark{Pos: xpos, Scope: p.scope})
}
}
p.setlineno(p.file.PkgName)
mkpackage(p.file.PkgName.Value)
- if pragma, ok := p.file.Pragma.(*Pragma); ok {
+ if pragma, ok := p.file.Pragma.(*pragmas); ok {
pragma.Flag &^= ir.GoBuildPragma
p.checkUnused(pragma)
}
- Target.Decls = append(Target.Decls, p.decls(p.file.DeclList)...)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, p.decls(p.file.DeclList)...)
base.Pos = src.NoXPos
clearImports()
p.errorAt(l.pos, "//go:linkname only allowed in Go files that import \"unsafe\"")
continue
}
- n := ir.AsNode(lookup(l.local).Def)
+ n := ir.AsNode(typecheck.Lookup(l.local).Def)
if n == nil || n.Op() != ir.ONAME {
// TODO(mdempsky): Change to p.errorAt before Go 1.17 release.
// base.WarnfAt(p.makeXPos(l.pos), "//go:linkname must refer to declared function or variable (will be an error in Go 1.17)")
}
n.Sym().Linkname = l.remote
}
- Target.CgoPragmas = append(Target.CgoPragmas, p.pragcgobuf...)
+ typecheck.Target.CgoPragmas = append(typecheck.Target.CgoPragmas, p.pragcgobuf...)
}
func (p *noder) decls(decls []syntax.Decl) (l []ir.Node) {
}
func (p *noder) importDecl(imp *syntax.ImportDecl) {
- if imp.Path.Bad {
+ if imp.Path == nil || imp.Path.Bad {
return // avoid follow-on errors if there was a syntax error
}
- if pragma, ok := imp.Pragma.(*Pragma); ok {
+ if pragma, ok := imp.Pragma.(*pragmas); ok {
p.checkUnused(pragma)
}
return
}
- if ipkg == unsafepkg {
+ if ipkg == ir.Pkgs.Unsafe {
p.importedUnsafe = true
}
if ipkg.Path == "embed" {
}
if !ipkg.Direct {
- Target.Imports = append(Target.Imports, ipkg)
+ typecheck.Target.Imports = append(typecheck.Target.Imports, ipkg)
}
ipkg.Direct = true
if imp.LocalPkgName != nil {
my = p.name(imp.LocalPkgName)
} else {
- my = lookup(ipkg.Name)
+ my = typecheck.Lookup(ipkg.Name)
}
pack := ir.NewPkgName(p.pos(imp), my, ipkg)
return
}
if my.Def != nil {
- redeclare(pack.Pos(), my, "as imported package name")
+ typecheck.Redeclared(pack.Pos(), my, "as imported package name")
}
my.Def = pack
my.Lastlineno = pack.Pos()
exprs = p.exprList(decl.Values)
}
- if pragma, ok := decl.Pragma.(*Pragma); ok {
+ if pragma, ok := decl.Pragma.(*pragmas); ok {
if len(pragma.Embeds) > 0 {
if !p.importedEmbed {
// This check can't be done when building the list pragma.Embeds
}
p.setlineno(decl)
- return variter(names, typ, exprs)
+ return typecheck.DeclVars(names, typ, exprs)
}
// constState tracks state between constant specifiers within a
}
}
- if pragma, ok := decl.Pragma.(*Pragma); ok {
+ if pragma, ok := decl.Pragma.(*pragmas); ok {
p.checkUnused(pragma)
}
if decl.Values == nil {
v = ir.DeepCopy(n.Pos(), v)
}
- declare(n, dclcontext)
+ typecheck.Declare(n, typecheck.DeclContext)
n.Ntype = typ
n.Defn = v
n.SetIota(cs.iota)
- nn = append(nn, p.nod(decl, ir.ODCLCONST, n, nil))
+ nn = append(nn, ir.NewDecl(p.pos(decl), ir.ODCLCONST, n))
}
if len(values) > len(names) {
func (p *noder) typeDecl(decl *syntax.TypeDecl) ir.Node {
n := p.declName(ir.OTYPE, decl.Name)
- declare(n, dclcontext)
+ typecheck.Declare(n, typecheck.DeclContext)
// decl.Type may be nil but in that case we got a syntax error during parsing
typ := p.typeExprOrNil(decl.Type)
n.Ntype = typ
n.SetAlias(decl.Alias)
- if pragma, ok := decl.Pragma.(*Pragma); ok {
+ if pragma, ok := decl.Pragma.(*pragmas); ok {
if !decl.Alias {
- n.SetPragma(pragma.Flag & TypePragmas)
- pragma.Flag &^= TypePragmas
+ n.SetPragma(pragma.Flag & typePragmas)
+ pragma.Flag &^= typePragmas
}
p.checkUnused(pragma)
}
- nod := p.nod(decl, ir.ODCLTYPE, n, nil)
- if n.Alias() && !langSupported(1, 9, types.LocalPkg) {
+ nod := ir.NewDecl(p.pos(decl), ir.ODCLTYPE, n)
+ if n.Alias() && !types.AllowsGoVersion(types.LocalPkg, 1, 9) {
base.ErrorfAt(nod.Pos(), "type aliases only supported as of -lang=go1.9")
}
return nod
if len(t.Params) > 0 || len(t.Results) > 0 {
base.ErrorfAt(f.Pos(), "func init must have no arguments and no return values")
}
- Target.Inits = append(Target.Inits, f)
+ typecheck.Target.Inits = append(typecheck.Target.Inits, f)
}
if types.LocalPkg.Name == "main" && name.Name == "main" {
name = ir.BlankNode.Sym() // filled in by typecheckfunc
}
- f.Nname = newFuncNameAt(p.pos(fun.Name), name, f)
+ f.Nname = ir.NewFuncNameAt(p.pos(fun.Name), name, f)
f.Nname.Defn = f
f.Nname.Ntype = t
- if pragma, ok := fun.Pragma.(*Pragma); ok {
- f.Pragma = pragma.Flag & FuncPragmas
+ if pragma, ok := fun.Pragma.(*pragmas); ok {
+ f.Pragma = pragma.Flag & funcPragmas
if pragma.Flag&ir.Systemstack != 0 && pragma.Flag&ir.Nosplit != 0 {
base.ErrorfAt(f.Pos(), "go:nosplit and go:systemstack cannot be combined")
}
- pragma.Flag &^= FuncPragmas
+ pragma.Flag &^= funcPragmas
p.checkUnused(pragma)
}
if fun.Recv == nil {
- declare(f.Nname, ir.PFUNC)
+ typecheck.Declare(f.Nname, ir.PFUNC)
}
p.funcBody(f, fun.Body)
n.SetDiag(expr.Bad) // avoid follow-on errors if there was a syntax error
return n
case *syntax.CompositeLit:
- n := p.nod(expr, ir.OCOMPLIT, nil, nil)
+ n := ir.NewCompLitExpr(p.pos(expr), ir.OCOMPLIT, nil, nil)
if expr.Type != nil {
- n.SetRight(p.expr(expr.Type))
+ n.Ntype = ir.Node(p.expr(expr.Type)).(ir.Ntype)
}
l := p.exprs(expr.ElemList)
for i, e := range l {
l[i] = p.wrapname(expr.ElemList[i], e)
}
- n.PtrList().Set(l)
+ n.List.Set(l)
base.Pos = p.makeXPos(expr.Rbrace)
return n
case *syntax.KeyValueExpr:
// use position of expr.Key rather than of expr (which has position of ':')
- return p.nod(expr.Key, ir.OKEY, p.expr(expr.Key), p.wrapname(expr.Value, p.expr(expr.Value)))
+ return ir.NewKeyExpr(p.pos(expr.Key), p.expr(expr.Key), p.wrapname(expr.Value, p.expr(expr.Value)))
case *syntax.FuncLit:
return p.funcLit(expr)
case *syntax.ParenExpr:
- return p.nod(expr, ir.OPAREN, p.expr(expr.X), nil)
+ return ir.NewParenExpr(p.pos(expr), p.expr(expr.X))
case *syntax.SelectorExpr:
// parser.new_dotname
obj := p.expr(expr.X)
pack.Used = true
return importName(pack.Pkg.Lookup(expr.Sel.Value))
}
- n := nodSym(ir.OXDOT, obj, p.name(expr.Sel))
+ n := ir.NewSelectorExpr(base.Pos, ir.OXDOT, obj, p.name(expr.Sel))
n.SetPos(p.pos(expr)) // lineno may have been changed by p.expr(expr.X)
return n
case *syntax.IndexExpr:
- return p.nod(expr, ir.OINDEX, p.expr(expr.X), p.expr(expr.Index))
+ return ir.NewIndexExpr(p.pos(expr), p.expr(expr.X), p.expr(expr.Index))
case *syntax.SliceExpr:
op := ir.OSLICE
if expr.Full {
n.SetSliceBounds(index[0], index[1], index[2])
return n
case *syntax.AssertExpr:
- return p.nod(expr, ir.ODOTTYPE, p.expr(expr.X), p.typeExpr(expr.Type))
+ return ir.NewTypeAssertExpr(p.pos(expr), p.expr(expr.X), p.typeExpr(expr.Type).(ir.Ntype))
case *syntax.Operation:
if expr.Op == syntax.Add && expr.Y != nil {
return p.sum(expr)
pos, op := p.pos(expr), p.unOp(expr.Op)
switch op {
case ir.OADDR:
- return nodAddrAt(pos, x)
+ return typecheck.NodAddrAt(pos, x)
case ir.ODEREF:
return ir.NewStarExpr(pos, x)
}
}
return ir.NewBinaryExpr(pos, op, x, y)
case *syntax.CallExpr:
- n := p.nod(expr, ir.OCALL, p.expr(expr.Fun), nil)
- n.PtrList().Set(p.exprs(expr.ArgList))
- n.SetIsDDD(expr.HasDots)
+ n := ir.NewCallExpr(p.pos(expr), ir.OCALL, p.expr(expr.Fun), nil)
+ n.Args.Set(p.exprs(expr.ArgList))
+ n.IsDDD = expr.HasDots
return n
case *syntax.ArrayType:
nstr = nil
chunks = chunks[:0]
}
- n = p.nod(add, ir.OADD, n, r)
+ n = ir.NewBinaryExpr(p.pos(add), ir.OADD, n, r)
}
if len(chunks) > 1 {
nstr.SetVal(constant.MakeString(strings.Join(chunks, "")))
}
sym := p.packname(typ)
- n := ir.NewField(p.pos(typ), lookup(sym.Name), importName(sym).(ir.Ntype), nil)
+ n := ir.NewField(p.pos(typ), typecheck.Lookup(sym.Name), importName(sym).(ir.Ntype), nil)
n.Embedded = true
if isStar {
for i, stmt := range stmts {
s := p.stmtFall(stmt, fallOK && i+1 == len(stmts))
if s == nil {
- } else if s.Op() == ir.OBLOCK && s.(*ir.BlockStmt).List().Len() > 0 {
+ } else if s.Op() == ir.OBLOCK && len(s.(*ir.BlockStmt).List) > 0 {
// Inline non-empty block.
// Empty blocks must be preserved for checkreturn.
- nodes = append(nodes, s.(*ir.BlockStmt).List().Slice()...)
+ nodes = append(nodes, s.(*ir.BlockStmt).List...)
} else {
nodes = append(nodes, s)
}
l := p.blockStmt(stmt)
if len(l) == 0 {
// TODO(mdempsky): Line number?
- return ir.Nod(ir.OBLOCK, nil, nil)
+ return ir.NewBlockStmt(base.Pos, nil)
}
- return liststmt(l)
+ return ir.NewBlockStmt(src.NoXPos, l)
case *syntax.ExprStmt:
return p.wrapname(stmt, p.expr(stmt.X))
case *syntax.SendStmt:
- return p.nod(stmt, ir.OSEND, p.expr(stmt.Chan), p.expr(stmt.Value))
+ return ir.NewSendStmt(p.pos(stmt), p.expr(stmt.Chan), p.expr(stmt.Value))
case *syntax.DeclStmt:
- return liststmt(p.decls(stmt.DeclList))
+ return ir.NewBlockStmt(src.NoXPos, p.decls(stmt.DeclList))
case *syntax.AssignStmt:
if stmt.Op != 0 && stmt.Op != syntax.Def {
n := ir.NewAssignOpStmt(p.pos(stmt), p.binOp(stmt.Op), p.expr(stmt.Lhs), p.expr(stmt.Rhs))
- n.SetImplicit(stmt.Rhs == syntax.ImplicitOne)
+ n.IncDec = stmt.Rhs == syntax.ImplicitOne
return n
}
rhs := p.exprList(stmt.Rhs)
if list, ok := stmt.Lhs.(*syntax.ListExpr); ok && len(list.ElemList) != 1 || len(rhs) != 1 {
- n := p.nod(stmt, ir.OAS2, nil, nil)
- n.SetColas(stmt.Op == syntax.Def)
- n.PtrList().Set(p.assignList(stmt.Lhs, n, n.Colas()))
- n.PtrRlist().Set(rhs)
+ n := ir.NewAssignListStmt(p.pos(stmt), ir.OAS2, nil, nil)
+ n.Def = stmt.Op == syntax.Def
+ n.Lhs.Set(p.assignList(stmt.Lhs, n, n.Def))
+ n.Rhs.Set(rhs)
return n
}
- n := p.nod(stmt, ir.OAS, nil, nil)
- n.SetColas(stmt.Op == syntax.Def)
- n.SetLeft(p.assignList(stmt.Lhs, n, n.Colas())[0])
- n.SetRight(rhs[0])
+ n := ir.NewAssignStmt(p.pos(stmt), nil, nil)
+ n.Def = stmt.Op == syntax.Def
+ n.X = p.assignList(stmt.Lhs, n, n.Def)[0]
+ n.Y = rhs[0]
return n
case *syntax.BranchStmt:
if stmt.Results != nil {
results = p.exprList(stmt.Results)
}
- n := p.nod(stmt, ir.ORETURN, nil, nil)
- n.PtrList().Set(results)
- if n.List().Len() == 0 && Curfn != nil {
- for _, ln := range Curfn.Dcl {
- if ln.Class() == ir.PPARAM {
+ n := ir.NewReturnStmt(p.pos(stmt), nil)
+ n.Results.Set(results)
+ if len(n.Results) == 0 && ir.CurFunc != nil {
+ for _, ln := range ir.CurFunc.Dcl {
+ if ln.Class_ == ir.PPARAM {
continue
}
- if ln.Class() != ir.PPARAMOUT {
+ if ln.Class_ != ir.PPARAMOUT {
break
}
if ln.Sym().Def != ln {
}
newOrErr = true
- n := NewName(sym)
- declare(n, dclcontext)
+ n := typecheck.NewName(sym)
+ typecheck.Declare(n, typecheck.DeclContext)
n.Defn = defn
- defn.PtrInit().Append(ir.Nod(ir.ODCL, n, nil))
+ defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
res[i] = n
}
func (p *noder) ifStmt(stmt *syntax.IfStmt) ir.Node {
p.openScope(stmt.Pos())
- n := p.nod(stmt, ir.OIF, nil, nil)
+ n := ir.NewIfStmt(p.pos(stmt), nil, nil, nil)
if stmt.Init != nil {
- n.PtrInit().Set1(p.stmt(stmt.Init))
+ *n.PtrInit() = []ir.Node{p.stmt(stmt.Init)}
}
if stmt.Cond != nil {
- n.SetLeft(p.expr(stmt.Cond))
+ n.Cond = p.expr(stmt.Cond)
}
- n.PtrBody().Set(p.blockStmt(stmt.Then))
+ n.Body.Set(p.blockStmt(stmt.Then))
if stmt.Else != nil {
e := p.stmt(stmt.Else)
if e.Op() == ir.OBLOCK {
- n.PtrRlist().Set(e.List().Slice())
+ e := e.(*ir.BlockStmt)
+ n.Else.Set(e.List)
} else {
- n.PtrRlist().Set1(e)
+ n.Else = []ir.Node{e}
}
}
p.closeAnotherScope()
panic("unexpected RangeClause")
}
- n := p.nod(r, ir.ORANGE, nil, p.expr(r.X))
+ n := ir.NewRangeStmt(p.pos(r), nil, p.expr(r.X), nil)
if r.Lhs != nil {
- n.SetColas(r.Def)
- n.PtrList().Set(p.assignList(r.Lhs, n, n.Colas()))
+ n.Def = r.Def
+ n.Vars.Set(p.assignList(r.Lhs, n, n.Def))
}
- n.PtrBody().Set(p.blockStmt(stmt.Body))
+ n.Body.Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
- n := p.nod(stmt, ir.OFOR, nil, nil)
+ n := ir.NewForStmt(p.pos(stmt), nil, nil, nil, nil)
if stmt.Init != nil {
- n.PtrInit().Set1(p.stmt(stmt.Init))
+ *n.PtrInit() = []ir.Node{p.stmt(stmt.Init)}
}
if stmt.Cond != nil {
- n.SetLeft(p.expr(stmt.Cond))
+ n.Cond = p.expr(stmt.Cond)
}
if stmt.Post != nil {
- n.SetRight(p.stmt(stmt.Post))
+ n.Post = p.stmt(stmt.Post)
}
- n.PtrBody().Set(p.blockStmt(stmt.Body))
+ n.Body.Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
func (p *noder) switchStmt(stmt *syntax.SwitchStmt) ir.Node {
p.openScope(stmt.Pos())
- n := p.nod(stmt, ir.OSWITCH, nil, nil)
+ n := ir.NewSwitchStmt(p.pos(stmt), nil, nil)
if stmt.Init != nil {
- n.PtrInit().Set1(p.stmt(stmt.Init))
+ *n.PtrInit() = []ir.Node{p.stmt(stmt.Init)}
}
if stmt.Tag != nil {
- n.SetLeft(p.expr(stmt.Tag))
+ n.Tag = p.expr(stmt.Tag)
}
var tswitch *ir.TypeSwitchGuard
- if l := n.Left(); l != nil && l.Op() == ir.OTYPESW {
+ if l := n.Tag; l != nil && l.Op() == ir.OTYPESW {
tswitch = l.(*ir.TypeSwitchGuard)
}
- n.PtrList().Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
+ n.Cases.Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
p.closeScope(stmt.Rbrace)
return n
}
p.openScope(clause.Pos())
- n := p.nod(clause, ir.OCASE, nil, nil)
+ n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Cases != nil {
- n.PtrList().Set(p.exprList(clause.Cases))
+ n.List.Set(p.exprList(clause.Cases))
}
- if tswitch != nil && tswitch.Left() != nil {
- nn := NewName(tswitch.Left().Sym())
- declare(nn, dclcontext)
- n.PtrRlist().Set1(nn)
+ if tswitch != nil && tswitch.Tag != nil {
+ nn := typecheck.NewName(tswitch.Tag.Sym())
+ typecheck.Declare(nn, typecheck.DeclContext)
+ n.Vars = []ir.Node{nn}
// keep track of the instances for reporting unused
nn.Defn = tswitch
}
body = body[:len(body)-1]
}
- n.PtrBody().Set(p.stmtsFall(body, true))
- if l := n.Body().Len(); l > 0 && n.Body().Index(l-1).Op() == ir.OFALL {
+ n.Body.Set(p.stmtsFall(body, true))
+ if l := len(n.Body); l > 0 && n.Body[l-1].Op() == ir.OFALL {
if tswitch != nil {
base.Errorf("cannot fallthrough in type switch")
}
}
func (p *noder) selectStmt(stmt *syntax.SelectStmt) ir.Node {
- n := p.nod(stmt, ir.OSELECT, nil, nil)
- n.PtrList().Set(p.commClauses(stmt.Body, stmt.Rbrace))
+ n := ir.NewSelectStmt(p.pos(stmt), nil)
+ n.Cases.Set(p.commClauses(stmt.Body, stmt.Rbrace))
return n
}
}
p.openScope(clause.Pos())
- n := p.nod(clause, ir.OCASE, nil, nil)
+ n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Comm != nil {
- n.PtrList().Set1(p.stmt(clause.Comm))
+ n.List = []ir.Node{p.stmt(clause.Comm)}
}
- n.PtrBody().Set(p.stmts(clause.Body))
+ n.Body.Set(p.stmts(clause.Body))
nodes = append(nodes, n)
}
if len(clauses) > 0 {
func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) ir.Node {
sym := p.name(label.Label)
- lhs := p.nodSym(label, ir.OLABEL, nil, sym)
+ lhs := ir.NewLabelStmt(p.pos(label), sym)
var ls ir.Node
if label.Stmt != nil { // TODO(mdempsky): Should always be present.
if ls != nil {
switch ls.Op() {
case ir.OFOR:
- ls.SetSym(sym)
+ ls := ls.(*ir.ForStmt)
+ ls.Label = sym
case ir.ORANGE:
- ls.SetSym(sym)
+ ls := ls.(*ir.RangeStmt)
+ ls.Label = sym
case ir.OSWITCH:
- ls.SetSym(sym)
+ ls := ls.(*ir.SwitchStmt)
+ ls.Label = sym
case ir.OSELECT:
- ls.SetSym(sym)
+ ls := ls.(*ir.SelectStmt)
+ ls.Label = sym
}
}
}
l := []ir.Node{lhs}
if ls != nil {
if ls.Op() == ir.OBLOCK {
- l = append(l, ls.List().Slice()...)
+ ls := ls.(*ir.BlockStmt)
+ l = append(l, ls.List...)
} else {
l = append(l, ls)
}
}
- return liststmt(l)
+ return ir.NewBlockStmt(src.NoXPos, l)
}
var unOps = [...]ir.Op{
// literal is not compatible with the current language version.
func checkLangCompat(lit *syntax.BasicLit) {
s := lit.Value
- if len(s) <= 2 || langSupported(1, 13, types.LocalPkg) {
+ if len(s) <= 2 || types.AllowsGoVersion(types.LocalPkg, 1, 13) {
return
}
// len(s) > 2
// to big.Float to match cmd/compile's historical precision.
// TODO(mdempsky): Remove.
if v.Kind() == constant.Float {
- v = constant.Make(bigFloatVal(v))
+ v = constant.Make(ir.BigFloat(v))
}
return v
}
func (p *noder) name(name *syntax.Name) *types.Sym {
- return lookup(name.Value)
+ return typecheck.Lookup(name.Value)
}
func (p *noder) mkname(name *syntax.Name) ir.Node {
}
fallthrough
case ir.ONAME, ir.ONONAME, ir.OPACK:
- p := p.nod(n, ir.OPAREN, x, nil)
+ p := ir.NewParenExpr(p.pos(n), x)
p.SetImplicit(true)
return p
}
return x
}
- func (p *noder) nod(orig syntax.Node, op ir.Op, left, right ir.Node) ir.Node {
- return ir.NodAt(p.pos(orig), op, left, right)
- }
-
- func (p *noder) nodSym(orig syntax.Node, op ir.Op, left ir.Node, sym *types.Sym) ir.Node {
- n := nodSym(op, left, sym)
- n.SetPos(p.pos(orig))
- return n
- }
-
func (p *noder) pos(n syntax.Node) src.XPos {
// TODO(gri): orig.Pos() should always be known - fix package syntax
xpos := base.Pos
"go:generate": true,
}
- // *Pragma is the value stored in a syntax.Pragma during parsing.
- type Pragma struct {
+ // *pragmas is the value stored in a syntax.pragmas during parsing.
+ type pragmas struct {
Flag ir.PragmaFlag // collected bits
- Pos []PragmaPos // position of each individual flag
- Embeds []PragmaEmbed
+ Pos []pragmaPos // position of each individual flag
+ Embeds []pragmaEmbed
}
- type PragmaPos struct {
+ type pragmaPos struct {
Flag ir.PragmaFlag
Pos syntax.Pos
}
- type PragmaEmbed struct {
+ type pragmaEmbed struct {
Pos syntax.Pos
Patterns []string
}
- func (p *noder) checkUnused(pragma *Pragma) {
+ func (p *noder) checkUnused(pragma *pragmas) {
for _, pos := range pragma.Pos {
if pos.Flag&pragma.Flag != 0 {
p.errorAt(pos.Pos, "misplaced compiler directive")
}
}
- func (p *noder) checkUnusedDuringParse(pragma *Pragma) {
+ func (p *noder) checkUnusedDuringParse(pragma *pragmas) {
for _, pos := range pragma.Pos {
if pos.Flag&pragma.Flag != 0 {
p.error(syntax.Error{Pos: pos.Pos, Msg: "misplaced compiler directive"})
// pragma is called concurrently if files are parsed concurrently.
func (p *noder) pragma(pos syntax.Pos, blankLine bool, text string, old syntax.Pragma) syntax.Pragma {
- pragma, _ := old.(*Pragma)
+ pragma, _ := old.(*pragmas)
if pragma == nil {
- pragma = new(Pragma)
+ pragma = new(pragmas)
}
if text == "" {
p.error(syntax.Error{Pos: pos, Msg: "usage: //go:embed pattern..."})
break
}
- pragma.Embeds = append(pragma.Embeds, PragmaEmbed{pos, args})
+ pragma.Embeds = append(pragma.Embeds, pragmaEmbed{pos, args})
case strings.HasPrefix(text, "go:cgo_import_dynamic "):
// This is permitted for general use because Solaris
p.error(syntax.Error{Pos: pos, Msg: fmt.Sprintf("//%s is not allowed in the standard library", verb)})
}
pragma.Flag |= flag
- pragma.Pos = append(pragma.Pos, PragmaPos{flag, pos})
+ pragma.Pos = append(pragma.Pos, pragmaPos{flag, pos})
}
return pragma
}
return list, nil
}
+
+ func fakeRecv() *ir.Field {
+ return ir.NewField(base.Pos, nil, nil, types.FakeRecvType())
+ }
+
+ func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
+ xtype := p.typeExpr(expr.Type)
+ ntype := p.typeExpr(expr.Type)
+
+ fn := ir.NewFunc(p.pos(expr))
+ fn.SetIsHiddenClosure(ir.CurFunc != nil)
+ fn.Nname = ir.NewFuncNameAt(p.pos(expr), ir.BlankNode.Sym(), fn) // filled in by typecheckclosure
+ fn.Nname.Ntype = xtype
+ fn.Nname.Defn = fn
+
+ clo := ir.NewClosureExpr(p.pos(expr), fn)
+ fn.ClosureType = ntype
+ fn.OClosure = clo
+
+ p.funcBody(fn, expr.Body)
+
+ // closure-specific variables are hanging off the
+ // ordinary ones in the symbol table; see oldname.
+ // unhook them.
+ // make the list of pointers for the closure call.
+ for _, v := range fn.ClosureVars {
+ // Unlink from v1; see comment in syntax.go type Param for these fields.
+ v1 := v.Defn
+ v1.Name().Innermost = v.Outer
+
+ // If the closure usage of v is not dense,
+ // we need to make it dense; now that we're out
+ // of the function in which v appeared,
+ // look up v.Sym in the enclosing function
+ // and keep it around for use in the compiled code.
+ //
+ // That is, suppose we just finished parsing the innermost
+ // closure f4 in this code:
+ //
+ // func f() {
+ // v := 1
+ // func() { // f2
+ // use(v)
+ // func() { // f3
+ // func() { // f4
+ // use(v)
+ // }()
+ // }()
+ // }()
+ // }
+ //
+ // At this point v.Outer is f2's v; there is no f3's v.
+ // To construct the closure f4 from within f3,
+ // we need to use f3's v and in this case we need to create f3's v.
+ // We are now in the context of f3, so calling oldname(v.Sym)
+ // obtains f3's v, creating it if necessary (as it is in the example).
+ //
+ // capturevars will decide whether to use v directly or &v.
+ v.Outer = oldname(v.Sym()).(*ir.Name)
+ }
+
+ return clo
+ }
+
+ // A function named init is a special case.
+ // It is called by the initialization before main is run.
+ // To make it unique within a package and also uncallable,
+ // the name, normally "pkg.init", is altered to "pkg.init.0".
+ var renameinitgen int
+
+ func renameinit() *types.Sym {
+ s := typecheck.LookupNum("init.", renameinitgen)
+ renameinitgen++
+ return s
+ }
+
+ // oldname returns the Node that declares symbol s in the current scope.
+ // If no such Node currently exists, an ONONAME Node is returned instead.
+ // Automatically creates a new closure variable if the referenced symbol was
+ // declared in a different (containing) function.
+ func oldname(s *types.Sym) ir.Node {
+ if s.Pkg != types.LocalPkg {
+ return ir.NewIdent(base.Pos, s)
+ }
+
+ n := ir.AsNode(s.Def)
+ if n == nil {
+ // Maybe a top-level declaration will come along later to
+ // define s. resolve will check s.Def again once all input
+ // source has been processed.
+ return ir.NewIdent(base.Pos, s)
+ }
+
+ if ir.CurFunc != nil && n.Op() == ir.ONAME && n.Name().Curfn != nil && n.Name().Curfn != ir.CurFunc {
+ // Inner func is referring to var in outer func.
+ //
+ // TODO(rsc): If there is an outer variable x and we
+ // are parsing x := 5 inside the closure, until we get to
+ // the := it looks like a reference to the outer x so we'll
+ // make x a closure variable unnecessarily.
+ n := n.(*ir.Name)
+ c := n.Name().Innermost
+ if c == nil || c.Curfn != ir.CurFunc {
+ // Do not have a closure var for the active closure yet; make one.
+ c = typecheck.NewName(s)
+ c.Class_ = ir.PAUTOHEAP
+ c.SetIsClosureVar(true)
+ c.SetIsDDD(n.IsDDD())
+ c.Defn = n
+
+ // Link into list of active closure variables.
+ // Popped from list in func funcLit.
+ c.Outer = n.Name().Innermost
+ n.Name().Innermost = c
+
+ ir.CurFunc.ClosureVars = append(ir.CurFunc.ClosureVars, c)
+ }
+
+ // return ref to closure var, not original
+ return c
+ }
+
+ return n
+ }
+
+ func varEmbed(p *noder, names []*ir.Name, typ ir.Ntype, exprs []ir.Node, embeds []pragmaEmbed) (newExprs []ir.Node) {
+ haveEmbed := false
+ for _, decl := range p.file.DeclList {
+ imp, ok := decl.(*syntax.ImportDecl)
+ if !ok {
+ // imports always come first
+ break
+ }
+ path, _ := strconv.Unquote(imp.Path.Value)
+ if path == "embed" {
+ haveEmbed = true
+ break
+ }
+ }
+
+ pos := embeds[0].Pos
+ if !haveEmbed {
+ p.errorAt(pos, "invalid go:embed: missing import \"embed\"")
+ return exprs
+ }
+ if base.Flag.Cfg.Embed.Patterns == nil {
+ p.errorAt(pos, "invalid go:embed: build system did not supply embed configuration")
+ return exprs
+ }
+ if len(names) > 1 {
+ p.errorAt(pos, "go:embed cannot apply to multiple vars")
+ return exprs
+ }
+ if len(exprs) > 0 {
+ p.errorAt(pos, "go:embed cannot apply to var with initializer")
+ return exprs
+ }
+ if typ == nil {
+ // Should not happen, since len(exprs) == 0 now.
+ p.errorAt(pos, "go:embed cannot apply to var without type")
+ return exprs
+ }
+ if typecheck.DeclContext != ir.PEXTERN {
+ p.errorAt(pos, "go:embed cannot apply to var inside func")
+ return exprs
+ }
+
+ v := names[0]
+ typecheck.Target.Embeds = append(typecheck.Target.Embeds, v)
+ v.Embed = new([]ir.Embed)
+ for _, e := range embeds {
+ *v.Embed = append(*v.Embed, ir.Embed{Pos: p.makeXPos(e.Pos), Patterns: e.Patterns})
+ }
+ return exprs
+ }
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
- package gc
+ package test
import (
"internal/testenv"
for _, dep := range strings.Fields(strings.Trim(string(out), "[]")) {
switch dep {
case "go/build", "go/scanner":
- t.Errorf("undesired dependency on %q", dep)
+ // cmd/compile/internal/importer introduces a dependency
+ // on go/build and go/token; cmd/compile/internal/ uses
+ // go/constant which uses go/token in its API. Once we
+ // got rid of those dependencies, enable this check again.
+ // TODO(gri) fix this
+ // t.Errorf("undesired dependency on %q", dep)
}
}
}