-// UNREVIEWED
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
"errors"
"fmt"
"go/constant"
+ "internal/godebug"
+ "internal/goversion"
+ . "internal/types/errors"
)
+// nopos indicates an unknown position
var nopos syntax.Pos
// debugging/development support
-const debug = true // leave on during development
-
-// If forceStrict is set, the type-checker enforces additional
-// rules not specified by the Go 1 spec, but which will
-// catch guaranteed run-time errors if the respective
-// code is executed. In other words, programs passing in
-// strict mode are Go 1 compliant, but not all Go 1 programs
-// will pass in strict mode. The additional rules are:
-//
-// - A type assertion x.(T) where T is an interface type
-// is invalid if any (statically known) method that exists
-// for both x and T have different signatures.
-//
-const forceStrict = false
+const debug = false // leave on during development
-// If methodTypeParamsOk is set, type parameters are
-// permitted in method declarations (in interfaces, too).
-// Generalization and experimental feature.
-const methodTypeParamsOk = true
+// gotypesalias controls the use of Alias types.
+var gotypesalias = godebug.New("#gotypesalias")
// exprInfo stores information about an untyped expression.
type exprInfo struct {
val constant.Value // constant value; or nil (if not a constant)
}
-// A context represents the context within which an object is type-checked.
-type context struct {
+// An environment represents the environment within which an object is
+// type-checked.
+type environment struct {
decl *declInfo // package-level declaration whose init expression/function body is checked
scope *Scope // top-most scope for lookups
pos syntax.Pos // if valid, identifiers are looked up as if at position pos (used by Eval)
iota constant.Value // value of iota in a constant declaration; nil otherwise
errpos syntax.Pos // if valid, identifier position of a constant with inherited initializer
+ inTParamList bool // set if inside a type parameter list
sig *Signature // function signature if inside a function; nil otherwise
isPanic map[*syntax.CallExpr]bool // set of panic call expressions (used for termination check)
hasLabel bool // set if a function makes use of labels (only ~1% of functions); unused outside functions
hasCallOrRecv bool // set if an expression contains a function call or channel receive operation
}
-// lookup looks up name in the current context and returns the matching object, or nil.
-func (ctxt *context) lookup(name string) Object {
- _, obj := ctxt.scope.LookupParent(name, ctxt.pos)
+// lookup looks up name in the current environment and returns the matching object, or nil.
+func (env *environment) lookup(name string) Object {
+ _, obj := env.scope.LookupParent(name, env.pos)
return obj
}
path, dir string
}
+// A dotImportKey describes a dot-imported object in the given scope.
+type dotImportKey struct {
+ scope *Scope
+ name string
+}
+
+// An action describes a (delayed) action.
+type action struct {
+ f func() // action to be executed
+ desc *actionDesc // action description; may be nil, requires debug to be set
+}
+
+// If debug is set, describef sets a printf-formatted description for action a.
+// Otherwise, it is a no-op.
+func (a *action) describef(pos poser, format string, args ...interface{}) {
+ if debug {
+ a.desc = &actionDesc{pos, format, args}
+ }
+}
+
+// An actionDesc provides information on an action.
+// For debugging only.
+type actionDesc struct {
+ pos poser
+ format string
+ args []interface{}
+}
+
// A Checker maintains the state of the type checker.
// It must be created with NewChecker.
type Checker struct {
// package information
// (initialized by NewChecker, valid for the life-time of checker)
+
+ // If enableAlias is set, alias declarations produce an Alias type.
+ // Otherwise the alias information is only in the type name, which
+ // points directly to the actual (aliased) type.
+ enableAlias bool
+
conf *Config
+ ctxt *Context // context for de-duplicating instances
pkg *Package
*Info
- nextId uint64 // unique Id for type parameters (first valid Id is 1)
- objMap map[Object]*declInfo // maps package-level objects and (non-interface) methods to declaration info
- impMap map[importKey]*Package // maps (import path, source directory) to (complete or fake) package
- posMap map[*Interface][]syntax.Pos // maps interface types to lists of embedded interface positions
- typMap map[string]*Named // maps an instantiated named type hash to a *Named type
- pkgCnt map[string]int // counts number of imported packages with a given name (for better error messages)
+ version version // accepted language version
+ posVers map[*syntax.PosBase]version // maps file PosBases to versions (may be nil)
+ nextID uint64 // unique Id for type parameters (first valid Id is 1)
+ objMap map[Object]*declInfo // maps package-level objects and (non-interface) methods to declaration info
+ impMap map[importKey]*Package // maps (import path, source directory) to (complete or fake) package
+ valids instanceLookup // valid *Named (incl. instantiated) types per the validType check
+
+ // pkgPathMap maps package names to the set of distinct import paths we've
+ // seen for that name, anywhere in the import graph. It is used for
+ // disambiguating package names in error messages.
+ //
+ // pkgPathMap is allocated lazily, so that we don't pay the price of building
+ // it on the happy path. seenPkgMap tracks the packages that we've already
+ // walked.
+ pkgPathMap map[string]map[string]bool
+ seenPkgMap map[*Package]bool
// information collected during type-checking of a set of package files
// (initialized by Files, valid only for the duration of check.Files;
// maps and lists are allocated on demand)
- files []*syntax.File // package files
- unusedDotImports map[*Scope]map[*Package]syntax.Pos // positions of unused dot-imported packages for each file scope
+ files []*syntax.File // list of package files
+ imports []*PkgName // list of imported packages
+ dotImportMap map[dotImportKey]*PkgName // maps dot-imported objects to the package they were dot-imported through
+ recvTParamMap map[*syntax.Name]*TypeParam // maps blank receiver type parameters to their type
+ brokenAliases map[*TypeName]bool // set of aliases with broken (not yet determined) types
+ unionTypeSets map[*Union]*_TypeSet // computed type sets for union types
+ mono monoGraph // graph for detecting non-monomorphizable instantiation loops
firstErr error // first error encountered
methods map[*TypeName][]*Func // maps package scope type names to associated non-blank (non-interface) methods
untyped map[syntax.Expr]exprInfo // map of expressions without final type
- delayed []func() // stack of delayed action segments; segments are processed in FIFO order
- finals []func() // list of final actions; processed at the end of type-checking the current set of files
+ delayed []action // stack of delayed action segments; segments are processed in FIFO order
objPath []Object // path of object dependencies during type inference (for cycle reporting)
+ cleaners []cleaner // list of types that may need a final cleanup at the end of type-checking
- // context within which the current object is type-checked
- // (valid only for the duration of type-checking a specific object)
- context
+ // environment within which the current object is type-checked (valid only
+ // for the duration of type-checking a specific object)
+ environment
// debugging
indent int // indentation for tracing
}
-// addUnusedImport adds the position of a dot-imported package
-// pkg to the map of dot imports for the given file scope.
-func (check *Checker) addUnusedDotImport(scope *Scope, pkg *Package, pos syntax.Pos) {
- mm := check.unusedDotImports
- if mm == nil {
- mm = make(map[*Scope]map[*Package]syntax.Pos)
- check.unusedDotImports = mm
- }
- m := mm[scope]
- if m == nil {
- m = make(map[*Package]syntax.Pos)
- mm[scope] = m
- }
- m[pkg] = pos
-}
-
// addDeclDep adds the dependency edge (check.decl -> to) if check.decl exists
func (check *Checker) addDeclDep(to Object) {
from := check.decl
from.addDep(to)
}
+// Note: The following three alias-related functions are only used
+// when Alias types are not enabled.
+
+// brokenAlias records that alias doesn't have a determined type yet.
+// It also sets alias.typ to Typ[Invalid].
+// Not used if check.enableAlias is set.
+func (check *Checker) brokenAlias(alias *TypeName) {
+ assert(!check.enableAlias)
+ if check.brokenAliases == nil {
+ check.brokenAliases = make(map[*TypeName]bool)
+ }
+ check.brokenAliases[alias] = true
+ alias.typ = Typ[Invalid]
+}
+
+// validAlias records that alias has the valid type typ (possibly Typ[Invalid]).
+func (check *Checker) validAlias(alias *TypeName, typ Type) {
+ assert(!check.enableAlias)
+ delete(check.brokenAliases, alias)
+ alias.typ = typ
+}
+
+// isBrokenAlias reports whether alias doesn't have a determined type yet.
+func (check *Checker) isBrokenAlias(alias *TypeName) bool {
+ assert(!check.enableAlias)
+ return check.brokenAliases[alias]
+}
+
func (check *Checker) rememberUntyped(e syntax.Expr, lhs bool, mode operandMode, typ *Basic, val constant.Value) {
m := check.untyped
if m == nil {
// either at the end of the current statement, or in case of a local constant
// or variable declaration, before the constant or variable is in scope
// (so that f still sees the scope before any new declarations).
-func (check *Checker) later(f func()) {
- check.delayed = append(check.delayed, f)
-}
-
-// atEnd adds f to the list of actions processed at the end
-// of type-checking, before initialization order computation.
-// Actions added by atEnd are processed after any actions
-// added by later.
-func (check *Checker) atEnd(f func()) {
- check.finals = append(check.finals, f)
+// later returns the pushed action so one can provide a description
+// via action.describef for debugging, if desired.
+func (check *Checker) later(f func()) *action {
+ i := len(check.delayed)
+ check.delayed = append(check.delayed, action{f: f})
+ return &check.delayed[i]
}
// push pushes obj onto the object path and returns its index in the path.
return obj
}
+type cleaner interface {
+ cleanup()
+}
+
+// needsCleanup records objects/types that implement the cleanup method
+// which will be called at the end of type-checking.
+func (check *Checker) needsCleanup(c cleaner) {
+ check.cleaners = append(check.cleaners, c)
+}
+
// NewChecker returns a new Checker instance for a given package.
// Package files may be added incrementally via checker.Files.
func NewChecker(conf *Config, pkg *Package, info *Info) *Checker {
info = new(Info)
}
+ // Note: clients may call NewChecker with the Unsafe package, which is
+ // globally shared and must not be mutated. Therefore NewChecker must not
+ // mutate *pkg.
+ //
+ // (previously, pkg.goVersion was mutated here: go.dev/issue/61212)
+
return &Checker{
- conf: conf,
- pkg: pkg,
- Info: info,
- nextId: 1,
- objMap: make(map[Object]*declInfo),
- impMap: make(map[importKey]*Package),
- posMap: make(map[*Interface][]syntax.Pos),
- typMap: make(map[string]*Named),
- pkgCnt: make(map[string]int),
+ enableAlias: gotypesalias.Value() == "1",
+ conf: conf,
+ ctxt: conf.Context,
+ pkg: pkg,
+ Info: info,
+ objMap: make(map[Object]*declInfo),
+ impMap: make(map[importKey]*Package),
}
}
func (check *Checker) initFiles(files []*syntax.File) {
// start with a clean slate (check.Files may be called multiple times)
check.files = nil
- check.unusedDotImports = nil
+ check.imports = nil
+ check.dotImportMap = nil
check.firstErr = nil
check.methods = nil
check.untyped = nil
check.delayed = nil
- check.finals = nil
+ check.objPath = nil
+ check.cleaners = nil
// determine package name and collect valid files
pkg := check.pkg
if name != "_" {
pkg.name = name
} else {
- check.errorf(file.PkgName, "invalid package name _")
+ check.error(file.PkgName, BlankPkgName, "invalid package name _")
}
fallthrough
check.files = append(check.files, file)
default:
- check.errorf(file, "package %s; expected %s", name, pkg.name)
+ check.errorf(file, MismatchedPkgName, "package %s; expected %s", name, pkg.name)
// ignore this file
}
}
+
+ for _, file := range check.files {
+ fbase := base(file.Pos()) // fbase may be nil for tests
+ check.recordFileVersion(fbase, check.conf.GoVersion) // record package version (possibly zero version)
+ v, _ := parseGoVersion(file.GoVersion)
+ if v.major > 0 {
+ if v.equal(check.version) {
+ continue
+ }
+ // Go 1.21 introduced the feature of setting the go.mod
+ // go line to an early version of Go and allowing //go:build lines
+ // to “upgrade” the Go version in a given file.
+ // We can do that backwards compatibly.
+ // Go 1.21 also introduced the feature of allowing //go:build lines
+ // to “downgrade” the Go version in a given file.
+ // That can't be done compatibly in general, since before the
+ // build lines were ignored and code got the module's Go version.
+ // To work around this, downgrades are only allowed when the
+ // module's Go version is Go 1.21 or later.
+ // If there is no check.version, then we don't really know what Go version to apply.
+ // Legacy tools may do this, and they historically have accepted everything.
+ // Preserve that behavior by ignoring //go:build constraints entirely in that case.
+ if (v.before(check.version) && check.version.before(go1_21)) || check.version.equal(go0_0) {
+ continue
+ }
+ if check.posVers == nil {
+ check.posVers = make(map[*syntax.PosBase]version)
+ }
+ check.posVers[fbase] = v
+ check.recordFileVersion(fbase, file.GoVersion) // overwrite package version
+ }
+ }
}
// A bailout panic is used for early termination.
var errBadCgo = errors.New("cannot use FakeImportC and go115UsesCgo together")
func (check *Checker) checkFiles(files []*syntax.File) (err error) {
+ if check.pkg == Unsafe {
+ // Defensive handling for Unsafe, which cannot be type checked, and must
+ // not be mutated. See https://go.dev/issue/61212 for an example of where
+ // Unsafe is passed to NewChecker.
+ return nil
+ }
+
+ // Note: parseGoVersion and the subsequent checks should happen once,
+ // when we create a new Checker, not for each batch of files.
+ // We can't change it at this point because NewChecker doesn't
+ // return an error.
+ check.version, err = parseGoVersion(check.conf.GoVersion)
+ if err != nil {
+ return err
+ }
+ if check.version.after(version{1, goversion.Version}) {
+ return fmt.Errorf("package requires newer Go version %v", check.version)
+ }
if check.conf.FakeImportC && check.conf.go115UsesCgo {
return errBadCgo
}
print := func(msg string) {
if check.conf.Trace {
+ fmt.Println()
fmt.Println(msg)
}
}
print("== processDelayed ==")
check.processDelayed(0) // incl. all functions
- check.processFinals()
+
+ print("== cleanup ==")
+ check.cleanup()
print("== initOrder ==")
check.initOrder()
print("== recordUntyped ==")
check.recordUntyped()
- if check.Info != nil {
- print("== sanitizeInfo ==")
- sanitizeInfo(check.Info)
+ if check.firstErr == nil {
+ // TODO(mdempsky): Ensure monomorph is safe when errors exist.
+ check.monomorph()
}
+ check.pkg.goVersion = check.conf.GoVersion
check.pkg.complete = true
+
+ // no longer needed - release memory
+ check.imports = nil
+ check.dotImportMap = nil
+ check.pkgPathMap = nil
+ check.seenPkgMap = nil
+ check.recvTParamMap = nil
+ check.brokenAliases = nil
+ check.unionTypeSets = nil
+ check.ctxt = nil
+
+ // TODO(gri) There's more memory we should release at this point.
+
return
}
// add more actions (such as nested functions), so
// this is a sufficiently bounded process.
for i := top; i < len(check.delayed); i++ {
- check.delayed[i]() // may append to check.delayed
+ a := &check.delayed[i]
+ if check.conf.Trace {
+ if a.desc != nil {
+ check.trace(a.desc.pos.Pos(), "-- "+a.desc.format, a.desc.args...)
+ } else {
+ check.trace(nopos, "-- delayed %p", a.f)
+ }
+ }
+ a.f() // may append to check.delayed
+ if check.conf.Trace {
+ fmt.Println()
+ }
}
assert(top <= len(check.delayed)) // stack must not have shrunk
check.delayed = check.delayed[:top]
}
-func (check *Checker) processFinals() {
- n := len(check.finals)
- for _, f := range check.finals {
- f() // must not append to check.finals
+// cleanup runs cleanup for all collected cleaners.
+func (check *Checker) cleanup() {
+ // Don't use a range clause since Named.cleanup may add more cleaners.
+ for i := 0; i < len(check.cleaners); i++ {
+ check.cleaners[i].cleanup()
+ }
+ check.cleaners = nil
+}
+
+func (check *Checker) record(x *operand) {
+ // convert x into a user-friendly set of values
+ // TODO(gri) this code can be simplified
+ var typ Type
+ var val constant.Value
+ switch x.mode {
+ case invalid:
+ typ = Typ[Invalid]
+ case novalue:
+ typ = (*Tuple)(nil)
+ case constant_:
+ typ = x.typ
+ val = x.val
+ default:
+ typ = x.typ
}
- if len(check.finals) != n {
- panic("internal error: final action list grew")
+ assert(x.expr != nil && typ != nil)
+
+ if isUntyped(typ) {
+ // delay type and value recording until we know the type
+ // or until the end of type checking
+ check.rememberUntyped(x.expr, false, x.mode, typ.(*Basic), val)
+ } else {
+ check.recordTypeAndValue(x.expr, x.mode, typ, val)
}
}
func (check *Checker) recordUntyped() {
- if !debug && check.Types == nil {
+ if !debug && !check.recordTypes() {
return // nothing to do
}
for x, info := range check.untyped {
if debug && isTyped(info.typ) {
- check.dump("%v: %s (type %s) is typed", posFor(x), x, info.typ)
+ check.dump("%v: %s (type %s) is typed", atPos(x), x, info.typ)
unreachable()
}
check.recordTypeAndValue(x, info.mode, info.typ, info.val)
}
if mode == constant_ {
assert(val != nil)
- assert(typ == Typ[Invalid] || isConstType(typ))
+ // We check allBasic(typ, IsConstType) here as constant expressions may be
+ // recorded as type parameters.
+ assert(!isValid(typ) || allBasic(typ, IsConstType))
}
if m := check.Types; m != nil {
m[x] = TypeAndValue{mode, typ, val}
}
+ if check.StoreTypesInSyntax {
+ tv := TypeAndValue{mode, typ, val}
+ stv := syntax.TypeAndValue{Type: typ, Value: val}
+ if tv.IsVoid() {
+ stv.SetIsVoid()
+ }
+ if tv.IsType() {
+ stv.SetIsType()
+ }
+ if tv.IsBuiltin() {
+ stv.SetIsBuiltin()
+ }
+ if tv.IsValue() {
+ stv.SetIsValue()
+ }
+ if tv.IsNil() {
+ stv.SetIsNil()
+ }
+ if tv.Addressable() {
+ stv.SetAddressable()
+ }
+ if tv.Assignable() {
+ stv.SetAssignable()
+ }
+ if tv.HasOk() {
+ stv.SetHasOk()
+ }
+ x.SetTypeInfo(stv)
+ }
}
func (check *Checker) recordBuiltinType(f syntax.Expr, sig *Signature) {
- // f must be a (possibly parenthesized) identifier denoting a built-in
- // (built-ins in package unsafe always produce a constant result and
- // we don't record their signatures, so we don't see qualified idents
- // here): record the signature for f and possible children.
+ // f must be a (possibly parenthesized, possibly qualified)
+ // identifier denoting a built-in (including unsafe's non-constant
+ // functions Add and Slice): record the signature for f and possible
+ // children.
for {
check.recordTypeAndValue(f, builtin, sig, nil)
switch p := f.(type) {
- case *syntax.Name:
+ case *syntax.Name, *syntax.SelectorExpr:
return // we're done
case *syntax.ParenExpr:
f = p.X
}
}
-func (check *Checker) recordCommaOkTypes(x syntax.Expr, a [2]Type) {
+// recordCommaOkTypes updates recorded types to reflect that x is used in a commaOk context
+// (and therefore has tuple type).
+func (check *Checker) recordCommaOkTypes(x syntax.Expr, a []*operand) {
assert(x != nil)
- if a[0] == nil || a[1] == nil {
+ assert(len(a) == 2)
+ if a[0].mode == invalid {
return
}
- assert(isTyped(a[0]) && isTyped(a[1]) && (isBoolean(a[1]) || a[1] == universeError))
+ t0, t1 := a[0].typ, a[1].typ
+ assert(isTyped(t0) && isTyped(t1) && (isBoolean(t1) || t1 == universeError))
if m := check.Types; m != nil {
for {
tv := m[x]
assert(tv.Type != nil) // should have been recorded already
pos := x.Pos()
tv.Type = NewTuple(
- NewVar(pos, check.pkg, "", a[0]),
- NewVar(pos, check.pkg, "", a[1]),
+ NewVar(pos, check.pkg, "", t0),
+ NewVar(pos, check.pkg, "", t1),
)
m[x] = tv
// if x is a parenthesized expression (p.X), update p.X
x = p.X
}
}
+ if check.StoreTypesInSyntax {
+ // Note: this loop is duplicated because the type of tv is different.
+ // Above it is types2.TypeAndValue, here it is syntax.TypeAndValue.
+ for {
+ tv := x.GetTypeInfo()
+ assert(tv.Type != nil) // should have been recorded already
+ pos := x.Pos()
+ tv.Type = NewTuple(
+ NewVar(pos, check.pkg, "", t0),
+ NewVar(pos, check.pkg, "", t1),
+ )
+ x.SetTypeInfo(tv)
+ p, _ := x.(*syntax.ParenExpr)
+ if p == nil {
+ break
+ }
+ x = p.X
+ }
+ }
+}
+
+// recordInstance records instantiation information into check.Info, if the
+// Instances map is non-nil. The given expr must be an ident, selector, or
+// index (list) expr with ident or selector operand.
+//
+// TODO(rfindley): the expr parameter is fragile. See if we can access the
+// instantiated identifier in some other way.
+func (check *Checker) recordInstance(expr syntax.Expr, targs []Type, typ Type) {
+ ident := instantiatedIdent(expr)
+ assert(ident != nil)
+ assert(typ != nil)
+ if m := check.Instances; m != nil {
+ m[ident] = Instance{newTypeList(targs), typ}
+ }
}
-func (check *Checker) recordInferred(call syntax.Expr, targs []Type, sig *Signature) {
- assert(call != nil)
- assert(sig != nil)
- if m := check.Inferred; m != nil {
- m[call] = Inferred{targs, sig}
+func instantiatedIdent(expr syntax.Expr) *syntax.Name {
+ var selOrIdent syntax.Expr
+ switch e := expr.(type) {
+ case *syntax.IndexExpr:
+ selOrIdent = e.X
+ case *syntax.SelectorExpr, *syntax.Name:
+ selOrIdent = e
+ }
+ switch x := selOrIdent.(type) {
+ case *syntax.Name:
+ return x
+ case *syntax.SelectorExpr:
+ return x.Sel
}
+ panic("instantiated ident not found")
}
func (check *Checker) recordDef(id *syntax.Name, obj Object) {
m[node] = scope
}
}
+
+func (check *Checker) recordFileVersion(fbase *syntax.PosBase, version string) {
+ if m := check.FileVersions; m != nil {
+ m[fbase] = version
+ }
+}