}
}
- for _, n := range typecheck.Target.Externs {
- nm, ok := n.(*ir.Name)
- if !ok {
- continue
- }
+ for _, nm := range typecheck.Target.Externs {
s := nm.Sym()
switch s.Name {
case metaVarName:
// fixup. It adds calls to the pkg init function as appropriate to
// register coverage-related variables with the runtime.
func FixupInit(cnames Names) {
- for _, n := range typecheck.Target.Decls {
- if fn, ok := n.(*ir.Func); ok && ir.FuncName(fn) == "init" {
+ for _, fn := range typecheck.Target.Funcs {
+ if ir.FuncName(fn) == "init" {
cnames.InitFn = fn
break
}
loopDepth int
}
-func Funcs(all []ir.Node) {
+func Funcs(all []*ir.Func) {
ir.VisitFuncsBottomUp(all, Batch)
}
// We compute Addrtaken in bulk here.
// After this phase, we maintain Addrtaken incrementally.
if typecheck.DirtyAddrtaken {
- typecheck.ComputeAddrtaken(typecheck.Target.Decls)
+ typecheck.ComputeAddrtaken(typecheck.Target.Funcs)
typecheck.DirtyAddrtaken = false
}
typecheck.IncrementalAddrtaken = true
// TODO(prattmic): No need to use bottom-up visit order. This
// is mirroring the PGO IRGraph visit order, which also need
// not be bottom-up.
- ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
+ ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
for _, fn := range list {
devirtualize.ProfileGuided(fn, profile)
}
// Devirtualize and get variable capture right in for loops
var transformed []loopvar.VarAndLoop
- for _, n := range typecheck.Target.Decls {
- if n.Op() == ir.ODCLFUNC {
- devirtualize.Static(n.(*ir.Func))
- transformed = append(transformed, loopvar.ForCapture(n.(*ir.Func))...)
- }
+ for _, n := range typecheck.Target.Funcs {
+ devirtualize.Static(n)
+ transformed = append(transformed, loopvar.ForCapture(n)...)
}
ir.CurFunc = nil
// Large values are also moved off stack in escape analysis;
// because large values may contain pointers, it must happen early.
base.Timer.Start("fe", "escapes")
- escape.Funcs(typecheck.Target.Decls)
+ escape.Funcs(typecheck.Target.Funcs)
loopvar.LogTransformations(transformed)
// Don't use range--walk can add functions to Target.Decls.
base.Timer.Start("be", "compilefuncs")
fcount := int64(0)
- for i := 0; i < len(typecheck.Target.Decls); i++ {
- if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
- // Don't try compiling dead hidden closure.
- if fn.IsDeadcodeClosure() {
- continue
- }
- enqueueFunc(fn)
- fcount++
+ for i := 0; i < len(typecheck.Target.Funcs); i++ {
+ fn := typecheck.Target.Funcs[i]
+ // Don't try compiling dead hidden closure.
+ if fn.IsDeadcodeClosure() {
+ continue
}
+ enqueueFunc(fn)
+ fcount++
}
base.Timer.AddEvent(fcount, "funcs")
func dumpdata() {
numExterns := len(typecheck.Target.Externs)
- numDecls := len(typecheck.Target.Decls)
+ numDecls := len(typecheck.Target.Funcs)
dumpglobls(typecheck.Target.Externs)
reflectdata.CollectPTabs()
numExports := len(typecheck.Target.Exports)
// In the typical case, we loop 0 or 1 times.
// It was not until issue 24761 that we found any code that required a loop at all.
for {
- for i := numDecls; i < len(typecheck.Target.Decls); i++ {
- if n, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
- enqueueFunc(n)
- }
+ for i := numDecls; i < len(typecheck.Target.Funcs); i++ {
+ fn := typecheck.Target.Funcs[i]
+ enqueueFunc(fn)
}
- numDecls = len(typecheck.Target.Decls)
+ numDecls = len(typecheck.Target.Funcs)
compileFunctions()
reflectdata.WriteRuntimeTypes()
- if numDecls == len(typecheck.Target.Decls) {
+ if numDecls == len(typecheck.Target.Funcs) {
break
}
}
base.Ctxt.DwarfGlobal(base.Ctxt.Pkgpath, types.TypeSymName(n.Type()), n.Linksym())
}
-func dumpGlobalConst(n ir.Node) {
+func dumpGlobalConst(n *ir.Name) {
// only export typed constants
t := n.Type()
if t == nil {
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym().Name, types.TypeSymName(t), ir.IntVal(t, v))
}
-func dumpglobls(externs []ir.Node) {
+func dumpglobls(externs []*ir.Name) {
// add globals
for _, n := range externs {
switch n.Op() {
case ir.ONAME:
- dumpGlobal(n.(*ir.Name))
+ dumpGlobal(n)
case ir.OLITERAL:
dumpGlobalConst(n)
}
}
}
-func addsignats(dcls []ir.Node) {
+func addsignats(dcls []*ir.Name) {
// copy types from dcl list to signatset
for _, n := range dcls {
if n.Op() == ir.OTYPE {
)
// pgoInlinePrologue records the hot callsites from ir-graph.
-func pgoInlinePrologue(p *pgo.Profile, decls []ir.Node) {
+func pgoInlinePrologue(p *pgo.Profile, funcs []*ir.Func) {
if base.Debug.PGOInlineCDFThreshold != "" {
if s, err := strconv.ParseFloat(base.Debug.PGOInlineCDFThreshold, 64); err == nil && s >= 0 && s <= 100 {
inlineCDFHotCallSiteThresholdPercent = s
p = nil
}
- InlineDecls(p, typecheck.Target.Decls, true)
+ InlineDecls(p, typecheck.Target.Funcs, true)
// Perform a garbage collection of hidden closures functions that
// are no longer reachable from top-level functions following
}
// InlineDecls applies inlining to the given batch of declarations.
-func InlineDecls(p *pgo.Profile, decls []ir.Node, doInline bool) {
+func InlineDecls(p *pgo.Profile, funcs []*ir.Func, doInline bool) {
if p != nil {
- pgoInlinePrologue(p, decls)
+ pgoInlinePrologue(p, funcs)
}
doCanInline := func(n *ir.Func, recursive bool, numfns int) {
}
}
- ir.VisitFuncsBottomUp(decls, func(list []*ir.Func, recursive bool) {
+ ir.VisitFuncsBottomUp(funcs, func(list []*ir.Func, recursive bool) {
numfns := numNonClosures(list)
// We visit functions within an SCC in fairly arbitrary order,
// so by computing inlinability for all functions in the SCC
})
}
- for i := 0; i < len(typecheck.Target.Decls); i++ {
- if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
- if fn.IsHiddenClosure() {
- continue
- }
- markLiveFuncs(fn)
+ for i := 0; i < len(typecheck.Target.Funcs); i++ {
+ fn := typecheck.Target.Funcs[i]
+ if fn.IsHiddenClosure() {
+ continue
}
+ markLiveFuncs(fn)
}
- for i := 0; i < len(typecheck.Target.Decls); i++ {
- if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
- if !fn.IsHiddenClosure() {
- continue
- }
- if fn.IsDeadcodeClosure() {
- continue
- }
- if liveFuncs[fn] {
- continue
- }
- fn.SetIsDeadcodeClosure(true)
- if base.Flag.LowerM > 2 {
- fmt.Printf("%v: unreferenced closure %v marked as dead\n", ir.Line(fn), fn)
- }
- if fn.Inl != nil && fn.LSym == nil {
- ir.InitLSym(fn, true)
- }
+ for i := 0; i < len(typecheck.Target.Funcs); i++ {
+ fn := typecheck.Target.Funcs[i]
+ if !fn.IsHiddenClosure() {
+ continue
+ }
+ if fn.IsDeadcodeClosure() {
+ continue
+ }
+ if liveFuncs[fn] {
+ continue
+ }
+ fn.SetIsDeadcodeClosure(true)
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v: unreferenced closure %v marked as dead\n", ir.Line(fn), fn)
+ }
+ if fn.Inl != nil && fn.LSym == nil {
+ ir.InitLSym(fn, true)
}
}
}
}
if pkg != nil {
- pkg.Decls = append(pkg.Decls, fn)
+ pkg.Funcs = append(pkg.Funcs, fn)
}
if false && IsTrivialClosure(clo) {
// Init functions, listed in source order.
Inits []*Func
- // Top-level declarations.
- Decls []Node
+ // Funcs contains all (instantiated) functions, methods, and
+ // function literals to be compiled.
+ Funcs []*Func
- // Extern (package global) declarations.
- Externs []Node
+ // Externs holds constants, (non-generic) types, and variables
+ // declared at package scope.
+ Externs []*Name
// Assembly function declarations.
Asms []*Name
// If recursive is false, the list consists of only a single function and its closures.
// If recursive is true, the list may still contain only a single function,
// if that function is itself recursive.
-func VisitFuncsBottomUp(list []Node, analyze func(list []*Func, recursive bool)) {
+func VisitFuncsBottomUp(list []*Func, analyze func(list []*Func, recursive bool)) {
var v bottomUpVisitor
v.analyze = analyze
v.nodeID = make(map[*Func]uint32)
for _, n := range list {
- if n.Op() == ODCLFUNC {
- n := n.(*Func)
- if !n.IsHiddenClosure() {
- v.visit(n)
- }
+ if !n.IsHiddenClosure() {
+ v.visit(n)
}
}
}
assign.Def = true
tmp.Defn = assign
- typecheck.Target.Decls = append(typecheck.Target.Decls, typecheck.Stmt(assign))
+ // TODO(mdempsky): This code doesn't work anymore, because we now
+ // rely on types2 to compute InitOrder. If it's going to be used
+ // for testing again, the assignment here probably needs to be
+ // added to typecheck.Target.InitOrder somewhere.
+ //
+ // Probably just easier to address the escape analysis limitation.
+ //
+ // typecheck.Target.Decls = append(typecheck.Target.Decls, typecheck.Stmt(assign))
return tmp
}
case declFunc:
names := r.pkgObjs(target)
assert(len(names) == 1)
- target.Decls = append(target.Decls, names[0].Func)
+ target.Funcs = append(target.Funcs, names[0].Func)
case declMethod:
typ := r.typ()
_, sym := r.selector()
method := typecheck.Lookdot1(nil, sym, typ, typ.Methods(), 0)
- target.Decls = append(target.Decls, method.Nname.(*ir.Name).Func)
+ target.Funcs = append(target.Funcs, method.Nname.(*ir.Name).Func)
case declVar:
names := r.pkgObjs(target)
// with the same information some other way.
fndcls := len(fn.Dcl)
- topdcls := len(typecheck.Target.Decls)
+ topdcls := len(typecheck.Target.Funcs)
tmpfn := ir.NewFunc(fn.Pos())
tmpfn.Nname = ir.NewNameAt(fn.Nname.Pos(), fn.Sym())
// typecheck.Stmts may have added function literals to
// typecheck.Target.Decls. Remove them again so we don't risk trying
// to compile them multiple times.
- typecheck.Target.Decls = typecheck.Target.Decls[:topdcls]
+ typecheck.Target.Funcs = typecheck.Target.Funcs[:topdcls]
}
// usedLocals returns a set of local variables that are used within body.
}
})
- target.Decls = append(target.Decls, fn)
+ target.Funcs = append(target.Funcs, fn)
}
// newWrapperType returns a copy of the given signature type, but with
r := localPkgReader.newReader(pkgbits.RelocMeta, pkgbits.PrivateRootIdx, pkgbits.SyncPrivate)
r.pkgInit(types.LocalPkg, target)
- // Type-check any top-level assignments. We ignore non-assignments
- // here because other declarations are typechecked as they're
- // constructed.
- for i, ndecls := 0, len(target.Decls); i < ndecls; i++ {
- switch n := target.Decls[i]; n.Op() {
- case ir.OAS, ir.OAS2:
- target.Decls[i] = typecheck.Stmt(n)
- }
- }
-
readBodies(target, false)
// Check that nothing snuck past typechecking.
- for _, n := range target.Decls {
- if n.Typecheck() == 0 {
- base.FatalfAt(n.Pos(), "missed typecheck: %v", n)
+ for _, fn := range target.Funcs {
+ if fn.Typecheck() == 0 {
+ base.FatalfAt(fn.Pos(), "missed typecheck: %v", fn)
}
// For functions, check that at least their first statement (if
// any) was typechecked too.
- if fn, ok := n.(*ir.Func); ok && len(fn.Body) != 0 {
+ if len(fn.Body) != 0 {
if stmt := fn.Body[0]; stmt.Typecheck() == 0 {
base.FatalfAt(stmt.Pos(), "missed typecheck: %v", stmt)
}
// For functions originally came from package runtime,
// mark as norace to prevent instrumenting, see issue #60439.
- for _, n := range target.Decls {
- if fn, ok := n.(*ir.Func); ok {
- if !base.Flag.CompilingRuntime && types.IsRuntimePkg(fn.Sym().Pkg) {
- fn.Pragma |= ir.Norace
- }
+ for _, fn := range target.Funcs {
+ if !base.Flag.CompilingRuntime && types.IsRuntimePkg(fn.Sym().Pkg) {
+ fn.Pragma |= ir.Norace
}
}
// necessary on instantiations of imported generic functions, so their
// inlining costs can be computed.
func readBodies(target *ir.Package, duringInlining bool) {
- var inlDecls []ir.Node
+ var inlDecls []*ir.Func
// Don't use range--bodyIdx can add closures to todoBodies.
for {
if duringInlining && canSkipNonGenericMethod {
inlDecls = append(inlDecls, fn)
} else {
- target.Decls = append(target.Decls, fn)
+ target.Funcs = append(target.Funcs, fn)
}
}
base.Flag.LowerM = oldLowerM
for _, fn := range inlDecls {
- fn.(*ir.Func).Body = nil // free memory
+ fn.Body = nil // free memory
}
}
}
// of a package.
func (p *Profile) initializeIRGraph() {
// Bottomup walk over the function to create IRGraph.
- ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
+ ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
for _, fn := range list {
p.VisitIR(fn)
}
// List of functions in this package.
funcs := make(map[string]struct{})
- ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
+ ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
for _, f := range list {
name := ir.LinkFuncName(f)
funcs[name] = struct{}{}
}
}
// Print edges.
- ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
+ ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
for _, f := range list {
name := ir.LinkFuncName(f)
if n, ok := p.WeightedCG.IRNodes[name]; ok {
ir.WithFunc(fn, func() {
typecheck.Stmts(nf)
})
- typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
+ typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
if base.Debug.WrapGlobalMapDbg > 1 {
fmt.Fprintf(os.Stderr, "=-= len(newfuncs) is %d for %v\n",
len(newfuncs), fn)
if base.Debug.WrapGlobalMapDbg > 1 {
fmt.Fprintf(os.Stderr, "=-= add to target.decls %v\n", nfn)
}
- typecheck.Target.Decls = append(typecheck.Target.Decls, ir.Node(nfn))
+ typecheck.Target.Funcs = append(typecheck.Target.Funcs, nfn)
}
// Prepend to Inits, so it runs first, before any user-declared init
typecheck.Stmts(fnInit.Body)
ir.CurFunc = nil
- typecheck.Target.Decls = append(typecheck.Target.Decls, fnInit)
+ typecheck.Target.Funcs = append(typecheck.Target.Funcs, fnInit)
typecheck.Target.Inits = append(typecheck.Target.Inits, fnInit)
}
}
})
fn.SetNilCheckDisabled(true)
- typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
+ typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
return fn
}
// neither of which can be nil, and our comparisons
// are shallow.
fn.SetNilCheckDisabled(true)
- typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
+ typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
return fn
}
// This may generate new decls for the wrappers, but we
// specifically *don't* want to visit those, lest we create
// wrappers for wrappers.
- for _, fn := range typecheck.Target.Decls {
- if fn.Op() != ir.ODCLFUNC {
- continue
- }
- fn := fn.(*ir.Func)
+ for _, fn := range typecheck.Target.Funcs {
nam := fn.Nname
if ir.IsBlank(nam) {
continue
ir.CurFunc = fn
typecheck.Stmts(fn.Body)
- typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
+ typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
// Restore previous context.
base.Pos = savepos
// important to handle it for this check, so we model it
// directly. This has to happen before transforming closures in walk since
// it's a lot harder to work out the argument after.
- for _, n := range typecheck.Target.Decls {
- if n.Op() != ir.ODCLFUNC {
- continue
- }
- c.curfn = n.(*ir.Func)
+ for _, n := range typecheck.Target.Funcs {
+ c.curfn = n
if c.curfn.ABIWrapper() {
// We only want "real" calls to these
// functions, not the generated ones within
// q is the queue of ODCLFUNC Nodes to visit in BFS order.
var q ir.NameQueue
- for _, n := range typecheck.Target.Decls {
- if n.Op() != ir.ODCLFUNC {
- continue
- }
- fn := n.(*ir.Func)
-
+ for _, fn := range typecheck.Target.Funcs {
symToFunc[fn.LSym] = fn
// Make nowritebarrierrec functions BFS roots.
// have not yet been marked as Addrtaken.
var DirtyAddrtaken = false
-func ComputeAddrtaken(top []ir.Node) {
- for _, n := range top {
- var doVisit func(n ir.Node)
- doVisit = func(n ir.Node) {
- if n.Op() == ir.OADDR {
- if x := ir.OuterValue(n.(*ir.AddrExpr).X); x.Op() == ir.ONAME {
- x.Name().SetAddrtaken(true)
- if x.Name().IsClosureVar() {
- // Mark the original variable as Addrtaken so that capturevars
- // knows not to pass it by value.
- x.Name().Defn.Name().SetAddrtaken(true)
- }
+func ComputeAddrtaken(funcs []*ir.Func) {
+ var doVisit func(n ir.Node)
+ doVisit = func(n ir.Node) {
+ if n.Op() == ir.OADDR {
+ if x := ir.OuterValue(n.(*ir.AddrExpr).X); x.Op() == ir.ONAME {
+ x.Name().SetAddrtaken(true)
+ if x.Name().IsClosureVar() {
+ // Mark the original variable as Addrtaken so that capturevars
+ // knows not to pass it by value.
+ x.Name().Defn.Name().SetAddrtaken(true)
}
}
- if n.Op() == ir.OCLOSURE {
- ir.VisitList(n.(*ir.ClosureExpr).Func.Body, doVisit)
- }
}
- ir.Visit(n, doVisit)
+ if n.Op() == ir.OCLOSURE {
+ ir.VisitList(n.(*ir.ClosureExpr).Func.Body, doVisit)
+ }
+ }
+
+ for _, fn := range funcs {
+ ir.Visit(fn, doVisit)
}
}