diff options
Diffstat (limited to 'vendor/golang.org/x/tools/go/ssa')
28 files changed, 12887 insertions, 0 deletions
diff --git a/vendor/golang.org/x/tools/go/ssa/TODO b/vendor/golang.org/x/tools/go/ssa/TODO new file mode 100644 index 0000000..6c35253 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/TODO @@ -0,0 +1,16 @@ +-*- text -*- + +SSA Generics to-do list +=========================== + +DOCUMENTATION: +- Read me for internals + +TYPE PARAMETERIZED GENERIC FUNCTIONS: +- sanity.go updates. +- Check source functions going to generics. +- Tests, tests, tests... + +USAGE: +- Back fill users for handling ssa.InstantiateGenerics being off. + diff --git a/vendor/golang.org/x/tools/go/ssa/block.go b/vendor/golang.org/x/tools/go/ssa/block.go new file mode 100644 index 0000000..28170c7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/block.go @@ -0,0 +1,113 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +import "fmt" + +// This file implements the BasicBlock type. + +// addEdge adds a control-flow graph edge from from to to. +func addEdge(from, to *BasicBlock) { + from.Succs = append(from.Succs, to) + to.Preds = append(to.Preds, from) +} + +// Parent returns the function that contains block b. +func (b *BasicBlock) Parent() *Function { return b.parent } + +// String returns a human-readable label of this block. +// It is not guaranteed unique within the function. +func (b *BasicBlock) String() string { + return fmt.Sprintf("%d", b.Index) +} + +// emit appends an instruction to the current basic block. +// If the instruction defines a Value, it is returned. +func (b *BasicBlock) emit(i Instruction) Value { + i.setBlock(b) + b.Instrs = append(b.Instrs, i) + v, _ := i.(Value) + return v +} + +// predIndex returns the i such that b.Preds[i] == c or panics if +// there is none. +func (b *BasicBlock) predIndex(c *BasicBlock) int { + for i, pred := range b.Preds { + if pred == c { + return i + } + } + panic(fmt.Sprintf("no edge %s -> %s", c, b)) +} + +// hasPhi returns true if b.Instrs contains φ-nodes. +func (b *BasicBlock) hasPhi() bool { + _, ok := b.Instrs[0].(*Phi) + return ok +} + +// phis returns the prefix of b.Instrs containing all the block's φ-nodes. +func (b *BasicBlock) phis() []Instruction { + for i, instr := range b.Instrs { + if _, ok := instr.(*Phi); !ok { + return b.Instrs[:i] + } + } + return nil // unreachable in well-formed blocks +} + +// replacePred replaces all occurrences of p in b's predecessor list with q. +// Ordinarily there should be at most one. +func (b *BasicBlock) replacePred(p, q *BasicBlock) { + for i, pred := range b.Preds { + if pred == p { + b.Preds[i] = q + } + } +} + +// replaceSucc replaces all occurrences of p in b's successor list with q. +// Ordinarily there should be at most one. +func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { + for i, succ := range b.Succs { + if succ == p { + b.Succs[i] = q + } + } +} + +// removePred removes all occurrences of p in b's +// predecessor list and φ-nodes. +// Ordinarily there should be at most one. +func (b *BasicBlock) removePred(p *BasicBlock) { + phis := b.phis() + + // We must preserve edge order for φ-nodes. + j := 0 + for i, pred := range b.Preds { + if pred != p { + b.Preds[j] = b.Preds[i] + // Strike out φ-edge too. + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges[j] = phi.Edges[i] + } + j++ + } + } + // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. + for i := j; i < len(b.Preds); i++ { + b.Preds[i] = nil + for _, instr := range phis { + instr.(*Phi).Edges[i] = nil + } + } + b.Preds = b.Preds[:j] + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges = phi.Edges[:j] + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/blockopt.go b/vendor/golang.org/x/tools/go/ssa/blockopt.go new file mode 100644 index 0000000..7dabce8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/blockopt.go @@ -0,0 +1,183 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// Simple block optimizations to simplify the control flow graph. + +// TODO(adonovan): opt: instead of creating several "unreachable" blocks +// per function in the Builder, reuse a single one (e.g. at Blocks[1]) +// to reduce garbage. + +import ( + "fmt" + "os" +) + +// If true, perform sanity checking and show progress at each +// successive iteration of optimizeBlocks. Very verbose. +const debugBlockOpt = false + +// markReachable sets Index=-1 for all blocks reachable from b. +func markReachable(b *BasicBlock) { + b.Index = -1 + for _, succ := range b.Succs { + if succ.Index == 0 { + markReachable(succ) + } + } +} + +// deleteUnreachableBlocks marks all reachable blocks of f and +// eliminates (nils) all others, including possibly cyclic subgraphs. +func deleteUnreachableBlocks(f *Function) { + const white, black = 0, -1 + // We borrow b.Index temporarily as the mark bit. + for _, b := range f.Blocks { + b.Index = white + } + markReachable(f.Blocks[0]) + if f.Recover != nil { + markReachable(f.Recover) + } + for i, b := range f.Blocks { + if b.Index == white { + for _, c := range b.Succs { + if c.Index == black { + c.removePred(b) // delete white->black edge + } + } + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "unreachable", b) + } + f.Blocks[i] = nil // delete b + } + } + f.removeNilBlocks() +} + +// jumpThreading attempts to apply simple jump-threading to block b, +// in which a->b->c become a->c if b is just a Jump. +// The result is true if the optimization was applied. +func jumpThreading(f *Function, b *BasicBlock) bool { + if b.Index == 0 { + return false // don't apply to entry block + } + if b.Instrs == nil { + return false + } + if _, ok := b.Instrs[0].(*Jump); !ok { + return false // not just a jump + } + c := b.Succs[0] + if c == b { + return false // don't apply to degenerate jump-to-self. + } + if c.hasPhi() { + return false // not sound without more effort + } + for j, a := range b.Preds { + a.replaceSucc(b, c) + + // If a now has two edges to c, replace its degenerate If by Jump. + if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { + jump := new(Jump) + jump.setBlock(a) + a.Instrs[len(a.Instrs)-1] = jump + a.Succs = a.Succs[:1] + c.removePred(b) + } else { + if j == 0 { + c.replacePred(b, a) + } else { + c.Preds = append(c.Preds, a) + } + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) + } + } + f.Blocks[b.Index] = nil // delete b + return true +} + +// fuseBlocks attempts to apply the block fusion optimization to block +// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. +// The result is true if the optimization was applied. +func fuseBlocks(f *Function, a *BasicBlock) bool { + if len(a.Succs) != 1 { + return false + } + b := a.Succs[0] + if len(b.Preds) != 1 { + return false + } + + // Degenerate &&/|| ops may result in a straight-line CFG + // containing φ-nodes. (Ideally we'd replace such them with + // their sole operand but that requires Referrers, built later.) + if b.hasPhi() { + return false // not sound without further effort + } + + // Eliminate jump at end of A, then copy all of B across. + a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) + for _, instr := range b.Instrs { + instr.setBlock(a) + } + + // A inherits B's successors + a.Succs = append(a.succs2[:0], b.Succs...) + + // Fix up Preds links of all successors of B. + for _, c := range b.Succs { + c.replacePred(b, a) + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) + } + + f.Blocks[b.Index] = nil // delete b + return true +} + +// optimizeBlocks() performs some simple block optimizations on a +// completed function: dead block elimination, block fusion, jump +// threading. +func optimizeBlocks(f *Function) { + deleteUnreachableBlocks(f) + + // Loop until no further progress. + changed := true + for changed { + changed = false + + if debugBlockOpt { + f.WriteTo(os.Stderr) + mustSanityCheck(f, nil) + } + + for _, b := range f.Blocks { + // f.Blocks will temporarily contain nils to indicate + // deleted blocks; we remove them at the end. + if b == nil { + continue + } + + // Fuse blocks. b->c becomes bc. + if fuseBlocks(f, b) { + changed = true + } + + // a->b->c becomes a->c if b contains only a Jump. + if jumpThreading(f, b) { + changed = true + continue // (b was disconnected) + } + } + } + f.removeNilBlocks() +} diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go new file mode 100644 index 0000000..55943e4 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/builder.go @@ -0,0 +1,3276 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the builder, which builds SSA-form IR for function bodies. +// +// SSA construction has two phases, "create" and "build". First, one +// or more packages are created in any order by a sequence of calls to +// CreatePackage, either from syntax or from mere type information. +// Each created package has a complete set of Members (const, var, +// type, func) that can be accessed through methods like +// Program.FuncValue. +// +// It is not necessary to call CreatePackage for all dependencies of +// each syntax package, only for its direct imports. (In future +// perhaps even this restriction may be lifted.) +// +// Second, packages created from syntax are built, by one or more +// calls to Package.Build, which may be concurrent; or by a call to +// Program.Build, which builds all packages in parallel. Building +// traverses the type-annotated syntax tree of each function body and +// creates SSA-form IR, a control-flow graph of instructions, +// populating fields such as Function.Body, .Params, and others. +// +// Building may create additional methods, including: +// - wrapper methods (e.g. for embeddding, or implicit &recv) +// - bound method closures (e.g. for use(recv.f)) +// - thunks (e.g. for use(I.f) or use(T.f)) +// - generic instances (e.g. to produce f[int] from f[any]). +// As these methods are created, they are added to the build queue, +// and then processed in turn, until a fixed point is reached, +// Since these methods might belong to packages that were not +// created (by a call to CreatePackage), their Pkg field is unset. +// +// Instances of generic functions may be either instantiated (f[int] +// is a copy of f[T] with substitutions) or wrapped (f[int] delegates +// to f[T]), depending on the availability of generic syntax and the +// InstantiateGenerics mode flag. +// +// Each package has an initializer function named "init" that calls +// the initializer functions of each direct import, computes and +// assigns the initial value of each global variable, and calls each +// source-level function named "init". (These generate SSA functions +// named "init#1", "init#2", etc.) +// +// Runtime types +// +// Each MakeInterface operation is a conversion from a non-interface +// type to an interface type. The semantics of this operation requires +// a runtime type descriptor, which is the type portion of an +// interface, and the value abstracted by reflect.Type. +// +// The program accumulates all non-parameterized types that are +// encountered as MakeInterface operands, along with all types that +// may be derived from them using reflection. This set is available as +// Program.RuntimeTypes, and the methods of these types may be +// reachable via interface calls or reflection even if they are never +// referenced from the SSA IR. (In practice, algorithms such as RTA +// that compute reachability from package main perform their own +// tracking of runtime types at a finer grain, so this feature is not +// very useful.) +// +// Function literals +// +// Anonymous functions must be built as soon as they are encountered, +// as it may affect locals of the enclosing function, but they are not +// marked 'built' until the end of the outermost enclosing function. +// (Among other things, this causes them to be logged in top-down order.) +// +// The Function.build fields determines the algorithm for building the +// function body. It is cleared to mark that building is complete. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "os" + "runtime" + "sync" + + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" +) + +type opaqueType struct{ name string } + +func (t *opaqueType) String() string { return t.name } +func (t *opaqueType) Underlying() types.Type { return t } + +var ( + varOk = newVar("ok", tBool) + varIndex = newVar("index", tInt) + + // Type constants. + tBool = types.Typ[types.Bool] + tByte = types.Typ[types.Byte] + tInt = types.Typ[types.Int] + tInvalid = types.Typ[types.Invalid] + tString = types.Typ[types.String] + tUntypedNil = types.Typ[types.UntypedNil] + + tRangeIter = &opaqueType{"iter"} // the type of all "range" iterators + tDeferStack = types.NewPointer(&opaqueType{"deferStack"}) // the type of a "deferStack" from ssa:deferstack() + tEface = types.NewInterfaceType(nil, nil).Complete() + + // SSA Value constants. + vZero = intConst(0) + vOne = intConst(1) + vTrue = NewConst(constant.MakeBool(true), tBool) + vFalse = NewConst(constant.MakeBool(false), tBool) + + jReady = intConst(0) // range-over-func jump is READY + jBusy = intConst(-1) // range-over-func jump is BUSY + jDone = intConst(-2) // range-over-func jump is DONE + + // The ssa:deferstack intrinsic returns the current function's defer stack. + vDeferStack = &Builtin{ + name: "ssa:deferstack", + sig: types.NewSignatureType(nil, nil, nil, nil, types.NewTuple(anonVar(tDeferStack)), false), + } +) + +// builder holds state associated with the package currently being built. +// Its methods contain all the logic for AST-to-SSA conversion. +// +// All Functions belong to the same Program. +// +// builders are not thread-safe. +type builder struct { + fns []*Function // Functions that have finished their CREATE phases. + + finished int // finished is the length of the prefix of fns containing built functions. + + // The task of building shared functions within the builder. + // Shared functions are ones the the builder may either create or lookup. + // These may be built by other builders in parallel. + // The task is done when the builder has finished iterating, and it + // waits for all shared functions to finish building. + // nil implies there are no hared functions to wait on. + buildshared *task +} + +// shared is done when the builder has built all of the +// enqueued functions to a fixed-point. +func (b *builder) shared() *task { + if b.buildshared == nil { // lazily-initialize + b.buildshared = &task{done: make(chan unit)} + } + return b.buildshared +} + +// enqueue fn to be built by the builder. +func (b *builder) enqueue(fn *Function) { + b.fns = append(b.fns, fn) +} + +// waitForSharedFunction indicates that the builder should wait until +// the potentially shared function fn has finished building. +// +// This should include any functions that may be built by other +// builders. +func (b *builder) waitForSharedFunction(fn *Function) { + if fn.buildshared != nil { // maybe need to wait? + s := b.shared() + s.addEdge(fn.buildshared) + } +} + +// cond emits to fn code to evaluate boolean condition e and jump +// to t or f depending on its value, performing various simplifications. +// +// Postcondition: fn.currentBlock is nil. +func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { + switch e := e.(type) { + case *ast.ParenExpr: + b.cond(fn, e.X, t, f) + return + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND: + ltrue := fn.newBasicBlock("cond.true") + b.cond(fn, e.X, ltrue, f) + fn.currentBlock = ltrue + b.cond(fn, e.Y, t, f) + return + + case token.LOR: + lfalse := fn.newBasicBlock("cond.false") + b.cond(fn, e.X, t, lfalse) + fn.currentBlock = lfalse + b.cond(fn, e.Y, t, f) + return + } + + case *ast.UnaryExpr: + if e.Op == token.NOT { + b.cond(fn, e.X, f, t) + return + } + } + + // A traditional compiler would simplify "if false" (etc) here + // but we do not, for better fidelity to the source code. + // + // The value of a constant condition may be platform-specific, + // and may cause blocks that are reachable in some configuration + // to be hidden from subsequent analyses such as bug-finding tools. + emitIf(fn, b.expr(fn, e), t, f) +} + +// logicalBinop emits code to fn to evaluate e, a &&- or +// ||-expression whose reified boolean value is wanted. +// The value is returned. +func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { + rhs := fn.newBasicBlock("binop.rhs") + done := fn.newBasicBlock("binop.done") + + // T(e) = T(e.X) = T(e.Y) after untyped constants have been + // eliminated. + // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. + t := fn.typeOf(e) + + var short Value // value of the short-circuit path + switch e.Op { + case token.LAND: + b.cond(fn, e.X, rhs, done) + short = NewConst(constant.MakeBool(false), t) + + case token.LOR: + b.cond(fn, e.X, done, rhs) + short = NewConst(constant.MakeBool(true), t) + } + + // Is rhs unreachable? + if rhs.Preds == nil { + // Simplify false&&y to false, true||y to true. + fn.currentBlock = done + return short + } + + // Is done unreachable? + if done.Preds == nil { + // Simplify true&&y (or false||y) to y. + fn.currentBlock = rhs + return b.expr(fn, e.Y) + } + + // All edges from e.X to done carry the short-circuit value. + var edges []Value + for range done.Preds { + edges = append(edges, short) + } + + // The edge from e.Y to done carries the value of e.Y. + fn.currentBlock = rhs + edges = append(edges, b.expr(fn, e.Y)) + emitJump(fn, done) + fn.currentBlock = done + + phi := &Phi{Edges: edges, Comment: e.Op.String()} + phi.pos = e.OpPos + phi.typ = t + return done.emit(phi) +} + +// exprN lowers a multi-result expression e to SSA form, emitting code +// to fn and returning a single Value whose type is a *types.Tuple. +// The caller must access the components via Extract. +// +// Multi-result expressions include CallExprs in a multi-value +// assignment or return statement, and "value,ok" uses of +// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op +// is token.ARROW). +func (b *builder) exprN(fn *Function, e ast.Expr) Value { + typ := fn.typeOf(e).(*types.Tuple) + switch e := e.(type) { + case *ast.ParenExpr: + return b.exprN(fn, e.X) + + case *ast.CallExpr: + // Currently, no built-in function nor type conversion + // has multiple results, so we can avoid some of the + // cases for single-valued CallExpr. + var c Call + b.setCall(fn, e, &c.Call) + c.typ = typ + return fn.emit(&c) + + case *ast.IndexExpr: + mapt := typeparams.CoreType(fn.typeOf(e.X)).(*types.Map) // ,ok must be a map. + lookup := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), + CommaOk: true, + } + lookup.setType(typ) + lookup.setPos(e.Lbrack) + return fn.emit(lookup) + + case *ast.TypeAssertExpr: + return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) + + case *ast.UnaryExpr: // must be receive <- + unop := &UnOp{ + Op: token.ARROW, + X: b.expr(fn, e.X), + CommaOk: true, + } + unop.setType(typ) + unop.setPos(e.OpPos) + return fn.emit(unop) + } + panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) +} + +// builtin emits to fn SSA instructions to implement a call to the +// built-in function obj with the specified arguments +// and return type. It returns the value defined by the result. +// +// The result is nil if no special handling was required; in this case +// the caller should treat this like an ordinary library function +// call. +func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { + typ = fn.typ(typ) + switch obj.Name() { + case "make": + switch ct := typeparams.CoreType(typ).(type) { + case *types.Slice: + n := b.expr(fn, args[1]) + m := n + if len(args) == 3 { + m = b.expr(fn, args[2]) + } + if m, ok := m.(*Const); ok { + // treat make([]T, n, m) as new([m]T)[:n] + cap := m.Int64() + at := types.NewArray(ct.Elem(), cap) + v := &Slice{ + X: emitNew(fn, at, pos, "makeslice"), + High: n, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + v := &MakeSlice{ + Len: n, + Cap: m, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Map: + var res Value + if len(args) == 2 { + res = b.expr(fn, args[1]) + } + v := &MakeMap{Reserve: res} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Chan: + var sz Value = vZero + if len(args) == 2 { + sz = b.expr(fn, args[1]) + } + v := &MakeChan{Size: sz} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + + case "new": + return emitNew(fn, typeparams.MustDeref(typ), pos, "new") + + case "len", "cap": + // Special case: len or cap of an array or *array is + // based on the type, not the value which may be nil. + // We must still evaluate the value, though. (If it + // was side-effect free, the whole call would have + // been constant-folded.) + t := typeparams.Deref(fn.typeOf(args[0])) + if at, ok := typeparams.CoreType(t).(*types.Array); ok { + b.expr(fn, args[0]) // for effects only + return intConst(at.Len()) + } + // Otherwise treat as normal. + + case "panic": + fn.emit(&Panic{ + X: emitConv(fn, b.expr(fn, args[0]), tEface), + pos: pos, + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + return vTrue // any non-nil Value will do + } + return nil // treat all others as a regular function call +} + +// addr lowers a single-result addressable expression e to SSA form, +// emitting code to fn and returning the location (an lvalue) defined +// by the expression. +// +// If escaping is true, addr marks the base variable of the +// addressable expression e as being a potentially escaping pointer +// value. For example, in this code: +// +// a := A{ +// b: [1]B{B{c: 1}} +// } +// return &a.b[0].c +// +// the application of & causes a.b[0].c to have its address taken, +// which means that ultimately the local variable a must be +// heap-allocated. This is a simple but very conservative escape +// analysis. +// +// Operations forming potentially escaping pointers include: +// - &x, including when implicit in method call or composite literals. +// - a[:] iff a is an array (not *array) +// - references to variables in lexically enclosing functions. +func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { + switch e := e.(type) { + case *ast.Ident: + if isBlankIdent(e) { + return blank{} + } + obj := fn.objectOf(e).(*types.Var) + var v Value + if g := fn.Prog.packageLevelMember(obj); g != nil { + v = g.(*Global) // var (address) + } else { + v = fn.lookup(obj, escaping) + } + return &address{addr: v, pos: e.Pos(), expr: e} + + case *ast.CompositeLit: + typ := typeparams.Deref(fn.typeOf(e)) + var v *Alloc + if escaping { + v = emitNew(fn, typ, e.Lbrace, "complit") + } else { + v = emitLocal(fn, typ, e.Lbrace, "complit") + } + var sb storebuf + b.compLit(fn, v, e, true, &sb) + sb.emit(fn) + return &address{addr: v, pos: e.Lbrace, expr: e} + + case *ast.ParenExpr: + return b.addr(fn, e.X, escaping) + + case *ast.SelectorExpr: + sel := fn.selection(e) + if sel == nil { + // qualified identifier + return b.addr(fn, e.Sel, escaping) + } + if sel.kind != types.FieldVal { + panic(sel) + } + wantAddr := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + index := sel.index[len(sel.index)-1] + fld := fieldOf(typeparams.MustDeref(v.Type()), index) // v is an addr. + + // Due to the two phases of resolving AssignStmt, a panic from x.f = p() + // when x is nil is required to come after the side-effects of + // evaluating x and p(). + emit := func(fn *Function) Value { + return emitFieldSelection(fn, v, index, true, e.Sel) + } + return &lazyAddress{addr: emit, t: fld.Type(), pos: e.Sel.Pos(), expr: e.Sel} + + case *ast.IndexExpr: + xt := fn.typeOf(e.X) + elem, mode := indexType(xt) + var x Value + var et types.Type + switch mode { + case ixArrVar: // array, array|slice, array|*array, or array|*array|slice. + x = b.addr(fn, e.X, escaping).address(fn) + et = types.NewPointer(elem) + case ixVar: // *array, slice, *array|slice + x = b.expr(fn, e.X) + et = types.NewPointer(elem) + case ixMap: + mt := typeparams.CoreType(xt).(*types.Map) + return &element{ + m: b.expr(fn, e.X), + k: emitConv(fn, b.expr(fn, e.Index), mt.Key()), + t: mt.Elem(), + pos: e.Lbrack, + } + default: + panic("unexpected container type in IndexExpr: " + xt.String()) + } + index := b.expr(fn, e.Index) + if isUntyped(index.Type()) { + index = emitConv(fn, index, tInt) + } + // Due to the two phases of resolving AssignStmt, a panic from x[i] = p() + // when x is nil or i is out-of-bounds is required to come after the + // side-effects of evaluating x, i and p(). + emit := func(fn *Function) Value { + v := &IndexAddr{ + X: x, + Index: index, + } + v.setPos(e.Lbrack) + v.setType(et) + return fn.emit(v) + } + return &lazyAddress{addr: emit, t: typeparams.MustDeref(et), pos: e.Lbrack, expr: e} + + case *ast.StarExpr: + return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} + } + + panic(fmt.Sprintf("unexpected address expression: %T", e)) +} + +type store struct { + lhs lvalue + rhs Value +} + +type storebuf struct{ stores []store } + +func (sb *storebuf) store(lhs lvalue, rhs Value) { + sb.stores = append(sb.stores, store{lhs, rhs}) +} + +func (sb *storebuf) emit(fn *Function) { + for _, s := range sb.stores { + s.lhs.store(fn, s.rhs) + } +} + +// assign emits to fn code to initialize the lvalue loc with the value +// of expression e. If isZero is true, assign assumes that loc holds +// the zero value for its type. +// +// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate +// better code in some cases, e.g., for composite literals in an +// addressable location. +// +// If sb is not nil, assign generates code to evaluate expression e, but +// not to update loc. Instead, the necessary stores are appended to the +// storebuf sb so that they can be executed later. This allows correct +// in-place update of existing variables when the RHS is a composite +// literal that may reference parts of the LHS. +func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { + // Can we initialize it in place? + if e, ok := unparen(e).(*ast.CompositeLit); ok { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + if !is[blank](loc) && isPointerCore(loc.typ()) { // avoid calling blank.typ() + ptr := b.addr(fn, e, true).address(fn) + // copy address + if sb != nil { + sb.store(loc, ptr) + } else { + loc.store(fn, ptr) + } + return + } + + if _, ok := loc.(*address); ok { + if isNonTypeParamInterface(loc.typ()) { + // e.g. var x interface{} = T{...} + // Can't in-place initialize an interface value. + // Fall back to copying. + } else { + // x = T{...} or x := T{...} + addr := loc.address(fn) + if sb != nil { + b.compLit(fn, addr, e, isZero, sb) + } else { + var sb storebuf + b.compLit(fn, addr, e, isZero, &sb) + sb.emit(fn) + } + + // Subtle: emit debug ref for aggregate types only; + // slice and map are handled by store ops in compLit. + switch typeparams.CoreType(loc.typ()).(type) { + case *types.Struct, *types.Array: + emitDebugRef(fn, e, addr, true) + } + + return + } + } + } + + // simple case: just copy + rhs := b.expr(fn, e) + if sb != nil { + sb.store(loc, rhs) + } else { + loc.store(fn, rhs) + } +} + +// expr lowers a single-result expression e to SSA form, emitting code +// to fn and returning the Value defined by the expression. +func (b *builder) expr(fn *Function, e ast.Expr) Value { + e = unparen(e) + + tv := fn.info.Types[e] + + // Is expression a constant? + if tv.Value != nil { + return NewConst(tv.Value, fn.typ(tv.Type)) + } + + var v Value + if tv.Addressable() { + // Prefer pointer arithmetic ({Index,Field}Addr) followed + // by Load over subelement extraction (e.g. Index, Field), + // to avoid large copies. + v = b.addr(fn, e, false).load(fn) + } else { + v = b.expr0(fn, e, tv) + } + if fn.debugInfo() { + emitDebugRef(fn, e, v, false) + } + return v +} + +func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { + switch e := e.(type) { + case *ast.BasicLit: + panic("non-constant BasicLit") // unreachable + + case *ast.FuncLit: + /* function literal */ + anon := &Function{ + name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), + Signature: fn.typeOf(e.Type).(*types.Signature), + pos: e.Type.Func, + parent: fn, + anonIdx: int32(len(fn.AnonFuncs)), + Pkg: fn.Pkg, + Prog: fn.Prog, + syntax: e, + info: fn.info, + goversion: fn.goversion, + build: (*builder).buildFromSyntax, + topLevelOrigin: nil, // use anonIdx to lookup an anon instance's origin. + typeparams: fn.typeparams, // share the parent's type parameters. + typeargs: fn.typeargs, // share the parent's type arguments. + subst: fn.subst, // share the parent's type substitutions. + uniq: fn.uniq, // start from parent's unique values + } + fn.AnonFuncs = append(fn.AnonFuncs, anon) + // Build anon immediately, as it may cause fn's locals to escape. + // (It is not marked 'built' until the end of the enclosing FuncDecl.) + anon.build(b, anon) + fn.uniq = anon.uniq // resume after anon's unique values + if anon.FreeVars == nil { + return anon + } + v := &MakeClosure{Fn: anon} + v.setType(fn.typ(tv.Type)) + for _, fv := range anon.FreeVars { + v.Bindings = append(v.Bindings, fv.outer) + fv.outer = nil + } + return fn.emit(v) + + case *ast.TypeAssertExpr: // single-result form only + return emitTypeAssert(fn, b.expr(fn, e.X), fn.typ(tv.Type), e.Lparen) + + case *ast.CallExpr: + if fn.info.Types[e.Fun].IsType() { + // Explicit type conversion, e.g. string(x) or big.Int(x) + x := b.expr(fn, e.Args[0]) + y := emitConv(fn, x, fn.typ(tv.Type)) + if y != x { + switch y := y.(type) { + case *Convert: + y.pos = e.Lparen + case *ChangeType: + y.pos = e.Lparen + case *MakeInterface: + y.pos = e.Lparen + case *SliceToArrayPointer: + y.pos = e.Lparen + case *UnOp: // conversion from slice to array. + y.pos = e.Lparen + } + } + return y + } + // Call to "intrinsic" built-ins, e.g. new, make, panic. + if id, ok := unparen(e.Fun).(*ast.Ident); ok { + if obj, ok := fn.info.Uses[id].(*types.Builtin); ok { + if v := b.builtin(fn, obj, e.Args, fn.typ(tv.Type), e.Lparen); v != nil { + return v + } + } + } + // Regular function call. + var v Call + b.setCall(fn, e, &v.Call) + v.setType(fn.typ(tv.Type)) + return fn.emit(&v) + + case *ast.UnaryExpr: + switch e.Op { + case token.AND: // &X --- potentially escaping. + addr := b.addr(fn, e.X, true) + if _, ok := unparen(e.X).(*ast.StarExpr); ok { + // &*p must panic if p is nil (http://golang.org/s/go12nil). + // For simplicity, we'll just (suboptimally) rely + // on the side effects of a load. + // TODO(adonovan): emit dedicated nilcheck. + addr.load(fn) + } + return addr.address(fn) + case token.ADD: + return b.expr(fn, e.X) + case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ + v := &UnOp{ + Op: e.Op, + X: b.expr(fn, e.X), + } + v.setPos(e.OpPos) + v.setType(fn.typ(tv.Type)) + return fn.emit(v) + default: + panic(e.Op) + } + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND, token.LOR: + return b.logicalBinop(fn, e) + case token.SHL, token.SHR: + fallthrough + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.typ(tv.Type), e.OpPos) + + case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: + cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) + // The type of x==y may be UntypedBool. + return emitConv(fn, cmp, types.Default(fn.typ(tv.Type))) + default: + panic("illegal op in BinaryExpr: " + e.Op.String()) + } + + case *ast.SliceExpr: + var low, high, max Value + var x Value + xtyp := fn.typeOf(e.X) + switch typeparams.CoreType(xtyp).(type) { + case *types.Array: + // Potentially escaping. + x = b.addr(fn, e.X, true).address(fn) + case *types.Basic, *types.Slice, *types.Pointer: // *array + x = b.expr(fn, e.X) + default: + // core type exception? + if isBytestring(xtyp) { + x = b.expr(fn, e.X) // bytestring is handled as string and []byte. + } else { + panic("unexpected sequence type in SliceExpr") + } + } + if e.Low != nil { + low = b.expr(fn, e.Low) + } + if e.High != nil { + high = b.expr(fn, e.High) + } + if e.Slice3 { + max = b.expr(fn, e.Max) + } + v := &Slice{ + X: x, + Low: low, + High: high, + Max: max, + } + v.setPos(e.Lbrack) + v.setType(fn.typ(tv.Type)) + return fn.emit(v) + + case *ast.Ident: + obj := fn.info.Uses[e] + // Universal built-in or nil? + switch obj := obj.(type) { + case *types.Builtin: + return &Builtin{name: obj.Name(), sig: fn.instanceType(e).(*types.Signature)} + case *types.Nil: + return zeroConst(fn.instanceType(e)) + } + + // Package-level func or var? + // (obj must belong to same package or a direct import.) + if v := fn.Prog.packageLevelMember(obj); v != nil { + if g, ok := v.(*Global); ok { + return emitLoad(fn, g) // var (address) + } + callee := v.(*Function) // (func) + if callee.typeparams.Len() > 0 { + targs := fn.subst.types(instanceArgs(fn.info, e)) + callee = callee.instance(targs, b) + } + return callee + } + // Local var. + return emitLoad(fn, fn.lookup(obj.(*types.Var), false)) // var (address) + + case *ast.SelectorExpr: + sel := fn.selection(e) + if sel == nil { + // builtin unsafe.{Add,Slice} + if obj, ok := fn.info.Uses[e.Sel].(*types.Builtin); ok { + return &Builtin{name: obj.Name(), sig: fn.typ(tv.Type).(*types.Signature)} + } + // qualified identifier + return b.expr(fn, e.Sel) + } + switch sel.kind { + case types.MethodExpr: + // (*T).f or T.f, the method f from the method-set of type T. + // The result is a "thunk". + thunk := createThunk(fn.Prog, sel) + b.enqueue(thunk) + return emitConv(fn, thunk, fn.typ(tv.Type)) + + case types.MethodVal: + // e.f where e is an expression and f is a method. + // The result is a "bound". + obj := sel.obj.(*types.Func) + rt := fn.typ(recvType(obj)) + wantAddr := isPointer(rt) + escaping := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + + if types.IsInterface(rt) { + // If v may be an interface type I (after instantiating), + // we must emit a check that v is non-nil. + if recv, ok := aliases.Unalias(sel.recv).(*types.TypeParam); ok { + // Emit a nil check if any possible instantiation of the + // type parameter is an interface type. + if typeSetOf(recv).Len() > 0 { + // recv has a concrete term its typeset. + // So it cannot be instantiated as an interface. + // + // Example: + // func _[T interface{~int; Foo()}] () { + // var v T + // _ = v.Foo // <-- MethodVal + // } + } else { + // rt may be instantiated as an interface. + // Emit nil check: typeassert (any(v)).(any). + emitTypeAssert(fn, emitConv(fn, v, tEface), tEface, token.NoPos) + } + } else { + // non-type param interface + // Emit nil check: typeassert v.(I). + emitTypeAssert(fn, v, rt, e.Sel.Pos()) + } + } + if targs := receiverTypeArgs(obj); len(targs) > 0 { + // obj is generic. + obj = fn.Prog.canon.instantiateMethod(obj, fn.subst.types(targs), fn.Prog.ctxt) + } + bound := createBound(fn.Prog, obj) + b.enqueue(bound) + + c := &MakeClosure{ + Fn: bound, + Bindings: []Value{v}, + } + c.setPos(e.Sel.Pos()) + c.setType(fn.typ(tv.Type)) + return fn.emit(c) + + case types.FieldVal: + indices := sel.index + last := len(indices) - 1 + v := b.expr(fn, e.X) + v = emitImplicitSelections(fn, v, indices[:last], e.Pos()) + v = emitFieldSelection(fn, v, indices[last], false, e.Sel) + return v + } + + panic("unexpected expression-relative selector") + + case *ast.IndexListExpr: + // f[X, Y] must be a generic function + if !instance(fn.info, e.X) { + panic("unexpected expression-could not match index list to instantiation") + } + return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. + + case *ast.IndexExpr: + if instance(fn.info, e.X) { + return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. + } + // not a generic instantiation. + xt := fn.typeOf(e.X) + switch et, mode := indexType(xt); mode { + case ixVar: + // Addressable slice/array; use IndexAddr and Load. + return b.addr(fn, e, false).load(fn) + + case ixArrVar, ixValue: + // An array in a register, a string or a combined type that contains + // either an [_]array (ixArrVar) or string (ixValue). + + // Note: for ixArrVar and CoreType(xt)==nil can be IndexAddr and Load. + index := b.expr(fn, e.Index) + if isUntyped(index.Type()) { + index = emitConv(fn, index, tInt) + } + v := &Index{ + X: b.expr(fn, e.X), + Index: index, + } + v.setPos(e.Lbrack) + v.setType(et) + return fn.emit(v) + + case ixMap: + ct := typeparams.CoreType(xt).(*types.Map) + v := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), ct.Key()), + } + v.setPos(e.Lbrack) + v.setType(ct.Elem()) + return fn.emit(v) + default: + panic("unexpected container type in IndexExpr: " + xt.String()) + } + + case *ast.CompositeLit, *ast.StarExpr: + // Addressable types (lvalues) + return b.addr(fn, e, false).load(fn) + } + + panic(fmt.Sprintf("unexpected expr: %T", e)) +} + +// stmtList emits to fn code for all statements in list. +func (b *builder) stmtList(fn *Function, list []ast.Stmt) { + for _, s := range list { + b.stmt(fn, s) + } +} + +// receiver emits to fn code for expression e in the "receiver" +// position of selection e.f (where f may be a field or a method) and +// returns the effective receiver after applying the implicit field +// selections of sel. +// +// wantAddr requests that the result is an address. If +// !sel.indirect, this may require that e be built in addr() mode; it +// must thus be addressable. +// +// escaping is defined as per builder.addr(). +func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value { + var v Value + if wantAddr && !sel.indirect && !isPointerCore(fn.typeOf(e)) { + v = b.addr(fn, e, escaping).address(fn) + } else { + v = b.expr(fn, e) + } + + last := len(sel.index) - 1 + // The position of implicit selection is the position of the inducing receiver expression. + v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos()) + if types.IsInterface(v.Type()) { + // When v is an interface, sel.Kind()==MethodValue and v.f is invoked. + // So v is not loaded, even if v has a pointer core type. + } else if !wantAddr && isPointerCore(v.Type()) { + v = emitLoad(fn, v) + } + return v +} + +// setCallFunc populates the function parts of a CallCommon structure +// (Func, Method, Recv, Args[0]) based on the kind of invocation +// occurring in e. +func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { + c.pos = e.Lparen + + // Is this a method call? + if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { + sel := fn.selection(selector) + if sel != nil && sel.kind == types.MethodVal { + obj := sel.obj.(*types.Func) + recv := recvType(obj) + + wantAddr := isPointer(recv) + escaping := true + v := b.receiver(fn, selector.X, wantAddr, escaping, sel) + if types.IsInterface(recv) { + // Invoke-mode call. + c.Value = v // possibly type param + c.Method = obj + } else { + // "Call"-mode call. + c.Value = fn.Prog.objectMethod(obj, b) + c.Args = append(c.Args, v) + } + return + } + + // sel.kind==MethodExpr indicates T.f() or (*T).f(): + // a statically dispatched call to the method f in the + // method-set of T or *T. T may be an interface. + // + // e.Fun would evaluate to a concrete method, interface + // wrapper function, or promotion wrapper. + // + // For now, we evaluate it in the usual way. + // + // TODO(adonovan): opt: inline expr() here, to make the + // call static and to avoid generation of wrappers. + // It's somewhat tricky as it may consume the first + // actual parameter if the call is "invoke" mode. + // + // Examples: + // type T struct{}; func (T) f() {} // "call" mode + // type T interface { f() } // "invoke" mode + // + // type S struct{ T } + // + // var s S + // S.f(s) + // (*S).f(&s) + // + // Suggested approach: + // - consume the first actual parameter expression + // and build it with b.expr(). + // - apply implicit field selections. + // - use MethodVal logic to populate fields of c. + } + + // Evaluate the function operand in the usual way. + c.Value = b.expr(fn, e.Fun) +} + +// emitCallArgs emits to f code for the actual parameters of call e to +// a (possibly built-in) function of effective type sig. +// The argument values are appended to args, which is then returned. +func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { + // f(x, y, z...): pass slice z straight through. + if e.Ellipsis != 0 { + for i, arg := range e.Args { + v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) + args = append(args, v) + } + return args + } + + offset := len(args) // 1 if call has receiver, 0 otherwise + + // Evaluate actual parameter expressions. + // + // If this is a chained call of the form f(g()) where g has + // multiple return values (MRV), they are flattened out into + // args; a suffix of them may end up in a varargs slice. + for _, arg := range e.Args { + v := b.expr(fn, arg) + if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain + for i, n := 0, ttuple.Len(); i < n; i++ { + args = append(args, emitExtract(fn, v, i)) + } + } else { + args = append(args, v) + } + } + + // Actual->formal assignability conversions for normal parameters. + np := sig.Params().Len() // number of normal parameters + if sig.Variadic() { + np-- + } + for i := 0; i < np; i++ { + args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) + } + + // Actual->formal assignability conversions for variadic parameter, + // and construction of slice. + if sig.Variadic() { + varargs := args[offset+np:] + st := sig.Params().At(np).Type().(*types.Slice) + vt := st.Elem() + if len(varargs) == 0 { + args = append(args, zeroConst(st)) + } else { + // Replace a suffix of args with a slice containing it. + at := types.NewArray(vt, int64(len(varargs))) + a := emitNew(fn, at, token.NoPos, "varargs") + a.setPos(e.Rparen) + for i, arg := range varargs { + iaddr := &IndexAddr{ + X: a, + Index: intConst(int64(i)), + } + iaddr.setType(types.NewPointer(vt)) + fn.emit(iaddr) + emitStore(fn, iaddr, arg, arg.Pos()) + } + s := &Slice{X: a} + s.setType(st) + args[offset+np] = fn.emit(s) + args = args[:offset+np+1] + } + } + return args +} + +// setCall emits to fn code to evaluate all the parameters of a function +// call e, and populates *c with those values. +func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { + // First deal with the f(...) part and optional receiver. + b.setCallFunc(fn, e, c) + + // Then append the other actual parameters. + sig, _ := typeparams.CoreType(fn.typeOf(e.Fun)).(*types.Signature) + if sig == nil { + panic(fmt.Sprintf("no signature for call of %s", e.Fun)) + } + c.Args = b.emitCallArgs(fn, sig, e, c.Args) +} + +// assignOp emits to fn code to perform loc <op>= val. +func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { + loc.store(fn, emitArith(fn, op, loc.load(fn), val, loc.typ(), pos)) +} + +// localValueSpec emits to fn code to define all of the vars in the +// function-local ValueSpec, spec. +func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { + switch { + case len(spec.Values) == len(spec.Names): + // e.g. var x, y = 0, 1 + // 1:1 assignment + for i, id := range spec.Names { + if !isBlankIdent(id) { + emitLocalVar(fn, identVar(fn, id)) + } + lval := b.addr(fn, id, false) // non-escaping + b.assign(fn, lval, spec.Values[i], true, nil) + } + + case len(spec.Values) == 0: + // e.g. var x, y int + // Locals are implicitly zero-initialized. + for _, id := range spec.Names { + if !isBlankIdent(id) { + lhs := emitLocalVar(fn, identVar(fn, id)) + if fn.debugInfo() { + emitDebugRef(fn, id, lhs, true) + } + } + } + + default: + // e.g. var x, y = pos() + tuple := b.exprN(fn, spec.Values[0]) + for i, id := range spec.Names { + if !isBlankIdent(id) { + emitLocalVar(fn, identVar(fn, id)) + lhs := b.addr(fn, id, false) // non-escaping + lhs.store(fn, emitExtract(fn, tuple, i)) + } + } + } +} + +// assignStmt emits code to fn for a parallel assignment of rhss to lhss. +// isDef is true if this is a short variable declaration (:=). +// +// Note the similarity with localValueSpec. +func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { + // Side effects of all LHSs and RHSs must occur in left-to-right order. + lvals := make([]lvalue, len(lhss)) + isZero := make([]bool, len(lhss)) + for i, lhs := range lhss { + var lval lvalue = blank{} + if !isBlankIdent(lhs) { + if isDef { + if obj, ok := fn.info.Defs[lhs.(*ast.Ident)].(*types.Var); ok { + emitLocalVar(fn, obj) + isZero[i] = true + } + } + lval = b.addr(fn, lhs, false) // non-escaping + } + lvals[i] = lval + } + if len(lhss) == len(rhss) { + // Simple assignment: x = f() (!isDef) + // Parallel assignment: x, y = f(), g() (!isDef) + // or short var decl: x, y := f(), g() (isDef) + // + // In all cases, the RHSs may refer to the LHSs, + // so we need a storebuf. + var sb storebuf + for i := range rhss { + b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) + } + sb.emit(fn) + } else { + // e.g. x, y = pos() + tuple := b.exprN(fn, rhss[0]) + emitDebugRef(fn, rhss[0], tuple, false) + for i, lval := range lvals { + lval.store(fn, emitExtract(fn, tuple, i)) + } + } +} + +// arrayLen returns the length of the array whose composite literal elements are elts. +func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { + var max int64 = -1 + var i int64 = -1 + for _, e := range elts { + if kv, ok := e.(*ast.KeyValueExpr); ok { + i = b.expr(fn, kv.Key).(*Const).Int64() + } else { + i++ + } + if i > max { + max = i + } + } + return max + 1 +} + +// compLit emits to fn code to initialize a composite literal e at +// address addr with type typ. +// +// Nested composite literals are recursively initialized in place +// where possible. If isZero is true, compLit assumes that addr +// holds the zero value for typ. +// +// Because the elements of a composite literal may refer to the +// variables being updated, as in the second line below, +// +// x := T{a: 1} +// x = T{a: x.a} +// +// all the reads must occur before all the writes. Thus all stores to +// loc are emitted to the storebuf sb for later execution. +// +// A CompositeLit may have pointer type only in the recursive (nested) +// case when the type name is implicit. e.g. in []*T{{}}, the inner +// literal has type *T behaves like &T{}. +// In that case, addr must hold a T, not a *T. +func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { + typ := typeparams.Deref(fn.typeOf(e)) // retain the named/alias/param type, if any + switch t := typeparams.CoreType(typ).(type) { + case *types.Struct: + if !isZero && len(e.Elts) != t.NumFields() { + // memclear + zt := typeparams.MustDeref(addr.Type()) + sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt)) + isZero = true + } + for i, e := range e.Elts { + fieldIndex := i + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + fname := kv.Key.(*ast.Ident).Name + for i, n := 0, t.NumFields(); i < n; i++ { + sf := t.Field(i) + if sf.Name() == fname { + fieldIndex = i + pos = kv.Colon + e = kv.Value + break + } + } + } + sf := t.Field(fieldIndex) + faddr := &FieldAddr{ + X: addr, + Field: fieldIndex, + } + faddr.setPos(pos) + faddr.setType(types.NewPointer(sf.Type())) + fn.emit(faddr) + b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) + } + + case *types.Array, *types.Slice: + var at *types.Array + var array Value + switch t := t.(type) { + case *types.Slice: + at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) + array = emitNew(fn, at, e.Lbrace, "slicelit") + case *types.Array: + at = t + array = addr + + if !isZero && int64(len(e.Elts)) != at.Len() { + // memclear + zt := typeparams.MustDeref(array.Type()) + sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt)) + } + } + + var idx *Const + for _, e := range e.Elts { + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + idx = b.expr(fn, kv.Key).(*Const) + pos = kv.Colon + e = kv.Value + } else { + var idxval int64 + if idx != nil { + idxval = idx.Int64() + 1 + } + idx = intConst(idxval) + } + iaddr := &IndexAddr{ + X: array, + Index: idx, + } + iaddr.setType(types.NewPointer(at.Elem())) + fn.emit(iaddr) + if t != at { // slice + // backing array is unaliased => storebuf not needed. + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) + } else { + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) + } + } + + if t != at { // slice + s := &Slice{X: array} + s.setPos(e.Lbrace) + s.setType(typ) + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) + } + + case *types.Map: + m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} + m.setPos(e.Lbrace) + m.setType(typ) + fn.emit(m) + for _, e := range e.Elts { + e := e.(*ast.KeyValueExpr) + + // If a key expression in a map literal is itself a + // composite literal, the type may be omitted. + // For example: + // map[*struct{}]bool{{}: true} + // An &-operation may be implied: + // map[*struct{}]bool{&struct{}{}: true} + wantAddr := false + if _, ok := unparen(e.Key).(*ast.CompositeLit); ok { + wantAddr = isPointerCore(t.Key()) + } + + var key Value + if wantAddr { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + key = b.addr(fn, e.Key, true).address(fn) + } else { + key = b.expr(fn, e.Key) + } + + loc := element{ + m: m, + k: emitConv(fn, key, t.Key()), + t: t.Elem(), + pos: e.Colon, + } + + // We call assign() only because it takes care + // of any &-operation required in the recursive + // case, e.g., + // map[int]*struct{}{0: {}} implies &struct{}{}. + // In-place update is of course impossible, + // and no storebuf is needed. + b.assign(fn, &loc, e.Value, true, nil) + } + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) + + default: + panic("unexpected CompositeLit type: " + typ.String()) + } +} + +// switchStmt emits to fn code for the switch statement s, optionally +// labelled by label. +func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { + // We treat SwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches() + // to those cases that are free of side effects. + if s.Init != nil { + b.stmt(fn, s.Init) + } + var tag Value = vTrue + if s.Tag != nil { + tag = b.expr(fn, s.Tag) + } + done := fn.newBasicBlock("switch.done") + if label != nil { + label._break = done + } + // We pull the default case (if present) down to the end. + // But each fallthrough label must point to the next + // body block in source order, so we preallocate a + // body block (fallthru) for the next case. + // Unfortunately this makes for a confusing block order. + var dfltBody *[]ast.Stmt + var dfltFallthrough *BasicBlock + var fallthru, dfltBlock *BasicBlock + ncases := len(s.Body.List) + for i, clause := range s.Body.List { + body := fallthru + if body == nil { + body = fn.newBasicBlock("switch.body") // first case only + } + + // Preallocate body block for the next case. + fallthru = done + if i+1 < ncases { + fallthru = fn.newBasicBlock("switch.body") + } + + cc := clause.(*ast.CaseClause) + if cc.List == nil { + // Default case. + dfltBody = &cc.Body + dfltFallthrough = fallthru + dfltBlock = body + continue + } + + var nextCond *BasicBlock + for _, cond := range cc.List { + nextCond = fn.newBasicBlock("switch.next") + // TODO(adonovan): opt: when tag==vTrue, we'd + // get better code if we use b.cond(cond) + // instead of BinOp(EQL, tag, b.expr(cond)) + // followed by If. Don't forget conversions + // though. + cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos()) + emitIf(fn, cond, body, nextCond) + fn.currentBlock = nextCond + } + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: fallthru, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = nextCond + } + if dfltBlock != nil { + emitJump(fn, dfltBlock) + fn.currentBlock = dfltBlock + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: dfltFallthrough, + } + b.stmtList(fn, *dfltBody) + fn.targets = fn.targets.tail + } + emitJump(fn, done) + fn.currentBlock = done +} + +// typeSwitchStmt emits to fn code for the type switch statement s, optionally +// labelled by label. +func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { + // We treat TypeSwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches(). + + // Typeswitch lowering: + // + // var x X + // switch y := x.(type) { + // case T1, T2: S1 // >1 (y := x) + // case nil: SN // nil (y := x) + // default: SD // 0 types (y := x) + // case T3: S3 // 1 type (y := x.(T3)) + // } + // + // ...s.Init... + // x := eval x + // .caseT1: + // t1, ok1 := typeswitch,ok x <T1> + // if ok1 then goto S1 else goto .caseT2 + // .caseT2: + // t2, ok2 := typeswitch,ok x <T2> + // if ok2 then goto S1 else goto .caseNil + // .S1: + // y := x + // ...S1... + // goto done + // .caseNil: + // if t2, ok2 := typeswitch,ok x <T2> + // if x == nil then goto SN else goto .caseT3 + // .SN: + // y := x + // ...SN... + // goto done + // .caseT3: + // t3, ok3 := typeswitch,ok x <T3> + // if ok3 then goto S3 else goto default + // .S3: + // y := t3 + // ...S3... + // goto done + // .default: + // y := x + // ...SD... + // goto done + // .done: + if s.Init != nil { + b.stmt(fn, s.Init) + } + + var x Value + switch ass := s.Assign.(type) { + case *ast.ExprStmt: // x.(type) + x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) + case *ast.AssignStmt: // y := x.(type) + x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) + } + + done := fn.newBasicBlock("typeswitch.done") + if label != nil { + label._break = done + } + var default_ *ast.CaseClause + for _, clause := range s.Body.List { + cc := clause.(*ast.CaseClause) + if cc.List == nil { + default_ = cc + continue + } + body := fn.newBasicBlock("typeswitch.body") + var next *BasicBlock + var casetype types.Type + var ti Value // ti, ok := typeassert,ok x <Ti> + for _, cond := range cc.List { + next = fn.newBasicBlock("typeswitch.next") + casetype = fn.typeOf(cond) + var condv Value + if casetype == tUntypedNil { + condv = emitCompare(fn, token.EQL, x, zeroConst(x.Type()), cond.Pos()) + ti = x + } else { + yok := emitTypeTest(fn, x, casetype, cc.Case) + ti = emitExtract(fn, yok, 0) + condv = emitExtract(fn, yok, 1) + } + emitIf(fn, condv, body, next) + fn.currentBlock = next + } + if len(cc.List) != 1 { + ti = x + } + fn.currentBlock = body + b.typeCaseBody(fn, cc, ti, done) + fn.currentBlock = next + } + if default_ != nil { + b.typeCaseBody(fn, default_, x, done) + } else { + emitJump(fn, done) + } + fn.currentBlock = done +} + +func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { + if obj, ok := fn.info.Implicits[cc].(*types.Var); ok { + // In a switch y := x.(type), each case clause + // implicitly declares a distinct object y. + // In a single-type case, y has that type. + // In multi-type cases, 'case nil' and default, + // y has the same type as the interface operand. + emitStore(fn, emitLocalVar(fn, obj), x, obj.Pos()) + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) +} + +// selectStmt emits to fn code for the select statement s, optionally +// labelled by label. +func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { + // A blocking select of a single case degenerates to a + // simple send or receive. + // TODO(adonovan): opt: is this optimization worth its weight? + if len(s.Body.List) == 1 { + clause := s.Body.List[0].(*ast.CommClause) + if clause.Comm != nil { + b.stmt(fn, clause.Comm) + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = done + return + } + } + + // First evaluate all channels in all cases, and find + // the directions of each state. + var states []*SelectState + blocking := true + debugInfo := fn.debugInfo() + for _, clause := range s.Body.List { + var st *SelectState + switch comm := clause.(*ast.CommClause).Comm.(type) { + case nil: // default case + blocking = false + continue + + case *ast.SendStmt: // ch<- i + ch := b.expr(fn, comm.Chan) + chtyp := typeparams.CoreType(fn.typ(ch.Type())).(*types.Chan) + st = &SelectState{ + Dir: types.SendOnly, + Chan: ch, + Send: emitConv(fn, b.expr(fn, comm.Value), chtyp.Elem()), + Pos: comm.Arrow, + } + if debugInfo { + st.DebugNode = comm + } + + case *ast.AssignStmt: // x := <-ch + recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + + case *ast.ExprStmt: // <-ch + recv := unparen(comm.X).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + } + states = append(states, st) + } + + // We dispatch on the (fair) result of Select using a + // sequential if-else chain, in effect: + // + // idx, recvOk, r0...r_n-1 := select(...) + // if idx == 0 { // receive on channel 0 (first receive => r0) + // x, ok := r0, recvOk + // ...state0... + // } else if v == 1 { // send on channel 1 + // ...state1... + // } else { + // ...default... + // } + sel := &Select{ + States: states, + Blocking: blocking, + } + sel.setPos(s.Select) + var vars []*types.Var + vars = append(vars, varIndex, varOk) + for _, st := range states { + if st.Dir == types.RecvOnly { + chtyp := typeparams.CoreType(fn.typ(st.Chan.Type())).(*types.Chan) + vars = append(vars, anonVar(chtyp.Elem())) + } + } + sel.setType(types.NewTuple(vars...)) + + fn.emit(sel) + idx := emitExtract(fn, sel, 0) + + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + + var defaultBody *[]ast.Stmt + state := 0 + r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV + for _, cc := range s.Body.List { + clause := cc.(*ast.CommClause) + if clause.Comm == nil { + defaultBody = &clause.Body + continue + } + body := fn.newBasicBlock("select.body") + next := fn.newBasicBlock("select.next") + emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + switch comm := clause.Comm.(type) { + case *ast.ExprStmt: // <-ch + if debugInfo { + v := emitExtract(fn, sel, r) + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + r++ + + case *ast.AssignStmt: // x := <-states[state].Chan + if comm.Tok == token.DEFINE { + emitLocalVar(fn, identVar(fn, comm.Lhs[0].(*ast.Ident))) + } + x := b.addr(fn, comm.Lhs[0], false) // non-escaping + v := emitExtract(fn, sel, r) + if debugInfo { + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + x.store(fn, v) + + if len(comm.Lhs) == 2 { // x, ok := ... + if comm.Tok == token.DEFINE { + emitLocalVar(fn, identVar(fn, comm.Lhs[1].(*ast.Ident))) + } + ok := b.addr(fn, comm.Lhs[1], false) // non-escaping + ok.store(fn, emitExtract(fn, sel, 1)) + } + r++ + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = next + state++ + } + if defaultBody != nil { + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, *defaultBody) + fn.targets = fn.targets.tail + } else { + // A blocking select must match some case. + // (This should really be a runtime.errorString, not a string.) + fn.emit(&Panic{ + X: emitConv(fn, stringConst("blocking select matched no case"), tEface), + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + } + emitJump(fn, done) + fn.currentBlock = done +} + +// forStmt emits to fn code for the for statement s, optionally +// labelled by label. +func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { + // Use forStmtGo122 instead if it applies. + if s.Init != nil { + if assign, ok := s.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE { + if versions.AtLeast(fn.goversion, versions.Go1_22) { + b.forStmtGo122(fn, s, label) + return + } + } + } + + // ...init... + // jump loop + // loop: + // if cond goto body else done + // body: + // ...body... + // jump post + // post: (target of continue) + // ...post... + // jump loop + // done: (target of break) + if s.Init != nil { + b.stmt(fn, s.Init) + } + + body := fn.newBasicBlock("for.body") + done := fn.newBasicBlock("for.done") // target of 'break' + loop := body // target of back-edge + if s.Cond != nil { + loop = fn.newBasicBlock("for.loop") + } + cont := loop // target of 'continue' + if s.Post != nil { + cont = fn.newBasicBlock("for.post") + } + if label != nil { + label._break = done + label._continue = cont + } + emitJump(fn, loop) + fn.currentBlock = loop + if loop != body { + b.cond(fn, s.Cond, body, done) + fn.currentBlock = body + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: cont, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, cont) + + if s.Post != nil { + fn.currentBlock = cont + b.stmt(fn, s.Post) + emitJump(fn, loop) // back-edge + } + fn.currentBlock = done +} + +// forStmtGo122 emits to fn code for the for statement s, optionally +// labelled by label. s must define its variables. +// +// This allocates once per loop iteration. This is only correct in +// GoVersions >= go1.22. +func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { + // i_outer = alloc[T] + // *i_outer = ...init... // under objects[i] = i_outer + // jump loop + // loop: + // i = phi [head: i_outer, loop: i_next] + // ...cond... // under objects[i] = i + // if cond goto body else done + // body: + // ...body... // under objects[i] = i (same as loop) + // jump post + // post: + // tmp = *i + // i_next = alloc[T] + // *i_next = tmp + // ...post... // under objects[i] = i_next + // goto loop + // done: + + init := s.Init.(*ast.AssignStmt) + startingBlocks := len(fn.Blocks) + + pre := fn.currentBlock // current block before starting + loop := fn.newBasicBlock("for.loop") // target of back-edge + body := fn.newBasicBlock("for.body") + post := fn.newBasicBlock("for.post") // target of 'continue' + done := fn.newBasicBlock("for.done") // target of 'break' + + // For each of the n loop variables, we create five SSA values, + // outer, phi, next, load, and store in pre, loop, and post. + // There is no limit on n. + type loopVar struct { + obj *types.Var + outer *Alloc + phi *Phi + load *UnOp + next *Alloc + store *Store + } + vars := make([]loopVar, len(init.Lhs)) + for i, lhs := range init.Lhs { + v := identVar(fn, lhs.(*ast.Ident)) + typ := fn.typ(v.Type()) + + fn.currentBlock = pre + outer := emitLocal(fn, typ, v.Pos(), v.Name()) + + fn.currentBlock = loop + phi := &Phi{Comment: v.Name()} + phi.pos = v.Pos() + phi.typ = outer.Type() + fn.emit(phi) + + fn.currentBlock = post + // If next is local, it reuses the address and zeroes the old value so + // load before allocating next. + load := emitLoad(fn, phi) + next := emitLocal(fn, typ, v.Pos(), v.Name()) + store := emitStore(fn, next, load, token.NoPos) + + phi.Edges = []Value{outer, next} // pre edge is emitted before post edge. + + vars[i] = loopVar{v, outer, phi, load, next, store} + } + + // ...init... under fn.objects[v] = i_outer + fn.currentBlock = pre + for _, v := range vars { + fn.vars[v.obj] = v.outer + } + const isDef = false // assign to already-allocated outers + b.assignStmt(fn, init.Lhs, init.Rhs, isDef) + if label != nil { + label._break = done + label._continue = post + } + emitJump(fn, loop) + + // ...cond... under fn.objects[v] = i + fn.currentBlock = loop + for _, v := range vars { + fn.vars[v.obj] = v.phi + } + if s.Cond != nil { + b.cond(fn, s.Cond, body, done) + } else { + emitJump(fn, body) + } + + // ...body... under fn.objects[v] = i + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: post, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, post) + + // ...post... under fn.objects[v] = i_next + for _, v := range vars { + fn.vars[v.obj] = v.next + } + fn.currentBlock = post + if s.Post != nil { + b.stmt(fn, s.Post) + } + emitJump(fn, loop) // back-edge + fn.currentBlock = done + + // For each loop variable that does not escape, + // (the common case), fuse its next cells into its + // (local) outer cell as they have disjoint live ranges. + // + // It is sufficient to test whether i_next escapes, + // because its Heap flag will be marked true if either + // the cond or post expression causes i to escape + // (because escape distributes over phi). + var nlocals int + for _, v := range vars { + if !v.next.Heap { + nlocals++ + } + } + if nlocals > 0 { + replace := make(map[Value]Value, 2*nlocals) + dead := make(map[Instruction]bool, 4*nlocals) + for _, v := range vars { + if !v.next.Heap { + replace[v.next] = v.outer + replace[v.phi] = v.outer + dead[v.phi], dead[v.next], dead[v.load], dead[v.store] = true, true, true, true + } + } + + // Replace all uses of i_next and phi with i_outer. + // Referrers have not been built for fn yet so only update Instruction operands. + // We need only look within the blocks added by the loop. + var operands []*Value // recycle storage + for _, b := range fn.Blocks[startingBlocks:] { + for _, instr := range b.Instrs { + operands = instr.Operands(operands[:0]) + for _, ptr := range operands { + k := *ptr + if v := replace[k]; v != nil { + *ptr = v + } + } + } + } + + // Remove instructions for phi, load, and store. + // lift() will remove the unused i_next *Alloc. + isDead := func(i Instruction) bool { return dead[i] } + loop.Instrs = removeInstrsIf(loop.Instrs, isDead) + post.Instrs = removeInstrsIf(post.Instrs, isDead) + } +} + +// rangeIndexed emits to fn the header for an integer-indexed loop +// over array, *array or slice value x. +// The v result is defined only if tv is non-nil. +// forPos is the position of the "for" token. +func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // length = len(x) + // index = -1 + // loop: (target of continue) + // index++ + // if index < length goto body else done + // body: + // k = index + // v = x[index] + // ...body... + // jump loop + // done: (target of break) + + // Determine number of iterations. + var length Value + dt := typeparams.Deref(x.Type()) + if arr, ok := typeparams.CoreType(dt).(*types.Array); ok { + // For array or *array, the number of iterations is + // known statically thanks to the type. We avoid a + // data dependence upon x, permitting later dead-code + // elimination if x is pure, static unrolling, etc. + // Ranging over a nil *array may have >0 iterations. + // We still generate code for x, in case it has effects. + length = intConst(arr.Len()) + } else { + // length = len(x). + var c Call + c.Call.Value = makeLen(x.Type()) + c.Call.Args = []Value{x} + c.setType(tInt) + length = fn.emit(&c) + } + + index := emitLocal(fn, tInt, token.NoPos, "rangeindex") + emitStore(fn, index, intConst(-1), pos) + + loop = fn.newBasicBlock("rangeindex.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + incr := &BinOp{ + Op: token.ADD, + X: emitLoad(fn, index), + Y: vOne, + } + incr.setType(tInt) + emitStore(fn, index, fn.emit(incr), pos) + + body := fn.newBasicBlock("rangeindex.body") + done = fn.newBasicBlock("rangeindex.done") + emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) + fn.currentBlock = body + + k = emitLoad(fn, index) + if tv != nil { + switch t := typeparams.CoreType(x.Type()).(type) { + case *types.Array: + instr := &Index{ + X: x, + Index: k, + } + instr.setType(t.Elem()) + instr.setPos(x.Pos()) + v = fn.emit(instr) + + case *types.Pointer: // *array + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) + instr.setPos(x.Pos()) + v = emitLoad(fn, fn.emit(instr)) + + case *types.Slice: + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem())) + instr.setPos(x.Pos()) + v = emitLoad(fn, fn.emit(instr)) + + default: + panic("rangeIndexed x:" + t.String()) + } + } + return +} + +// rangeIter emits to fn the header for a loop using +// Range/Next/Extract to iterate over map or string value x. +// tk and tv are the types of the key/value results k and v, or nil +// if the respective component is not wanted. +func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // it = range x + // loop: (target of continue) + // okv = next it (ok, key, value) + // ok = extract okv #0 + // if ok goto body else done + // body: + // k = extract okv #1 + // v = extract okv #2 + // ...body... + // jump loop + // done: (target of break) + // + + if tk == nil { + tk = tInvalid + } + if tv == nil { + tv = tInvalid + } + + rng := &Range{X: x} + rng.setPos(pos) + rng.setType(tRangeIter) + it := fn.emit(rng) + + loop = fn.newBasicBlock("rangeiter.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + okv := &Next{ + Iter: it, + IsString: isBasic(typeparams.CoreType(x.Type())), + } + okv.setType(types.NewTuple( + varOk, + newVar("k", tk), + newVar("v", tv), + )) + fn.emit(okv) + + body := fn.newBasicBlock("rangeiter.body") + done = fn.newBasicBlock("rangeiter.done") + emitIf(fn, emitExtract(fn, okv, 0), body, done) + fn.currentBlock = body + + if tk != tInvalid { + k = emitExtract(fn, okv, 1) + } + if tv != tInvalid { + v = emitExtract(fn, okv, 2) + } + return +} + +// rangeChan emits to fn the header for a loop that receives from +// channel x until it fails. +// tk is the channel's element type, or nil if the k result is +// not wanted +// pos is the position of the '=' or ':=' token. +func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { + // + // loop: (target of continue) + // ko = <-x (key, ok) + // ok = extract ko #1 + // if ok goto body else done + // body: + // k = extract ko #0 + // ...body... + // goto loop + // done: (target of break) + + loop = fn.newBasicBlock("rangechan.loop") + emitJump(fn, loop) + fn.currentBlock = loop + recv := &UnOp{ + Op: token.ARROW, + X: x, + CommaOk: true, + } + recv.setPos(pos) + recv.setType(types.NewTuple( + newVar("k", typeparams.CoreType(x.Type()).(*types.Chan).Elem()), + varOk, + )) + ko := fn.emit(recv) + body := fn.newBasicBlock("rangechan.body") + done = fn.newBasicBlock("rangechan.done") + emitIf(fn, emitExtract(fn, ko, 1), body, done) + fn.currentBlock = body + if tk != nil { + k = emitExtract(fn, ko, 0) + } + return +} + +// rangeInt emits to fn the header for a range loop with an integer operand. +// tk is the key value's type, or nil if the k result is not wanted. +// pos is the position of the "for" token. +func (b *builder) rangeInt(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { + // + // iter = 0 + // if 0 < x goto body else done + // loop: (target of continue) + // iter++ + // if iter < x goto body else done + // body: + // k = x + // ...body... + // jump loop + // done: (target of break) + + if isUntyped(x.Type()) { + x = emitConv(fn, x, tInt) + } + + T := x.Type() + iter := emitLocal(fn, T, token.NoPos, "rangeint.iter") + // x may be unsigned. Avoid initializing x to -1. + + body := fn.newBasicBlock("rangeint.body") + done = fn.newBasicBlock("rangeint.done") + emitIf(fn, emitCompare(fn, token.LSS, zeroConst(T), x, token.NoPos), body, done) + + loop = fn.newBasicBlock("rangeint.loop") + fn.currentBlock = loop + + incr := &BinOp{ + Op: token.ADD, + X: emitLoad(fn, iter), + Y: emitConv(fn, vOne, T), + } + incr.setType(T) + emitStore(fn, iter, fn.emit(incr), pos) + emitIf(fn, emitCompare(fn, token.LSS, incr, x, token.NoPos), body, done) + fn.currentBlock = body + + if tk != nil { + // Integer types (int, uint8, etc.) are named and + // we know that k is assignable to x when tk != nil. + // This implies tk and T are identical so no conversion is needed. + k = emitLoad(fn, iter) + } + + return +} + +// rangeStmt emits to fn code for the range statement s, optionally +// labelled by label. +func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { + var tk, tv types.Type + if s.Key != nil && !isBlankIdent(s.Key) { + tk = fn.typeOf(s.Key) + } + if s.Value != nil && !isBlankIdent(s.Value) { + tv = fn.typeOf(s.Value) + } + + // create locals for s.Key and s.Value. + createVars := func() { + // Unlike a short variable declaration, a RangeStmt + // using := never redeclares an existing variable; it + // always creates a new one. + if tk != nil { + emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident))) + } + if tv != nil { + emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident))) + } + } + + afterGo122 := versions.AtLeast(fn.goversion, versions.Go1_22) + if s.Tok == token.DEFINE && !afterGo122 { + // pre-go1.22: If iteration variables are defined (:=), this + // occurs once outside the loop. + createVars() + } + + x := b.expr(fn, s.X) + + var k, v Value + var loop, done *BasicBlock + switch rt := typeparams.CoreType(x.Type()).(type) { + case *types.Slice, *types.Array, *types.Pointer: // *array + k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) + + case *types.Chan: + k, loop, done = b.rangeChan(fn, x, tk, s.For) + + case *types.Map: + k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) + + case *types.Basic: + switch { + case rt.Info()&types.IsString != 0: + k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) + + case rt.Info()&types.IsInteger != 0: + k, loop, done = b.rangeInt(fn, x, tk, s.For) + + default: + panic("Cannot range over basic type: " + rt.String()) + } + + case *types.Signature: + // Special case rewrite (fn.goversion >= go1.23): + // for x := range f { ... } + // into + // f(func(x T) bool { ... }) + b.rangeFunc(fn, x, tk, tv, s, label) + return + + default: + panic("Cannot range over: " + rt.String()) + } + + if s.Tok == token.DEFINE && afterGo122 { + // go1.22: If iteration variables are defined (:=), this occurs inside the loop. + createVars() + } + + // Evaluate both LHS expressions before we update either. + var kl, vl lvalue + if tk != nil { + kl = b.addr(fn, s.Key, false) // non-escaping + } + if tv != nil { + vl = b.addr(fn, s.Value, false) // non-escaping + } + if tk != nil { + kl.store(fn, k) + } + if tv != nil { + vl.store(fn, v) + } + + if label != nil { + label._break = done + label._continue = loop + } + + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: loop, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, loop) // back-edge + fn.currentBlock = done +} + +// rangeFunc emits to fn code for the range-over-func rng.Body of the iterator +// function x, optionally labelled by label. It creates a new anonymous function +// yield for rng and builds the function. +func (b *builder) rangeFunc(fn *Function, x Value, tk, tv types.Type, rng *ast.RangeStmt, label *lblock) { + // Consider the SSA code for the outermost range-over-func in fn: + // + // func fn(...) (ret R) { + // ... + // for k, v = range x { + // ... + // } + // ... + // } + // + // The code emitted into fn will look something like this. + // + // loop: + // jump := READY + // y := make closure yield [ret, deferstack, jump, k, v] + // x(y) + // switch jump { + // [see resuming execution] + // } + // goto done + // done: + // ... + // + // where yield is a new synthetic yield function: + // + // func yield(_k tk, _v tv) bool + // free variables: [ret, stack, jump, k, v] + // { + // entry: + // if jump != READY then goto invalid else valid + // invalid: + // panic("iterator called when it is not in a ready state") + // valid: + // jump = BUSY + // k = _k + // v = _v + // ... + // cont: + // jump = READY + // return true + // } + // + // Yield state: + // + // Each range loop has an associated jump variable that records + // the state of the iterator. A yield function is initially + // in a READY (0) and callable state. If the yield function is called + // and is not in READY state, it panics. When it is called in a callable + // state, it becomes BUSY. When execution reaches the end of the body + // of the loop (or a continue statement targeting the loop is executed), + // the yield function returns true and resumes being in a READY state. + // After the iterator function x(y) returns, then if the yield function + // is in a READY state, the yield enters the DONE state. + // + // Each lowered control statement (break X, continue X, goto Z, or return) + // that exits the loop sets the variable to a unique positive EXIT value, + // before returning false from the yield function. + // + // If the yield function returns abruptly due to a panic or GoExit, + // it remains in a BUSY state. The generated code asserts that, after + // the iterator call x(y) returns normally, the jump variable state + // is DONE. + // + // Resuming execution: + // + // The code generated for the range statement checks the jump + // variable to determine how to resume execution. + // + // switch jump { + // case BUSY: panic("...") + // case DONE: goto done + // case READY: state = DONE; goto done + // case 123: ... // action for exit 123. + // case 456: ... // action for exit 456. + // ... + // } + // + // Forward goto statements within a yield are jumps to labels that + // have not yet been traversed in fn. They may be in the Body of the + // function. What we emit for these is: + // + // goto target + // target: + // ... + // + // We leave an unresolved exit in yield.exits to check at the end + // of building yield if it encountered target in the body. If it + // encountered target, no additional work is required. Otherwise, + // the yield emits a new early exit in the basic block for target. + // We expect that blockopt will fuse the early exit into the case + // block later. The unresolved exit is then added to yield.parent.exits. + + loop := fn.newBasicBlock("rangefunc.loop") + done := fn.newBasicBlock("rangefunc.done") + + // These are targets within y. + fn.targets = &targets{ + tail: fn.targets, + _break: done, + // _continue is within y. + } + if label != nil { + label._break = done + // _continue is within y + } + + emitJump(fn, loop) + fn.currentBlock = loop + + // loop: + // jump := READY + + anonIdx := len(fn.AnonFuncs) + + jump := newVar(fmt.Sprintf("jump$%d", anonIdx+1), tInt) + emitLocalVar(fn, jump) // zero value is READY + + xsig := typeparams.CoreType(x.Type()).(*types.Signature) + ysig := typeparams.CoreType(xsig.Params().At(0).Type()).(*types.Signature) + + /* synthetic yield function for body of range-over-func loop */ + y := &Function{ + name: fmt.Sprintf("%s$%d", fn.Name(), anonIdx+1), + Signature: ysig, + Synthetic: "range-over-func yield", + pos: rangePosition(rng), + parent: fn, + anonIdx: int32(len(fn.AnonFuncs)), + Pkg: fn.Pkg, + Prog: fn.Prog, + syntax: rng, + info: fn.info, + goversion: fn.goversion, + build: (*builder).buildYieldFunc, + topLevelOrigin: nil, + typeparams: fn.typeparams, + typeargs: fn.typeargs, + subst: fn.subst, + jump: jump, + deferstack: fn.deferstack, + returnVars: fn.returnVars, // use the parent's return variables + uniq: fn.uniq, // start from parent's unique values + } + + // If the RangeStmt has a label, this is how it is passed to buildYieldFunc. + if label != nil { + y.lblocks = map[*types.Label]*lblock{label.label: nil} + } + fn.AnonFuncs = append(fn.AnonFuncs, y) + + // Build y immediately. It may: + // * cause fn's locals to escape, and + // * create new exit nodes in exits. + // (y is not marked 'built' until the end of the enclosing FuncDecl.) + unresolved := len(fn.exits) + y.build(b, y) + fn.uniq = y.uniq // resume after y's unique values + + // Emit the call of y. + // c := MakeClosure y + // x(c) + c := &MakeClosure{Fn: y} + c.setType(ysig) + for _, fv := range y.FreeVars { + c.Bindings = append(c.Bindings, fv.outer) + fv.outer = nil + } + fn.emit(c) + call := Call{ + Call: CallCommon{ + Value: x, + Args: []Value{c}, + pos: token.NoPos, + }, + } + call.setType(xsig.Results()) + fn.emit(&call) + + exits := fn.exits[unresolved:] + b.buildYieldResume(fn, jump, exits, done) + + emitJump(fn, done) + fn.currentBlock = done +} + +// buildYieldResume emits to fn code for how to resume execution once a call to +// the iterator function over the yield function returns x(y). It does this by building +// a switch over the value of jump for when it is READY, BUSY, or EXIT(id). +func (b *builder) buildYieldResume(fn *Function, jump *types.Var, exits []*exit, done *BasicBlock) { + // v := *jump + // switch v { + // case BUSY: panic("...") + // case READY: jump = DONE; goto done + // case EXIT(a): ... + // case EXIT(b): ... + // ... + // } + v := emitLoad(fn, fn.lookup(jump, false)) + + // case BUSY: panic("...") + isbusy := fn.newBasicBlock("rangefunc.resume.busy") + ifready := fn.newBasicBlock("rangefunc.resume.ready.check") + emitIf(fn, emitCompare(fn, token.EQL, v, jBusy, token.NoPos), isbusy, ifready) + fn.currentBlock = isbusy + fn.emit(&Panic{ + X: emitConv(fn, stringConst("iterator call did not preserve panic"), tEface), + }) + fn.currentBlock = ifready + + // case READY: jump = DONE; goto done + isready := fn.newBasicBlock("rangefunc.resume.ready") + ifexit := fn.newBasicBlock("rangefunc.resume.exits") + emitIf(fn, emitCompare(fn, token.EQL, v, jReady, token.NoPos), isready, ifexit) + fn.currentBlock = isready + storeVar(fn, jump, jDone, token.NoPos) + emitJump(fn, done) + fn.currentBlock = ifexit + + for _, e := range exits { + id := intConst(e.id) + + // case EXIT(id): { /* do e */ } + cond := emitCompare(fn, token.EQL, v, id, e.pos) + matchb := fn.newBasicBlock("rangefunc.resume.match") + cndb := fn.newBasicBlock("rangefunc.resume.cnd") + emitIf(fn, cond, matchb, cndb) + fn.currentBlock = matchb + + // Cases to fill in the { /* do e */ } bit. + switch { + case e.label != nil: // forward goto? + // case EXIT(id): goto lb // label + lb := fn.lblockOf(e.label) + // Do not mark lb as resolved. + // If fn does not contain label, lb remains unresolved and + // fn must itself be a range-over-func function. lb will be: + // lb: + // fn.jump = id + // return false + emitJump(fn, lb._goto) + + case e.to != fn: // e jumps to an ancestor of fn? + // case EXIT(id): { fn.jump = id; return false } + // fn is a range-over-func function. + storeVar(fn, fn.jump, id, token.NoPos) + fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) + + case e.block == nil && e.label == nil: // return from fn? + // case EXIT(id): { return ... } + fn.emit(new(RunDefers)) + results := make([]Value, len(fn.results)) + for i, r := range fn.results { + results[i] = emitLoad(fn, r) + } + fn.emit(&Return{Results: results, pos: e.pos}) + + case e.block != nil: + // case EXIT(id): goto block + emitJump(fn, e.block) + + default: + panic("unreachable") + } + fn.currentBlock = cndb + } +} + +// stmt lowers statement s to SSA form, emitting code to fn. +func (b *builder) stmt(fn *Function, _s ast.Stmt) { + // The label of the current statement. If non-nil, its _goto + // target is always set; its _break and _continue are set only + // within the body of switch/typeswitch/select/for/range. + // It is effectively an additional default-nil parameter of stmt(). + var label *lblock +start: + switch s := _s.(type) { + case *ast.EmptyStmt: + // ignore. (Usually removed by gofmt.) + + case *ast.DeclStmt: // Con, Var or Typ + d := s.Decl.(*ast.GenDecl) + if d.Tok == token.VAR { + for _, spec := range d.Specs { + if vs, ok := spec.(*ast.ValueSpec); ok { + b.localValueSpec(fn, vs) + } + } + } + + case *ast.LabeledStmt: + if s.Label.Name == "_" { + // Blank labels can't be the target of a goto, break, + // or continue statement, so we don't need a new block. + _s = s.Stmt + goto start + } + label = fn.lblockOf(fn.label(s.Label)) + label.resolved = true + emitJump(fn, label._goto) + fn.currentBlock = label._goto + _s = s.Stmt + goto start // effectively: tailcall stmt(fn, s.Stmt, label) + + case *ast.ExprStmt: + b.expr(fn, s.X) + + case *ast.SendStmt: + chtyp := typeparams.CoreType(fn.typeOf(s.Chan)).(*types.Chan) + fn.emit(&Send{ + Chan: b.expr(fn, s.Chan), + X: emitConv(fn, b.expr(fn, s.Value), chtyp.Elem()), + pos: s.Arrow, + }) + + case *ast.IncDecStmt: + op := token.ADD + if s.Tok == token.DEC { + op = token.SUB + } + loc := b.addr(fn, s.X, false) + b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) + + case *ast.AssignStmt: + switch s.Tok { + case token.ASSIGN, token.DEFINE: + b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) + + default: // +=, etc. + op := s.Tok + token.ADD - token.ADD_ASSIGN + b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) + } + + case *ast.GoStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Go{pos: s.Go} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + case *ast.DeferStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + deferstack := emitLoad(fn, fn.lookup(fn.deferstack, false)) + v := Defer{pos: s.Defer, DeferStack: deferstack} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + // A deferred call can cause recovery from panic, + // and control resumes at the Recover block. + createRecoverBlock(fn.source) + + case *ast.ReturnStmt: + b.returnStmt(fn, s) + + case *ast.BranchStmt: + b.branchStmt(fn, s) + + case *ast.BlockStmt: + b.stmtList(fn, s.List) + + case *ast.IfStmt: + if s.Init != nil { + b.stmt(fn, s.Init) + } + then := fn.newBasicBlock("if.then") + done := fn.newBasicBlock("if.done") + els := done + if s.Else != nil { + els = fn.newBasicBlock("if.else") + } + b.cond(fn, s.Cond, then, els) + fn.currentBlock = then + b.stmt(fn, s.Body) + emitJump(fn, done) + + if s.Else != nil { + fn.currentBlock = els + b.stmt(fn, s.Else) + emitJump(fn, done) + } + + fn.currentBlock = done + + case *ast.SwitchStmt: + b.switchStmt(fn, s, label) + + case *ast.TypeSwitchStmt: + b.typeSwitchStmt(fn, s, label) + + case *ast.SelectStmt: + b.selectStmt(fn, s, label) + + case *ast.ForStmt: + b.forStmt(fn, s, label) + + case *ast.RangeStmt: + b.rangeStmt(fn, s, label) + + default: + panic(fmt.Sprintf("unexpected statement kind: %T", s)) + } +} + +func (b *builder) branchStmt(fn *Function, s *ast.BranchStmt) { + var block *BasicBlock + if s.Label == nil { + block = targetedBlock(fn, s.Tok) + } else { + target := fn.label(s.Label) + block = labelledBlock(fn, target, s.Tok) + if block == nil { // forward goto + lb := fn.lblockOf(target) + block = lb._goto // jump to lb._goto + if fn.jump != nil { + // fn is a range-over-func and the goto may exit fn. + // Create an exit and resolve it at the end of + // builder.buildYieldFunc. + labelExit(fn, target, s.Pos()) + } + } + } + to := block.parent + + if to == fn { + emitJump(fn, block) + } else { // break outside of fn. + // fn must be a range-over-func + e := blockExit(fn, block, s.Pos()) + storeVar(fn, fn.jump, intConst(e.id), e.pos) + fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) + } + fn.currentBlock = fn.newBasicBlock("unreachable") +} + +func (b *builder) returnStmt(fn *Function, s *ast.ReturnStmt) { + var results []Value + + sig := fn.source.Signature // signature of the enclosing source function + + // Convert return operands to result type. + if len(s.Results) == 1 && sig.Results().Len() > 1 { + // Return of one expression in a multi-valued function. + tuple := b.exprN(fn, s.Results[0]) + ttuple := tuple.Type().(*types.Tuple) + for i, n := 0, ttuple.Len(); i < n; i++ { + results = append(results, + emitConv(fn, emitExtract(fn, tuple, i), + sig.Results().At(i).Type())) + } + } else { + // 1:1 return, or no-arg return in non-void function. + for i, r := range s.Results { + v := emitConv(fn, b.expr(fn, r), sig.Results().At(i).Type()) + results = append(results, v) + } + } + + // Store the results. + for i, r := range results { + var result Value // fn.source.result[i] conceptually + if fn == fn.source { + result = fn.results[i] + } else { // lookup needed? + result = fn.lookup(fn.returnVars[i], false) + } + emitStore(fn, result, r, s.Return) + } + + if fn.jump != nil { + // Return from body of a range-over-func. + // The return statement is syntactically within the loop, + // but the generated code is in the 'switch jump {...}' after it. + e := returnExit(fn, s.Pos()) + storeVar(fn, fn.jump, intConst(e.id), e.pos) + fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) + fn.currentBlock = fn.newBasicBlock("unreachable") + return + } + + // Run function calls deferred in this + // function when explicitly returning from it. + fn.emit(new(RunDefers)) + // Reload (potentially) named result variables to form the result tuple. + results = results[:0] + for _, nr := range fn.results { + results = append(results, emitLoad(fn, nr)) + } + fn.emit(&Return{Results: results, pos: s.Return}) + fn.currentBlock = fn.newBasicBlock("unreachable") +} + +// A buildFunc is a strategy for building the SSA body for a function. +type buildFunc = func(*builder, *Function) + +// iterate causes all created but unbuilt functions to be built. As +// this may create new methods, the process is iterated until it +// converges. +// +// Waits for any dependencies to finish building. +func (b *builder) iterate() { + for ; b.finished < len(b.fns); b.finished++ { + fn := b.fns[b.finished] + b.buildFunction(fn) + } + + b.buildshared.markDone() + b.buildshared.wait() +} + +// buildFunction builds SSA code for the body of function fn. Idempotent. +func (b *builder) buildFunction(fn *Function) { + if fn.build != nil { + assert(fn.parent == nil, "anonymous functions should not be built by buildFunction()") + + if fn.Prog.mode&LogSource != 0 { + defer logStack("build %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() + } + fn.build(b, fn) + fn.done() + } +} + +// buildParamsOnly builds fn.Params from fn.Signature, but does not build fn.Body. +func (b *builder) buildParamsOnly(fn *Function) { + // For external (C, asm) functions or functions loaded from + // export data, we must set fn.Params even though there is no + // body code to reference them. + if recv := fn.Signature.Recv(); recv != nil { + fn.addParamVar(recv) + } + params := fn.Signature.Params() + for i, n := 0, params.Len(); i < n; i++ { + fn.addParamVar(params.At(i)) + } +} + +// buildFromSyntax builds fn.Body from fn.syntax, which must be non-nil. +func (b *builder) buildFromSyntax(fn *Function) { + var ( + recvField *ast.FieldList + body *ast.BlockStmt + functype *ast.FuncType + ) + switch syntax := fn.syntax.(type) { + case *ast.FuncDecl: + functype = syntax.Type + recvField = syntax.Recv + body = syntax.Body + if body == nil { + b.buildParamsOnly(fn) // no body (non-Go function) + return + } + case *ast.FuncLit: + functype = syntax.Type + body = syntax.Body + case nil: + panic("no syntax") + default: + panic(syntax) // unexpected syntax + } + fn.source = fn + fn.startBody() + fn.createSyntacticParams(recvField, functype) + fn.createDeferStack() + b.stmt(fn, body) + if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { + // Control fell off the end of the function's body block. + // + // Block optimizations eliminate the current block, if + // unreachable. It is a builder invariant that + // if this no-arg return is ill-typed for + // fn.Signature.Results, this block must be + // unreachable. The sanity checker checks this. + fn.emit(new(RunDefers)) + fn.emit(new(Return)) + } + fn.finishBody() +} + +// buildYieldFunc builds the body of the yield function created +// from a range-over-func *ast.RangeStmt. +func (b *builder) buildYieldFunc(fn *Function) { + // See builder.rangeFunc for detailed documentation on how fn is set up. + // + // In psuedo-Go this roughly builds: + // func yield(_k tk, _v tv) bool { + // if jump != READY { panic("yield function called after range loop exit") } + // jump = BUSY + // k, v = _k, _v // assign the iterator variable (if needed) + // ... // rng.Body + // continue: + // jump = READY + // return true + // } + s := fn.syntax.(*ast.RangeStmt) + fn.source = fn.parent.source + fn.startBody() + params := fn.Signature.Params() + for i := 0; i < params.Len(); i++ { + fn.addParamVar(params.At(i)) + } + + // Initial targets + ycont := fn.newBasicBlock("yield-continue") + // lblocks is either {} or is {label: nil} where label is the label of syntax. + for label := range fn.lblocks { + fn.lblocks[label] = &lblock{ + label: label, + resolved: true, + _goto: ycont, + _continue: ycont, + // `break label` statement targets fn.parent.targets._break + } + } + fn.targets = &targets{ + _continue: ycont, + // `break` statement targets fn.parent.targets._break. + } + + // continue: + // jump = READY + // return true + saved := fn.currentBlock + fn.currentBlock = ycont + storeVar(fn, fn.jump, jReady, s.Body.Rbrace) + // A yield function's own deferstack is always empty, so rundefers is not needed. + fn.emit(&Return{Results: []Value{vTrue}, pos: token.NoPos}) + + // Emit header: + // + // if jump != READY { panic("yield iterator accessed after exit") } + // jump = BUSY + // k, v = _k, _v + fn.currentBlock = saved + yloop := fn.newBasicBlock("yield-loop") + invalid := fn.newBasicBlock("yield-invalid") + + jumpVal := emitLoad(fn, fn.lookup(fn.jump, true)) + emitIf(fn, emitCompare(fn, token.EQL, jumpVal, jReady, token.NoPos), yloop, invalid) + fn.currentBlock = invalid + fn.emit(&Panic{ + X: emitConv(fn, stringConst("yield function called after range loop exit"), tEface), + }) + + fn.currentBlock = yloop + storeVar(fn, fn.jump, jBusy, s.Body.Rbrace) + + // Initialize k and v from params. + var tk, tv types.Type + if s.Key != nil && !isBlankIdent(s.Key) { + tk = fn.typeOf(s.Key) // fn.parent.typeOf is identical + } + if s.Value != nil && !isBlankIdent(s.Value) { + tv = fn.typeOf(s.Value) + } + if s.Tok == token.DEFINE { + if tk != nil { + emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident))) + } + if tv != nil { + emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident))) + } + } + var k, v Value + if len(fn.Params) > 0 { + k = fn.Params[0] + } + if len(fn.Params) > 1 { + v = fn.Params[1] + } + var kl, vl lvalue + if tk != nil { + kl = b.addr(fn, s.Key, false) // non-escaping + } + if tv != nil { + vl = b.addr(fn, s.Value, false) // non-escaping + } + if tk != nil { + kl.store(fn, k) + } + if tv != nil { + vl.store(fn, v) + } + + // Build the body of the range loop. + b.stmt(fn, s.Body) + if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { + // Control fell off the end of the function's body block. + // Block optimizations eliminate the current block, if + // unreachable. + emitJump(fn, ycont) + } + + // Clean up exits and promote any unresolved exits to fn.parent. + for _, e := range fn.exits { + if e.label != nil { + lb := fn.lblocks[e.label] + if lb.resolved { + // label was resolved. Do not turn lb into an exit. + // e does not need to be handled by the parent. + continue + } + + // _goto becomes an exit. + // _goto: + // jump = id + // return false + fn.currentBlock = lb._goto + id := intConst(e.id) + storeVar(fn, fn.jump, id, e.pos) + fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) + } + + if e.to != fn { // e needs to be handled by the parent too. + fn.parent.exits = append(fn.parent.exits, e) + } + } + + fn.finishBody() +} + +// addRuntimeType records t as a runtime type, +// along with all types derivable from it using reflection. +// +// Acquires prog.runtimeTypesMu. +func addRuntimeType(prog *Program, t types.Type) { + prog.runtimeTypesMu.Lock() + defer prog.runtimeTypesMu.Unlock() + forEachReachable(&prog.MethodSets, t, func(t types.Type) bool { + prev, _ := prog.runtimeTypes.Set(t, true).(bool) + return !prev // already seen? + }) +} + +// Build calls Package.Build for each package in prog. +// Building occurs in parallel unless the BuildSerially mode flag was set. +// +// Build is intended for whole-program analysis; a typical compiler +// need only build a single package. +// +// Build is idempotent and thread-safe. +func (prog *Program) Build() { + var wg sync.WaitGroup + for _, p := range prog.packages { + if prog.mode&BuildSerially != 0 { + p.Build() + } else { + wg.Add(1) + cpuLimit <- unit{} // acquire a token + go func(p *Package) { + p.Build() + wg.Done() + <-cpuLimit // release a token + }(p) + } + } + wg.Wait() +} + +// cpuLimit is a counting semaphore to limit CPU parallelism. +var cpuLimit = make(chan unit, runtime.GOMAXPROCS(0)) + +// Build builds SSA code for all functions and vars in package p. +// +// CreatePackage must have been called for all of p's direct imports +// (and hence its direct imports must have been error-free). It is not +// necessary to call CreatePackage for indirect dependencies. +// Functions will be created for all necessary methods in those +// packages on demand. +// +// Build is idempotent and thread-safe. +func (p *Package) Build() { p.buildOnce.Do(p.build) } + +func (p *Package) build() { + if p.info == nil { + return // synthetic package, e.g. "testmain" + } + if p.Prog.mode&LogSource != 0 { + defer logStack("build %s", p)() + } + + b := builder{fns: p.created} + b.iterate() + + // We no longer need transient information: ASTs or go/types deductions. + p.info = nil + p.created = nil + p.files = nil + p.initVersion = nil + + if p.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckPackage(p) + } +} + +// buildPackageInit builds fn.Body for the synthetic package initializer. +func (b *builder) buildPackageInit(fn *Function) { + p := fn.Pkg + fn.startBody() + + var done *BasicBlock + + if p.Prog.mode&BareInits == 0 { + // Make init() skip if package is already initialized. + initguard := p.Var("init$guard") + doinit := fn.newBasicBlock("init.start") + done = fn.newBasicBlock("init.done") + emitIf(fn, emitLoad(fn, initguard), done, doinit) + fn.currentBlock = doinit + emitStore(fn, initguard, vTrue, token.NoPos) + + // Call the init() function of each package we import. + for _, pkg := range p.Pkg.Imports() { + prereq := p.Prog.packages[pkg] + if prereq == nil { + panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) + } + var v Call + v.Call.Value = prereq.init + v.Call.pos = fn.pos + v.setType(types.NewTuple()) + fn.emit(&v) + } + } + + // Initialize package-level vars in correct order. + if len(p.info.InitOrder) > 0 && len(p.files) == 0 { + panic("no source files provided for package. cannot initialize globals") + } + + for _, varinit := range p.info.InitOrder { + if fn.Prog.mode&LogSource != 0 { + fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", + varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) + } + // Initializers for global vars are evaluated in dependency + // order, but may come from arbitrary files of the package + // with different versions, so we transiently update + // fn.goversion for each one. (Since init is a synthetic + // function it has no syntax of its own that needs a version.) + fn.goversion = p.initVersion[varinit.Rhs] + if len(varinit.Lhs) == 1 { + // 1:1 initialization: var x, y = a(), b() + var lval lvalue + if v := varinit.Lhs[0]; v.Name() != "_" { + lval = &address{addr: p.objects[v].(*Global), pos: v.Pos()} + } else { + lval = blank{} + } + b.assign(fn, lval, varinit.Rhs, true, nil) + } else { + // n:1 initialization: var x, y := f() + tuple := b.exprN(fn, varinit.Rhs) + for i, v := range varinit.Lhs { + if v.Name() == "_" { + continue + } + emitStore(fn, p.objects[v].(*Global), emitExtract(fn, tuple, i), v.Pos()) + } + } + } + + // The rest of the init function is synthetic: + // no syntax, info, goversion. + fn.info = nil + fn.goversion = "" + + // Call all of the declared init() functions in source order. + for _, file := range p.files { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + id := decl.Name + if !isBlankIdent(id) && id.Name == "init" && decl.Recv == nil { + declaredInit := p.objects[p.info.Defs[id]].(*Function) + var v Call + v.Call.Value = declaredInit + v.setType(types.NewTuple()) + p.init.emit(&v) + } + } + } + } + + // Finish up init(). + if p.Prog.mode&BareInits == 0 { + emitJump(fn, done) + fn.currentBlock = done + } + fn.emit(new(Return)) + fn.finishBody() +} diff --git a/vendor/golang.org/x/tools/go/ssa/const.go b/vendor/golang.org/x/tools/go/ssa/const.go new file mode 100644 index 0000000..2a4e0dd --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/const.go @@ -0,0 +1,232 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the Const SSA value type. + +import ( + "fmt" + "go/constant" + "go/token" + "go/types" + "strconv" + "strings" + + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typeparams" +) + +// NewConst returns a new constant of the specified value and type. +// val must be valid according to the specification of Const.Value. +func NewConst(val constant.Value, typ types.Type) *Const { + if val == nil { + switch soleTypeKind(typ) { + case types.IsBoolean: + val = constant.MakeBool(false) + case types.IsInteger: + val = constant.MakeInt64(0) + case types.IsString: + val = constant.MakeString("") + } + } + return &Const{typ, val} +} + +// soleTypeKind returns a BasicInfo for which constant.Value can +// represent all zero values for the types in the type set. +// +// types.IsBoolean for false is a representative. +// types.IsInteger for 0 +// types.IsString for "" +// 0 otherwise. +func soleTypeKind(typ types.Type) types.BasicInfo { + // State records the set of possible zero values (false, 0, ""). + // Candidates (perhaps all) are eliminated during the type-set + // iteration, which executes at least once. + state := types.IsBoolean | types.IsInteger | types.IsString + underIs(typeSetOf(typ), func(ut types.Type) bool { + var c types.BasicInfo + if t, ok := ut.(*types.Basic); ok { + c = t.Info() + } + if c&types.IsNumeric != 0 { // int/float/complex + c = types.IsInteger + } + state = state & c + return state != 0 + }) + return state +} + +// intConst returns an 'int' constant that evaluates to i. +// (i is an int64 in case the host is narrower than the target.) +func intConst(i int64) *Const { + return NewConst(constant.MakeInt64(i), tInt) +} + +// stringConst returns a 'string' constant that evaluates to s. +func stringConst(s string) *Const { + return NewConst(constant.MakeString(s), tString) +} + +// zeroConst returns a new "zero" constant of the specified type. +func zeroConst(t types.Type) *Const { + return NewConst(nil, t) +} + +func (c *Const) RelString(from *types.Package) string { + var s string + if c.Value == nil { + s = zeroString(c.typ, from) + } else if c.Value.Kind() == constant.String { + s = constant.StringVal(c.Value) + const max = 20 + // TODO(adonovan): don't cut a rune in half. + if len(s) > max { + s = s[:max-3] + "..." // abbreviate + } + s = strconv.Quote(s) + } else { + s = c.Value.String() + } + return s + ":" + relType(c.Type(), from) +} + +// zeroString returns the string representation of the "zero" value of the type t. +func zeroString(t types.Type, from *types.Package) string { + switch t := t.(type) { + case *types.Basic: + switch { + case t.Info()&types.IsBoolean != 0: + return "false" + case t.Info()&types.IsNumeric != 0: + return "0" + case t.Info()&types.IsString != 0: + return `""` + case t.Kind() == types.UnsafePointer: + fallthrough + case t.Kind() == types.UntypedNil: + return "nil" + default: + panic(fmt.Sprint("zeroString for unexpected type:", t)) + } + case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: + return "nil" + case *types.Named, *aliases.Alias: + return zeroString(t.Underlying(), from) + case *types.Array, *types.Struct: + return relType(t, from) + "{}" + case *types.Tuple: + // Tuples are not normal values. + // We are currently format as "(t[0], ..., t[n])". Could be something else. + components := make([]string, t.Len()) + for i := 0; i < t.Len(); i++ { + components[i] = zeroString(t.At(i).Type(), from) + } + return "(" + strings.Join(components, ", ") + ")" + case *types.TypeParam: + return "*new(" + relType(t, from) + ")" + } + panic(fmt.Sprint("zeroString: unexpected ", t)) +} + +func (c *Const) Name() string { + return c.RelString(nil) +} + +func (c *Const) String() string { + return c.Name() +} + +func (c *Const) Type() types.Type { + return c.typ +} + +func (c *Const) Referrers() *[]Instruction { + return nil +} + +func (c *Const) Parent() *Function { return nil } + +func (c *Const) Pos() token.Pos { + return token.NoPos +} + +// IsNil returns true if this constant is a nil value of +// a nillable reference type (pointer, slice, channel, map, or function), +// a basic interface type, or +// a type parameter all of whose possible instantiations are themselves nillable. +func (c *Const) IsNil() bool { + return c.Value == nil && nillable(c.typ) +} + +// nillable reports whether *new(T) == nil is legal for type T. +func nillable(t types.Type) bool { + if typeparams.IsTypeParam(t) { + return underIs(typeSetOf(t), func(u types.Type) bool { + // empty type set (u==nil) => any underlying types => not nillable + return u != nil && nillable(u) + }) + } + switch t.Underlying().(type) { + case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature: + return true + case *types.Interface: + return true // basic interface. + default: + return false + } +} + +// TODO(adonovan): move everything below into golang.org/x/tools/go/ssa/interp. + +// Int64 returns the numeric value of this constant truncated to fit +// a signed 64-bit integer. +func (c *Const) Int64() int64 { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if i, ok := constant.Int64Val(x); ok { + return i + } + return 0 + case constant.Float: + f, _ := constant.Float64Val(x) + return int64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Uint64 returns the numeric value of this constant truncated to fit +// an unsigned 64-bit integer. +func (c *Const) Uint64() uint64 { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if u, ok := constant.Uint64Val(x); ok { + return u + } + return 0 + case constant.Float: + f, _ := constant.Float64Val(x) + return uint64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Float64 returns the numeric value of this constant truncated to fit +// a float64. +func (c *Const) Float64() float64 { + x := constant.ToFloat(c.Value) // (c.Value == nil) => x.Kind() == Unknown + f, _ := constant.Float64Val(x) + return f +} + +// Complex128 returns the complex value of this constant truncated to +// fit a complex128. +func (c *Const) Complex128() complex128 { + x := constant.ToComplex(c.Value) // (c.Value == nil) => x.Kind() == Unknown + re, _ := constant.Float64Val(constant.Real(x)) + im, _ := constant.Float64Val(constant.Imag(x)) + return complex(re, im) +} diff --git a/vendor/golang.org/x/tools/go/ssa/coretype.go b/vendor/golang.org/x/tools/go/ssa/coretype.go new file mode 100644 index 0000000..8c218f9 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/coretype.go @@ -0,0 +1,161 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +import ( + "go/types" + + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typeparams" +) + +// Utilities for dealing with core types. + +// isBytestring returns true if T has the same terms as interface{[]byte | string}. +// These act like a core type for some operations: slice expressions, append and copy. +// +// See https://go.dev/ref/spec#Core_types for the details on bytestring. +func isBytestring(T types.Type) bool { + U := T.Underlying() + if _, ok := U.(*types.Interface); !ok { + return false + } + + tset := typeSetOf(U) + if tset.Len() != 2 { + return false + } + hasBytes, hasString := false, false + underIs(tset, func(t types.Type) bool { + switch { + case isString(t): + hasString = true + case isByteSlice(t): + hasBytes = true + } + return hasBytes || hasString + }) + return hasBytes && hasString +} + +// termList is a list of types. +type termList []*types.Term // type terms of the type set +func (s termList) Len() int { return len(s) } +func (s termList) At(i int) types.Type { return s[i].Type() } + +// typeSetOf returns the type set of typ. Returns an empty typeset on an error. +func typeSetOf(typ types.Type) termList { + // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on. + var terms []*types.Term + var err error + // typeSetOf(t) == typeSetOf(Unalias(t)) + switch typ := aliases.Unalias(typ).(type) { + case *types.TypeParam: + terms, err = typeparams.StructuralTerms(typ) + case *types.Union: + terms, err = typeparams.UnionTermSet(typ) + case *types.Interface: + terms, err = typeparams.InterfaceTermSet(typ) + default: + // Common case. + // Specializing the len=1 case to avoid a slice + // had no measurable space/time benefit. + terms = []*types.Term{types.NewTerm(false, typ)} + } + + if err != nil { + return termList(nil) + } + return termList(terms) +} + +// underIs calls f with the underlying types of the specific type terms +// of s and reports whether all calls to f returned true. If there are +// no specific terms, underIs returns the result of f(nil). +func underIs(s termList, f func(types.Type) bool) bool { + if s.Len() == 0 { + return f(nil) + } + for i := 0; i < s.Len(); i++ { + u := s.At(i).Underlying() + if !f(u) { + return false + } + } + return true +} + +// indexType returns the element type and index mode of a IndexExpr over a type. +// It returns (nil, invalid) if the type is not indexable; this should never occur in a well-typed program. +func indexType(typ types.Type) (types.Type, indexMode) { + switch U := typ.Underlying().(type) { + case *types.Array: + return U.Elem(), ixArrVar + case *types.Pointer: + if arr, ok := U.Elem().Underlying().(*types.Array); ok { + return arr.Elem(), ixVar + } + case *types.Slice: + return U.Elem(), ixVar + case *types.Map: + return U.Elem(), ixMap + case *types.Basic: + return tByte, ixValue // must be a string + case *types.Interface: + tset := typeSetOf(U) + if tset.Len() == 0 { + return nil, ixInvalid // no underlying terms or error is empty. + } + + elem, mode := indexType(tset.At(0)) + for i := 1; i < tset.Len() && mode != ixInvalid; i++ { + e, m := indexType(tset.At(i)) + if !types.Identical(elem, e) { // if type checked, just a sanity check + return nil, ixInvalid + } + // Update the mode to the most constrained address type. + mode = mode.meet(m) + } + if mode != ixInvalid { + return elem, mode + } + } + return nil, ixInvalid +} + +// An indexMode specifies the (addressing) mode of an index operand. +// +// Addressing mode of an index operation is based on the set of +// underlying types. +// Hasse diagram of the indexMode meet semi-lattice: +// +// ixVar ixMap +// | | +// ixArrVar | +// | | +// ixValue | +// \ / +// ixInvalid +type indexMode byte + +const ( + ixInvalid indexMode = iota // index is invalid + ixValue // index is a computed value (not addressable) + ixArrVar // like ixVar, but index operand contains an array + ixVar // index is an addressable variable + ixMap // index is a map index expression (acts like a variable on lhs, commaok on rhs of an assignment) +) + +// meet is the address type that is constrained by both x and y. +func (x indexMode) meet(y indexMode) indexMode { + if (x == ixMap || y == ixMap) && x != y { + return ixInvalid + } + // Use int representation and return min. + if x < y { + return y + } + return x +} diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go new file mode 100644 index 0000000..423bce8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/create.go @@ -0,0 +1,318 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the CREATE phase of SSA construction. +// See builder.go for explanation. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "sync" + + "golang.org/x/tools/internal/versions" +) + +// NewProgram returns a new SSA Program. +// +// mode controls diagnostics and checking during SSA construction. +// +// To construct an SSA program: +// +// - Call NewProgram to create an empty Program. +// - Call CreatePackage providing typed syntax for each package +// you want to build, and call it with types but not +// syntax for each of those package's direct dependencies. +// - Call [Package.Build] on each syntax package you wish to build, +// or [Program.Build] to build all of them. +// +// See the Example tests for simple examples. +func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { + return &Program{ + Fset: fset, + imported: make(map[string]*Package), + packages: make(map[*types.Package]*Package), + mode: mode, + canon: newCanonizer(), + ctxt: types.NewContext(), + } +} + +// memberFromObject populates package pkg with a member for the +// typechecker object obj. +// +// For objects from Go source code, syntax is the associated syntax +// tree (for funcs and vars only) and goversion defines the +// appropriate interpretation; they will be used during the build +// phase. +func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node, goversion string) { + name := obj.Name() + switch obj := obj.(type) { + case *types.Builtin: + if pkg.Pkg != types.Unsafe { + panic("unexpected builtin object: " + obj.String()) + } + + case *types.TypeName: + if name != "_" { + pkg.Members[name] = &Type{ + object: obj, + pkg: pkg, + } + } + + case *types.Const: + c := &NamedConst{ + object: obj, + Value: NewConst(obj.Val(), obj.Type()), + pkg: pkg, + } + pkg.objects[obj] = c + if name != "_" { + pkg.Members[name] = c + } + + case *types.Var: + g := &Global{ + Pkg: pkg, + name: name, + object: obj, + typ: types.NewPointer(obj.Type()), // address + pos: obj.Pos(), + } + pkg.objects[obj] = g + if name != "_" { + pkg.Members[name] = g + } + + case *types.Func: + sig := obj.Type().(*types.Signature) + if sig.Recv() == nil && name == "init" { + pkg.ninit++ + name = fmt.Sprintf("init#%d", pkg.ninit) + } + fn := createFunction(pkg.Prog, obj, name, syntax, pkg.info, goversion) + fn.Pkg = pkg + pkg.created = append(pkg.created, fn) + pkg.objects[obj] = fn + if name != "_" && sig.Recv() == nil { + pkg.Members[name] = fn // package-level function + } + + default: // (incl. *types.Package) + panic("unexpected Object type: " + obj.String()) + } +} + +// createFunction creates a function or method. It supports both +// CreatePackage (with or without syntax) and the on-demand creation +// of methods in non-created packages based on their types.Func. +func createFunction(prog *Program, obj *types.Func, name string, syntax ast.Node, info *types.Info, goversion string) *Function { + sig := obj.Type().(*types.Signature) + + // Collect type parameters. + var tparams *types.TypeParamList + if rtparams := sig.RecvTypeParams(); rtparams.Len() > 0 { + tparams = rtparams // method of generic type + } else if sigparams := sig.TypeParams(); sigparams.Len() > 0 { + tparams = sigparams // generic function + } + + /* declared function/method (from syntax or export data) */ + fn := &Function{ + name: name, + object: obj, + Signature: sig, + build: (*builder).buildFromSyntax, + syntax: syntax, + info: info, + goversion: goversion, + pos: obj.Pos(), + Pkg: nil, // may be set by caller + Prog: prog, + typeparams: tparams, + } + if fn.syntax == nil { + fn.Synthetic = "from type information" + fn.build = (*builder).buildParamsOnly + } + if tparams.Len() > 0 { + fn.generic = new(generic) + } + return fn +} + +// membersFromDecl populates package pkg with members for each +// typechecker object (var, func, const or type) associated with the +// specified decl. +func membersFromDecl(pkg *Package, decl ast.Decl, goversion string) { + switch decl := decl.(type) { + case *ast.GenDecl: // import, const, type or var + switch decl.Tok { + case token.CONST: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + memberFromObject(pkg, pkg.info.Defs[id], nil, "") + } + } + + case token.VAR: + for _, spec := range decl.Specs { + for _, rhs := range spec.(*ast.ValueSpec).Values { + pkg.initVersion[rhs] = goversion + } + for _, id := range spec.(*ast.ValueSpec).Names { + memberFromObject(pkg, pkg.info.Defs[id], spec, goversion) + } + } + + case token.TYPE: + for _, spec := range decl.Specs { + id := spec.(*ast.TypeSpec).Name + memberFromObject(pkg, pkg.info.Defs[id], nil, "") + } + } + + case *ast.FuncDecl: + id := decl.Name + memberFromObject(pkg, pkg.info.Defs[id], decl, goversion) + } +} + +// CreatePackage creates and returns an SSA Package from the +// specified type-checked, error-free file ASTs, and populates its +// Members mapping. +// +// importable determines whether this package should be returned by a +// subsequent call to ImportedPackage(pkg.Path()). +// +// The real work of building SSA form for each function is not done +// until a subsequent call to Package.Build. +// +// CreatePackage should not be called after building any package in +// the program. +func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { + // TODO(adonovan): assert that no package has yet been built. + if pkg == nil { + panic("nil pkg") // otherwise pkg.Scope below returns types.Universe! + } + p := &Package{ + Prog: prog, + Members: make(map[string]Member), + objects: make(map[types.Object]Member), + Pkg: pkg, + syntax: info != nil, + // transient values (cleared after Package.Build) + info: info, + files: files, + initVersion: make(map[ast.Expr]string), + } + + /* synthesized package initializer */ + p.init = &Function{ + name: "init", + Signature: new(types.Signature), + Synthetic: "package initializer", + Pkg: p, + Prog: prog, + build: (*builder).buildPackageInit, + info: p.info, + goversion: "", // See Package.build for details. + } + p.Members[p.init.name] = p.init + p.created = append(p.created, p.init) + + // Allocate all package members: vars, funcs, consts and types. + if len(files) > 0 { + // Go source package. + for _, file := range files { + goversion := versions.Lang(versions.FileVersion(p.info, file)) + for _, decl := range file.Decls { + membersFromDecl(p, decl, goversion) + } + } + } else { + // GC-compiled binary package (or "unsafe") + // No code. + // No position information. + scope := p.Pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + memberFromObject(p, obj, nil, "") + if obj, ok := obj.(*types.TypeName); ok { + // No Unalias: aliases should not duplicate methods. + if named, ok := obj.Type().(*types.Named); ok { + for i, n := 0, named.NumMethods(); i < n; i++ { + memberFromObject(p, named.Method(i), nil, "") + } + } + } + } + } + + if prog.mode&BareInits == 0 { + // Add initializer guard variable. + initguard := &Global{ + Pkg: p, + name: "init$guard", + typ: types.NewPointer(tBool), + } + p.Members[initguard.Name()] = initguard + } + + if prog.mode&GlobalDebug != 0 { + p.SetDebugMode(true) + } + + if prog.mode&PrintPackages != 0 { + printMu.Lock() + p.WriteTo(os.Stdout) + printMu.Unlock() + } + + if importable { + prog.imported[p.Pkg.Path()] = p + } + prog.packages[p.Pkg] = p + + return p +} + +// printMu serializes printing of Packages/Functions to stdout. +var printMu sync.Mutex + +// AllPackages returns a new slice containing all packages created by +// prog.CreatePackage in unspecified order. +func (prog *Program) AllPackages() []*Package { + pkgs := make([]*Package, 0, len(prog.packages)) + for _, pkg := range prog.packages { + pkgs = append(pkgs, pkg) + } + return pkgs +} + +// ImportedPackage returns the importable Package whose PkgPath +// is path, or nil if no such Package has been created. +// +// A parameter to CreatePackage determines whether a package should be +// considered importable. For example, no import declaration can resolve +// to the ad-hoc main package created by 'go build foo.go'. +// +// TODO(adonovan): rethink this function and the "importable" concept; +// most packages are importable. This function assumes that all +// types.Package.Path values are unique within the ssa.Program, which is +// false---yet this function remains very convenient. +// Clients should use (*Program).Package instead where possible. +// SSA doesn't really need a string-keyed map of packages. +// +// Furthermore, the graph of packages may contain multiple variants +// (e.g. "p" vs "p as compiled for q.test"), and each has a different +// view of its dependencies. +func (prog *Program) ImportedPackage(path string) *Package { + return prog.imported[path] +} diff --git a/vendor/golang.org/x/tools/go/ssa/doc.go b/vendor/golang.org/x/tools/go/ssa/doc.go new file mode 100644 index 0000000..3310b55 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/doc.go @@ -0,0 +1,122 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ssa defines a representation of the elements of Go programs +// (packages, types, functions, variables and constants) using a +// static single-assignment (SSA) form intermediate representation +// (IR) for the bodies of functions. +// +// For an introduction to SSA form, see +// http://en.wikipedia.org/wiki/Static_single_assignment_form. +// This page provides a broader reading list: +// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. +// +// The level of abstraction of the SSA form is intentionally close to +// the source language to facilitate construction of source analysis +// tools. It is not intended for machine code generation. +// +// All looping, branching and switching constructs are replaced with +// unstructured control flow. Higher-level control flow constructs +// such as multi-way branch can be reconstructed as needed; see +// [golang.org/x/tools/go/ssa/ssautil.Switches] for an example. +// +// The simplest way to create the SSA representation of a package is +// to load typed syntax trees using [golang.org/x/tools/go/packages], then +// invoke the [golang.org/x/tools/go/ssa/ssautil.Packages] helper function. +// (See the package-level Examples named LoadPackages and LoadWholeProgram.) +// The resulting [ssa.Program] contains all the packages and their +// members, but SSA code is not created for function bodies until a +// subsequent call to [Package.Build] or [Program.Build]. +// +// The builder initially builds a naive SSA form in which all local +// variables are addresses of stack locations with explicit loads and +// stores. Registerisation of eligible locals and φ-node insertion +// using dominance and dataflow are then performed as a second pass +// called "lifting" to improve the accuracy and performance of +// subsequent analyses; this pass can be skipped by setting the +// NaiveForm builder flag. +// +// The primary interfaces of this package are: +// +// - [Member]: a named member of a Go package. +// - [Value]: an expression that yields a value. +// - [Instruction]: a statement that consumes values and performs computation. +// - [Node]: a [Value] or [Instruction] (emphasizing its membership in the SSA value graph) +// +// A computation that yields a result implements both the [Value] and +// [Instruction] interfaces. The following table shows for each +// concrete type which of these interfaces it implements. +// +// Value? Instruction? Member? +// *Alloc ✔ ✔ +// *BinOp ✔ ✔ +// *Builtin ✔ +// *Call ✔ ✔ +// *ChangeInterface ✔ ✔ +// *ChangeType ✔ ✔ +// *Const ✔ +// *Convert ✔ ✔ +// *DebugRef ✔ +// *Defer ✔ +// *Extract ✔ ✔ +// *Field ✔ ✔ +// *FieldAddr ✔ ✔ +// *FreeVar ✔ +// *Function ✔ ✔ (func) +// *Global ✔ ✔ (var) +// *Go ✔ +// *If ✔ +// *Index ✔ ✔ +// *IndexAddr ✔ ✔ +// *Jump ✔ +// *Lookup ✔ ✔ +// *MakeChan ✔ ✔ +// *MakeClosure ✔ ✔ +// *MakeInterface ✔ ✔ +// *MakeMap ✔ ✔ +// *MakeSlice ✔ ✔ +// *MapUpdate ✔ +// *MultiConvert ✔ ✔ +// *NamedConst ✔ (const) +// *Next ✔ ✔ +// *Panic ✔ +// *Parameter ✔ +// *Phi ✔ ✔ +// *Range ✔ ✔ +// *Return ✔ +// *RunDefers ✔ +// *Select ✔ ✔ +// *Send ✔ +// *Slice ✔ ✔ +// *SliceToArrayPointer ✔ ✔ +// *Store ✔ +// *Type ✔ (type) +// *TypeAssert ✔ ✔ +// *UnOp ✔ ✔ +// +// Other key types in this package include: [Program], [Package], [Function] +// and [BasicBlock]. +// +// The program representation constructed by this package is fully +// resolved internally, i.e. it does not rely on the names of Values, +// Packages, Functions, Types or BasicBlocks for the correct +// interpretation of the program. Only the identities of objects and +// the topology of the SSA and type graphs are semantically +// significant. (There is one exception: [types.Id] values, which identify field +// and method names, contain strings.) Avoidance of name-based +// operations simplifies the implementation of subsequent passes and +// can make them very efficient. Many objects are nonetheless named +// to aid in debugging, but it is not essential that the names be +// either accurate or unambiguous. The public API exposes a number of +// name-based maps for client convenience. +// +// The [golang.org/x/tools/go/ssa/ssautil] package provides various +// helper functions, for example to simplify loading a Go program into +// SSA form. +// +// TODO(adonovan): write a how-to document for all the various cases +// of trying to determine corresponding elements across the four +// domains of source locations, ast.Nodes, types.Objects, +// ssa.Values/Instructions. +package ssa // import "golang.org/x/tools/go/ssa" diff --git a/vendor/golang.org/x/tools/go/ssa/dom.go b/vendor/golang.org/x/tools/go/ssa/dom.go new file mode 100644 index 0000000..02c1ae8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/dom.go @@ -0,0 +1,340 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines algorithms related to dominance. + +// Dominator tree construction ---------------------------------------- +// +// We use the algorithm described in Lengauer & Tarjan. 1979. A fast +// algorithm for finding dominators in a flowgraph. +// http://doi.acm.org/10.1145/357062.357071 +// +// We also apply the optimizations to SLT described in Georgiadis et +// al, Finding Dominators in Practice, JGAA 2006, +// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf +// to avoid the need for buckets of size > 1. + +import ( + "bytes" + "fmt" + "math/big" + "os" + "sort" +) + +// Idom returns the block that immediately dominates b: +// its parent in the dominator tree, if any. +// Neither the entry node (b.Index==0) nor recover node +// (b==b.Parent().Recover()) have a parent. +func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } + +// Dominees returns the list of blocks that b immediately dominates: +// its children in the dominator tree. +func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } + +// Dominates reports whether b dominates c. +func (b *BasicBlock) Dominates(c *BasicBlock) bool { + return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post +} + +// DomPreorder returns a new slice containing the blocks of f +// in a preorder traversal of the dominator tree. +func (f *Function) DomPreorder() []*BasicBlock { + slice := append([]*BasicBlock(nil), f.Blocks...) + sort.Slice(slice, func(i, j int) bool { + return slice[i].dom.pre < slice[j].dom.pre + }) + return slice +} + +// DomPostorder returns a new slice containing the blocks of f +// in a postorder traversal of the dominator tree. +// (This is not the same as a postdominance order.) +func (f *Function) DomPostorder() []*BasicBlock { + slice := append([]*BasicBlock(nil), f.Blocks...) + sort.Slice(slice, func(i, j int) bool { + return slice[i].dom.post < slice[j].dom.post + }) + return slice +} + +// domInfo contains a BasicBlock's dominance information. +type domInfo struct { + idom *BasicBlock // immediate dominator (parent in domtree) + children []*BasicBlock // nodes immediately dominated by this one + pre, post int32 // pre- and post-order numbering within domtree +} + +// ltState holds the working state for Lengauer-Tarjan algorithm +// (during which domInfo.pre is repurposed for CFG DFS preorder number). +type ltState struct { + // Each slice is indexed by b.Index. + sdom []*BasicBlock // b's semidominator + parent []*BasicBlock // b's parent in DFS traversal of CFG + ancestor []*BasicBlock // b's ancestor with least sdom +} + +// dfs implements the depth-first search part of the LT algorithm. +func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 { + preorder[i] = v + v.dom.pre = i // For now: DFS preorder of spanning tree of CFG + i++ + lt.sdom[v.Index] = v + lt.link(nil, v) + for _, w := range v.Succs { + if lt.sdom[w.Index] == nil { + lt.parent[w.Index] = v + i = lt.dfs(w, i, preorder) + } + } + return i +} + +// eval implements the EVAL part of the LT algorithm. +func (lt *ltState) eval(v *BasicBlock) *BasicBlock { + // TODO(adonovan): opt: do path compression per simple LT. + u := v + for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] { + if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre { + u = v + } + } + return u +} + +// link implements the LINK part of the LT algorithm. +func (lt *ltState) link(v, w *BasicBlock) { + lt.ancestor[w.Index] = v +} + +// buildDomTree computes the dominator tree of f using the LT algorithm. +// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). +func buildDomTree(f *Function) { + // The step numbers refer to the original LT paper; the + // reordering is due to Georgiadis. + + // Clear any previous domInfo. + for _, b := range f.Blocks { + b.dom = domInfo{} + } + + n := len(f.Blocks) + // Allocate space for 5 contiguous [n]*BasicBlock arrays: + // sdom, parent, ancestor, preorder, buckets. + space := make([]*BasicBlock, 5*n) + lt := ltState{ + sdom: space[0:n], + parent: space[n : 2*n], + ancestor: space[2*n : 3*n], + } + + // Step 1. Number vertices by depth-first preorder. + preorder := space[3*n : 4*n] + root := f.Blocks[0] + prenum := lt.dfs(root, 0, preorder) + recover := f.Recover + if recover != nil { + lt.dfs(recover, prenum, preorder) + } + + buckets := space[4*n : 5*n] + copy(buckets, preorder) + + // In reverse preorder... + for i := int32(n) - 1; i > 0; i-- { + w := preorder[i] + + // Step 3. Implicitly define the immediate dominator of each node. + for v := buckets[i]; v != w; v = buckets[v.dom.pre] { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < i { + v.dom.idom = u + } else { + v.dom.idom = w + } + } + + // Step 2. Compute the semidominators of all nodes. + lt.sdom[w.Index] = lt.parent[w.Index] + for _, v := range w.Preds { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre { + lt.sdom[w.Index] = lt.sdom[u.Index] + } + } + + lt.link(lt.parent[w.Index], w) + + if lt.parent[w.Index] == lt.sdom[w.Index] { + w.dom.idom = lt.parent[w.Index] + } else { + buckets[i] = buckets[lt.sdom[w.Index].dom.pre] + buckets[lt.sdom[w.Index].dom.pre] = w + } + } + + // The final 'Step 3' is now outside the loop. + for v := buckets[0]; v != root; v = buckets[v.dom.pre] { + v.dom.idom = root + } + + // Step 4. Explicitly define the immediate dominator of each + // node, in preorder. + for _, w := range preorder[1:] { + if w == root || w == recover { + w.dom.idom = nil + } else { + if w.dom.idom != lt.sdom[w.Index] { + w.dom.idom = w.dom.idom.dom.idom + } + // Calculate Children relation as inverse of Idom. + w.dom.idom.dom.children = append(w.dom.idom.dom.children, w) + } + } + + pre, post := numberDomTree(root, 0, 0) + if recover != nil { + numberDomTree(recover, pre, post) + } + + // printDomTreeDot(os.Stderr, f) // debugging + // printDomTreeText(os.Stderr, root, 0) // debugging + + if f.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckDomTree(f) + } +} + +// numberDomTree sets the pre- and post-order numbers of a depth-first +// traversal of the dominator tree rooted at v. These are used to +// answer dominance queries in constant time. +func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { + v.dom.pre = pre + pre++ + for _, child := range v.dom.children { + pre, post = numberDomTree(child, pre, post) + } + v.dom.post = post + post++ + return pre, post +} + +// Testing utilities ---------------------------------------- + +// sanityCheckDomTree checks the correctness of the dominator tree +// computed by the LT algorithm by comparing against the dominance +// relation computed by a naive Kildall-style forward dataflow +// analysis (Algorithm 10.16 from the "Dragon" book). +func sanityCheckDomTree(f *Function) { + n := len(f.Blocks) + + // D[i] is the set of blocks that dominate f.Blocks[i], + // represented as a bit-set of block indices. + D := make([]big.Int, n) + + one := big.NewInt(1) + + // all is the set of all blocks; constant. + var all big.Int + all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) + + // Initialization. + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + // A root is dominated only by itself. + D[i].SetBit(&D[0], 0, 1) + } else { + // All other blocks are (initially) dominated + // by every block. + D[i].Set(&all) + } + } + + // Iteration until fixed point. + for changed := true; changed; { + changed = false + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + continue + } + // Compute intersection across predecessors. + var x big.Int + x.Set(&all) + for _, pred := range b.Preds { + x.And(&x, &D[pred.Index]) + } + x.SetBit(&x, i, 1) // a block always dominates itself. + if D[i].Cmp(&x) != 0 { + D[i].Set(&x) + changed = true + } + } + } + + // Check the entire relation. O(n^2). + // The Recover block (if any) must be treated specially so we skip it. + ok := true + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + b, c := f.Blocks[i], f.Blocks[j] + if c == f.Recover { + continue + } + actual := b.Dominates(c) + expected := D[j].Bit(i) == 1 + if actual != expected { + fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) + ok = false + } + } + } + + preorder := f.DomPreorder() + for _, b := range f.Blocks { + if got := preorder[b.dom.pre]; got != b { + fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) + ok = false + } + } + + if !ok { + panic("sanityCheckDomTree failed for " + f.String()) + } + +} + +// Printing functions ---------------------------------------- + +// printDomTreeText prints the dominator tree as text, using indentation. +func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { + fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) + for _, child := range v.dom.children { + printDomTreeText(buf, child, indent+1) + } +} + +// printDomTreeDot prints the dominator tree of f in AT&T GraphViz +// (.dot) format. +func printDomTreeDot(buf *bytes.Buffer, f *Function) { + fmt.Fprintln(buf, "//", f) + fmt.Fprintln(buf, "digraph domtree {") + for i, b := range f.Blocks { + v := b.dom + fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) + // TODO(adonovan): improve appearance of edges + // belonging to both dominator tree and CFG. + + // Dominator tree edge. + if i != 0 { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) + } + // CFG edges. + for _, pred := range b.Preds { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) + } + } + fmt.Fprintln(buf, "}") +} diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go new file mode 100644 index 0000000..c664ff8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/emit.go @@ -0,0 +1,614 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// Helpers for emitting SSA instructions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/internal/typeparams" +) + +// emitAlloc emits to f a new Alloc instruction allocating a variable +// of type typ. +// +// The caller must set Alloc.Heap=true (for an heap-allocated variable) +// or add the Alloc to f.Locals (for a frame-allocated variable). +// +// During building, a variable in f.Locals may have its Heap flag +// set when it is discovered that its address is taken. +// These Allocs are removed from f.Locals at the end. +// +// The builder should generally call one of the emit{New,Local,LocalVar} wrappers instead. +func emitAlloc(f *Function, typ types.Type, pos token.Pos, comment string) *Alloc { + v := &Alloc{Comment: comment} + v.setType(types.NewPointer(typ)) + v.setPos(pos) + f.emit(v) + return v +} + +// emitNew emits to f a new Alloc instruction heap-allocating a +// variable of type typ. pos is the optional source location. +func emitNew(f *Function, typ types.Type, pos token.Pos, comment string) *Alloc { + alloc := emitAlloc(f, typ, pos, comment) + alloc.Heap = true + return alloc +} + +// emitLocal creates a local var for (t, pos, comment) and +// emits an Alloc instruction for it. +// +// (Use this function or emitNew for synthetic variables; +// for source-level variables in the same function, use emitLocalVar.) +func emitLocal(f *Function, t types.Type, pos token.Pos, comment string) *Alloc { + local := emitAlloc(f, t, pos, comment) + f.Locals = append(f.Locals, local) + return local +} + +// emitLocalVar creates a local var for v and emits an Alloc instruction for it. +// Subsequent calls to f.lookup(v) return it. +// It applies the appropriate generic instantiation to the type. +func emitLocalVar(f *Function, v *types.Var) *Alloc { + alloc := emitLocal(f, f.typ(v.Type()), v.Pos(), v.Name()) + f.vars[v] = alloc + return alloc +} + +// emitLoad emits to f an instruction to load the address addr into a +// new temporary, and returns the value so defined. +func emitLoad(f *Function, addr Value) *UnOp { + v := &UnOp{Op: token.MUL, X: addr} + v.setType(typeparams.MustDeref(addr.Type())) + f.emit(v) + return v +} + +// emitDebugRef emits to f a DebugRef pseudo-instruction associating +// expression e with value v. +func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { + if !f.debugInfo() { + return // debugging not enabled + } + if v == nil || e == nil { + panic("nil") + } + var obj types.Object + e = unparen(e) + if id, ok := e.(*ast.Ident); ok { + if isBlankIdent(id) { + return + } + obj = f.objectOf(id) + switch obj.(type) { + case *types.Nil, *types.Const, *types.Builtin: + return + } + } + f.emit(&DebugRef{ + X: v, + Expr: e, + IsAddr: isAddr, + object: obj, + }) +} + +// emitArith emits to f code to compute the binary operation op(x, y) +// where op is an eager shift, logical or arithmetic operation. +// (Use emitCompare() for comparisons and Builder.logicalBinop() for +// non-eager operations.) +func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value { + switch op { + case token.SHL, token.SHR: + x = emitConv(f, x, t) + // y may be signed or an 'untyped' constant. + + // There is a runtime panic if y is signed and <0. Instead of inserting a check for y<0 + // and converting to an unsigned value (like the compiler) leave y as is. + + if isUntyped(y.Type().Underlying()) { + // Untyped conversion: + // Spec https://go.dev/ref/spec#Operators: + // The right operand in a shift expression must have integer type or be an untyped constant + // representable by a value of type uint. + y = emitConv(f, y, types.Typ[types.Uint]) + } + + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + x = emitConv(f, x, t) + y = emitConv(f, y, t) + + default: + panic("illegal op in emitArith: " + op.String()) + + } + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(t) + return f.emit(v) +} + +// emitCompare emits to f code compute the boolean result of +// comparison 'x op y'. +func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { + xt := x.Type().Underlying() + yt := y.Type().Underlying() + + // Special case to optimise a tagless SwitchStmt so that + // these are equivalent + // switch { case e: ...} + // switch true { case e: ... } + // if e==true { ... } + // even in the case when e's type is an interface. + // TODO(adonovan): opt: generalise to x==true, false!=y, etc. + if x == vTrue && op == token.EQL { + if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { + return y + } + } + + if types.Identical(xt, yt) { + // no conversion necessary + } else if isNonTypeParamInterface(x.Type()) { + y = emitConv(f, y, x.Type()) + } else if isNonTypeParamInterface(y.Type()) { + x = emitConv(f, x, y.Type()) + } else if _, ok := x.(*Const); ok { + x = emitConv(f, x, y.Type()) + } else if _, ok := y.(*Const); ok { + y = emitConv(f, y, x.Type()) + } else { + // other cases, e.g. channels. No-op. + } + + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(tBool) + return f.emit(v) +} + +// isValuePreserving returns true if a conversion from ut_src to +// ut_dst is value-preserving, i.e. just a change of type. +// Precondition: neither argument is a named or alias type. +func isValuePreserving(ut_src, ut_dst types.Type) bool { + // Identical underlying types? + if types.IdenticalIgnoreTags(ut_dst, ut_src) { + return true + } + + switch ut_dst.(type) { + case *types.Chan: + // Conversion between channel types? + _, ok := ut_src.(*types.Chan) + return ok + + case *types.Pointer: + // Conversion between pointers with identical base types? + _, ok := ut_src.(*types.Pointer) + return ok + } + return false +} + +// emitConv emits to f code to convert Value val to exactly type typ, +// and returns the converted value. Implicit conversions are required +// by language assignability rules in assignments, parameter passing, +// etc. +func emitConv(f *Function, val Value, typ types.Type) Value { + t_src := val.Type() + + // Identical types? Conversion is a no-op. + if types.Identical(t_src, typ) { + return val + } + ut_dst := typ.Underlying() + ut_src := t_src.Underlying() + + // Conversion to, or construction of a value of, an interface type? + if isNonTypeParamInterface(typ) { + // Interface name change? + if isValuePreserving(ut_src, ut_dst) { + c := &ChangeType{X: val} + c.setType(typ) + return f.emit(c) + } + + // Assignment from one interface type to another? + if isNonTypeParamInterface(t_src) { + c := &ChangeInterface{X: val} + c.setType(typ) + return f.emit(c) + } + + // Untyped nil constant? Return interface-typed nil constant. + if ut_src == tUntypedNil { + return zeroConst(typ) + } + + // Convert (non-nil) "untyped" literals to their default type. + if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { + val = emitConv(f, val, types.Default(ut_src)) + } + + // Record the types of operands to MakeInterface, if + // non-parameterized, as they are the set of runtime types. + t := val.Type() + if f.typeparams.Len() == 0 || !f.Prog.isParameterized(t) { + addRuntimeType(f.Prog, t) + } + + mi := &MakeInterface{X: val} + mi.setType(typ) + return f.emit(mi) + } + + // In the common case, the typesets of src and dst are singletons + // and we emit an appropriate conversion. But if either contains + // a type parameter, the conversion may represent a cross product, + // in which case which we emit a MultiConvert. + dst_terms := typeSetOf(ut_dst) + src_terms := typeSetOf(ut_src) + + // conversionCase describes an instruction pattern that maybe emitted to + // model d <- s for d in dst_terms and s in src_terms. + // Multiple conversions can match the same pattern. + type conversionCase uint8 + const ( + changeType conversionCase = 1 << iota + sliceToArray + sliceToArrayPtr + sliceTo0Array + sliceTo0ArrayPtr + convert + ) + // classify the conversion case of a source type us to a destination type ud. + // us and ud are underlying types (not *Named or *Alias) + classify := func(us, ud types.Type) conversionCase { + // Just a change of type, but not value or representation? + if isValuePreserving(us, ud) { + return changeType + } + + // Conversion from slice to array or slice to array pointer? + if slice, ok := us.(*types.Slice); ok { + var arr *types.Array + var ptr bool + // Conversion from slice to array pointer? + switch d := ud.(type) { + case *types.Array: + arr = d + case *types.Pointer: + arr, _ = d.Elem().Underlying().(*types.Array) + ptr = true + } + if arr != nil && types.Identical(slice.Elem(), arr.Elem()) { + if arr.Len() == 0 { + if ptr { + return sliceTo0ArrayPtr + } else { + return sliceTo0Array + } + } + if ptr { + return sliceToArrayPtr + } else { + return sliceToArray + } + } + } + + // The only remaining case in well-typed code is a representation- + // changing conversion of basic types (possibly with []byte/[]rune). + if !isBasic(us) && !isBasic(ud) { + panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), us, typ, ud)) + } + return convert + } + + var classifications conversionCase + for _, s := range src_terms { + us := s.Type().Underlying() + for _, d := range dst_terms { + ud := d.Type().Underlying() + classifications |= classify(us, ud) + } + } + if classifications == 0 { + panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) + } + + // Conversion of a compile-time constant value? + if c, ok := val.(*Const); ok { + // Conversion to a basic type? + if isBasic(ut_dst) { + // Conversion of a compile-time constant to + // another constant type results in a new + // constant of the destination type and + // (initially) the same abstract value. + // We don't truncate the value yet. + return NewConst(c.Value, typ) + } + // Can we always convert from zero value without panicking? + const mayPanic = sliceToArray | sliceToArrayPtr + if c.Value == nil && classifications&mayPanic == 0 { + return NewConst(nil, typ) + } + + // We're converting from constant to non-constant type, + // e.g. string -> []byte/[]rune. + } + + switch classifications { + case changeType: // representation-preserving change + c := &ChangeType{X: val} + c.setType(typ) + return f.emit(c) + + case sliceToArrayPtr, sliceTo0ArrayPtr: // slice to array pointer + c := &SliceToArrayPointer{X: val} + c.setType(typ) + return f.emit(c) + + case sliceToArray: // slice to arrays (not zero-length) + ptype := types.NewPointer(typ) + p := &SliceToArrayPointer{X: val} + p.setType(ptype) + x := f.emit(p) + unOp := &UnOp{Op: token.MUL, X: x} + unOp.setType(typ) + return f.emit(unOp) + + case sliceTo0Array: // slice to zero-length arrays (constant) + return zeroConst(typ) + + case convert: // representation-changing conversion + c := &Convert{X: val} + c.setType(typ) + return f.emit(c) + + default: // multiple conversion + c := &MultiConvert{X: val, from: src_terms, to: dst_terms} + c.setType(typ) + return f.emit(c) + } +} + +// emitTypeCoercion emits to f code to coerce the type of a +// Value v to exactly type typ, and returns the coerced value. +// +// Requires that coercing v.Typ() to typ is a value preserving change. +// +// Currently used only when v.Type() is a type instance of typ or vice versa. +// A type v is a type instance of a type t if there exists a +// type parameter substitution σ s.t. σ(v) == t. Example: +// +// σ(func(T) T) == func(int) int for σ == [T ↦ int] +// +// This happens in instantiation wrappers for conversion +// from an instantiation to a parameterized type (and vice versa) +// with σ substituting f.typeparams by f.typeargs. +func emitTypeCoercion(f *Function, v Value, typ types.Type) Value { + if types.Identical(v.Type(), typ) { + return v // no coercion needed + } + // TODO(taking): for instances should we record which side is the instance? + c := &ChangeType{ + X: v, + } + c.setType(typ) + f.emit(c) + return c +} + +// emitStore emits to f an instruction to store value val at location +// addr, applying implicit conversions as required by assignability rules. +func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { + typ := typeparams.MustDeref(addr.Type()) + s := &Store{ + Addr: addr, + Val: emitConv(f, val, typ), + pos: pos, + } + f.emit(s) + return s +} + +// emitJump emits to f a jump to target, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +func emitJump(f *Function, target *BasicBlock) { + b := f.currentBlock + b.emit(new(Jump)) + addEdge(b, target) + f.currentBlock = nil +} + +// emitIf emits to f a conditional jump to tblock or fblock based on +// cond, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) { + b := f.currentBlock + b.emit(&If{Cond: cond}) + addEdge(b, tblock) + addEdge(b, fblock) + f.currentBlock = nil +} + +// emitExtract emits to f an instruction to extract the index'th +// component of tuple. It returns the extracted value. +func emitExtract(f *Function, tuple Value, index int) Value { + e := &Extract{Tuple: tuple, Index: index} + e.setType(tuple.Type().(*types.Tuple).At(index).Type()) + return f.emit(e) +} + +// emitTypeAssert emits to f a type assertion value := x.(t) and +// returns the value. x.Type() must be an interface. +func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{X: x, AssertedType: t} + a.setPos(pos) + a.setType(t) + return f.emit(a) +} + +// emitTypeTest emits to f a type test value,ok := x.(t) and returns +// a (value, ok) tuple. x.Type() must be an interface. +func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{ + X: x, + AssertedType: t, + CommaOk: true, + } + a.setPos(pos) + a.setType(types.NewTuple( + newVar("value", t), + varOk, + )) + return f.emit(a) +} + +// emitTailCall emits to f a function call in tail position. The +// caller is responsible for all fields of 'call' except its type. +// Intended for wrapper methods. +// Precondition: f does/will not use deferred procedure calls. +// Postcondition: f.currentBlock is nil. +func emitTailCall(f *Function, call *Call) { + tresults := f.Signature.Results() + nr := tresults.Len() + if nr == 1 { + call.typ = tresults.At(0).Type() + } else { + call.typ = tresults + } + tuple := f.emit(call) + var ret Return + switch nr { + case 0: + // no-op + case 1: + ret.Results = []Value{tuple} + default: + for i := 0; i < nr; i++ { + v := emitExtract(f, tuple, i) + // TODO(adonovan): in principle, this is required: + // v = emitConv(f, o.Type, f.Signature.Results[i].Type) + // but in practice emitTailCall is only used when + // the types exactly match. + ret.Results = append(ret.Results, v) + } + } + f.emit(&ret) + f.currentBlock = nil +} + +// emitImplicitSelections emits to f code to apply the sequence of +// implicit field selections specified by indices to base value v, and +// returns the selected value. +// +// If v is the address of a struct, the result will be the address of +// a field; if it is the value of a struct, the result will be the +// value of a field. +func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value { + for _, index := range indices { + if isPointerCore(v.Type()) { + fld := fieldOf(typeparams.MustDeref(v.Type()), index) + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setPos(pos) + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff indirectly embedded. + if isPointerCore(fld.Type()) { + v = emitLoad(f, v) + } + } else { + fld := fieldOf(v.Type(), index) + instr := &Field{ + X: v, + Field: index, + } + instr.setPos(pos) + instr.setType(fld.Type()) + v = f.emit(instr) + } + } + return v +} + +// emitFieldSelection emits to f code to select the index'th field of v. +// +// If wantAddr, the input must be a pointer-to-struct and the result +// will be the field's address; otherwise the result will be the +// field's value. +// Ident id is used for position and debug info. +func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { + if isPointerCore(v.Type()) { + fld := fieldOf(typeparams.MustDeref(v.Type()), index) + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff we don't want its address. + if !wantAddr { + v = emitLoad(f, v) + } + } else { + fld := fieldOf(v.Type(), index) + instr := &Field{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(fld.Type()) + v = f.emit(instr) + } + emitDebugRef(f, id, v, wantAddr) + return v +} + +// createRecoverBlock emits to f a block of code to return after a +// recovered panic, and sets f.Recover to it. +// +// If f's result parameters are named, the code loads and returns +// their current values, otherwise it returns the zero values of their +// type. +// +// Idempotent. +func createRecoverBlock(f *Function) { + if f.Recover != nil { + return // already created + } + saved := f.currentBlock + + f.Recover = f.newBasicBlock("recover") + f.currentBlock = f.Recover + + var results []Value + // Reload NRPs to form value tuple. + for _, nr := range f.results { + results = append(results, emitLoad(f, nr)) + } + + f.emit(&Return{Results: results}) + + f.currentBlock = saved +} diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go new file mode 100644 index 0000000..2ed63bf --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/func.go @@ -0,0 +1,816 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the Function type. + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + "strings" + + "golang.org/x/tools/internal/typeparams" +) + +// Like ObjectOf, but panics instead of returning nil. +// Only valid during f's create and build phases. +func (f *Function) objectOf(id *ast.Ident) types.Object { + if o := f.info.ObjectOf(id); o != nil { + return o + } + panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", + id.Name, f.Prog.Fset.Position(id.Pos()))) +} + +// Like TypeOf, but panics instead of returning nil. +// Only valid during f's create and build phases. +func (f *Function) typeOf(e ast.Expr) types.Type { + if T := f.info.TypeOf(e); T != nil { + return f.typ(T) + } + panic(fmt.Sprintf("no type for %T @ %s", e, f.Prog.Fset.Position(e.Pos()))) +} + +// typ is the locally instantiated type of T. +// If f is not an instantiation, then f.typ(T)==T. +func (f *Function) typ(T types.Type) types.Type { + return f.subst.typ(T) +} + +// If id is an Instance, returns info.Instances[id].Type. +// Otherwise returns f.typeOf(id). +func (f *Function) instanceType(id *ast.Ident) types.Type { + if t, ok := f.info.Instances[id]; ok { + return t.Type + } + return f.typeOf(id) +} + +// selection returns a *selection corresponding to f.info.Selections[selector] +// with potential updates for type substitution. +func (f *Function) selection(selector *ast.SelectorExpr) *selection { + sel := f.info.Selections[selector] + if sel == nil { + return nil + } + + switch sel.Kind() { + case types.MethodExpr, types.MethodVal: + if recv := f.typ(sel.Recv()); recv != sel.Recv() { + // recv changed during type substitution. + pkg := f.declaredPackage().Pkg + obj, index, indirect := types.LookupFieldOrMethod(recv, true, pkg, sel.Obj().Name()) + + // sig replaces sel.Type(). See (types.Selection).Typ() for details. + sig := obj.Type().(*types.Signature) + sig = changeRecv(sig, newVar(sig.Recv().Name(), recv)) + if sel.Kind() == types.MethodExpr { + sig = recvAsFirstArg(sig) + } + return &selection{ + kind: sel.Kind(), + recv: recv, + typ: sig, + obj: obj, + index: index, + indirect: indirect, + } + } + } + return toSelection(sel) +} + +// Destinations associated with unlabelled for/switch/select stmts. +// We push/pop one of these as we enter/leave each construct and for +// each BranchStmt we scan for the innermost target of the right type. +type targets struct { + tail *targets // rest of stack + _break *BasicBlock + _continue *BasicBlock + _fallthrough *BasicBlock +} + +// Destinations associated with a labelled block. +// We populate these as labels are encountered in forward gotos or +// labelled statements. +// Forward gotos are resolved once it is known which statement they +// are associated with inside the Function. +type lblock struct { + label *types.Label // Label targeted by the blocks. + resolved bool // _goto block encountered (back jump or resolved fwd jump) + _goto *BasicBlock + _break *BasicBlock + _continue *BasicBlock +} + +// label returns the symbol denoted by a label identifier. +// +// label should be a non-blank identifier (label.Name != "_"). +func (f *Function) label(label *ast.Ident) *types.Label { + return f.objectOf(label).(*types.Label) +} + +// lblockOf returns the branch target associated with the +// specified label, creating it if needed. +func (f *Function) lblockOf(label *types.Label) *lblock { + lb := f.lblocks[label] + if lb == nil { + lb = &lblock{ + label: label, + _goto: f.newBasicBlock(label.Name()), + } + if f.lblocks == nil { + f.lblocks = make(map[*types.Label]*lblock) + } + f.lblocks[label] = lb + } + return lb +} + +// labelledBlock searches f for the block of the specified label. +// +// If f is a yield function, it additionally searches ancestor Functions +// corresponding to enclosing range-over-func statements within the +// same source function, so the returned block may belong to a different Function. +func labelledBlock(f *Function, label *types.Label, tok token.Token) *BasicBlock { + if lb := f.lblocks[label]; lb != nil { + var block *BasicBlock + switch tok { + case token.BREAK: + block = lb._break + case token.CONTINUE: + block = lb._continue + case token.GOTO: + block = lb._goto + } + if block != nil { + return block + } + } + // Search ancestors if this is a yield function. + if f.jump != nil { + return labelledBlock(f.parent, label, tok) + } + return nil +} + +// targetedBlock looks for the nearest block in f.targets +// (and f's ancestors) that matches tok's type, and returns +// the block and function it was found in. +func targetedBlock(f *Function, tok token.Token) *BasicBlock { + if f == nil { + return nil + } + for t := f.targets; t != nil; t = t.tail { + var block *BasicBlock + switch tok { + case token.BREAK: + block = t._break + case token.CONTINUE: + block = t._continue + case token.FALLTHROUGH: + block = t._fallthrough + } + if block != nil { + return block + } + } + // Search f's ancestors (in case f is a yield function). + return targetedBlock(f.parent, tok) +} + +// addResultVar adds a result for a variable v to f.results and v to f.returnVars. +func (f *Function) addResultVar(v *types.Var) { + result := emitLocalVar(f, v) + f.results = append(f.results, result) + f.returnVars = append(f.returnVars, v) +} + +// addParamVar adds a parameter to f.Params. +func (f *Function) addParamVar(v *types.Var) *Parameter { + name := v.Name() + if name == "" { + name = fmt.Sprintf("arg%d", len(f.Params)) + } + param := &Parameter{ + name: name, + object: v, + typ: f.typ(v.Type()), + parent: f, + } + f.Params = append(f.Params, param) + return param +} + +// addSpilledParam declares a parameter that is pre-spilled to the +// stack; the function body will load/store the spilled location. +// Subsequent lifting will eliminate spills where possible. +func (f *Function) addSpilledParam(obj *types.Var) { + param := f.addParamVar(obj) + spill := emitLocalVar(f, obj) + f.emit(&Store{Addr: spill, Val: param}) +} + +// startBody initializes the function prior to generating SSA code for its body. +// Precondition: f.Type() already set. +func (f *Function) startBody() { + f.currentBlock = f.newBasicBlock("entry") + f.vars = make(map[*types.Var]Value) // needed for some synthetics, e.g. init +} + +// createSyntacticParams populates f.Params and generates code (spills +// and named result locals) for all the parameters declared in the +// syntax. In addition it populates the f.objects mapping. +// +// Preconditions: +// f.startBody() was called. f.info != nil. +// Postcondition: +// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) +func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { + // Receiver (at most one inner iteration). + if recv != nil { + for _, field := range recv.List { + for _, n := range field.Names { + f.addSpilledParam(identVar(f, n)) + } + // Anonymous receiver? No need to spill. + if field.Names == nil { + f.addParamVar(f.Signature.Recv()) + } + } + } + + // Parameters. + if functype.Params != nil { + n := len(f.Params) // 1 if has recv, 0 otherwise + for _, field := range functype.Params.List { + for _, n := range field.Names { + f.addSpilledParam(identVar(f, n)) + } + // Anonymous parameter? No need to spill. + if field.Names == nil { + f.addParamVar(f.Signature.Params().At(len(f.Params) - n)) + } + } + } + + // Results. + if functype.Results != nil { + for _, field := range functype.Results.List { + // Implicit "var" decl of locals for named results. + for _, n := range field.Names { + v := identVar(f, n) + f.addResultVar(v) + } + // Implicit "var" decl of local for an unnamed result. + if field.Names == nil { + v := f.Signature.Results().At(len(f.results)) + f.addResultVar(v) + } + } + } +} + +// createDeferStack initializes fn.deferstack to local variable +// initialized to a ssa:deferstack() call. +func (fn *Function) createDeferStack() { + // Each syntactic function makes a call to ssa:deferstack, + // which is spilled to a local. Unused ones are later removed. + fn.deferstack = newVar("defer$stack", tDeferStack) + call := &Call{Call: CallCommon{Value: vDeferStack}} + call.setType(tDeferStack) + deferstack := fn.emit(call) + spill := emitLocalVar(fn, fn.deferstack) + emitStore(fn, spill, deferstack, token.NoPos) +} + +type setNumable interface { + setNum(int) +} + +// numberRegisters assigns numbers to all SSA registers +// (value-defining Instructions) in f, to aid debugging. +// (Non-Instruction Values are named at construction.) +func numberRegisters(f *Function) { + v := 0 + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case Value: + instr.(setNumable).setNum(v) + v++ + } + } + } +} + +// buildReferrers populates the def/use information in all non-nil +// Value.Referrers slice. +// Precondition: all such slices are initially empty. +func buildReferrers(f *Function) { + var rands []*Value + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if r := *rand; r != nil { + if ref := r.Referrers(); ref != nil { + *ref = append(*ref, instr) + } + } + } + } + } +} + +// finishBody() finalizes the contents of the function after SSA code generation of its body. +// +// The function is not done being built until done() is called. +func (f *Function) finishBody() { + f.currentBlock = nil + f.lblocks = nil + f.returnVars = nil + f.jump = nil + f.source = nil + f.exits = nil + + // Remove from f.Locals any Allocs that escape to the heap. + j := 0 + for _, l := range f.Locals { + if !l.Heap { + f.Locals[j] = l + j++ + } + } + // Nil out f.Locals[j:] to aid GC. + for i := j; i < len(f.Locals); i++ { + f.Locals[i] = nil + } + f.Locals = f.Locals[:j] + + optimizeBlocks(f) + + buildReferrers(f) + + buildDomTree(f) + + if f.Prog.mode&NaiveForm == 0 { + // For debugging pre-state of lifting pass: + // numberRegisters(f) + // f.WriteTo(os.Stderr) + lift(f) + } + + // clear remaining builder state + f.results = nil // (used by lifting) + f.deferstack = nil // (used by lifting) + f.vars = nil // (used by lifting) + f.subst = nil + + numberRegisters(f) // uses f.namedRegisters +} + +// done marks the building of f's SSA body complete, +// along with any nested functions, and optionally prints them. +func (f *Function) done() { + assert(f.parent == nil, "done called on an anonymous function") + + var visit func(*Function) + visit = func(f *Function) { + for _, anon := range f.AnonFuncs { + visit(anon) // anon is done building before f. + } + + f.uniq = 0 // done with uniq + f.build = nil // function is built + + if f.Prog.mode&PrintFunctions != 0 { + printMu.Lock() + f.WriteTo(os.Stdout) + printMu.Unlock() + } + + if f.Prog.mode&SanityCheckFunctions != 0 { + mustSanityCheck(f, nil) + } + } + visit(f) +} + +// removeNilBlocks eliminates nils from f.Blocks and updates each +// BasicBlock.Index. Use this after any pass that may delete blocks. +func (f *Function) removeNilBlocks() { + j := 0 + for _, b := range f.Blocks { + if b != nil { + b.Index = j + f.Blocks[j] = b + j++ + } + } + // Nil out f.Blocks[j:] to aid GC. + for i := j; i < len(f.Blocks); i++ { + f.Blocks[i] = nil + } + f.Blocks = f.Blocks[:j] +} + +// SetDebugMode sets the debug mode for package pkg. If true, all its +// functions will include full debug info. This greatly increases the +// size of the instruction stream, and causes Functions to depend upon +// the ASTs, potentially keeping them live in memory for longer. +func (pkg *Package) SetDebugMode(debug bool) { + pkg.debug = debug +} + +// debugInfo reports whether debug info is wanted for this function. +func (f *Function) debugInfo() bool { + // debug info for instantiations follows the debug info of their origin. + p := f.declaredPackage() + return p != nil && p.debug +} + +// lookup returns the address of the named variable identified by obj +// that is local to function f or one of its enclosing functions. +// If escaping, the reference comes from a potentially escaping pointer +// expression and the referent must be heap-allocated. +// We assume the referent is a *Alloc or *Phi. +// (The only Phis at this stage are those created directly by go1.22 "for" loops.) +func (f *Function) lookup(obj *types.Var, escaping bool) Value { + if v, ok := f.vars[obj]; ok { + if escaping { + switch v := v.(type) { + case *Alloc: + v.Heap = true + case *Phi: + for _, edge := range v.Edges { + if alloc, ok := edge.(*Alloc); ok { + alloc.Heap = true + } + } + } + } + return v // function-local var (address) + } + + // Definition must be in an enclosing function; + // plumb it through intervening closures. + if f.parent == nil { + panic("no ssa.Value for " + obj.String()) + } + outer := f.parent.lookup(obj, true) // escaping + v := &FreeVar{ + name: obj.Name(), + typ: outer.Type(), + pos: outer.Pos(), + outer: outer, + parent: f, + } + f.vars[obj] = v + f.FreeVars = append(f.FreeVars, v) + return v +} + +// emit emits the specified instruction to function f. +func (f *Function) emit(instr Instruction) Value { + return f.currentBlock.emit(instr) +} + +// RelString returns the full name of this function, qualified by +// package name, receiver type, etc. +// +// The specific formatting rules are not guaranteed and may change. +// +// Examples: +// +// "math.IsNaN" // a package-level function +// "(*bytes.Buffer).Bytes" // a declared method or a wrapper +// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) +// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) +// "main.main$1" // an anonymous function in main +// "main.init#1" // a declared init function +// "main.init" // the synthesized package initializer +// +// When these functions are referred to from within the same package +// (i.e. from == f.Pkg.Object), they are rendered without the package path. +// For example: "IsNaN", "(*Buffer).Bytes", etc. +// +// All non-synthetic functions have distinct package-qualified names. +// (But two methods may have the same name "(T).f" if one is a synthetic +// wrapper promoting a non-exported method "f" from another package; in +// that case, the strings are equal but the identifiers "f" are distinct.) +func (f *Function) RelString(from *types.Package) string { + // Anonymous? + if f.parent != nil { + // An anonymous function's Name() looks like "parentName$1", + // but its String() should include the type/package/etc. + parent := f.parent.RelString(from) + for i, anon := range f.parent.AnonFuncs { + if anon == f { + return fmt.Sprintf("%s$%d", parent, 1+i) + } + } + + return f.name // should never happen + } + + // Method (declared or wrapper)? + if recv := f.Signature.Recv(); recv != nil { + return f.relMethod(from, recv.Type()) + } + + // Thunk? + if f.method != nil { + return f.relMethod(from, f.method.recv) + } + + // Bound? + if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { + return f.relMethod(from, f.FreeVars[0].Type()) + } + + // Package-level function? + // Prefix with package name for cross-package references only. + if p := f.relPkg(); p != nil && p != from { + return fmt.Sprintf("%s.%s", p.Path(), f.name) + } + + // Unknown. + return f.name +} + +func (f *Function) relMethod(from *types.Package, recv types.Type) string { + return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) +} + +// writeSignature writes to buf the signature sig in declaration syntax. +func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature) { + buf.WriteString("func ") + if recv := sig.Recv(); recv != nil { + buf.WriteString("(") + if name := recv.Name(); name != "" { + buf.WriteString(name) + buf.WriteString(" ") + } + types.WriteType(buf, recv.Type(), types.RelativeTo(from)) + buf.WriteString(") ") + } + buf.WriteString(name) + types.WriteSignature(buf, sig, types.RelativeTo(from)) +} + +// declaredPackage returns the package fn is declared in or nil if the +// function is not declared in a package. +func (fn *Function) declaredPackage() *Package { + switch { + case fn.Pkg != nil: + return fn.Pkg // non-generic function (does that follow??) + case fn.topLevelOrigin != nil: + return fn.topLevelOrigin.Pkg // instance of a named generic function + case fn.parent != nil: + return fn.parent.declaredPackage() // instance of an anonymous [generic] function + default: + return nil // function is not declared in a package, e.g. a wrapper. + } +} + +// relPkg returns types.Package fn is printed in relationship to. +func (fn *Function) relPkg() *types.Package { + if p := fn.declaredPackage(); p != nil { + return p.Pkg + } + return nil +} + +var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer + +func (f *Function) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WriteFunction(&buf, f) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WriteFunction writes to buf a human-readable "disassembly" of f. +func WriteFunction(buf *bytes.Buffer, f *Function) { + fmt.Fprintf(buf, "# Name: %s\n", f.String()) + if f.Pkg != nil { + fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) + } + if syn := f.Synthetic; syn != "" { + fmt.Fprintln(buf, "# Synthetic:", syn) + } + if pos := f.Pos(); pos.IsValid() { + fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) + } + + if f.parent != nil { + fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) + } + + if f.Recover != nil { + fmt.Fprintf(buf, "# Recover: %s\n", f.Recover) + } + + from := f.relPkg() + + if f.FreeVars != nil { + buf.WriteString("# Free variables:\n") + for i, fv := range f.FreeVars { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) + } + } + + if len(f.Locals) > 0 { + buf.WriteString("# Locals:\n") + for i, l := range f.Locals { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(typeparams.MustDeref(l.Type()), from)) + } + } + writeSignature(buf, from, f.Name(), f.Signature) + buf.WriteString(":\n") + + if f.Blocks == nil { + buf.WriteString("\t(external)\n") + } + + // NB. column calculations are confused by non-ASCII + // characters and assume 8-space tabs. + const punchcard = 80 // for old time's sake. + const tabwidth = 8 + for _, b := range f.Blocks { + if b == nil { + // Corrupt CFG. + fmt.Fprintf(buf, ".nil:\n") + continue + } + n, _ := fmt.Fprintf(buf, "%d:", b.Index) + bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs)) + fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg) + + if false { // CFG debugging + fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) + } + for _, instr := range b.Instrs { + buf.WriteString("\t") + switch v := instr.(type) { + case Value: + l := punchcard - tabwidth + // Left-align the instruction. + if name := v.Name(); name != "" { + n, _ := fmt.Fprintf(buf, "%s = ", name) + l -= n + } + n, _ := buf.WriteString(instr.String()) + l -= n + // Right-align the type if there's space. + if t := v.Type(); t != nil { + buf.WriteByte(' ') + ts := relType(t, from) + l -= len(ts) + len(" ") // (spaces before and after type) + if l > 0 { + fmt.Fprintf(buf, "%*s", l, "") + } + buf.WriteString(ts) + } + case nil: + // Be robust against bad transforms. + buf.WriteString("<deleted>") + default: + buf.WriteString(instr.String()) + } + // -mode=S: show line numbers + if f.Prog.mode&LogSource != 0 { + if pos := instr.Pos(); pos.IsValid() { + fmt.Fprintf(buf, " L%d", f.Prog.Fset.Position(pos).Line) + } + } + buf.WriteString("\n") + } + } + fmt.Fprintf(buf, "\n") +} + +// newBasicBlock adds to f a new basic block and returns it. It does +// not automatically become the current block for subsequent calls to emit. +// comment is an optional string for more readable debugging output. +func (f *Function) newBasicBlock(comment string) *BasicBlock { + b := &BasicBlock{ + Index: len(f.Blocks), + Comment: comment, + parent: f, + } + b.Succs = b.succs2[:0] + f.Blocks = append(f.Blocks, b) + return b +} + +// NewFunction returns a new synthetic Function instance belonging to +// prog, with its name and signature fields set as specified. +// +// The caller is responsible for initializing the remaining fields of +// the function object, e.g. Pkg, Params, Blocks. +// +// It is practically impossible for clients to construct well-formed +// SSA functions/packages/programs directly, so we assume this is the +// job of the Builder alone. NewFunction exists to provide clients a +// little flexibility. For example, analysis tools may wish to +// construct fake Functions for the root of the callgraph, a fake +// "reflect" package, etc. +// +// TODO(adonovan): think harder about the API here. +func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function { + return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} +} + +// Syntax returns the function's syntax (*ast.Func{Decl,Lit}) +// if it was produced from syntax or an *ast.RangeStmt if +// it is a range-over-func yield function. +func (f *Function) Syntax() ast.Node { return f.syntax } + +// identVar returns the variable defined by id. +func identVar(fn *Function, id *ast.Ident) *types.Var { + return fn.info.Defs[id].(*types.Var) +} + +// unique returns a unique positive int within the source tree of f. +// The source tree of f includes all of f's ancestors by parent and all +// of the AnonFuncs contained within these. +func unique(f *Function) int64 { + f.uniq++ + return f.uniq +} + +// exit is a change of control flow going from a range-over-func +// yield function to an ancestor function caused by a break, continue, +// goto, or return statement. +// +// There are 3 types of exits: +// * return from the source function (from ReturnStmt), +// * jump to a block (from break and continue statements [labelled/unlabelled]), +// * go to a label (from goto statements). +// +// As the builder does one pass over the ast, it is unclear whether +// a forward goto statement will leave a range-over-func body. +// The function being exited to is unresolved until the end +// of building the range-over-func body. +type exit struct { + id int64 // unique value for exit within from and to + from *Function // the function the exit starts from + to *Function // the function being exited to (nil if unresolved) + pos token.Pos + + block *BasicBlock // basic block within to being jumped to. + label *types.Label // forward label being jumped to via goto. + // block == nil && label == nil => return +} + +// storeVar emits to function f code to store a value v to a *types.Var x. +func storeVar(f *Function, x *types.Var, v Value, pos token.Pos) { + emitStore(f, f.lookup(x, true), v, pos) +} + +// labelExit creates a new exit to a yield fn to exit the function using a label. +func labelExit(fn *Function, label *types.Label, pos token.Pos) *exit { + e := &exit{ + id: unique(fn), + from: fn, + to: nil, + pos: pos, + label: label, + } + fn.exits = append(fn.exits, e) + return e +} + +// blockExit creates a new exit to a yield fn that jumps to a basic block. +func blockExit(fn *Function, block *BasicBlock, pos token.Pos) *exit { + e := &exit{ + id: unique(fn), + from: fn, + to: block.parent, + pos: pos, + block: block, + } + fn.exits = append(fn.exits, e) + return e +} + +// blockExit creates a new exit to a yield fn that returns the source function. +func returnExit(fn *Function, pos token.Pos) *exit { + e := &exit{ + id: unique(fn), + from: fn, + to: fn.source, + pos: pos, + } + fn.exits = append(fn.exits, e) + return e +} diff --git a/vendor/golang.org/x/tools/go/ssa/instantiate.go b/vendor/golang.org/x/tools/go/ssa/instantiate.go new file mode 100644 index 0000000..2512f32 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/instantiate.go @@ -0,0 +1,131 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +import ( + "fmt" + "go/types" + "sync" +) + +// A generic records information about a generic origin function, +// including a cache of existing instantiations. +type generic struct { + instancesMu sync.Mutex + instances map[*typeList]*Function // canonical type arguments to an instance. +} + +// instance returns a Function that is the instantiation of generic +// origin function fn with the type arguments targs. +// +// Any created instance is added to cr. +// +// Acquires fn.generic.instancesMu. +func (fn *Function) instance(targs []types.Type, b *builder) *Function { + key := fn.Prog.canon.List(targs) + + gen := fn.generic + + gen.instancesMu.Lock() + defer gen.instancesMu.Unlock() + inst, ok := gen.instances[key] + if !ok { + inst = createInstance(fn, targs) + inst.buildshared = b.shared() + b.enqueue(inst) + + if gen.instances == nil { + gen.instances = make(map[*typeList]*Function) + } + gen.instances[key] = inst + } else { + b.waitForSharedFunction(inst) + } + return inst +} + +// createInstance returns the instantiation of generic function fn using targs. +// +// Requires fn.generic.instancesMu. +func createInstance(fn *Function, targs []types.Type) *Function { + prog := fn.Prog + + // Compute signature. + var sig *types.Signature + var obj *types.Func + if recv := fn.Signature.Recv(); recv != nil { + // method + obj = prog.canon.instantiateMethod(fn.object, targs, prog.ctxt) + sig = obj.Type().(*types.Signature) + } else { + // function + instSig, err := types.Instantiate(prog.ctxt, fn.Signature, targs, false) + if err != nil { + panic(err) + } + instance, ok := instSig.(*types.Signature) + if !ok { + panic("Instantiate of a Signature returned a non-signature") + } + obj = fn.object // instantiation does not exist yet + sig = prog.canon.Type(instance).(*types.Signature) + } + + // Choose strategy (instance or wrapper). + var ( + synthetic string + subst *subster + build buildFunc + ) + if prog.mode&InstantiateGenerics != 0 && !prog.isParameterized(targs...) { + synthetic = fmt.Sprintf("instance of %s", fn.Name()) + if fn.syntax != nil { + subst = makeSubster(prog.ctxt, obj, fn.typeparams, targs, false) + build = (*builder).buildFromSyntax + } else { + build = (*builder).buildParamsOnly + } + } else { + synthetic = fmt.Sprintf("instantiation wrapper of %s", fn.Name()) + build = (*builder).buildInstantiationWrapper + } + + /* generic instance or instantiation wrapper */ + return &Function{ + name: fmt.Sprintf("%s%s", fn.Name(), targs), // may not be unique + object: obj, + Signature: sig, + Synthetic: synthetic, + syntax: fn.syntax, // \ + info: fn.info, // } empty for non-created packages + goversion: fn.goversion, // / + build: build, + topLevelOrigin: fn, + pos: obj.Pos(), + Pkg: nil, + Prog: fn.Prog, + typeparams: fn.typeparams, // share with origin + typeargs: targs, + subst: subst, + } +} + +// isParameterized reports whether any of the specified types contains +// a free type parameter. It is safe to call concurrently. +func (prog *Program) isParameterized(ts ...types.Type) bool { + prog.hasParamsMu.Lock() + defer prog.hasParamsMu.Unlock() + + // TODO(adonovan): profile. If this operation is expensive, + // handle the most common but shallow cases such as T, pkg.T, + // *T without consulting the cache under the lock. + + for _, t := range ts { + if prog.hasParams.Has(t) { + return true + } + } + return false +} diff --git a/vendor/golang.org/x/tools/go/ssa/lift.go b/vendor/golang.org/x/tools/go/ssa/lift.go new file mode 100644 index 0000000..aada3dc --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/lift.go @@ -0,0 +1,688 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the lifting pass which tries to "lift" Alloc +// cells (new/local variables) into SSA registers, replacing loads +// with the dominating stored value, eliminating loads and stores, and +// inserting φ-nodes as needed. + +// Cited papers and resources: +// +// Ron Cytron et al. 1991. Efficiently computing SSA form... +// http://doi.acm.org/10.1145/115372.115320 +// +// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. +// Software Practice and Experience 2001, 4:1-10. +// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf +// +// Daniel Berlin, llvmdev mailing list, 2012. +// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html +// (Be sure to expand the whole thread.) + +// TODO(adonovan): opt: there are many optimizations worth evaluating, and +// the conventional wisdom for SSA construction is that a simple +// algorithm well engineered often beats those of better asymptotic +// complexity on all but the most egregious inputs. +// +// Danny Berlin suggests that the Cooper et al. algorithm for +// computing the dominance frontier is superior to Cytron et al. +// Furthermore he recommends that rather than computing the DF for the +// whole function then renaming all alloc cells, it may be cheaper to +// compute the DF for each alloc cell separately and throw it away. +// +// Consider exploiting liveness information to avoid creating dead +// φ-nodes which we then immediately remove. +// +// Also see many other "TODO: opt" suggestions in the code. + +import ( + "fmt" + "go/token" + "math/big" + "os" + + "golang.org/x/tools/internal/typeparams" +) + +// If true, show diagnostic information at each step of lifting. +// Very verbose. +const debugLifting = false + +// domFrontier maps each block to the set of blocks in its dominance +// frontier. The outer slice is conceptually a map keyed by +// Block.Index. The inner slice is conceptually a set, possibly +// containing duplicates. +// +// TODO(adonovan): opt: measure impact of dups; consider a packed bit +// representation, e.g. big.Int, and bitwise parallel operations for +// the union step in the Children loop. +// +// domFrontier's methods mutate the slice's elements but not its +// length, so their receivers needn't be pointers. +type domFrontier [][]*BasicBlock + +func (df domFrontier) add(u, v *BasicBlock) { + p := &df[u.Index] + *p = append(*p, v) +} + +// build builds the dominance frontier df for the dominator (sub)tree +// rooted at u, using the Cytron et al. algorithm. +// +// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA +// by pruning the entire IDF computation, rather than merely pruning +// the DF -> IDF step. +func (df domFrontier) build(u *BasicBlock) { + // Encounter each node u in postorder of dom tree. + for _, child := range u.dom.children { + df.build(child) + } + for _, vb := range u.Succs { + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + for _, w := range u.dom.children { + for _, vb := range df[w.Index] { + // TODO(adonovan): opt: use word-parallel bitwise union. + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + } +} + +func buildDomFrontier(fn *Function) domFrontier { + df := make(domFrontier, len(fn.Blocks)) + df.build(fn.Blocks[0]) + if fn.Recover != nil { + df.build(fn.Recover) + } + return df +} + +func removeInstr(refs []Instruction, instr Instruction) []Instruction { + return removeInstrsIf(refs, func(i Instruction) bool { return i == instr }) +} + +func removeInstrsIf(refs []Instruction, p func(Instruction) bool) []Instruction { + // TODO(taking): replace with go1.22 slices.DeleteFunc. + i := 0 + for _, ref := range refs { + if p(ref) { + continue + } + refs[i] = ref + i++ + } + for j := i; j != len(refs); j++ { + refs[j] = nil // aid GC + } + return refs[:i] +} + +// lift replaces local and new Allocs accessed only with +// load/store by SSA registers, inserting φ-nodes where necessary. +// The result is a program in classical pruned SSA form. +// +// Preconditions: +// - fn has no dead blocks (blockopt has run). +// - Def/use info (Operands and Referrers) is up-to-date. +// - The dominator tree is up-to-date. +func lift(fn *Function) { + // TODO(adonovan): opt: lots of little optimizations may be + // worthwhile here, especially if they cause us to avoid + // buildDomFrontier. For example: + // + // - Alloc never loaded? Eliminate. + // - Alloc never stored? Replace all loads with a zero constant. + // - Alloc stored once? Replace loads with dominating store; + // don't forget that an Alloc is itself an effective store + // of zero. + // - Alloc used only within a single block? + // Use degenerate algorithm avoiding φ-nodes. + // - Consider synergy with scalar replacement of aggregates (SRA). + // e.g. *(&x.f) where x is an Alloc. + // Perhaps we'd get better results if we generated this as x.f + // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). + // Unclear. + // + // But we will start with the simplest correct code. + df := buildDomFrontier(fn) + + if debugLifting { + title := false + for i, blocks := range df { + if blocks != nil { + if !title { + fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) + title = true + } + fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) + } + } + } + + newPhis := make(newPhiMap) + + // During this pass we will replace some BasicBlock.Instrs + // (allocs, loads and stores) with nil, keeping a count in + // BasicBlock.gaps. At the end we will reset Instrs to the + // concatenation of all non-dead newPhis and non-nil Instrs + // for the block, reusing the original array if space permits. + + // While we're here, we also eliminate 'rundefers' + // instructions and ssa:deferstack() in functions that contain no + // 'defer' instructions. For now, we also eliminate + // 's = ssa:deferstack()' calls if s doesn't escape, replacing s + // with nil in Defer{DeferStack: s}. This has the same meaning, + // but allows eliminating the intrinsic function `ssa:deferstack()` + // (unless it is needed due to range-over-func instances). This gives + // ssa users more time to support range-over-func. + usesDefer := false + deferstackAlloc, deferstackCall := deferstackPreamble(fn) + eliminateDeferStack := deferstackAlloc != nil && !deferstackAlloc.Heap + + // A counter used to generate ~unique ids for Phi nodes, as an + // aid to debugging. We use large numbers to make them highly + // visible. All nodes are renumbered later. + fresh := 1000 + + // Determine which allocs we can lift and number them densely. + // The renaming phase uses this numbering for compact maps. + numAllocs := 0 + for _, b := range fn.Blocks { + b.gaps = 0 + b.rundefers = 0 + for _, instr := range b.Instrs { + switch instr := instr.(type) { + case *Alloc: + index := -1 + if liftAlloc(df, instr, newPhis, &fresh) { + index = numAllocs + numAllocs++ + } + instr.index = index + case *Defer: + usesDefer = true + if eliminateDeferStack { + // Clear DeferStack and remove references to loads + if instr.DeferStack != nil { + if refs := instr.DeferStack.Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + instr.DeferStack = nil + } + } + case *RunDefers: + b.rundefers++ + } + } + } + + // renaming maps an alloc (keyed by index) to its replacement + // value. Initially the renaming contains nil, signifying the + // zero constant of the appropriate type; we construct the + // Const lazily at most once on each path through the domtree. + // TODO(adonovan): opt: cache per-function not per subtree. + renaming := make([]Value, numAllocs) + + // Renaming. + rename(fn.Blocks[0], renaming, newPhis) + + // Eliminate dead φ-nodes. + removeDeadPhis(fn.Blocks, newPhis) + + // Eliminate ssa:deferstack() call. + if eliminateDeferStack { + b := deferstackCall.block + for i, instr := range b.Instrs { + if instr == deferstackCall { + b.Instrs[i] = nil + b.gaps++ + break + } + } + } + + // Prepend remaining live φ-nodes to each block. + for _, b := range fn.Blocks { + nps := newPhis[b] + j := len(nps) + + rundefersToKill := b.rundefers + if usesDefer { + rundefersToKill = 0 + } + + if j+b.gaps+rundefersToKill == 0 { + continue // fast path: no new phis or gaps + } + + // Compact nps + non-nil Instrs into a new slice. + // TODO(adonovan): opt: compact in situ (rightwards) + // if Instrs has sufficient space or slack. + dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill) + for i, np := range nps { + dst[i] = np.phi + } + for _, instr := range b.Instrs { + if instr == nil { + continue + } + if !usesDefer { + if _, ok := instr.(*RunDefers); ok { + continue + } + } + dst[j] = instr + j++ + } + b.Instrs = dst + } + + // Remove any fn.Locals that were lifted. + j := 0 + for _, l := range fn.Locals { + if l.index < 0 { + fn.Locals[j] = l + j++ + } + } + // Nil out fn.Locals[j:] to aid GC. + for i := j; i < len(fn.Locals); i++ { + fn.Locals[i] = nil + } + fn.Locals = fn.Locals[:j] +} + +// removeDeadPhis removes φ-nodes not transitively needed by a +// non-Phi, non-DebugRef instruction. +func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) { + // First pass: find the set of "live" φ-nodes: those reachable + // from some non-Phi instruction. + // + // We compute reachability in reverse, starting from each φ, + // rather than forwards, starting from each live non-Phi + // instruction, because this way visits much less of the + // Value graph. + livePhis := make(map[*Phi]bool) + for _, npList := range newPhis { + for _, np := range npList { + phi := np.phi + if !livePhis[phi] && phiHasDirectReferrer(phi) { + markLivePhi(livePhis, phi) + } + } + } + + // Existing φ-nodes due to && and || operators + // are all considered live (see Go issue 19622). + for _, b := range blocks { + for _, phi := range b.phis() { + markLivePhi(livePhis, phi.(*Phi)) + } + } + + // Second pass: eliminate unused phis from newPhis. + for block, npList := range newPhis { + j := 0 + for _, np := range npList { + if livePhis[np.phi] { + npList[j] = np + j++ + } else { + // discard it, first removing it from referrers + for _, val := range np.phi.Edges { + if refs := val.Referrers(); refs != nil { + *refs = removeInstr(*refs, np.phi) + } + } + np.phi.block = nil + } + } + newPhis[block] = npList[:j] + } +} + +// markLivePhi marks phi, and all φ-nodes transitively reachable via +// its Operands, live. +func markLivePhi(livePhis map[*Phi]bool, phi *Phi) { + livePhis[phi] = true + for _, rand := range phi.Operands(nil) { + if q, ok := (*rand).(*Phi); ok { + if !livePhis[q] { + markLivePhi(livePhis, q) + } + } + } +} + +// phiHasDirectReferrer reports whether phi is directly referred to by +// a non-Phi instruction. Such instructions are the +// roots of the liveness traversal. +func phiHasDirectReferrer(phi *Phi) bool { + for _, instr := range *phi.Referrers() { + if _, ok := instr.(*Phi); !ok { + return true + } + } + return false +} + +type blockSet struct{ big.Int } // (inherit methods from Int) + +// add adds b to the set and returns true if the set changed. +func (s *blockSet) add(b *BasicBlock) bool { + i := b.Index + if s.Bit(i) != 0 { + return false + } + s.SetBit(&s.Int, i, 1) + return true +} + +// take removes an arbitrary element from a set s and +// returns its index, or returns -1 if empty. +func (s *blockSet) take() int { + l := s.BitLen() + for i := 0; i < l; i++ { + if s.Bit(i) == 1 { + s.SetBit(&s.Int, i, 0) + return i + } + } + return -1 +} + +// newPhi is a pair of a newly introduced φ-node and the lifted Alloc +// it replaces. +type newPhi struct { + phi *Phi + alloc *Alloc +} + +// newPhiMap records for each basic block, the set of newPhis that +// must be prepended to the block. +type newPhiMap map[*BasicBlock][]newPhi + +// liftAlloc determines whether alloc can be lifted into registers, +// and if so, it populates newPhis with all the φ-nodes it may require +// and returns true. +// +// fresh is a source of fresh ids for phi nodes. +func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool { + // Don't lift result values in functions that defer + // calls that may recover from panic. + if fn := alloc.Parent(); fn.Recover != nil { + for _, nr := range fn.results { + if nr == alloc { + return false + } + } + } + + // Compute defblocks, the set of blocks containing a + // definition of the alloc cell. + var defblocks blockSet + for _, instr := range *alloc.Referrers() { + // Bail out if we discover the alloc is not liftable; + // the only operations permitted to use the alloc are + // loads/stores into the cell, and DebugRef. + switch instr := instr.(type) { + case *Store: + if instr.Val == alloc { + return false // address used as value + } + if instr.Addr != alloc { + panic("Alloc.Referrers is inconsistent") + } + defblocks.add(instr.Block()) + case *UnOp: + if instr.Op != token.MUL { + return false // not a load + } + if instr.X != alloc { + panic("Alloc.Referrers is inconsistent") + } + case *DebugRef: + // ok + default: + return false // some other instruction + } + } + // The Alloc itself counts as a (zero) definition of the cell. + defblocks.add(alloc.Block()) + + if debugLifting { + fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) + } + + fn := alloc.Parent() + + // Φ-insertion. + // + // What follows is the body of the main loop of the insert-φ + // function described by Cytron et al, but instead of using + // counter tricks, we just reset the 'hasAlready' and 'work' + // sets each iteration. These are bitmaps so it's pretty cheap. + // + // TODO(adonovan): opt: recycle slice storage for W, + // hasAlready, defBlocks across liftAlloc calls. + var hasAlready blockSet + + // Initialize W and work to defblocks. + var work blockSet = defblocks // blocks seen + var W blockSet // blocks to do + W.Set(&defblocks.Int) + + // Traverse iterated dominance frontier, inserting φ-nodes. + for i := W.take(); i != -1; i = W.take() { + u := fn.Blocks[i] + for _, v := range df[u.Index] { + if hasAlready.add(v) { + // Create φ-node. + // It will be prepended to v.Instrs later, if needed. + phi := &Phi{ + Edges: make([]Value, len(v.Preds)), + Comment: alloc.Comment, + } + // This is merely a debugging aid: + phi.setNum(*fresh) + *fresh++ + + phi.pos = alloc.Pos() + phi.setType(typeparams.MustDeref(alloc.Type())) + phi.block = v + if debugLifting { + fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) + } + newPhis[v] = append(newPhis[v], newPhi{phi, alloc}) + + if work.add(v) { + W.add(v) + } + } + } + } + + return true +} + +// replaceAll replaces all intraprocedural uses of x with y, +// updating x.Referrers and y.Referrers. +// Precondition: x.Referrers() != nil, i.e. x must be local to some function. +func replaceAll(x, y Value) { + var rands []*Value + pxrefs := x.Referrers() + pyrefs := y.Referrers() + for _, instr := range *pxrefs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if *rand != nil { + if *rand == x { + *rand = y + } + } + } + if pyrefs != nil { + *pyrefs = append(*pyrefs, instr) // dups ok + } + } + *pxrefs = nil // x is now unreferenced +} + +// renamed returns the value to which alloc is being renamed, +// constructing it lazily if it's the implicit zero initialization. +func renamed(renaming []Value, alloc *Alloc) Value { + v := renaming[alloc.index] + if v == nil { + v = zeroConst(typeparams.MustDeref(alloc.Type())) + renaming[alloc.index] = v + } + return v +} + +// rename implements the (Cytron et al) SSA renaming algorithm, a +// preorder traversal of the dominator tree replacing all loads of +// Alloc cells with the value stored to that cell by the dominating +// store instruction. For lifting, we need only consider loads, +// stores and φ-nodes. +// +// renaming is a map from *Alloc (keyed by index number) to its +// dominating stored value; newPhis[x] is the set of new φ-nodes to be +// prepended to block x. +func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) { + // Each φ-node becomes the new name for its associated Alloc. + for _, np := range newPhis[u] { + phi := np.phi + alloc := np.alloc + renaming[alloc.index] = phi + } + + // Rename loads and stores of allocs. + for i, instr := range u.Instrs { + switch instr := instr.(type) { + case *Alloc: + if instr.index >= 0 { // store of zero to Alloc cell + // Replace dominated loads by the zero value. + renaming[instr.index] = nil + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) + } + // Delete the Alloc. + u.Instrs[i] = nil + u.gaps++ + } + + case *Store: + if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell + // Replace dominated loads by the stored value. + renaming[alloc.index] = instr.Val + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", + instr, instr.Val.Name()) + } + // Remove the store from the referrer list of the stored value. + if refs := instr.Val.Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + // Delete the Store. + u.Instrs[i] = nil + u.gaps++ + } + + case *UnOp: + if instr.Op == token.MUL { + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", + instr.Name(), instr, newval.Name()) + } + // Replace all references to + // the loaded value by the + // dominating stored value. + replaceAll(instr, newval) + // Delete the Load. + u.Instrs[i] = nil + u.gaps++ + } + } + + case *DebugRef: + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell + if instr.IsAddr { + instr.X = renamed(renaming, alloc) + instr.IsAddr = false + + // Add DebugRef to instr.X's referrers. + if refs := instr.X.Referrers(); refs != nil { + *refs = append(*refs, instr) + } + } else { + // A source expression denotes the address + // of an Alloc that was optimized away. + instr.X = nil + + // Delete the DebugRef. + u.Instrs[i] = nil + u.gaps++ + } + } + } + } + + // For each φ-node in a CFG successor, rename the edge. + for _, v := range u.Succs { + phis := newPhis[v] + if len(phis) == 0 { + continue + } + i := v.predIndex(u) + for _, np := range phis { + phi := np.phi + alloc := np.alloc + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", + phi.Name(), u, v, i, alloc.Name(), newval.Name()) + } + phi.Edges[i] = newval + if prefs := newval.Referrers(); prefs != nil { + *prefs = append(*prefs, phi) + } + } + } + + // Continue depth-first recursion over domtree, pushing a + // fresh copy of the renaming map for each subtree. + for i, v := range u.dom.children { + r := renaming + if i < len(u.dom.children)-1 { + // On all but the final iteration, we must make + // a copy to avoid destructive update. + r = make([]Value, len(renaming)) + copy(r, renaming) + } + rename(v, r, newPhis) + } + +} + +// deferstackPreamble returns the *Alloc and ssa:deferstack() call for fn.deferstack. +func deferstackPreamble(fn *Function) (*Alloc, *Call) { + if alloc, _ := fn.vars[fn.deferstack].(*Alloc); alloc != nil { + for _, ref := range *alloc.Referrers() { + if ref, _ := ref.(*Store); ref != nil && ref.Addr == alloc { + if call, _ := ref.Val.(*Call); call != nil { + return alloc, call + } + } + } + } + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/ssa/lvalue.go b/vendor/golang.org/x/tools/go/ssa/lvalue.go new file mode 100644 index 0000000..eede307 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/lvalue.go @@ -0,0 +1,155 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// lvalues are the union of addressable expressions and map-index +// expressions. + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/internal/typeparams" +) + +// An lvalue represents an assignable location that may appear on the +// left-hand side of an assignment. This is a generalization of a +// pointer to permit updates to elements of maps. +type lvalue interface { + store(fn *Function, v Value) // stores v into the location + load(fn *Function) Value // loads the contents of the location + address(fn *Function) Value // address of the location + typ() types.Type // returns the type of the location +} + +// An address is an lvalue represented by a true pointer. +type address struct { + addr Value // must have a pointer core type. + pos token.Pos // source position + expr ast.Expr // source syntax of the value (not address) [debug mode] +} + +func (a *address) load(fn *Function) Value { + load := emitLoad(fn, a.addr) + load.pos = a.pos + return load +} + +func (a *address) store(fn *Function, v Value) { + store := emitStore(fn, a.addr, v, a.pos) + if a.expr != nil { + // store.Val is v, converted for assignability. + emitDebugRef(fn, a.expr, store.Val, false) + } +} + +func (a *address) address(fn *Function) Value { + if a.expr != nil { + emitDebugRef(fn, a.expr, a.addr, true) + } + return a.addr +} + +func (a *address) typ() types.Type { + return typeparams.MustDeref(a.addr.Type()) +} + +// An element is an lvalue represented by m[k], the location of an +// element of a map. These locations are not addressable +// since pointers cannot be formed from them, but they do support +// load() and store(). +type element struct { + m, k Value // map + t types.Type // map element type + pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v) +} + +func (e *element) load(fn *Function) Value { + l := &Lookup{ + X: e.m, + Index: e.k, + } + l.setPos(e.pos) + l.setType(e.t) + return fn.emit(l) +} + +func (e *element) store(fn *Function, v Value) { + up := &MapUpdate{ + Map: e.m, + Key: e.k, + Value: emitConv(fn, v, e.t), + } + up.pos = e.pos + fn.emit(up) +} + +func (e *element) address(fn *Function) Value { + panic("map elements are not addressable") +} + +func (e *element) typ() types.Type { + return e.t +} + +// A lazyAddress is an lvalue whose address is the result of an instruction. +// These work like an *address except a new address.address() Value +// is created on each load, store and address call. +// A lazyAddress can be used to control when a side effect (nil pointer +// dereference, index out of bounds) of using a location happens. +type lazyAddress struct { + addr func(fn *Function) Value // emit to fn the computation of the address + t types.Type // type of the location + pos token.Pos // source position + expr ast.Expr // source syntax of the value (not address) [debug mode] +} + +func (l *lazyAddress) load(fn *Function) Value { + load := emitLoad(fn, l.addr(fn)) + load.pos = l.pos + return load +} + +func (l *lazyAddress) store(fn *Function, v Value) { + store := emitStore(fn, l.addr(fn), v, l.pos) + if l.expr != nil { + // store.Val is v, converted for assignability. + emitDebugRef(fn, l.expr, store.Val, false) + } +} + +func (l *lazyAddress) address(fn *Function) Value { + addr := l.addr(fn) + if l.expr != nil { + emitDebugRef(fn, l.expr, addr, true) + } + return addr +} + +func (l *lazyAddress) typ() types.Type { return l.t } + +// A blank is a dummy variable whose name is "_". +// It is not reified: loads are illegal and stores are ignored. +type blank struct{} + +func (bl blank) load(fn *Function) Value { + panic("blank.load is illegal") +} + +func (bl blank) store(fn *Function, v Value) { + // no-op +} + +func (bl blank) address(fn *Function) Value { + panic("blank var is not addressable") +} + +func (bl blank) typ() types.Type { + // This should be the type of the blank Ident; the typechecker + // doesn't provide this yet, but fortunately, we don't need it + // yet either. + panic("blank.typ is unimplemented") +} diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go new file mode 100644 index 0000000..b956018 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/methods.go @@ -0,0 +1,281 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines utilities for population of method sets. + +import ( + "fmt" + "go/types" + + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" +) + +// MethodValue returns the Function implementing method sel, building +// wrapper methods on demand. It returns nil if sel denotes an +// interface or generic method. +// +// Precondition: sel.Kind() == MethodVal. +// +// Thread-safe. +// +// Acquires prog.methodsMu. +func (prog *Program) MethodValue(sel *types.Selection) *Function { + if sel.Kind() != types.MethodVal { + panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) + } + T := sel.Recv() + if types.IsInterface(T) { + return nil // interface method or type parameter + } + + if prog.isParameterized(T) { + return nil // generic method + } + + if prog.mode&LogSource != 0 { + defer logStack("MethodValue %s %v", T, sel)() + } + + var b builder + + m := func() *Function { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + // Get or create SSA method set. + mset, ok := prog.methodSets.At(T).(*methodSet) + if !ok { + mset = &methodSet{mapping: make(map[string]*Function)} + prog.methodSets.Set(T, mset) + } + + // Get or create SSA method. + id := sel.Obj().Id() + fn, ok := mset.mapping[id] + if !ok { + obj := sel.Obj().(*types.Func) + needsPromotion := len(sel.Index()) > 1 + needsIndirection := !isPointer(recvType(obj)) && isPointer(T) + if needsPromotion || needsIndirection { + fn = createWrapper(prog, toSelection(sel)) + fn.buildshared = b.shared() + b.enqueue(fn) + } else { + fn = prog.objectMethod(obj, &b) + } + if fn.Signature.Recv() == nil { + panic(fn) + } + mset.mapping[id] = fn + } else { + b.waitForSharedFunction(fn) + } + + return fn + }() + + b.iterate() + + return m +} + +// objectMethod returns the Function for a given method symbol. +// The symbol may be an instance of a generic function. It need not +// belong to an existing SSA package created by a call to +// prog.CreatePackage. +// +// objectMethod panics if the function is not a method. +// +// Acquires prog.objectMethodsMu. +func (prog *Program) objectMethod(obj *types.Func, b *builder) *Function { + sig := obj.Type().(*types.Signature) + if sig.Recv() == nil { + panic("not a method: " + obj.String()) + } + + // Belongs to a created package? + if fn := prog.FuncValue(obj); fn != nil { + return fn + } + + // Instantiation of generic? + if originObj := obj.Origin(); originObj != obj { + origin := prog.objectMethod(originObj, b) + assert(origin.typeparams.Len() > 0, "origin is not generic") + targs := receiverTypeArgs(obj) + return origin.instance(targs, b) + } + + // Consult/update cache of methods created from types.Func. + prog.objectMethodsMu.Lock() + defer prog.objectMethodsMu.Unlock() + fn, ok := prog.objectMethods[obj] + if !ok { + fn = createFunction(prog, obj, obj.Name(), nil, nil, "") + fn.Synthetic = "from type information (on demand)" + fn.buildshared = b.shared() + b.enqueue(fn) + + if prog.objectMethods == nil { + prog.objectMethods = make(map[*types.Func]*Function) + } + prog.objectMethods[obj] = fn + } else { + b.waitForSharedFunction(fn) + } + return fn +} + +// LookupMethod returns the implementation of the method of type T +// identified by (pkg, name). It returns nil if the method exists but +// is an interface method or generic method, and panics if T has no such method. +func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { + sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) + if sel == nil { + panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) + } + return prog.MethodValue(sel) +} + +// methodSet contains the (concrete) methods of a concrete type (non-interface, non-parameterized). +type methodSet struct { + mapping map[string]*Function // populated lazily +} + +// RuntimeTypes returns a new unordered slice containing all types in +// the program for which a runtime type is required. +// +// A runtime type is required for any non-parameterized, non-interface +// type that is converted to an interface, or for any type (including +// interface types) derivable from one through reflection. +// +// The methods of such types may be reachable through reflection or +// interface calls even if they are never called directly. +// +// Thread-safe. +// +// Acquires prog.runtimeTypesMu. +func (prog *Program) RuntimeTypes() []types.Type { + prog.runtimeTypesMu.Lock() + defer prog.runtimeTypesMu.Unlock() + return prog.runtimeTypes.Keys() +} + +// forEachReachable calls f for type T and each type reachable from +// its type through reflection. +// +// The function f must use memoization to break cycles and +// return false when the type has already been visited. +// +// TODO(adonovan): publish in typeutil and share with go/callgraph/rta. +func forEachReachable(msets *typeutil.MethodSetCache, T types.Type, f func(types.Type) bool) { + var visit func(T types.Type, skip bool) + visit = func(T types.Type, skip bool) { + if !skip { + if !f(T) { + return + } + } + + // Recursion over signatures of each method. + tmset := msets.MethodSet(T) + for i := 0; i < tmset.Len(); i++ { + sig := tmset.At(i).Type().(*types.Signature) + // It is tempting to call visit(sig, false) + // but, as noted in golang.org/cl/65450043, + // the Signature.Recv field is ignored by + // types.Identical and typeutil.Map, which + // is confusing at best. + // + // More importantly, the true signature rtype + // reachable from a method using reflection + // has no receiver but an extra ordinary parameter. + // For the Read method of io.Reader we want: + // func(Reader, []byte) (int, error) + // but here sig is: + // func([]byte) (int, error) + // with .Recv = Reader (though it is hard to + // notice because it doesn't affect Signature.String + // or types.Identical). + // + // TODO(adonovan): construct and visit the correct + // non-method signature with an extra parameter + // (though since unnamed func types have no methods + // there is essentially no actual demand for this). + // + // TODO(adonovan): document whether or not it is + // safe to skip non-exported methods (as RTA does). + visit(sig.Params(), true) // skip the Tuple + visit(sig.Results(), true) // skip the Tuple + } + + switch T := T.(type) { + case *aliases.Alias: + visit(aliases.Unalias(T), skip) // emulates the pre-Alias behavior + + case *types.Basic: + // nop + + case *types.Interface: + // nop---handled by recursion over method set. + + case *types.Pointer: + visit(T.Elem(), false) + + case *types.Slice: + visit(T.Elem(), false) + + case *types.Chan: + visit(T.Elem(), false) + + case *types.Map: + visit(T.Key(), false) + visit(T.Elem(), false) + + case *types.Signature: + if T.Recv() != nil { + panic(fmt.Sprintf("Signature %s has Recv %s", T, T.Recv())) + } + visit(T.Params(), true) // skip the Tuple + visit(T.Results(), true) // skip the Tuple + + case *types.Named: + // A pointer-to-named type can be derived from a named + // type via reflection. It may have methods too. + visit(types.NewPointer(T), false) + + // Consider 'type T struct{S}' where S has methods. + // Reflection provides no way to get from T to struct{S}, + // only to S, so the method set of struct{S} is unwanted, + // so set 'skip' flag during recursion. + visit(T.Underlying(), true) // skip the unnamed type + + case *types.Array: + visit(T.Elem(), false) + + case *types.Struct: + for i, n := 0, T.NumFields(); i < n; i++ { + // TODO(adonovan): document whether or not + // it is safe to skip non-exported fields. + visit(T.Field(i).Type(), false) + } + + case *types.Tuple: + for i, n := 0, T.Len(); i < n; i++ { + visit(T.At(i).Type(), false) + } + + case *types.TypeParam, *types.Union: + // forEachReachable must not be called on parameterized types. + panic(T) + + default: + panic(T) + } + } + visit(T, false) +} diff --git a/vendor/golang.org/x/tools/go/ssa/mode.go b/vendor/golang.org/x/tools/go/ssa/mode.go new file mode 100644 index 0000000..8381639 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/mode.go @@ -0,0 +1,111 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the BuilderMode type and its command-line flag. + +import ( + "bytes" + "fmt" +) + +// BuilderMode is a bitmask of options for diagnostics and checking. +// +// *BuilderMode satisfies the flag.Value interface. Example: +// +// var mode = ssa.BuilderMode(0) +// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) } +type BuilderMode uint + +const ( + PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout + PrintFunctions // Print function SSA code to stdout + LogSource // Log source locations as SSA builder progresses + SanityCheckFunctions // Perform sanity checking of function bodies + NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers + BuildSerially // Build packages serially, not in parallel. + GlobalDebug // Enable debug info for all packages + BareInits // Build init functions without guards or calls to dependent inits + InstantiateGenerics // Instantiate generics functions (monomorphize) while building +) + +const BuilderModeDoc = `Options controlling the SSA builder. +The value is a sequence of zero or more of these letters: +C perform sanity [C]hecking of the SSA form. +D include [D]ebug info for every function. +P print [P]ackage inventory. +F print [F]unction SSA code. +S log [S]ource locations as SSA builder progresses. +L build distinct packages seria[L]ly instead of in parallel. +N build [N]aive SSA form: don't replace local loads/stores with registers. +I build bare [I]nit functions: no init guards or calls to dependent inits. +G instantiate [G]eneric function bodies via monomorphization +` + +func (m BuilderMode) String() string { + var buf bytes.Buffer + if m&GlobalDebug != 0 { + buf.WriteByte('D') + } + if m&PrintPackages != 0 { + buf.WriteByte('P') + } + if m&PrintFunctions != 0 { + buf.WriteByte('F') + } + if m&LogSource != 0 { + buf.WriteByte('S') + } + if m&SanityCheckFunctions != 0 { + buf.WriteByte('C') + } + if m&NaiveForm != 0 { + buf.WriteByte('N') + } + if m&BuildSerially != 0 { + buf.WriteByte('L') + } + if m&BareInits != 0 { + buf.WriteByte('I') + } + if m&InstantiateGenerics != 0 { + buf.WriteByte('G') + } + return buf.String() +} + +// Set parses the flag characters in s and updates *m. +func (m *BuilderMode) Set(s string) error { + var mode BuilderMode + for _, c := range s { + switch c { + case 'D': + mode |= GlobalDebug + case 'P': + mode |= PrintPackages + case 'F': + mode |= PrintFunctions + case 'S': + mode |= LogSource | BuildSerially + case 'C': + mode |= SanityCheckFunctions + case 'N': + mode |= NaiveForm + case 'L': + mode |= BuildSerially + case 'I': + mode |= BareInits + case 'G': + mode |= InstantiateGenerics + default: + return fmt.Errorf("unknown BuilderMode option: %q", c) + } + } + *m = mode + return nil +} + +// Get returns m. +func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go new file mode 100644 index 0000000..c890d7e --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/print.go @@ -0,0 +1,470 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the String() methods for all Value and +// Instruction types. + +import ( + "bytes" + "fmt" + "go/types" + "io" + "reflect" + "sort" + "strings" + + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/typeparams" +) + +// relName returns the name of v relative to i. +// In most cases, this is identical to v.Name(), but references to +// Functions (including methods) and Globals use RelString and +// all types are displayed with relType, so that only cross-package +// references are package-qualified. +func relName(v Value, i Instruction) string { + var from *types.Package + if i != nil { + from = i.Parent().relPkg() + } + switch v := v.(type) { + case Member: // *Function or *Global + return v.RelString(from) + case *Const: + return v.RelString(from) + } + return v.Name() +} + +// normalizeAnyForTesting controls whether we replace occurrences of +// interface{} with any. It is only used for normalizing test output. +var normalizeAnyForTesting bool + +func relType(t types.Type, from *types.Package) string { + s := types.TypeString(t, types.RelativeTo(from)) + if normalizeAnyForTesting { + s = strings.ReplaceAll(s, "interface{}", "any") + } + return s +} + +func relTerm(term *types.Term, from *types.Package) string { + s := relType(term.Type(), from) + if term.Tilde() { + return "~" + s + } + return s +} + +func relString(m Member, from *types.Package) string { + // NB: not all globals have an Object (e.g. init$guard), + // so use Package().Object not Object.Package(). + if pkg := m.Package().Pkg; pkg != nil && pkg != from { + return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) + } + return m.Name() +} + +// Value.String() +// +// This method is provided only for debugging. +// It never appears in disassembly, which uses Value.Name(). + +func (v *Parameter) String() string { + from := v.Parent().relPkg() + return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *FreeVar) String() string { + from := v.Parent().relPkg() + return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *Builtin) String() string { + return fmt.Sprintf("builtin %s", v.Name()) +} + +// Instruction.String() + +func (v *Alloc) String() string { + op := "local" + if v.Heap { + op = "new" + } + from := v.Parent().relPkg() + return fmt.Sprintf("%s %s (%s)", op, relType(typeparams.MustDeref(v.Type()), from), v.Comment) +} + +func (v *Phi) String() string { + var b bytes.Buffer + b.WriteString("phi [") + for i, edge := range v.Edges { + if i > 0 { + b.WriteString(", ") + } + // Be robust against malformed CFG. + if v.block == nil { + b.WriteString("??") + continue + } + block := -1 + if i < len(v.block.Preds) { + block = v.block.Preds[i].Index + } + fmt.Fprintf(&b, "%d: ", block) + edgeVal := "<nil>" // be robust + if edge != nil { + edgeVal = relName(edge, v) + } + b.WriteString(edgeVal) + } + b.WriteString("]") + if v.Comment != "" { + b.WriteString(" #") + b.WriteString(v.Comment) + } + return b.String() +} + +func printCall(v *CallCommon, prefix string, instr Instruction) string { + var b bytes.Buffer + b.WriteString(prefix) + if !v.IsInvoke() { + b.WriteString(relName(v.Value, instr)) + } else { + fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name()) + } + b.WriteString("(") + for i, arg := range v.Args { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(arg, instr)) + } + if v.Signature().Variadic() { + b.WriteString("...") + } + b.WriteString(")") + return b.String() +} + +func (c *CallCommon) String() string { + return printCall(c, "", nil) +} + +func (v *Call) String() string { + return printCall(&v.Call, "", v) +} + +func (v *BinOp) String() string { + return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v)) +} + +func (v *UnOp) String() string { + return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk)) +} + +func printConv(prefix string, v, x Value) string { + from := v.Parent().relPkg() + return fmt.Sprintf("%s %s <- %s (%s)", + prefix, + relType(v.Type(), from), + relType(x.Type(), from), + relName(x, v.(Instruction))) +} + +func (v *ChangeType) String() string { return printConv("changetype", v, v.X) } +func (v *Convert) String() string { return printConv("convert", v, v.X) } +func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) } +func (v *SliceToArrayPointer) String() string { return printConv("slice to array pointer", v, v.X) } +func (v *MakeInterface) String() string { return printConv("make", v, v.X) } + +func (v *MultiConvert) String() string { + from := v.Parent().relPkg() + + var b strings.Builder + b.WriteString(printConv("multiconvert", v, v.X)) + b.WriteString(" [") + for i, s := range v.from { + for j, d := range v.to { + if i != 0 || j != 0 { + b.WriteString(" | ") + } + fmt.Fprintf(&b, "%s <- %s", relTerm(d, from), relTerm(s, from)) + } + } + b.WriteString("]") + return b.String() +} + +func (v *MakeClosure) String() string { + var b bytes.Buffer + fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v)) + if v.Bindings != nil { + b.WriteString(" [") + for i, c := range v.Bindings { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(c, v)) + } + b.WriteString("]") + } + return b.String() +} + +func (v *MakeSlice) String() string { + from := v.Parent().relPkg() + return fmt.Sprintf("make %s %s %s", + relType(v.Type(), from), + relName(v.Len, v), + relName(v.Cap, v)) +} + +func (v *Slice) String() string { + var b bytes.Buffer + b.WriteString("slice ") + b.WriteString(relName(v.X, v)) + b.WriteString("[") + if v.Low != nil { + b.WriteString(relName(v.Low, v)) + } + b.WriteString(":") + if v.High != nil { + b.WriteString(relName(v.High, v)) + } + if v.Max != nil { + b.WriteString(":") + b.WriteString(relName(v.Max, v)) + } + b.WriteString("]") + return b.String() +} + +func (v *MakeMap) String() string { + res := "" + if v.Reserve != nil { + res = relName(v.Reserve, v) + } + from := v.Parent().relPkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), res) +} + +func (v *MakeChan) String() string { + from := v.Parent().relPkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v)) +} + +func (v *FieldAddr) String() string { + // Be robust against a bad index. + name := "?" + if fld := fieldOf(typeparams.MustDeref(v.X.Type()), v.Field); fld != nil { + name = fld.Name() + } + return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *Field) String() string { + // Be robust against a bad index. + name := "?" + if fld := fieldOf(v.X.Type(), v.Field); fld != nil { + name = fld.Name() + } + return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *IndexAddr) String() string { + return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Index) String() string { + return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Lookup) String() string { + return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk)) +} + +func (v *Range) String() string { + return "range " + relName(v.X, v) +} + +func (v *Next) String() string { + return "next " + relName(v.Iter, v) +} + +func (v *TypeAssert) String() string { + from := v.Parent().relPkg() + return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from)) +} + +func (v *Extract) String() string { + return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index) +} + +func (s *Jump) String() string { + // Be robust against malformed CFG. + block := -1 + if s.block != nil && len(s.block.Succs) == 1 { + block = s.block.Succs[0].Index + } + return fmt.Sprintf("jump %d", block) +} + +func (s *If) String() string { + // Be robust against malformed CFG. + tblock, fblock := -1, -1 + if s.block != nil && len(s.block.Succs) == 2 { + tblock = s.block.Succs[0].Index + fblock = s.block.Succs[1].Index + } + return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock) +} + +func (s *Go) String() string { + return printCall(&s.Call, "go ", s) +} + +func (s *Panic) String() string { + return "panic " + relName(s.X, s) +} + +func (s *Return) String() string { + var b bytes.Buffer + b.WriteString("return") + for i, r := range s.Results { + if i == 0 { + b.WriteString(" ") + } else { + b.WriteString(", ") + } + b.WriteString(relName(r, s)) + } + return b.String() +} + +func (*RunDefers) String() string { + return "rundefers" +} + +func (s *Send) String() string { + return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s)) +} + +func (s *Defer) String() string { + prefix := "defer " + if s.DeferStack != nil { + prefix += "[" + relName(s.DeferStack, s) + "] " + } + c := printCall(&s.Call, prefix, s) + return c +} + +func (s *Select) String() string { + var b bytes.Buffer + for i, st := range s.States { + if i > 0 { + b.WriteString(", ") + } + if st.Dir == types.RecvOnly { + b.WriteString("<-") + b.WriteString(relName(st.Chan, s)) + } else { + b.WriteString(relName(st.Chan, s)) + b.WriteString("<-") + b.WriteString(relName(st.Send, s)) + } + } + non := "" + if !s.Blocking { + non = "non" + } + return fmt.Sprintf("select %sblocking [%s]", non, b.String()) +} + +func (s *Store) String() string { + return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s)) +} + +func (s *MapUpdate) String() string { + return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) +} + +func (s *DebugRef) String() string { + p := s.Parent().Prog.Fset.Position(s.Pos()) + var descr interface{} + if s.object != nil { + descr = s.object // e.g. "var x int" + } else { + descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" + } + var addr string + if s.IsAddr { + addr = "address of " + } + return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) +} + +func (p *Package) String() string { + return "package " + p.Pkg.Path() +} + +var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer + +func (p *Package) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WritePackage(&buf, p) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WritePackage writes to buf a human-readable summary of p. +func WritePackage(buf *bytes.Buffer, p *Package) { + fmt.Fprintf(buf, "%s:\n", p) + + var names []string + maxname := 0 + for name := range p.Members { + if l := len(name); l > maxname { + maxname = l + } + names = append(names, name) + } + + from := p.Pkg + sort.Strings(names) + for _, name := range names { + switch mem := p.Members[name].(type) { + case *NamedConst: + fmt.Fprintf(buf, " const %-*s %s = %s\n", + maxname, name, mem.Name(), mem.Value.RelString(from)) + + case *Function: + fmt.Fprintf(buf, " func %-*s %s\n", + maxname, name, relType(mem.Type(), from)) + + case *Type: + fmt.Fprintf(buf, " type %-*s %s\n", + maxname, name, relType(mem.Type().Underlying(), from)) + for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { + fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) + } + + case *Global: + fmt.Fprintf(buf, " var %-*s %s\n", + maxname, name, relType(typeparams.MustDeref(mem.Type()), from)) + } + } + + fmt.Fprintf(buf, "\n") +} + +func commaOk(x bool) string { + if x { + return ",ok" + } + return "" +} diff --git a/vendor/golang.org/x/tools/go/ssa/sanity.go b/vendor/golang.org/x/tools/go/ssa/sanity.go new file mode 100644 index 0000000..285cba0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/sanity.go @@ -0,0 +1,560 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// An optional pass for sanity-checking invariants of the SSA representation. +// Currently it checks CFG invariants but little at the instruction level. + +import ( + "bytes" + "fmt" + "go/ast" + "go/types" + "io" + "os" + "strings" +) + +type sanity struct { + reporter io.Writer + fn *Function + block *BasicBlock + instrs map[Instruction]unit + insane bool +} + +// sanityCheck performs integrity checking of the SSA representation +// of the function fn and returns true if it was valid. Diagnostics +// are written to reporter if non-nil, os.Stderr otherwise. Some +// diagnostics are only warnings and do not imply a negative result. +// +// Sanity-checking is intended to facilitate the debugging of code +// transformation passes. +func sanityCheck(fn *Function, reporter io.Writer) bool { + if reporter == nil { + reporter = os.Stderr + } + return (&sanity{reporter: reporter}).checkFunction(fn) +} + +// mustSanityCheck is like sanityCheck but panics instead of returning +// a negative result. +func mustSanityCheck(fn *Function, reporter io.Writer) { + if !sanityCheck(fn, reporter) { + fn.WriteTo(os.Stderr) + panic("SanityCheck failed") + } +} + +func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { + fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) + if s.block != nil { + fmt.Fprintf(s.reporter, ", block %s", s.block) + } + io.WriteString(s.reporter, ": ") + fmt.Fprintf(s.reporter, format, args...) + io.WriteString(s.reporter, "\n") +} + +func (s *sanity) errorf(format string, args ...interface{}) { + s.insane = true + s.diagnostic("Error", format, args...) +} + +func (s *sanity) warnf(format string, args ...interface{}) { + s.diagnostic("Warning", format, args...) +} + +// findDuplicate returns an arbitrary basic block that appeared more +// than once in blocks, or nil if all were unique. +func findDuplicate(blocks []*BasicBlock) *BasicBlock { + if len(blocks) < 2 { + return nil + } + if blocks[0] == blocks[1] { + return blocks[0] + } + // Slow path: + m := make(map[*BasicBlock]bool) + for _, b := range blocks { + if m[b] { + return b + } + m[b] = true + } + return nil +} + +func (s *sanity) checkInstr(idx int, instr Instruction) { + switch instr := instr.(type) { + case *If, *Jump, *Return, *Panic: + s.errorf("control flow instruction not at end of block") + case *Phi: + if idx == 0 { + // It suffices to apply this check to just the first phi node. + if dup := findDuplicate(s.block.Preds); dup != nil { + s.errorf("phi node in block with duplicate predecessor %s", dup) + } + } else { + prev := s.block.Instrs[idx-1] + if _, ok := prev.(*Phi); !ok { + s.errorf("Phi instruction follows a non-Phi: %T", prev) + } + } + if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { + s.errorf("phi node has %d edges but %d predecessors", ne, np) + + } else { + for i, e := range instr.Edges { + if e == nil { + s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i]) + } else if !types.Identical(instr.typ, e.Type()) { + s.errorf("phi node '%s' has a different type (%s) for edge #%d from %s (%s)", + instr.Comment, instr.Type(), i, s.block.Preds[i], e.Type()) + } + } + } + + case *Alloc: + if !instr.Heap { + found := false + for _, l := range s.fn.Locals { + if l == instr { + found = true + break + } + } + if !found { + s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) + } + } + + case *BinOp: + case *Call: + if common := instr.Call; common.IsInvoke() { + if !types.IsInterface(common.Value.Type()) { + s.errorf("invoke on %s (%s) which is not an interface type (or type param)", common.Value, common.Value.Type()) + } + } + case *ChangeInterface: + case *ChangeType: + case *SliceToArrayPointer: + case *Convert: + if from := instr.X.Type(); !isBasicConvTypes(typeSetOf(from)) { + if to := instr.Type(); !isBasicConvTypes(typeSetOf(to)) { + s.errorf("convert %s -> %s: at least one type must be basic (or all basic, []byte, or []rune)", from, to) + } + } + case *MultiConvert: + case *Defer: + case *Extract: + case *Field: + case *FieldAddr: + case *Go: + case *Index: + case *IndexAddr: + case *Lookup: + case *MakeChan: + case *MakeClosure: + numFree := len(instr.Fn.(*Function).FreeVars) + numBind := len(instr.Bindings) + if numFree != numBind { + s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", + numBind, instr.Fn, numFree) + + } + if recv := instr.Type().(*types.Signature).Recv(); recv != nil { + s.errorf("MakeClosure's type includes receiver %s", recv.Type()) + } + + case *MakeInterface: + case *MakeMap: + case *MakeSlice: + case *MapUpdate: + case *Next: + case *Range: + case *RunDefers: + case *Select: + case *Send: + case *Slice: + case *Store: + case *TypeAssert: + case *UnOp: + case *DebugRef: + // TODO(adonovan): implement checks. + default: + panic(fmt.Sprintf("Unknown instruction type: %T", instr)) + } + + if call, ok := instr.(CallInstruction); ok { + if call.Common().Signature() == nil { + s.errorf("nil signature: %s", call) + } + } + + // Check that value-defining instructions have valid types + // and a valid referrer list. + if v, ok := instr.(Value); ok { + t := v.Type() + if t == nil { + s.errorf("no type: %s = %s", v.Name(), v) + } else if t == tRangeIter || t == tDeferStack { + // not a proper type; ignore. + } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { + s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) + } + s.checkReferrerList(v) + } + + // Untyped constants are legal as instruction Operands(), + // for example: + // _ = "foo"[0] + // or: + // if wordsize==64 {...} + + // All other non-Instruction Values can be found via their + // enclosing Function or Package. +} + +func (s *sanity) checkFinalInstr(instr Instruction) { + switch instr := instr.(type) { + case *If: + if nsuccs := len(s.block.Succs); nsuccs != 2 { + s.errorf("If-terminated block has %d successors; expected 2", nsuccs) + return + } + if s.block.Succs[0] == s.block.Succs[1] { + s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) + return + } + + case *Jump: + if nsuccs := len(s.block.Succs); nsuccs != 1 { + s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) + return + } + + case *Return: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Return-terminated block has %d successors; expected none", nsuccs) + return + } + if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { + s.errorf("%d-ary return in %d-ary function", na, nf) + } + + case *Panic: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Panic-terminated block has %d successors; expected none", nsuccs) + return + } + + default: + s.errorf("non-control flow instruction at end of block") + } +} + +func (s *sanity) checkBlock(b *BasicBlock, index int) { + s.block = b + + if b.Index != index { + s.errorf("block has incorrect Index %d", b.Index) + } + if b.parent != s.fn { + s.errorf("block has incorrect parent %s", b.parent) + } + + // Check all blocks are reachable. + // (The entry block is always implicitly reachable, + // as is the Recover block, if any.) + if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 { + s.warnf("unreachable block") + if b.Instrs == nil { + // Since this block is about to be pruned, + // tolerating transient problems in it + // simplifies other optimizations. + return + } + } + + // Check predecessor and successor relations are dual, + // and that all blocks in CFG belong to same function. + for _, a := range b.Preds { + found := false + for _, bb := range a.Succs { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) + } + if a.parent != s.fn { + s.errorf("predecessor %s belongs to different function %s", a, a.parent) + } + } + for _, c := range b.Succs { + found := false + for _, bb := range c.Preds { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) + } + if c.parent != s.fn { + s.errorf("successor %s belongs to different function %s", c, c.parent) + } + } + + // Check each instruction is sane. + n := len(b.Instrs) + if n == 0 { + s.errorf("basic block contains no instructions") + } + var rands [10]*Value // reuse storage + for j, instr := range b.Instrs { + if instr == nil { + s.errorf("nil instruction at index %d", j) + continue + } + if b2 := instr.Block(); b2 == nil { + s.errorf("nil Block() for instruction at index %d", j) + continue + } else if b2 != b { + s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) + continue + } + if j < n-1 { + s.checkInstr(j, instr) + } else { + s.checkFinalInstr(instr) + } + + // Check Instruction.Operands. + operands: + for i, op := range instr.Operands(rands[:0]) { + if op == nil { + s.errorf("nil operand pointer %d of %s", i, instr) + continue + } + val := *op + if val == nil { + continue // a nil operand is ok + } + + // Check that "untyped" types only appear on constant operands. + if _, ok := (*op).(*Const); !ok { + if basic, ok := (*op).Type().Underlying().(*types.Basic); ok { + if basic.Info()&types.IsUntyped != 0 { + s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) + } + } + } + + // Check that Operands that are also Instructions belong to same function. + // TODO(adonovan): also check their block dominates block b. + if val, ok := val.(Instruction); ok { + if val.Block() == nil { + s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val) + } else if val.Parent() != s.fn { + s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) + } + } + + // Check that each function-local operand of + // instr refers back to instr. (NB: quadratic) + switch val := val.(type) { + case *Const, *Global, *Builtin: + continue // not local + case *Function: + if val.parent == nil { + continue // only anon functions are local + } + } + + // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. + + if refs := val.Referrers(); refs != nil { + for _, ref := range *refs { + if ref == instr { + continue operands + } + } + s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) + } else { + s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) + } + } + } +} + +func (s *sanity) checkReferrerList(v Value) { + refs := v.Referrers() + if refs == nil { + s.errorf("%s has missing referrer list", v.Name()) + return + } + for i, ref := range *refs { + if _, ok := s.instrs[ref]; !ok { + s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) + } + } +} + +func (s *sanity) checkFunction(fn *Function) bool { + // TODO(adonovan): check Function invariants: + // - check params match signature + // - check transient fields are nil + // - warn if any fn.Locals do not appear among block instructions. + + // TODO(taking): Sanity check origin, typeparams, and typeargs. + s.fn = fn + if fn.Prog == nil { + s.errorf("nil Prog") + } + + var buf bytes.Buffer + _ = fn.String() // must not crash + _ = fn.RelString(fn.relPkg()) // must not crash + WriteFunction(&buf, fn) // must not crash + + // All functions have a package, except delegates (which are + // shared across packages, or duplicated as weak symbols in a + // separate-compilation model), and error.Error. + if fn.Pkg == nil { + if strings.HasPrefix(fn.Synthetic, "from type information (on demand)") || + strings.HasPrefix(fn.Synthetic, "wrapper ") || + strings.HasPrefix(fn.Synthetic, "bound ") || + strings.HasPrefix(fn.Synthetic, "thunk ") || + strings.HasSuffix(fn.name, "Error") || + strings.HasPrefix(fn.Synthetic, "instance ") || + strings.HasPrefix(fn.Synthetic, "instantiation ") || + (fn.parent != nil && len(fn.typeargs) > 0) /* anon fun in instance */ { + // ok + } else { + s.errorf("nil Pkg") + } + } + if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn { + if len(fn.typeargs) > 0 && fn.Prog.mode&InstantiateGenerics != 0 { + // ok (instantiation with InstantiateGenerics on) + } else if fn.topLevelOrigin != nil && len(fn.typeargs) > 0 { + // ok (we always have the syntax set for instantiation) + } else if _, rng := fn.syntax.(*ast.RangeStmt); rng && fn.Synthetic == "range-over-func yield" { + // ok (range-func-yields are both synthetic and keep syntax) + } else { + s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) + } + } + for i, l := range fn.Locals { + if l.Parent() != fn { + s.errorf("Local %s at index %d has wrong parent", l.Name(), i) + } + if l.Heap { + s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) + } + } + // Build the set of valid referrers. + s.instrs = make(map[Instruction]unit) + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + s.instrs[instr] = unit{} + } + } + for i, p := range fn.Params { + if p.Parent() != fn { + s.errorf("Param %s at index %d has wrong parent", p.Name(), i) + } + // Check common suffix of Signature and Params match type. + if sig := fn.Signature; sig != nil { + j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params + if j < 0 { + continue + } + if !types.Identical(p.Type(), sig.Params().At(j).Type()) { + s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type()) + + } + } + s.checkReferrerList(p) + } + for i, fv := range fn.FreeVars { + if fv.Parent() != fn { + s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) + } + s.checkReferrerList(fv) + } + + if fn.Blocks != nil && len(fn.Blocks) == 0 { + // Function _had_ blocks (so it's not external) but + // they were "optimized" away, even the entry block. + s.errorf("Blocks slice is non-nil but empty") + } + for i, b := range fn.Blocks { + if b == nil { + s.warnf("nil *BasicBlock at f.Blocks[%d]", i) + continue + } + s.checkBlock(b, i) + } + if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover { + s.errorf("Recover block is not in Blocks slice") + } + + s.block = nil + for i, anon := range fn.AnonFuncs { + if anon.Parent() != fn { + s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) + } + if i != int(anon.anonIdx) { + s.errorf("AnonFuncs[%d]=%s but %s.anonIdx=%d", i, anon, anon, anon.anonIdx) + } + } + s.fn = nil + return !s.insane +} + +// sanityCheckPackage checks invariants of packages upon creation. +// It does not require that the package is built. +// Unlike sanityCheck (for functions), it just panics at the first error. +func sanityCheckPackage(pkg *Package) { + if pkg.Pkg == nil { + panic(fmt.Sprintf("Package %s has no Object", pkg)) + } + _ = pkg.String() // must not crash + + for name, mem := range pkg.Members { + if name != mem.Name() { + panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", + pkg.Pkg.Path(), mem, mem.Name(), name)) + } + obj := mem.Object() + if obj == nil { + // This check is sound because fields + // {Global,Function}.object have type + // types.Object. (If they were declared as + // *types.{Var,Func}, we'd have a non-empty + // interface containing a nil pointer.) + + continue // not all members have typechecker objects + } + if obj.Name() != name { + if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { + // Ok. The name of a declared init function varies between + // its types.Func ("init") and its ssa.Function ("init#%d"). + } else { + panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", + pkg.Pkg.Path(), mem, obj.Name(), name)) + } + } + if obj.Pos() != mem.Pos() { + panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos())) + } + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/source.go b/vendor/golang.org/x/tools/go/ssa/source.go new file mode 100644 index 0000000..7b71c88 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/source.go @@ -0,0 +1,288 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines utilities for working with source positions +// or source-level named entities ("objects"). + +// TODO(adonovan): test that {Value,Instruction}.Pos() positions match +// the originating syntax, as specified. + +import ( + "go/ast" + "go/token" + "go/types" +) + +// EnclosingFunction returns the function that contains the syntax +// node denoted by path. +// +// Syntax associated with package-level variable specifications is +// enclosed by the package's init() function. +// +// Returns nil if not found; reasons might include: +// - the node is not enclosed by any function. +// - the node is within an anonymous function (FuncLit) and +// its SSA function has not been created yet +// (pkg.Build() has not yet been called). +func EnclosingFunction(pkg *Package, path []ast.Node) *Function { + // Start with package-level function... + fn := findEnclosingPackageLevelFunction(pkg, path) + if fn == nil { + return nil // not in any function + } + + // ...then walk down the nested anonymous functions. + n := len(path) +outer: + for i := range path { + if lit, ok := path[n-1-i].(*ast.FuncLit); ok { + for _, anon := range fn.AnonFuncs { + if anon.Pos() == lit.Type.Func { + fn = anon + continue outer + } + } + // SSA function not found: + // - package not yet built, or maybe + // - builder skipped FuncLit in dead block + // (in principle; but currently the Builder + // generates even dead FuncLits). + return nil + } + } + return fn +} + +// HasEnclosingFunction returns true if the AST node denoted by path +// is contained within the declaration of some function or +// package-level variable. +// +// Unlike EnclosingFunction, the behaviour of this function does not +// depend on whether SSA code for pkg has been built, so it can be +// used to quickly reject check inputs that will cause +// EnclosingFunction to fail, prior to SSA building. +func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { + return findEnclosingPackageLevelFunction(pkg, path) != nil +} + +// findEnclosingPackageLevelFunction returns the Function +// corresponding to the package-level function enclosing path. +func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { + if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] + switch decl := path[n-2].(type) { + case *ast.GenDecl: + if decl.Tok == token.VAR && n >= 3 { + // Package-level 'var' initializer. + return pkg.init + } + + case *ast.FuncDecl: + if decl.Recv == nil && decl.Name.Name == "init" { + // Explicit init() function. + for _, b := range pkg.init.Blocks { + for _, instr := range b.Instrs { + if instr, ok := instr.(*Call); ok { + if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos { + return callee + } + } + } + } + // Hack: return non-nil when SSA is not yet + // built so that HasEnclosingFunction works. + return pkg.init + } + // Declared function/method. + return findNamedFunc(pkg, decl.Name.NamePos) + } + } + return nil // not in any function +} + +// findNamedFunc returns the named function whose FuncDecl.Ident is at +// position pos. +func findNamedFunc(pkg *Package, pos token.Pos) *Function { + // Look at all package members and method sets of named types. + // Not very efficient. + for _, mem := range pkg.Members { + switch mem := mem.(type) { + case *Function: + if mem.Pos() == pos { + return mem + } + case *Type: + mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) + for i, n := 0, mset.Len(); i < n; i++ { + // Don't call Program.Method: avoid creating wrappers. + obj := mset.At(i).Obj().(*types.Func) + if obj.Pos() == pos { + // obj from MethodSet may not be the origin type. + m := obj.Origin() + return pkg.objects[m].(*Function) + } + } + } + } + return nil +} + +// ValueForExpr returns the SSA Value that corresponds to non-constant +// expression e. +// +// It returns nil if no value was found, e.g. +// - the expression is not lexically contained within f; +// - f was not built with debug information; or +// - e is a constant expression. (For efficiency, no debug +// information is stored for constants. Use +// go/types.Info.Types[e].Value instead.) +// - e is a reference to nil or a built-in function. +// - the value was optimised away. +// +// If e is an addressable expression used in an lvalue context, +// value is the address denoted by e, and isAddr is true. +// +// The types of e (or &e, if isAddr) and the result are equal +// (modulo "untyped" bools resulting from comparisons). +// +// (Tip: to find the ssa.Value given a source position, use +// astutil.PathEnclosingInterval to locate the ast.Node, then +// EnclosingFunction to locate the Function, then ValueForExpr to find +// the ssa.Value.) +func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { + if f.debugInfo() { // (opt) + e = unparen(e) + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if ref, ok := instr.(*DebugRef); ok { + if ref.Expr == e { + return ref.X, ref.IsAddr + } + } + } + } + } + return +} + +// --- Lookup functions for source-level named entities (types.Objects) --- + +// Package returns the SSA Package corresponding to the specified +// type-checker package. It returns nil if no such Package was +// created by a prior call to prog.CreatePackage. +func (prog *Program) Package(pkg *types.Package) *Package { + return prog.packages[pkg] +} + +// packageLevelMember returns the package-level member corresponding +// to the specified symbol, which may be a package-level const +// (*NamedConst), var (*Global) or func/method (*Function) of some +// package in prog. +// +// It returns nil if the object belongs to a package that has not been +// created by prog.CreatePackage. +func (prog *Program) packageLevelMember(obj types.Object) Member { + if pkg, ok := prog.packages[obj.Pkg()]; ok { + return pkg.objects[obj] + } + return nil +} + +// FuncValue returns the SSA function or (non-interface) method +// denoted by the specified func symbol. It returns nil id the symbol +// denotes an interface method, or belongs to a package that was not +// created by prog.CreatePackage. +func (prog *Program) FuncValue(obj *types.Func) *Function { + fn, _ := prog.packageLevelMember(obj).(*Function) + return fn +} + +// ConstValue returns the SSA constant denoted by the specified const symbol. +func (prog *Program) ConstValue(obj *types.Const) *Const { + // TODO(adonovan): opt: share (don't reallocate) + // Consts for const objects and constant ast.Exprs. + + // Universal constant? {true,false,nil} + if obj.Parent() == types.Universe { + return NewConst(obj.Val(), obj.Type()) + } + // Package-level named constant? + if v := prog.packageLevelMember(obj); v != nil { + return v.(*NamedConst).Value + } + return NewConst(obj.Val(), obj.Type()) +} + +// VarValue returns the SSA Value that corresponds to a specific +// identifier denoting the specified var symbol. +// +// VarValue returns nil if a local variable was not found, perhaps +// because its package was not built, the debug information was not +// requested during SSA construction, or the value was optimized away. +// +// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), +// and that ident must resolve to obj. +// +// pkg is the package enclosing the reference. (A reference to a var +// always occurs within a function, so we need to know where to find it.) +// +// If the identifier is a field selector and its base expression is +// non-addressable, then VarValue returns the value of that field. +// For example: +// +// func f() struct {x int} +// f().x // VarValue(x) returns a *Field instruction of type int +// +// All other identifiers denote addressable locations (variables). +// For them, VarValue may return either the variable's address or its +// value, even when the expression is evaluated only for its value; the +// situation is reported by isAddr, the second component of the result. +// +// If !isAddr, the returned value is the one associated with the +// specific identifier. For example, +// +// var x int // VarValue(x) returns Const 0 here +// x = 1 // VarValue(x) returns Const 1 here +// +// It is not specified whether the value or the address is returned in +// any particular case, as it may depend upon optimizations performed +// during SSA code generation, such as registerization, constant +// folding, avoidance of materialization of subexpressions, etc. +func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { + // All references to a var are local to some function, possibly init. + fn := EnclosingFunction(pkg, ref) + if fn == nil { + return // e.g. def of struct field; SSA not built? + } + + id := ref[0].(*ast.Ident) + + // Defining ident of a parameter? + if id.Pos() == obj.Pos() { + for _, param := range fn.Params { + if param.Object() == obj { + return param, false + } + } + } + + // Other ident? + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + if dr, ok := instr.(*DebugRef); ok { + if dr.Pos() == id.Pos() { + return dr.X, dr.IsAddr + } + } + } + } + + // Defining ident of package-level var? + if v := prog.packageLevelMember(obj); v != nil { + return v.(*Global), true + } + + return // e.g. debug info not requested, or var optimized away +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go new file mode 100644 index 0000000..1231afd --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssa.go @@ -0,0 +1,1871 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This package defines a high-level intermediate representation for +// Go programs using static single-assignment (SSA) form. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "sync" + + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/typeparams" +) + +// A Program is a partial or complete Go program converted to SSA form. +type Program struct { + Fset *token.FileSet // position information for the files of this Program + imported map[string]*Package // all importable Packages, keyed by import path + packages map[*types.Package]*Package // all created Packages + mode BuilderMode // set of mode bits for SSA construction + MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets + + canon *canonizer // type canonicalization map + ctxt *types.Context // cache for type checking instantiations + + methodsMu sync.Mutex + methodSets typeutil.Map // maps type to its concrete *methodSet + + // memoization of whether a type refers to type parameters + hasParamsMu sync.Mutex + hasParams typeparams.Free + + runtimeTypesMu sync.Mutex + runtimeTypes typeutil.Map // set of runtime types (from MakeInterface) + + // objectMethods is a memoization of objectMethod + // to avoid creation of duplicate methods from type information. + objectMethodsMu sync.Mutex + objectMethods map[*types.Func]*Function +} + +// A Package is a single analyzed Go package containing Members for +// all package-level functions, variables, constants and types it +// declares. These may be accessed directly via Members, or via the +// type-specific accessor methods Func, Type, Var and Const. +// +// Members also contains entries for "init" (the synthetic package +// initializer) and "init#%d", the nth declared init function, +// and unspecified other things too. +type Package struct { + Prog *Program // the owning program + Pkg *types.Package // the corresponding go/types.Package + Members map[string]Member // all package members keyed by name (incl. init and init#%d) + objects map[types.Object]Member // mapping of package objects to members (incl. methods). Contains *NamedConst, *Global, *Function (values but not types) + init *Function // Func("init"); the package's init function + debug bool // include full debug info in this package + syntax bool // package was loaded from syntax + + // The following fields are set transiently, then cleared + // after building. + buildOnce sync.Once // ensures package building occurs once + ninit int32 // number of init functions + info *types.Info // package type information + files []*ast.File // package ASTs + created []*Function // members created as a result of building this package (includes declared functions, wrappers) + initVersion map[ast.Expr]string // goversion to use for each global var init expr +} + +// A Member is a member of a Go package, implemented by *NamedConst, +// *Global, *Function, or *Type; they are created by package-level +// const, var, func and type declarations respectively. +type Member interface { + Name() string // declared name of the package member + String() string // package-qualified name of the package member + RelString(*types.Package) string // like String, but relative refs are unqualified + Object() types.Object // typechecker's object for this member, if any + Pos() token.Pos // position of member's declaration, if known + Type() types.Type // type of the package member + Token() token.Token // token.{VAR,FUNC,CONST,TYPE} + Package() *Package // the containing package +} + +// A Type is a Member of a Package representing a package-level named type. +type Type struct { + object *types.TypeName + pkg *Package +} + +// A NamedConst is a Member of a Package representing a package-level +// named constant. +// +// Pos() returns the position of the declaring ast.ValueSpec.Names[*] +// identifier. +// +// NB: a NamedConst is not a Value; it contains a constant Value, which +// it augments with the name and position of its 'const' declaration. +type NamedConst struct { + object *types.Const + Value *Const + pkg *Package +} + +// A Value is an SSA value that can be referenced by an instruction. +type Value interface { + // Name returns the name of this value, and determines how + // this Value appears when used as an operand of an + // Instruction. + // + // This is the same as the source name for Parameters, + // Builtins, Functions, FreeVars, Globals. + // For constants, it is a representation of the constant's value + // and type. For all other Values this is the name of the + // virtual register defined by the instruction. + // + // The name of an SSA Value is not semantically significant, + // and may not even be unique within a function. + Name() string + + // If this value is an Instruction, String returns its + // disassembled form; otherwise it returns unspecified + // human-readable information about the Value, such as its + // kind, name and type. + String() string + + // Type returns the type of this value. Many instructions + // (e.g. IndexAddr) change their behaviour depending on the + // types of their operands. + Type() types.Type + + // Parent returns the function to which this Value belongs. + // It returns nil for named Functions, Builtin, Const and Global. + Parent() *Function + + // Referrers returns the list of instructions that have this + // value as one of their operands; it may contain duplicates + // if an instruction has a repeated operand. + // + // Referrers actually returns a pointer through which the + // caller may perform mutations to the object's state. + // + // Referrers is currently only defined if Parent()!=nil, + // i.e. for the function-local values FreeVar, Parameter, + // Functions (iff anonymous) and all value-defining instructions. + // It returns nil for named Functions, Builtin, Const and Global. + // + // Instruction.Operands contains the inverse of this relation. + Referrers() *[]Instruction + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this value, + // or token.NoPos if it was not explicit in the source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Lparen + // for an *ast.CallExpr. This permits a compact but + // approximate mapping from Values to source positions for use + // in diagnostic messages, for example. + // + // (Do not use this position to determine which Value + // corresponds to an ast.Expr; use Function.ValueForExpr + // instead. NB: it requires that the function was built with + // debug information.) + Pos() token.Pos +} + +// An Instruction is an SSA instruction that computes a new Value or +// has some effect. +// +// An Instruction that defines a value (e.g. BinOp) also implements +// the Value interface; an Instruction that only has an effect (e.g. Store) +// does not. +type Instruction interface { + // String returns the disassembled form of this value. + // + // Examples of Instructions that are Values: + // "x + y" (BinOp) + // "len([])" (Call) + // Note that the name of the Value is not printed. + // + // Examples of Instructions that are not Values: + // "return x" (Return) + // "*y = x" (Store) + // + // (The separation Value.Name() from Value.String() is useful + // for some analyses which distinguish the operation from the + // value it defines, e.g., 'y = local int' is both an allocation + // of memory 'local int' and a definition of a pointer y.) + String() string + + // Parent returns the function to which this instruction + // belongs. + Parent() *Function + + // Block returns the basic block to which this instruction + // belongs. + Block() *BasicBlock + + // setBlock sets the basic block to which this instruction belongs. + setBlock(*BasicBlock) + + // Operands returns the operands of this instruction: the + // set of Values it references. + // + // Specifically, it appends their addresses to rands, a + // user-provided slice, and returns the resulting slice, + // permitting avoidance of memory allocation. + // + // The operands are appended in undefined order, but the order + // is consistent for a given Instruction; the addresses are + // always non-nil but may point to a nil Value. Clients may + // store through the pointers, e.g. to effect a value + // renaming. + // + // Value.Referrers is a subset of the inverse of this + // relation. (Referrers are not tracked for all types of + // Values.) + Operands(rands []*Value) []*Value + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this + // instruction, or token.NoPos if it was not explicit in the + // source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Go token + // for an *ast.GoStmt. This permits a compact but approximate + // mapping from Instructions to source positions for use in + // diagnostic messages, for example. + // + // (Do not use this position to determine which Instruction + // corresponds to an ast.Expr; see the notes for Value.Pos. + // This position may be used to determine which non-Value + // Instruction corresponds to some ast.Stmts, but not all: If + // and Jump instructions have no Pos(), for example.) + Pos() token.Pos +} + +// A Node is a node in the SSA value graph. Every concrete type that +// implements Node is also either a Value, an Instruction, or both. +// +// Node contains the methods common to Value and Instruction, plus the +// Operands and Referrers methods generalized to return nil for +// non-Instructions and non-Values, respectively. +// +// Node is provided to simplify SSA graph algorithms. Clients should +// use the more specific and informative Value or Instruction +// interfaces where appropriate. +type Node interface { + // Common methods: + String() string + Pos() token.Pos + Parent() *Function + + // Partial methods: + Operands(rands []*Value) []*Value // nil for non-Instructions + Referrers() *[]Instruction // nil for non-Values +} + +// Function represents the parameters, results, and code of a function +// or method. +// +// If Blocks is nil, this indicates an external function for which no +// Go source code is available. In this case, FreeVars, Locals, and +// Params are nil too. Clients performing whole-program analysis must +// handle external functions specially. +// +// Blocks contains the function's control-flow graph (CFG). +// Blocks[0] is the function entry point; block order is not otherwise +// semantically significant, though it may affect the readability of +// the disassembly. +// To iterate over the blocks in dominance order, use DomPreorder(). +// +// Recover is an optional second entry point to which control resumes +// after a recovered panic. The Recover block may contain only a return +// statement, preceded by a load of the function's named return +// parameters, if any. +// +// A nested function (Parent()!=nil) that refers to one or more +// lexically enclosing local variables ("free variables") has FreeVars. +// Such functions cannot be called directly but require a +// value created by MakeClosure which, via its Bindings, supplies +// values for these parameters. +// +// If the function is a method (Signature.Recv() != nil) then the first +// element of Params is the receiver parameter. +// +// A Go package may declare many functions called "init". +// For each one, Object().Name() returns "init" but Name() returns +// "init#1", etc, in declaration order. +// +// Pos() returns the declaring ast.FuncLit.Type.Func or the position +// of the ast.FuncDecl.Name, if the function was explicit in the +// source. Synthetic wrappers, for which Synthetic != "", may share +// the same position as the function they wrap. +// Syntax.Pos() always returns the position of the declaring "func" token. +// +// When the operand of a range statement is an iterator function, +// the loop body is transformed into a synthetic anonymous function +// that is passed as the yield argument in a call to the iterator. +// In that case, Function.Pos is the position of the "range" token, +// and Function.Syntax is the ast.RangeStmt. +// +// Synthetic functions, for which Synthetic != "", are functions +// that do not appear in the source AST. These include: +// - method wrappers, +// - thunks, +// - bound functions, +// - empty functions built from loaded type information, +// - yield functions created from range-over-func loops, +// - package init functions, and +// - instantiations of generic functions. +// +// Synthetic wrapper functions may share the same position +// as the function they wrap. +// +// Type() returns the function's Signature. +// +// A generic function is a function or method that has uninstantiated type +// parameters (TypeParams() != nil). Consider a hypothetical generic +// method, (*Map[K,V]).Get. It may be instantiated with all +// non-parameterized types as (*Map[string,int]).Get or with +// parameterized types as (*Map[string,U]).Get, where U is a type parameter. +// In both instantiations, Origin() refers to the instantiated generic +// method, (*Map[K,V]).Get, TypeParams() refers to the parameters [K,V] of +// the generic method. TypeArgs() refers to [string,U] or [string,int], +// respectively, and is nil in the generic method. +type Function struct { + name string + object *types.Func // symbol for declared function (nil for FuncLit or synthetic init) + method *selection // info about provenance of synthetic methods; thunk => non-nil + Signature *types.Signature + pos token.Pos + + // source information + Synthetic string // provenance of synthetic function; "" for true source functions + syntax ast.Node // *ast.Func{Decl,Lit}, if from syntax (incl. generic instances) or (*ast.RangeStmt if a yield function) + info *types.Info // type annotations (iff syntax != nil) + goversion string // Go version of syntax (NB: init is special) + + parent *Function // enclosing function if anon; nil if global + Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) + Prog *Program // enclosing program + + buildshared *task // wait for a shared function to be done building (may be nil if <=1 builder ever needs to wait) + + // These fields are populated only when the function body is built: + + Params []*Parameter // function parameters; for methods, includes receiver + FreeVars []*FreeVar // free variables whose values must be supplied by closure + Locals []*Alloc // frame-allocated variables of this function + Blocks []*BasicBlock // basic blocks of the function; nil => external + Recover *BasicBlock // optional; control transfers here after recovered panic + AnonFuncs []*Function // anonymous functions (from FuncLit,RangeStmt) directly beneath this one + referrers []Instruction // referring instructions (iff Parent() != nil) + anonIdx int32 // position of a nested function in parent's AnonFuncs. fn.Parent()!=nil => fn.Parent().AnonFunc[fn.anonIdx] == fn. + + typeparams *types.TypeParamList // type parameters of this function. typeparams.Len() > 0 => generic or instance of generic function + typeargs []types.Type // type arguments that instantiated typeparams. len(typeargs) > 0 => instance of generic function + topLevelOrigin *Function // the origin function if this is an instance of a source function. nil if Parent()!=nil. + generic *generic // instances of this function, if generic + + // The following fields are cleared after building. + build buildFunc // algorithm to build function body (nil => built) + currentBlock *BasicBlock // where to emit code + vars map[*types.Var]Value // addresses of local variables + results []*Alloc // result allocations of the current function + returnVars []*types.Var // variables for a return statement. Either results or for range-over-func a parent's results + targets *targets // linked stack of branch targets + lblocks map[*types.Label]*lblock // labelled blocks + subst *subster // type parameter substitutions (if non-nil) + jump *types.Var // synthetic variable for the yield state (non-nil => range-over-func) + deferstack *types.Var // synthetic variable holding enclosing ssa:deferstack() + source *Function // nearest enclosing source function + exits []*exit // exits of the function that need to be resolved + uniq int64 // source of unique ints within the source tree while building +} + +// BasicBlock represents an SSA basic block. +// +// The final element of Instrs is always an explicit transfer of +// control (If, Jump, Return, or Panic). +// +// A block may contain no Instructions only if it is unreachable, +// i.e., Preds is nil. Empty blocks are typically pruned. +// +// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) +// graph independent of the SSA Value graph: the control-flow graph or +// CFG. It is illegal for multiple edges to exist between the same +// pair of blocks. +// +// Each BasicBlock is also a node in the dominator tree of the CFG. +// The tree may be navigated using Idom()/Dominees() and queried using +// Dominates(). +// +// The order of Preds and Succs is significant (to Phi and If +// instructions, respectively). +type BasicBlock struct { + Index int // index of this block within Parent().Blocks + Comment string // optional label; no semantic significance + parent *Function // parent function + Instrs []Instruction // instructions in order + Preds, Succs []*BasicBlock // predecessors and successors + succs2 [2]*BasicBlock // initial space for Succs + dom domInfo // dominator tree info + gaps int // number of nil Instrs (transient) + rundefers int // number of rundefers (transient) +} + +// Pure values ---------------------------------------- + +// A FreeVar represents a free variable of the function to which it +// belongs. +// +// FreeVars are used to implement anonymous functions, whose free +// variables are lexically captured in a closure formed by +// MakeClosure. The value of such a free var is an Alloc or another +// FreeVar and is considered a potentially escaping heap address, with +// pointer type. +// +// FreeVars are also used to implement bound method closures. Such a +// free var represents the receiver value and may be of any type that +// has concrete methods. +// +// Pos() returns the position of the value that was captured, which +// belongs to an enclosing function. +type FreeVar struct { + name string + typ types.Type + pos token.Pos + parent *Function + referrers []Instruction + + // Transiently needed during building. + outer Value // the Value captured from the enclosing context. +} + +// A Parameter represents an input parameter of a function. +type Parameter struct { + name string + object *types.Var // non-nil + typ types.Type + parent *Function + referrers []Instruction +} + +// A Const represents a value known at build time. +// +// Consts include true constants of boolean, numeric, and string types, as +// defined by the Go spec; these are represented by a non-nil Value field. +// +// Consts also include the "zero" value of any type, of which the nil values +// of various pointer-like types are a special case; these are represented +// by a nil Value field. +// +// Pos() returns token.NoPos. +// +// Example printed forms: +// +// 42:int +// "hello":untyped string +// 3+4i:MyComplex +// nil:*int +// nil:[]string +// [3]int{}:[3]int +// struct{x string}{}:struct{x string} +// 0:interface{int|int64} +// nil:interface{bool|int} // no go/constant representation +type Const struct { + typ types.Type + Value constant.Value +} + +// A Global is a named Value holding the address of a package-level +// variable. +// +// Pos() returns the position of the ast.ValueSpec.Names[*] +// identifier. +type Global struct { + name string + object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard + typ types.Type + pos token.Pos + + Pkg *Package +} + +// A Builtin represents a specific use of a built-in function, e.g. len. +// +// Builtins are immutable values. Builtins do not have addresses. +// Builtins can only appear in CallCommon.Value. +// +// Name() indicates the function: one of the built-in functions from the +// Go spec (excluding "make" and "new") or one of these ssa-defined +// intrinsics: +// +// // wrapnilchk returns ptr if non-nil, panics otherwise. +// // (For use in indirection wrappers.) +// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T +// +// Object() returns a *types.Builtin for built-ins defined by the spec, +// nil for others. +// +// Type() returns a *types.Signature representing the effective +// signature of the built-in for this call. +type Builtin struct { + name string + sig *types.Signature +} + +// Value-defining instructions ---------------------------------------- + +// The Alloc instruction reserves space for a variable of the given type, +// zero-initializes it, and yields its address. +// +// Alloc values are always addresses, and have pointer types, so the +// type of the allocated variable is actually +// Type().Underlying().(*types.Pointer).Elem(). +// +// If Heap is false, Alloc zero-initializes the same local variable in +// the call frame and returns its address; in this case the Alloc must +// be present in Function.Locals. We call this a "local" alloc. +// +// If Heap is true, Alloc allocates a new zero-initialized variable +// each time the instruction is executed. We call this a "new" alloc. +// +// When Alloc is applied to a channel, map or slice type, it returns +// the address of an uninitialized (nil) reference of that kind; store +// the result of MakeSlice, MakeMap or MakeChan in that location to +// instantiate these types. +// +// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, +// or the ast.CallExpr.Rparen for a call to new() or for a call that +// allocates a varargs slice. +// +// Example printed form: +// +// t0 = local int +// t1 = new int +type Alloc struct { + register + Comment string + Heap bool + index int // dense numbering; for lifting +} + +// The Phi instruction represents an SSA φ-node, which combines values +// that differ across incoming control-flow edges and yields a new +// value. Within a block, all φ-nodes must appear before all non-φ +// nodes. +// +// Pos() returns the position of the && or || for short-circuit +// control-flow joins, or that of the *Alloc for φ-nodes inserted +// during SSA renaming. +// +// Example printed form: +// +// t2 = phi [0: t0, 1: t1] +type Phi struct { + register + Comment string // a hint as to its purpose + Edges []Value // Edges[i] is value for Block().Preds[i] +} + +// The Call instruction represents a function or method call. +// +// The Call instruction yields the function result if there is exactly +// one. Otherwise it returns a tuple, the components of which are +// accessed via Extract. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. +// +// Example printed form: +// +// t2 = println(t0, t1) +// t4 = t3() +// t7 = invoke t5.Println(...t6) +type Call struct { + register + Call CallCommon +} + +// The BinOp instruction yields the result of binary operation X Op Y. +// +// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. +// +// Example printed form: +// +// t1 = t0 + 1:int +type BinOp struct { + register + // One of: + // ADD SUB MUL QUO REM + - * / % + // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ + // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= + Op token.Token + X, Y Value +} + +// The UnOp instruction yields the result of Op X. +// ARROW is channel receive. +// MUL is pointer indirection (load). +// XOR is bitwise complement. +// SUB is negation. +// NOT is logical negation. +// +// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above +// and a boolean indicating the success of the receive. The +// components of the tuple are accessed using Extract. +// +// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. +// For receive operations (ARROW) implicit in ranging over a channel, +// Pos() returns the ast.RangeStmt.For. +// For implicit memory loads (STAR), Pos() returns the position of the +// most closely associated source-level construct; the details are not +// specified. +// +// Example printed form: +// +// t0 = *x +// t2 = <-t1,ok +type UnOp struct { + register + Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^ + X Value + CommaOk bool +} + +// The ChangeType instruction applies to X a value-preserving type +// change to Type(). +// +// Type changes are permitted: +// - between a named type and its underlying type. +// - between two named types of the same underlying type. +// - between (possibly named) pointers to identical base types. +// - from a bidirectional channel to a read- or write-channel, +// optionally adding/removing a name. +// - between a type (t) and an instance of the type (tσ), i.e. +// Type() == σ(X.Type()) (or X.Type()== σ(Type())) where +// σ is the type substitution of Parent().TypeParams by +// Parent().TypeArgs. +// +// This operation cannot fail dynamically. +// +// Type changes may to be to or from a type parameter (or both). All +// types in the type set of X.Type() have a value-preserving type +// change to all types in the type set of Type(). +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// +// t1 = changetype *int <- IntPtr (t0) +type ChangeType struct { + register + X Value +} + +// The Convert instruction yields the conversion of value X to type +// Type(). One or both of those types is basic (but possibly named). +// +// A conversion may change the value and representation of its operand. +// Conversions are permitted: +// - between real numeric types. +// - between complex numeric types. +// - between string and []byte or []rune. +// - between pointers and unsafe.Pointer. +// - between unsafe.Pointer and uintptr. +// - from (Unicode) integer to (UTF-8) string. +// +// A conversion may imply a type name change also. +// +// Conversions may to be to or from a type parameter. All types in +// the type set of X.Type() can be converted to all types in the type +// set of Type(). +// +// This operation cannot fail dynamically. +// +// Conversions of untyped string/number/bool constants to a specific +// representation are eliminated during SSA construction. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// +// t1 = convert []byte <- string (t0) +type Convert struct { + register + X Value +} + +// The MultiConvert instruction yields the conversion of value X to type +// Type(). Either X.Type() or Type() must be a type parameter. Each +// type in the type set of X.Type() can be converted to each type in the +// type set of Type(). +// +// See the documentation for Convert, ChangeType, and SliceToArrayPointer +// for the conversions that are permitted. Additionally conversions of +// slices to arrays are permitted. +// +// This operation can fail dynamically (see SliceToArrayPointer). +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// +// t1 = multiconvert D <- S (t0) [*[2]rune <- []rune | string <- []rune] +type MultiConvert struct { + register + X Value + from []*types.Term + to []*types.Term +} + +// ChangeInterface constructs a value of one interface type from a +// value of another interface type known to be assignable to it. +// This operation cannot fail. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or token.NoPos +// otherwise. +// +// Example printed form: +// +// t1 = change interface interface{} <- I (t0) +type ChangeInterface struct { + register + X Value +} + +// The SliceToArrayPointer instruction yields the conversion of slice X to +// array pointer. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Conversion may to be to or from a type parameter. All types in +// the type set of X.Type() must be a slice types that can be converted to +// all types in the type set of Type() which must all be pointer to array +// types. +// +// This operation can fail dynamically if the length of the slice is less +// than the length of the array. +// +// Example printed form: +// +// t1 = slice to array pointer *[4]byte <- []byte (t0) +type SliceToArrayPointer struct { + register + X Value +} + +// MakeInterface constructs an instance of an interface type from a +// value of a concrete type. +// +// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set +// of X, and Program.MethodValue(m) to find the implementation of a method. +// +// To construct the zero value of an interface type T, use: +// +// NewConst(constant.MakeNil(), T, pos) +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// +// t1 = make interface{} <- int (42:int) +// t2 = make Stringer <- t0 +type MakeInterface struct { + register + X Value +} + +// The MakeClosure instruction yields a closure value whose code is +// Fn and whose free variables' values are supplied by Bindings. +// +// Type() returns a (possibly named) *types.Signature. +// +// Pos() returns the ast.FuncLit.Type.Func for a function literal +// closure or the ast.SelectorExpr.Sel for a bound method closure. +// +// Example printed form: +// +// t0 = make closure anon@1.2 [x y z] +// t1 = make closure bound$(main.I).add [i] +type MakeClosure struct { + register + Fn Value // always a *Function + Bindings []Value // values for each free variable in Fn.FreeVars +} + +// The MakeMap instruction creates a new hash-table-based map object +// and yields a value of kind map. +// +// Type() returns a (possibly named) *types.Map. +// +// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or +// the ast.CompositeLit.Lbrack if created by a literal. +// +// Example printed form: +// +// t1 = make map[string]int t0 +// t1 = make StringIntMap t0 +type MakeMap struct { + register + Reserve Value // initial space reservation; nil => default +} + +// The MakeChan instruction creates a new channel object and yields a +// value of kind chan. +// +// Type() returns a (possibly named) *types.Chan. +// +// Pos() returns the ast.CallExpr.Lparen for the make(chan) that +// created it. +// +// Example printed form: +// +// t0 = make chan int 0 +// t0 = make IntChan 0 +type MakeChan struct { + register + Size Value // int; size of buffer; zero => synchronous. +} + +// The MakeSlice instruction yields a slice of length Len backed by a +// newly allocated array of length Cap. +// +// Both Len and Cap must be non-nil Values of integer type. +// +// (Alloc(types.Array) followed by Slice will not suffice because +// Alloc can only create arrays of constant length.) +// +// Type() returns a (possibly named) *types.Slice. +// +// Pos() returns the ast.CallExpr.Lparen for the make([]T) that +// created it. +// +// Example printed form: +// +// t1 = make []string 1:int t0 +// t1 = make StringSlice 1:int t0 +type MakeSlice struct { + register + Len Value + Cap Value +} + +// The Slice instruction yields a slice of an existing string, slice +// or *array X between optional integer bounds Low and High. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns string if the type of X was string, otherwise a +// *types.Slice with the same element type as X. +// +// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice +// operation, the ast.CompositeLit.Lbrace if created by a literal, or +// NoPos if not explicit in the source (e.g. a variadic argument slice). +// +// Example printed form: +// +// t1 = slice t0[1:] +type Slice struct { + register + X Value // slice, string, or *array + Low, High, Max Value // each may be nil +} + +// The FieldAddr instruction yields the address of Field of *struct X. +// +// The field is identified by its index within the field list of the +// struct type of X. +// +// Dynamically, this instruction panics if X evaluates to a nil +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. For implicit selections, returns +// the position of the inducing explicit selection. If produced for a +// struct literal S{f: e}, it returns the position of the colon; for +// S{e} it returns the start of expression e. +// +// Example printed form: +// +// t1 = &t0.name [#1] +type FieldAddr struct { + register + X Value // *struct + Field int // index into CoreType(CoreType(X.Type()).(*types.Pointer).Elem()).(*types.Struct).Fields +} + +// The Field instruction yields the Field of struct X. +// +// The field is identified by its index within the field list of the +// struct type of X; by using numeric indices we avoid ambiguity of +// package-local identifiers and permit compact representations. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. For implicit selections, returns +// the position of the inducing explicit selection. + +// Example printed form: +// +// t1 = t0.name [#1] +type Field struct { + register + X Value // struct + Field int // index into CoreType(X.Type()).(*types.Struct).Fields +} + +// The IndexAddr instruction yields the address of the element at +// index Index of collection X. Index is an integer expression. +// +// The elements of maps and strings are not addressable; use Lookup (map), +// Index (string), or MapUpdate instead. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// +// t2 = &t0[t1] +type IndexAddr struct { + register + X Value // *array, slice or type parameter with types array, *array, or slice. + Index Value // numeric index +} + +// The Index instruction yields element Index of collection X, an array, +// string or type parameter containing an array, a string, a pointer to an, +// array or a slice. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// +// t2 = t0[t1] +type Index struct { + register + X Value // array, string or type parameter with types array, *array, slice, or string. + Index Value // integer index +} + +// The Lookup instruction yields element Index of collection map X. +// Index is the appropriate key type. +// +// If CommaOk, the result is a 2-tuple of the value above and a +// boolean indicating the result of a map membership test for the key. +// The components of the tuple are accessed using Extract. +// +// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. +// +// Example printed form: +// +// t2 = t0[t1] +// t5 = t3[t4],ok +type Lookup struct { + register + X Value // map + Index Value // key-typed index + CommaOk bool // return a value,ok pair +} + +// SelectState is a helper for Select. +// It represents one goal state and its corresponding communication. +type SelectState struct { + Dir types.ChanDir // direction of case (SendOnly or RecvOnly) + Chan Value // channel to use (for send or receive) + Send Value // value to send (for send) + Pos token.Pos // position of token.ARROW + DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] +} + +// The Select instruction tests whether (or blocks until) one +// of the specified sent or received states is entered. +// +// Let n be the number of States for which Dir==RECV and T_i (0<=i<n) +// be the element type of each such state's Chan. +// Select returns an n+2-tuple +// +// (index int, recvOk bool, r_0 T_0, ... r_n-1 T_n-1) +// +// The tuple's components, described below, must be accessed via the +// Extract instruction. +// +// If Blocking, select waits until exactly one state holds, i.e. a +// channel becomes ready for the designated operation of sending or +// receiving; select chooses one among the ready states +// pseudorandomly, performs the send or receive operation, and sets +// 'index' to the index of the chosen channel. +// +// If !Blocking, select doesn't block if no states hold; instead it +// returns immediately with index equal to -1. +// +// If the chosen channel was used for a receive, the r_i component is +// set to the received value, where i is the index of that state among +// all n receive states; otherwise r_i has the zero value of type T_i. +// Note that the receive index i is not the same as the state +// index index. +// +// The second component of the triple, recvOk, is a boolean whose value +// is true iff the selected operation was a receive and the receive +// successfully yielded a value. +// +// Pos() returns the ast.SelectStmt.Select. +// +// Example printed form: +// +// t3 = select nonblocking [<-t0, t1<-t2] +// t4 = select blocking [] +type Select struct { + register + States []*SelectState + Blocking bool +} + +// The Range instruction yields an iterator over the domain and range +// of X, which must be a string or map. +// +// Elements are accessed via Next. +// +// Type() returns an opaque and degenerate "rangeIter" type. +// +// Pos() returns the ast.RangeStmt.For. +// +// Example printed form: +// +// t0 = range "hello":string +type Range struct { + register + X Value // string or map +} + +// The Next instruction reads and advances the (map or string) +// iterator Iter and returns a 3-tuple value (ok, k, v). If the +// iterator is not exhausted, ok is true and k and v are the next +// elements of the domain and range, respectively. Otherwise ok is +// false and k and v are undefined. +// +// Components of the tuple are accessed using Extract. +// +// The IsString field distinguishes iterators over strings from those +// over maps, as the Type() alone is insufficient: consider +// map[int]rune. +// +// Type() returns a *types.Tuple for the triple (ok, k, v). +// The types of k and/or v may be types.Invalid. +// +// Example printed form: +// +// t1 = next t0 +type Next struct { + register + Iter Value + IsString bool // true => string iterator; false => map iterator. +} + +// The TypeAssert instruction tests whether interface value X has type +// AssertedType. +// +// If !CommaOk, on success it returns v, the result of the conversion +// (defined below); on failure it panics. +// +// If CommaOk: on success it returns a pair (v, true) where v is the +// result of the conversion; on failure it returns (z, false) where z +// is AssertedType's zero value. The components of the pair must be +// accessed using the Extract instruction. +// +// If Underlying: tests whether interface value X has the underlying +// type AssertedType. +// +// If AssertedType is a concrete type, TypeAssert checks whether the +// dynamic type in interface X is equal to it, and if so, the result +// of the conversion is a copy of the value in the interface. +// +// If AssertedType is an interface, TypeAssert checks whether the +// dynamic type of the interface is assignable to it, and if so, the +// result of the conversion is a copy of the interface value X. +// If AssertedType is a superinterface of X.Type(), the operation will +// fail iff the operand is nil. (Contrast with ChangeInterface, which +// performs no nil-check.) +// +// Type() reflects the actual type of the result, possibly a +// 2-types.Tuple; AssertedType is the asserted type. +// +// Depending on the TypeAssert's purpose, Pos may return: +// - the ast.CallExpr.Lparen of an explicit T(e) conversion; +// - the ast.TypeAssertExpr.Lparen of an explicit e.(T) operation; +// - the ast.CaseClause.Case of a case of a type-switch statement; +// - the Ident(m).NamePos of an interface method value i.m +// (for which TypeAssert may be used to effect the nil check). +// +// Example printed form: +// +// t1 = typeassert t0.(int) +// t3 = typeassert,ok t2.(T) +type TypeAssert struct { + register + X Value + AssertedType types.Type + CommaOk bool +} + +// The Extract instruction yields component Index of Tuple. +// +// This is used to access the results of instructions with multiple +// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and +// IndexExpr(Map). +// +// Example printed form: +// +// t1 = extract t0 #1 +type Extract struct { + register + Tuple Value + Index int +} + +// Instructions executed for effect. They do not yield a value. -------------------- + +// The Jump instruction transfers control to the sole successor of its +// owning block. +// +// A Jump must be the last instruction of its containing BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// +// jump done +type Jump struct { + anInstruction +} + +// The If instruction transfers control to one of the two successors +// of its owning block, depending on the boolean Cond: the first if +// true, the second if false. +// +// An If instruction must be the last instruction of its containing +// BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// +// if t0 goto done else body +type If struct { + anInstruction + Cond Value +} + +// The Return instruction returns values and control back to the calling +// function. +// +// len(Results) is always equal to the number of results in the +// function's signature. +// +// If len(Results) > 1, Return returns a tuple value with the specified +// components which the caller must access using Extract instructions. +// +// There is no instruction to return a ready-made tuple like those +// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or +// a tail-call to a function with multiple result parameters. +// +// Return must be the last instruction of its containing BasicBlock. +// Such a block has no successors. +// +// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. +// +// Example printed form: +// +// return +// return nil:I, 2:int +type Return struct { + anInstruction + Results []Value + pos token.Pos +} + +// The RunDefers instruction pops and invokes the entire stack of +// procedure calls pushed by Defer instructions in this function. +// +// It is legal to encounter multiple 'rundefers' instructions in a +// single control-flow path through a function; this is useful in +// the combined init() function, for example. +// +// Pos() returns NoPos. +// +// Example printed form: +// +// rundefers +type RunDefers struct { + anInstruction +} + +// The Panic instruction initiates a panic with value X. +// +// A Panic instruction must be the last instruction of its containing +// BasicBlock, which must have no successors. +// +// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; +// they are treated as calls to a built-in function. +// +// Pos() returns the ast.CallExpr.Lparen if this panic was explicit +// in the source. +// +// Example printed form: +// +// panic t0 +type Panic struct { + anInstruction + X Value // an interface{} + pos token.Pos +} + +// The Go instruction creates a new goroutine and calls the specified +// function within it. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.GoStmt.Go. +// +// Example printed form: +// +// go println(t0, t1) +// go t3() +// go invoke t5.Println(...t6) +type Go struct { + anInstruction + Call CallCommon + pos token.Pos +} + +// The Defer instruction pushes the specified call onto a stack of +// functions to be called by a RunDefers instruction or by a panic. +// +// If DeferStack != nil, it indicates the defer list that the defer is +// added to. Defer list values come from the Builtin function +// ssa:deferstack. Calls to ssa:deferstack() produces the defer stack +// of the current function frame. DeferStack allows for deferring into an +// alternative function stack than the current function. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.DeferStmt.Defer. +// +// Example printed form: +// +// defer println(t0, t1) +// defer t3() +// defer invoke t5.Println(...t6) +type Defer struct { + anInstruction + Call CallCommon + DeferStack Value // stack of deferred functions (from ssa:deferstack() intrinsic) onto which this function is pushed + pos token.Pos +} + +// The Send instruction sends X on channel Chan. +// +// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. +// +// Example printed form: +// +// send t0 <- t1 +type Send struct { + anInstruction + Chan, X Value + pos token.Pos +} + +// The Store instruction stores Val at address Addr. +// Stores can be of arbitrary types. +// +// Pos() returns the position of the source-level construct most closely +// associated with the memory store operation. +// Since implicit memory stores are numerous and varied and depend upon +// implementation choices, the details are not specified. +// +// Example printed form: +// +// *x = y +type Store struct { + anInstruction + Addr Value + Val Value + pos token.Pos +} + +// The MapUpdate instruction updates the association of Map[Key] to +// Value. +// +// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, +// if explicit in the source. +// +// Example printed form: +// +// t0[t1] = t2 +type MapUpdate struct { + anInstruction + Map Value + Key Value + Value Value + pos token.Pos +} + +// A DebugRef instruction maps a source-level expression Expr to the +// SSA value X that represents the value (!IsAddr) or address (IsAddr) +// of that expression. +// +// DebugRef is a pseudo-instruction: it has no dynamic effect. +// +// Pos() returns Expr.Pos(), the start position of the source-level +// expression. This is not the same as the "designated" token as +// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the +// position of the ("designated") Lparen token. +// +// If Expr is an *ast.Ident denoting a var or func, Object() returns +// the object; though this information can be obtained from the type +// checker, including it here greatly facilitates debugging. +// For non-Ident expressions, Object() returns nil. +// +// DebugRefs are generated only for functions built with debugging +// enabled; see Package.SetDebugMode() and the GlobalDebug builder +// mode flag. +// +// DebugRefs are not emitted for ast.Idents referring to constants or +// predeclared identifiers, since they are trivial and numerous. +// Nor are they emitted for ast.ParenExprs. +// +// (By representing these as instructions, rather than out-of-band, +// consistency is maintained during transformation passes by the +// ordinary SSA renaming machinery.) +// +// Example printed form: +// +// ; *ast.CallExpr @ 102:9 is t5 +// ; var x float64 @ 109:72 is x +// ; address of *ast.CompositeLit @ 216:10 is t0 +type DebugRef struct { + // TODO(generics): Reconsider what DebugRefs are for generics. + anInstruction + Expr ast.Expr // the referring expression (never *ast.ParenExpr) + object types.Object // the identity of the source var/func + IsAddr bool // Expr is addressable and X is the address it denotes + X Value // the value or address of Expr +} + +// Embeddable mix-ins and helpers for common parts of other structs. ----------- + +// register is a mix-in embedded by all SSA values that are also +// instructions, i.e. virtual registers, and provides a uniform +// implementation of most of the Value interface: Value.Name() is a +// numbered register (e.g. "t0"); the other methods are field accessors. +// +// Temporary names are automatically assigned to each register on +// completion of building a function in SSA form. +// +// Clients must not assume that the 'id' value (and the Name() derived +// from it) is unique within a function. As always in this API, +// semantics are determined only by identity; names exist only to +// facilitate debugging. +type register struct { + anInstruction + num int // "name" of virtual register, e.g. "t0". Not guaranteed unique. + typ types.Type // type of virtual register + pos token.Pos // position of source expression, or NoPos + referrers []Instruction +} + +// anInstruction is a mix-in embedded by all Instructions. +// It provides the implementations of the Block and setBlock methods. +type anInstruction struct { + block *BasicBlock // the basic block of this instruction +} + +// CallCommon is contained by Go, Defer and Call to hold the +// common parts of a function or method call. +// +// Each CallCommon exists in one of two modes, function call and +// interface method invocation, or "call" and "invoke" for short. +// +// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon +// represents an ordinary function call of the value in Value, +// which may be a *Builtin, a *Function or any other value of kind +// 'func'. +// +// Value may be one of: +// +// (a) a *Function, indicating a statically dispatched call +// to a package-level function, an anonymous function, or +// a method of a named type. +// (b) a *MakeClosure, indicating an immediately applied +// function literal with free variables. +// (c) a *Builtin, indicating a statically dispatched call +// to a built-in function. +// (d) any other value, indicating a dynamically dispatched +// function call. +// +// StaticCallee returns the identity of the callee in cases +// (a) and (b), nil otherwise. +// +// Args contains the arguments to the call. If Value is a method, +// Args[0] contains the receiver parameter. +// +// Example printed form: +// +// t2 = println(t0, t1) +// go t3() +// defer t5(...t6) +// +// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon +// represents a dynamically dispatched call to an interface method. +// In this mode, Value is the interface value and Method is the +// interface's abstract method. The interface value may be a type +// parameter. Note: an interface method may be shared by multiple +// interfaces due to embedding; Value.Type() provides the specific +// interface used for this call. +// +// Value is implicitly supplied to the concrete method implementation +// as the receiver parameter; in other words, Args[0] holds not the +// receiver but the first true argument. +// +// Example printed form: +// +// t1 = invoke t0.String() +// go invoke t3.Run(t2) +// defer invoke t4.Handle(...t5) +// +// For all calls to variadic functions (Signature().Variadic()), +// the last element of Args is a slice. +type CallCommon struct { + Value Value // receiver (invoke mode) or func value (call mode) + Method *types.Func // interface method (invoke mode) + Args []Value // actual parameters (in static method call, includes receiver) + pos token.Pos // position of CallExpr.Lparen, iff explicit in source +} + +// IsInvoke returns true if this call has "invoke" (not "call") mode. +func (c *CallCommon) IsInvoke() bool { + return c.Method != nil +} + +func (c *CallCommon) Pos() token.Pos { return c.pos } + +// Signature returns the signature of the called function. +// +// For an "invoke"-mode call, the signature of the interface method is +// returned. +// +// In either "call" or "invoke" mode, if the callee is a method, its +// receiver is represented by sig.Recv, not sig.Params().At(0). +func (c *CallCommon) Signature() *types.Signature { + if c.Method != nil { + return c.Method.Type().(*types.Signature) + } + return typeparams.CoreType(c.Value.Type()).(*types.Signature) +} + +// StaticCallee returns the callee if this is a trivially static +// "call"-mode call to a function. +func (c *CallCommon) StaticCallee() *Function { + switch fn := c.Value.(type) { + case *Function: + return fn + case *MakeClosure: + return fn.Fn.(*Function) + } + return nil +} + +// Description returns a description of the mode of this call suitable +// for a user interface, e.g., "static method call". +func (c *CallCommon) Description() string { + switch fn := c.Value.(type) { + case *Builtin: + return "built-in function call" + case *MakeClosure: + return "static function closure call" + case *Function: + if fn.Signature.Recv() != nil { + return "static method call" + } + return "static function call" + } + if c.IsInvoke() { + return "dynamic method call" // ("invoke" mode) + } + return "dynamic function call" +} + +// The CallInstruction interface, implemented by *Go, *Defer and *Call, +// exposes the common parts of function-calling instructions, +// yet provides a way back to the Value defined by *Call alone. +type CallInstruction interface { + Instruction + Common() *CallCommon // returns the common parts of the call + Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer) +} + +func (s *Call) Common() *CallCommon { return &s.Call } +func (s *Defer) Common() *CallCommon { return &s.Call } +func (s *Go) Common() *CallCommon { return &s.Call } + +func (s *Call) Value() *Call { return s } +func (s *Defer) Value() *Call { return nil } +func (s *Go) Value() *Call { return nil } + +func (v *Builtin) Type() types.Type { return v.sig } +func (v *Builtin) Name() string { return v.name } +func (*Builtin) Referrers() *[]Instruction { return nil } +func (v *Builtin) Pos() token.Pos { return token.NoPos } +func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } +func (v *Builtin) Parent() *Function { return nil } + +func (v *FreeVar) Type() types.Type { return v.typ } +func (v *FreeVar) Name() string { return v.name } +func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } +func (v *FreeVar) Pos() token.Pos { return v.pos } +func (v *FreeVar) Parent() *Function { return v.parent } + +func (v *Global) Type() types.Type { return v.typ } +func (v *Global) Name() string { return v.name } +func (v *Global) Parent() *Function { return nil } +func (v *Global) Pos() token.Pos { return v.pos } +func (v *Global) Referrers() *[]Instruction { return nil } +func (v *Global) Token() token.Token { return token.VAR } +func (v *Global) Object() types.Object { return v.object } +func (v *Global) String() string { return v.RelString(nil) } +func (v *Global) Package() *Package { return v.Pkg } +func (v *Global) RelString(from *types.Package) string { return relString(v, from) } + +func (v *Function) Name() string { return v.name } +func (v *Function) Type() types.Type { return v.Signature } +func (v *Function) Pos() token.Pos { return v.pos } +func (v *Function) Token() token.Token { return token.FUNC } +func (v *Function) Object() types.Object { + if v.object != nil { + return types.Object(v.object) + } + return nil +} +func (v *Function) String() string { return v.RelString(nil) } +func (v *Function) Package() *Package { return v.Pkg } +func (v *Function) Parent() *Function { return v.parent } +func (v *Function) Referrers() *[]Instruction { + if v.parent != nil { + return &v.referrers + } + return nil +} + +// TypeParams are the function's type parameters if generic or the +// type parameters that were instantiated if fn is an instantiation. +func (fn *Function) TypeParams() *types.TypeParamList { + return fn.typeparams +} + +// TypeArgs are the types that TypeParams() were instantiated by to create fn +// from fn.Origin(). +func (fn *Function) TypeArgs() []types.Type { return fn.typeargs } + +// Origin returns the generic function from which fn was instantiated, +// or nil if fn is not an instantiation. +func (fn *Function) Origin() *Function { + if fn.parent != nil && len(fn.typeargs) > 0 { + // Nested functions are BUILT at a different time than their instances. + // Build declared package if not yet BUILT. This is not an expected use + // case, but is simple and robust. + fn.declaredPackage().Build() + } + return origin(fn) +} + +// origin is the function that fn is an instantiation of. Returns nil if fn is +// not an instantiation. +// +// Precondition: fn and the origin function are done building. +func origin(fn *Function) *Function { + if fn.parent != nil && len(fn.typeargs) > 0 { + return origin(fn.parent).AnonFuncs[fn.anonIdx] + } + return fn.topLevelOrigin +} + +func (v *Parameter) Type() types.Type { return v.typ } +func (v *Parameter) Name() string { return v.name } +func (v *Parameter) Object() types.Object { return v.object } +func (v *Parameter) Referrers() *[]Instruction { return &v.referrers } +func (v *Parameter) Pos() token.Pos { return v.object.Pos() } +func (v *Parameter) Parent() *Function { return v.parent } + +func (v *Alloc) Type() types.Type { return v.typ } +func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } +func (v *Alloc) Pos() token.Pos { return v.pos } + +func (v *register) Type() types.Type { return v.typ } +func (v *register) setType(typ types.Type) { v.typ = typ } +func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) } +func (v *register) setNum(num int) { v.num = num } +func (v *register) Referrers() *[]Instruction { return &v.referrers } +func (v *register) Pos() token.Pos { return v.pos } +func (v *register) setPos(pos token.Pos) { v.pos = pos } + +func (v *anInstruction) Parent() *Function { return v.block.parent } +func (v *anInstruction) Block() *BasicBlock { return v.block } +func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } +func (v *anInstruction) Referrers() *[]Instruction { return nil } + +func (t *Type) Name() string { return t.object.Name() } +func (t *Type) Pos() token.Pos { return t.object.Pos() } +func (t *Type) Type() types.Type { return t.object.Type() } +func (t *Type) Token() token.Token { return token.TYPE } +func (t *Type) Object() types.Object { return t.object } +func (t *Type) String() string { return t.RelString(nil) } +func (t *Type) Package() *Package { return t.pkg } +func (t *Type) RelString(from *types.Package) string { return relString(t, from) } + +func (c *NamedConst) Name() string { return c.object.Name() } +func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } +func (c *NamedConst) String() string { return c.RelString(nil) } +func (c *NamedConst) Type() types.Type { return c.object.Type() } +func (c *NamedConst) Token() token.Token { return token.CONST } +func (c *NamedConst) Object() types.Object { return c.object } +func (c *NamedConst) Package() *Package { return c.pkg } +func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } + +func (d *DebugRef) Object() types.Object { return d.object } + +// Func returns the package-level function of the specified name, +// or nil if not found. +func (p *Package) Func(name string) (f *Function) { + f, _ = p.Members[name].(*Function) + return +} + +// Var returns the package-level variable of the specified name, +// or nil if not found. +func (p *Package) Var(name string) (g *Global) { + g, _ = p.Members[name].(*Global) + return +} + +// Const returns the package-level constant of the specified name, +// or nil if not found. +func (p *Package) Const(name string) (c *NamedConst) { + c, _ = p.Members[name].(*NamedConst) + return +} + +// Type returns the package-level type of the specified name, +// or nil if not found. +func (p *Package) Type(name string) (t *Type) { + t, _ = p.Members[name].(*Type) + return +} + +func (v *Call) Pos() token.Pos { return v.Call.pos } +func (s *Defer) Pos() token.Pos { return s.pos } +func (s *Go) Pos() token.Pos { return s.pos } +func (s *MapUpdate) Pos() token.Pos { return s.pos } +func (s *Panic) Pos() token.Pos { return s.pos } +func (s *Return) Pos() token.Pos { return s.pos } +func (s *Send) Pos() token.Pos { return s.pos } +func (s *Store) Pos() token.Pos { return s.pos } +func (s *If) Pos() token.Pos { return token.NoPos } +func (s *Jump) Pos() token.Pos { return token.NoPos } +func (s *RunDefers) Pos() token.Pos { return token.NoPos } +func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } + +// Operands. + +func (v *Alloc) Operands(rands []*Value) []*Value { + return rands +} + +func (v *BinOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Y) +} + +func (c *CallCommon) Operands(rands []*Value) []*Value { + rands = append(rands, &c.Value) + for i := range c.Args { + rands = append(rands, &c.Args[i]) + } + return rands +} + +func (s *Go) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Call) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Defer) Operands(rands []*Value) []*Value { + return append(s.Call.Operands(rands), &s.DeferStack) +} + +func (v *ChangeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *ChangeType) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Convert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *MultiConvert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *DebugRef) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Extract) Operands(rands []*Value) []*Value { + return append(rands, &v.Tuple) +} + +func (v *Field) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *FieldAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *If) Operands(rands []*Value) []*Value { + return append(rands, &s.Cond) +} + +func (v *Index) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *IndexAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (*Jump) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Lookup) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *MakeChan) Operands(rands []*Value) []*Value { + return append(rands, &v.Size) +} + +func (v *MakeClosure) Operands(rands []*Value) []*Value { + rands = append(rands, &v.Fn) + for i := range v.Bindings { + rands = append(rands, &v.Bindings[i]) + } + return rands +} + +func (v *MakeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *MakeMap) Operands(rands []*Value) []*Value { + return append(rands, &v.Reserve) +} + +func (v *MakeSlice) Operands(rands []*Value) []*Value { + return append(rands, &v.Len, &v.Cap) +} + +func (v *MapUpdate) Operands(rands []*Value) []*Value { + return append(rands, &v.Map, &v.Key, &v.Value) +} + +func (v *Next) Operands(rands []*Value) []*Value { + return append(rands, &v.Iter) +} + +func (s *Panic) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Phi) Operands(rands []*Value) []*Value { + for i := range v.Edges { + rands = append(rands, &v.Edges[i]) + } + return rands +} + +func (v *Range) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *Return) Operands(rands []*Value) []*Value { + for i := range s.Results { + rands = append(rands, &s.Results[i]) + } + return rands +} + +func (*RunDefers) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Select) Operands(rands []*Value) []*Value { + for i := range v.States { + rands = append(rands, &v.States[i].Chan, &v.States[i].Send) + } + return rands +} + +func (s *Send) Operands(rands []*Value) []*Value { + return append(rands, &s.Chan, &s.X) +} + +func (v *Slice) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Low, &v.High, &v.Max) +} + +func (s *Store) Operands(rands []*Value) []*Value { + return append(rands, &s.Addr, &s.Val) +} + +func (v *TypeAssert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *UnOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +// Non-Instruction Values: +func (v *Builtin) Operands(rands []*Value) []*Value { return rands } +func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } +func (v *Const) Operands(rands []*Value) []*Value { return rands } +func (v *Function) Operands(rands []*Value) []*Value { return rands } +func (v *Global) Operands(rands []*Value) []*Value { return rands } +func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go new file mode 100644 index 0000000..3daa67a --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go @@ -0,0 +1,214 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil + +// This file defines utility functions for constructing programs in SSA form. + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/versions" +) + +// Packages creates an SSA program for a set of packages. +// +// The packages must have been loaded from source syntax using the +// [packages.Load] function in [packages.LoadSyntax] or +// [packages.LoadAllSyntax] mode. +// +// Packages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of initial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding initial +// package due to type errors. +// +// Code for bodies of functions is not built until [Program.Build] is +// called on the resulting Program. SSA code is constructed only for +// the initial packages with well-typed syntax trees. +// +// The mode parameter controls diagnostics and checking during SSA construction. +func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + // TODO(adonovan): opt: this calls CreatePackage far more than + // necessary: for all dependencies, not just the (non-initial) + // direct dependencies of the initial packages. + // + // But can it reasonably be changed without breaking the + // spirit and/or letter of the law above? Clients may notice + // if we call CreatePackage less, as methods like + // Program.FuncValue will return nil. Or must we provide a new + // function (and perhaps deprecate this one)? Is it worth it? + // + // Tim King makes the interesting point that it would be + // possible to entirely alleviate the client from the burden + // of calling CreatePackage for non-syntax packages, if we + // were to treat vars and funcs lazily in the same way we now + // treat methods. (In essence, try to move away from the + // notion of ssa.Packages, and make the Program answer + // all reasonable questions about any types.Object.) + + return doPackages(initial, mode, false) +} + +// AllPackages creates an SSA program for a set of packages plus all +// their dependencies. +// +// The packages must have been loaded from source syntax using the +// [packages.Load] function in [packages.LoadAllSyntax] mode. +// +// AllPackages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of initial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding +// initial package due to type errors. +// +// Code for bodies of functions is not built until Build is called on +// the resulting Program. SSA code is constructed for all packages with +// well-typed syntax trees. +// +// The mode parameter controls diagnostics and checking during SSA construction. +func AllPackages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + return doPackages(initial, mode, true) +} + +func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*ssa.Program, []*ssa.Package) { + + var fset *token.FileSet + if len(initial) > 0 { + fset = initial[0].Fset + } + + prog := ssa.NewProgram(fset, mode) + + isInitial := make(map[*packages.Package]bool, len(initial)) + for _, p := range initial { + isInitial[p] = true + } + + ssamap := make(map[*packages.Package]*ssa.Package) + packages.Visit(initial, nil, func(p *packages.Package) { + if p.Types != nil && !p.IllTyped { + var files []*ast.File + var info *types.Info + if deps || isInitial[p] { + files = p.Syntax + info = p.TypesInfo + } + ssamap[p] = prog.CreatePackage(p.Types, files, info, true) + } + }) + + var ssapkgs []*ssa.Package + for _, p := range initial { + ssapkgs = append(ssapkgs, ssamap[p]) // may be nil + } + return prog, ssapkgs +} + +// CreateProgram returns a new program in SSA form, given a program +// loaded from source. An SSA package is created for each transitively +// error-free package of lprog. +// +// Code for bodies of functions is not built until Build is called +// on the result. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +// Deprecated: Use [golang.org/x/tools/go/packages] and the [Packages] +// function instead; see ssa.Example_loadPackages. +func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { + prog := ssa.NewProgram(lprog.Fset, mode) + + for _, info := range lprog.AllPackages { + if info.TransitivelyErrorFree { + prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) + } + } + + return prog +} + +// BuildPackage builds an SSA program with SSA intermediate +// representation (IR) for all functions of a single package. +// +// It populates pkg by type-checking the specified file syntax trees. All +// dependencies are loaded using the importer specified by tc, which +// typically loads compiler export data; SSA code cannot be built for +// those packages. BuildPackage then constructs an [ssa.Program] with all +// dependency packages created, and builds and returns the SSA package +// corresponding to pkg. +// +// The caller must have set pkg.Path to the import path. +// +// The operation fails if there were any type-checking or import errors. +// +// See ../example_test.go for an example. +func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) { + if fset == nil { + panic("no token.FileSet") + } + if pkg.Path() == "" { + panic("package has no import path") + } + + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + versions.InitFileVersions(info) + if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { + return nil, nil, err + } + + prog := ssa.NewProgram(fset, mode) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pkg.Imports()) + + // TODO(adonovan): we could replace createAll with just: + // + // // Create SSA packages for all imports. + // for _, p := range pkg.Imports() { + // prog.CreatePackage(p, nil, nil, true) + // } + // + // (with minor changes to changes to ../builder_test.go as + // shown in CL 511715 PS 10.) But this would strictly violate + // the letter of the doc comment above, which says "all + // dependencies created". + // + // Tim makes the good point with some extra work we could + // remove the need for any CreatePackage calls except the + // ones with syntax (i.e. primary packages). Of course + // You wouldn't have ssa.Packages and Members for as + // many things but no-one really uses that anyway. + // I wish I had done this from the outset. + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pkg, files, info, false) + ssapkg.Build() + return ssapkg, info, nil +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go new file mode 100644 index 0000000..dd4b04e --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go @@ -0,0 +1,230 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil + +// This file implements discovery of switch and type-switch constructs +// from low-level control flow. +// +// Many techniques exist for compiling a high-level switch with +// constant cases to efficient machine code. The optimal choice will +// depend on the data type, the specific case values, the code in the +// body of each case, and the hardware. +// Some examples: +// - a lookup table (for a switch that maps constants to constants) +// - a computed goto +// - a binary tree +// - a perfect hash +// - a two-level switch (to partition constant strings by their first byte). + +import ( + "bytes" + "fmt" + "go/token" + "go/types" + + "golang.org/x/tools/go/ssa" +) + +// A ConstCase represents a single constant comparison. +// It is part of a Switch. +type ConstCase struct { + Block *ssa.BasicBlock // block performing the comparison + Body *ssa.BasicBlock // body of the case + Value *ssa.Const // case comparand +} + +// A TypeCase represents a single type assertion. +// It is part of a Switch. +type TypeCase struct { + Block *ssa.BasicBlock // block performing the type assert + Body *ssa.BasicBlock // body of the case + Type types.Type // case type + Binding ssa.Value // value bound by this case +} + +// A Switch is a logical high-level control flow operation +// (a multiway branch) discovered by analysis of a CFG containing +// only if/else chains. It is not part of the ssa.Instruction set. +// +// One of ConstCases and TypeCases has length >= 2; +// the other is nil. +// +// In a value switch, the list of cases may contain duplicate constants. +// A type switch may contain duplicate types, or types assignable +// to an interface type also in the list. +// TODO(adonovan): eliminate such duplicates. +type Switch struct { + Start *ssa.BasicBlock // block containing start of if/else chain + X ssa.Value // the switch operand + ConstCases []ConstCase // ordered list of constant comparisons + TypeCases []TypeCase // ordered list of type assertions + Default *ssa.BasicBlock // successor if all comparisons fail +} + +func (sw *Switch) String() string { + // We represent each block by the String() of its + // first Instruction, e.g. "print(42:int)". + var buf bytes.Buffer + if sw.ConstCases != nil { + fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) + for _, c := range sw.ConstCases { + fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0]) + } + } else { + fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) + for _, c := range sw.TypeCases { + fmt.Fprintf(&buf, "case %s %s: %s\n", + c.Binding.Name(), c.Type, c.Body.Instrs[0]) + } + } + if sw.Default != nil { + fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) + } + fmt.Fprintf(&buf, "}") + return buf.String() +} + +// Switches examines the control-flow graph of fn and returns the +// set of inferred value and type switches. A value switch tests an +// ssa.Value for equality against two or more compile-time constant +// values. Switches involving link-time constants (addresses) are +// ignored. A type switch type-asserts an ssa.Value against two or +// more types. +// +// The switches are returned in dominance order. +// +// The resulting switches do not necessarily correspond to uses of the +// 'switch' keyword in the source: for example, a single source-level +// switch statement with non-constant cases may result in zero, one or +// many Switches, one per plural sequence of constant cases. +// Switches may even be inferred from if/else- or goto-based control flow. +// (In general, the control flow constructs of the source program +// cannot be faithfully reproduced from the SSA representation.) +func Switches(fn *ssa.Function) []Switch { + // Traverse the CFG in dominance order, so we don't + // enter an if/else-chain in the middle. + var switches []Switch + seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet + for _, b := range fn.DomPreorder() { + if x, k := isComparisonBlock(b); x != nil { + // Block b starts a switch. + sw := Switch{Start: b, X: x} + valueSwitch(&sw, k, seen) + if len(sw.ConstCases) > 1 { + switches = append(switches, sw) + } + } + + if y, x, T := isTypeAssertBlock(b); y != nil { + // Block b starts a type switch. + sw := Switch{Start: b, X: x} + typeSwitch(&sw, y, T, seen) + if len(sw.TypeCases) > 1 { + switches = append(switches, sw) + } + } + } + return switches +} + +func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.ConstCases = append(sw.ConstCases, ConstCase{ + Block: b, + Body: b.Succs[0], + Value: k, + }) + b = b.Succs[1] + if len(b.Instrs) > 2 { + // Block b contains not just 'if x == k', + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + x, k = isComparisonBlock(b) + } + sw.Default = b +} + +func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.TypeCases = append(sw.TypeCases, TypeCase{ + Block: b, + Body: b.Succs[0], + Type: T, + Binding: y, + }) + b = b.Succs[1] + if len(b.Instrs) > 4 { + // Block b contains not just + // {TypeAssert; Extract #0; Extract #1; If} + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + y, x, T = isTypeAssertBlock(b) + } + sw.Default = b +} + +// isComparisonBlock returns the operands (v, k) if a block ends with +// a comparison v==k, where k is a compile-time constant. +func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) { + if n := len(b.Instrs); n >= 2 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { + if k, ok := binop.Y.(*ssa.Const); ok { + return binop.X, k + } + if k, ok := binop.X.(*ssa.Const); ok { + return binop.Y, k + } + } + } + } + return +} + +// isTypeAssertBlock returns the operands (y, x, T) if a block ends with +// a type assertion "if y, ok := x.(T); ok {". +func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) { + if n := len(b.Instrs); n >= 4 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 { + if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b { + // hack: relies upon instruction ordering. + if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok { + return ext0, ta.X, ta.AssertedType + } + } + } + } + } + return +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go new file mode 100644 index 0000000..b4feb42 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go @@ -0,0 +1,157 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil // import "golang.org/x/tools/go/ssa/ssautil" + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/ssa" + + _ "unsafe" // for linkname hack +) + +// This file defines utilities for visiting the SSA representation of +// a Program. +// +// TODO(adonovan): test coverage. + +// AllFunctions finds and returns the set of functions potentially +// needed by program prog, as determined by a simple linker-style +// reachability algorithm starting from the members and method-sets of +// each package. The result may include anonymous functions and +// synthetic wrappers. +// +// Precondition: all packages are built. +// +// TODO(adonovan): this function is underspecified. It doesn't +// actually work like a linker, which computes reachability from main +// using something like go/callgraph/cha (without materializing the +// call graph). In fact, it treats all public functions and all +// methods of public non-parameterized types as roots, even though +// they may be unreachable--but only in packages created from syntax. +// +// I think we should deprecate AllFunctions function in favor of two +// clearly defined ones: +// +// 1. The first would efficiently compute CHA reachability from a set +// of main packages, making it suitable for a whole-program +// analysis context with InstantiateGenerics, in conjunction with +// Program.Build. +// +// 2. The second would return only the set of functions corresponding +// to source Func{Decl,Lit} syntax, like SrcFunctions in +// go/analysis/passes/buildssa; this is suitable for +// package-at-a-time (or handful of packages) context. +// ssa.Package could easily expose it as a field. +// +// We could add them unexported for now and use them via the linkname hack. +func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool { + seen := make(map[*ssa.Function]bool) + + var function func(fn *ssa.Function) + function = func(fn *ssa.Function) { + if !seen[fn] { + seen[fn] = true + var buf [10]*ssa.Value // avoid alloc in common case + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + for _, op := range instr.Operands(buf[:0]) { + if fn, ok := (*op).(*ssa.Function); ok { + function(fn) + } + } + } + } + } + } + + // TODO(adonovan): opt: provide a way to share a builder + // across a sequence of MethodValue calls. + + methodsOf := func(T types.Type) { + if !types.IsInterface(T) { + mset := prog.MethodSets.MethodSet(T) + for i := 0; i < mset.Len(); i++ { + function(prog.MethodValue(mset.At(i))) + } + } + } + + // Historically, Program.RuntimeTypes used to include the type + // of any exported member of a package loaded from syntax that + // has a non-parameterized type, plus all types + // reachable from that type using reflection, even though + // these runtime types may not be required for them. + // + // Rather than break existing programs that rely on + // AllFunctions visiting extra methods that are unreferenced + // by IR and unreachable via reflection, we moved the logic + // here, unprincipled though it is. + // (See doc comment for better ideas.) + // + // Nonetheless, after the move, we no longer visit every + // method of any type recursively reachable from T, only the + // methods of T and *T themselves, and we only apply this to + // named types T, and not to the type of every exported + // package member. + exportedTypeHack := func(t *ssa.Type) { + if isSyntactic(t.Package()) && + ast.IsExported(t.Name()) && + !types.IsInterface(t.Type()) { + // Consider only named types. + // (Ignore aliases and unsafe.Pointer.) + if named, ok := t.Type().(*types.Named); ok { + if named.TypeParams() == nil { + methodsOf(named) // T + methodsOf(types.NewPointer(named)) // *T + } + } + } + } + + for _, pkg := range prog.AllPackages() { + for _, mem := range pkg.Members { + switch mem := mem.(type) { + case *ssa.Function: + // Visit all package-level declared functions. + function(mem) + + case *ssa.Type: + exportedTypeHack(mem) + } + } + } + + // Visit all methods of types for which runtime types were + // materialized, as they are reachable through reflection. + for _, T := range prog.RuntimeTypes() { + methodsOf(T) + } + + return seen +} + +// MainPackages returns the subset of the specified packages +// named "main" that define a main function. +// The result may include synthetic "testmain" packages. +func MainPackages(pkgs []*ssa.Package) []*ssa.Package { + var mains []*ssa.Package + for _, pkg := range pkgs { + if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { + mains = append(mains, pkg) + } + } + return mains +} + +// TODO(adonovan): propose a principled API for this. One possibility +// is a new field, Package.SrcFunctions []*Function, which would +// contain the list of SrcFunctions described in point 2 of the +// AllFunctions doc comment, or nil if the package is not from syntax. +// But perhaps overloading nil vs empty slice is too subtle. +// +//go:linkname isSyntactic golang.org/x/tools/go/ssa.isSyntactic +func isSyntactic(pkg *ssa.Package) bool diff --git a/vendor/golang.org/x/tools/go/ssa/subst.go b/vendor/golang.org/x/tools/go/ssa/subst.go new file mode 100644 index 0000000..4dcb871 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/subst.go @@ -0,0 +1,642 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +import ( + "go/types" + + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" +) + +// subster defines a type substitution operation of a set of type parameters +// to type parameter free replacement types. Substitution is done within +// the context of a package-level function instantiation. *Named types +// declared in the function are unique to the instantiation. +// +// For example, given a parameterized function F +// +// func F[S, T any]() any { +// type X struct{ s S; next *X } +// var p *X +// return p +// } +// +// calling the instantiation F[string, int]() returns an interface +// value (*X[string,int], nil) where the underlying value of +// X[string,int] is a struct{s string; next *X[string,int]}. +// +// A nil *subster is a valid, empty substitution map. It always acts as +// the identity function. This allows for treating parameterized and +// non-parameterized functions identically while compiling to ssa. +// +// Not concurrency-safe. +// +// Note: Some may find it helpful to think through some of the most +// complex substitution cases using lambda calculus inspired notation. +// subst.typ() solves evaluating a type expression E +// within the body of a function Fn[m] with the type parameters m +// once we have applied the type arguments N. +// We can succinctly write this as a function application: +// +// ((λm. E) N) +// +// go/types does not provide this interface directly. +// So what subster provides is a type substitution operation +// +// E[m:=N] +type subster struct { + replacements map[*types.TypeParam]types.Type // values should contain no type params + cache map[types.Type]types.Type // cache of subst results + origin *types.Func // types.Objects declared within this origin function are unique within this context + ctxt *types.Context // speeds up repeated instantiations + uniqueness typeutil.Map // determines the uniqueness of the instantiations within the function + // TODO(taking): consider adding Pos +} + +// Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache. +// targs should not contain any types in tparams. +// fn is the generic function for which we are substituting. +func makeSubster(ctxt *types.Context, fn *types.Func, tparams *types.TypeParamList, targs []types.Type, debug bool) *subster { + assert(tparams.Len() == len(targs), "makeSubster argument count must match") + + subst := &subster{ + replacements: make(map[*types.TypeParam]types.Type, tparams.Len()), + cache: make(map[types.Type]types.Type), + origin: fn.Origin(), + ctxt: ctxt, + } + for i := 0; i < tparams.Len(); i++ { + subst.replacements[tparams.At(i)] = targs[i] + } + return subst +} + +// typ returns the type of t with the type parameter tparams[i] substituted +// for the type targs[i] where subst was created using tparams and targs. +func (subst *subster) typ(t types.Type) (res types.Type) { + if subst == nil { + return t // A nil subst is type preserving. + } + if r, ok := subst.cache[t]; ok { + return r + } + defer func() { + subst.cache[t] = res + }() + + switch t := t.(type) { + case *types.TypeParam: + if r := subst.replacements[t]; r != nil { + return r + } + return t + + case *types.Basic: + return t + + case *types.Array: + if r := subst.typ(t.Elem()); r != t.Elem() { + return types.NewArray(r, t.Len()) + } + return t + + case *types.Slice: + if r := subst.typ(t.Elem()); r != t.Elem() { + return types.NewSlice(r) + } + return t + + case *types.Pointer: + if r := subst.typ(t.Elem()); r != t.Elem() { + return types.NewPointer(r) + } + return t + + case *types.Tuple: + return subst.tuple(t) + + case *types.Struct: + return subst.struct_(t) + + case *types.Map: + key := subst.typ(t.Key()) + elem := subst.typ(t.Elem()) + if key != t.Key() || elem != t.Elem() { + return types.NewMap(key, elem) + } + return t + + case *types.Chan: + if elem := subst.typ(t.Elem()); elem != t.Elem() { + return types.NewChan(t.Dir(), elem) + } + return t + + case *types.Signature: + return subst.signature(t) + + case *types.Union: + return subst.union(t) + + case *types.Interface: + return subst.interface_(t) + + case *aliases.Alias: + return subst.alias(t) + + case *types.Named: + return subst.named(t) + + case *opaqueType: + return t // opaque types are never substituted + + default: + panic("unreachable") + } +} + +// types returns the result of {subst.typ(ts[i])}. +func (subst *subster) types(ts []types.Type) []types.Type { + res := make([]types.Type, len(ts)) + for i := range ts { + res[i] = subst.typ(ts[i]) + } + return res +} + +func (subst *subster) tuple(t *types.Tuple) *types.Tuple { + if t != nil { + if vars := subst.varlist(t); vars != nil { + return types.NewTuple(vars...) + } + } + return t +} + +type varlist interface { + At(i int) *types.Var + Len() int +} + +// fieldlist is an adapter for structs for the varlist interface. +type fieldlist struct { + str *types.Struct +} + +func (fl fieldlist) At(i int) *types.Var { return fl.str.Field(i) } +func (fl fieldlist) Len() int { return fl.str.NumFields() } + +func (subst *subster) struct_(t *types.Struct) *types.Struct { + if t != nil { + if fields := subst.varlist(fieldlist{t}); fields != nil { + tags := make([]string, t.NumFields()) + for i, n := 0, t.NumFields(); i < n; i++ { + tags[i] = t.Tag(i) + } + return types.NewStruct(fields, tags) + } + } + return t +} + +// varlist returns subst(in[i]) or return nils if subst(v[i]) == v[i] for all i. +func (subst *subster) varlist(in varlist) []*types.Var { + var out []*types.Var // nil => no updates + for i, n := 0, in.Len(); i < n; i++ { + v := in.At(i) + w := subst.var_(v) + if v != w && out == nil { + out = make([]*types.Var, n) + for j := 0; j < i; j++ { + out[j] = in.At(j) + } + } + if out != nil { + out[i] = w + } + } + return out +} + +func (subst *subster) var_(v *types.Var) *types.Var { + if v != nil { + if typ := subst.typ(v.Type()); typ != v.Type() { + if v.IsField() { + return types.NewField(v.Pos(), v.Pkg(), v.Name(), typ, v.Embedded()) + } + return types.NewVar(v.Pos(), v.Pkg(), v.Name(), typ) + } + } + return v +} + +func (subst *subster) union(u *types.Union) *types.Union { + var out []*types.Term // nil => no updates + + for i, n := 0, u.Len(); i < n; i++ { + t := u.Term(i) + r := subst.typ(t.Type()) + if r != t.Type() && out == nil { + out = make([]*types.Term, n) + for j := 0; j < i; j++ { + out[j] = u.Term(j) + } + } + if out != nil { + out[i] = types.NewTerm(t.Tilde(), r) + } + } + + if out != nil { + return types.NewUnion(out) + } + return u +} + +func (subst *subster) interface_(iface *types.Interface) *types.Interface { + if iface == nil { + return nil + } + + // methods for the interface. Initially nil if there is no known change needed. + // Signatures for the method where recv is nil. NewInterfaceType fills in the receivers. + var methods []*types.Func + initMethods := func(n int) { // copy first n explicit methods + methods = make([]*types.Func, iface.NumExplicitMethods()) + for i := 0; i < n; i++ { + f := iface.ExplicitMethod(i) + norecv := changeRecv(f.Type().(*types.Signature), nil) + methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), norecv) + } + } + for i := 0; i < iface.NumExplicitMethods(); i++ { + f := iface.ExplicitMethod(i) + // On interfaces, we need to cycle break on anonymous interface types + // being in a cycle with their signatures being in cycles with their receivers + // that do not go through a Named. + norecv := changeRecv(f.Type().(*types.Signature), nil) + sig := subst.typ(norecv) + if sig != norecv && methods == nil { + initMethods(i) + } + if methods != nil { + methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), sig.(*types.Signature)) + } + } + + var embeds []types.Type + initEmbeds := func(n int) { // copy first n embedded types + embeds = make([]types.Type, iface.NumEmbeddeds()) + for i := 0; i < n; i++ { + embeds[i] = iface.EmbeddedType(i) + } + } + for i := 0; i < iface.NumEmbeddeds(); i++ { + e := iface.EmbeddedType(i) + r := subst.typ(e) + if e != r && embeds == nil { + initEmbeds(i) + } + if embeds != nil { + embeds[i] = r + } + } + + if methods == nil && embeds == nil { + return iface + } + if methods == nil { + initMethods(iface.NumExplicitMethods()) + } + if embeds == nil { + initEmbeds(iface.NumEmbeddeds()) + } + return types.NewInterfaceType(methods, embeds).Complete() +} + +func (subst *subster) alias(t *aliases.Alias) types.Type { + // See subster.named. This follows the same strategy. + tparams := aliases.TypeParams(t) + targs := aliases.TypeArgs(t) + tname := t.Obj() + torigin := aliases.Origin(t) + + if !declaredWithin(tname, subst.origin) { + // t is declared outside of the function origin. So t is a package level type alias. + if targs.Len() == 0 { + // No type arguments so no instantiation needed. + return t + } + + // Instantiate with the substituted type arguments. + newTArgs := subst.typelist(targs) + return subst.instantiate(torigin, newTArgs) + } + + if targs.Len() == 0 { + // t is declared within the function origin and has no type arguments. + // + // Example: This corresponds to A or B in F, but not A[int]: + // + // func F[T any]() { + // type A[S any] = struct{t T, s S} + // type B = T + // var x A[int] + // ... + // } + // + // This is somewhat different than *Named as *Alias cannot be created recursively. + + // Copy and substitute type params. + var newTParams []*types.TypeParam + for i := 0; i < tparams.Len(); i++ { + cur := tparams.At(i) + cobj := cur.Obj() + cname := types.NewTypeName(cobj.Pos(), cobj.Pkg(), cobj.Name(), nil) + ntp := types.NewTypeParam(cname, nil) + subst.cache[cur] = ntp // See the comment "Note: Subtle" in subster.named. + newTParams = append(newTParams, ntp) + } + + // Substitute rhs. + rhs := subst.typ(aliases.Rhs(t)) + + // Create the fresh alias. + obj := aliases.NewAlias(true, tname.Pos(), tname.Pkg(), tname.Name(), rhs) + fresh := obj.Type() + if fresh, ok := fresh.(*aliases.Alias); ok { + // TODO: assume ok when aliases are always materialized (go1.27). + aliases.SetTypeParams(fresh, newTParams) + } + + // Substitute into all of the constraints after they are created. + for i, ntp := range newTParams { + bound := tparams.At(i).Constraint() + ntp.SetConstraint(subst.typ(bound)) + } + return fresh + } + + // t is declared within the function origin and has type arguments. + // + // Example: This corresponds to A[int] in F. Cases A and B are handled above. + // func F[T any]() { + // type A[S any] = struct{t T, s S} + // type B = T + // var x A[int] + // ... + // } + subOrigin := subst.typ(torigin) + subTArgs := subst.typelist(targs) + return subst.instantiate(subOrigin, subTArgs) +} + +func (subst *subster) named(t *types.Named) types.Type { + // A Named type is a user defined type. + // Ignoring generics, Named types are canonical: they are identical if + // and only if they have the same defining symbol. + // Generics complicate things, both if the type definition itself is + // parameterized, and if the type is defined within the scope of a + // parameterized function. In this case, two named types are identical if + // and only if their identifying symbols are identical, and all type + // arguments bindings in scope of the named type definition (including the + // type parameters of the definition itself) are equivalent. + // + // Notably: + // 1. For type definition type T[P1 any] struct{}, T[A] and T[B] are identical + // only if A and B are identical. + // 2. Inside the generic func Fn[m any]() any { type T struct{}; return T{} }, + // the result of Fn[A] and Fn[B] have identical type if and only if A and + // B are identical. + // 3. Both 1 and 2 could apply, such as in + // func F[m any]() any { type T[x any] struct{}; return T{} } + // + // A subster replaces type parameters within a function scope, and therefore must + // also replace free type parameters in the definitions of local types. + // + // Note: There are some detailed notes sprinkled throughout that borrow from + // lambda calculus notation. These contain some over simplifying math. + // + // LC: One way to think about subster is that it is a way of evaluating + // ((λm. E) N) as E[m:=N]. + // Each Named type t has an object *TypeName within a scope S that binds an + // underlying type expression U. U can refer to symbols within S (+ S's ancestors). + // Let x = t.TypeParams() and A = t.TypeArgs(). + // Each Named type t is then either: + // U where len(x) == 0 && len(A) == 0 + // λx. U where len(x) != 0 && len(A) == 0 + // ((λx. U) A) where len(x) == len(A) + // In each case, we will evaluate t[m:=N]. + tparams := t.TypeParams() // x + targs := t.TypeArgs() // A + + if !declaredWithin(t.Obj(), subst.origin) { + // t is declared outside of Fn[m]. + // + // In this case, we can skip substituting t.Underlying(). + // The underlying type cannot refer to the type parameters. + // + // LC: Let free(E) be the set of free type parameters in an expression E. + // Then whenever m ∉ free(E), then E = E[m:=N]. + // t ∉ Scope(fn) so therefore m ∉ free(U) and m ∩ x = ∅. + if targs.Len() == 0 { + // t has no type arguments. So it does not need to be instantiated. + // + // This is the normal case in real Go code, where t is not parameterized, + // declared at some package scope, and m is a TypeParam from a parameterized + // function F[m] or method. + // + // LC: m ∉ free(A) lets us conclude m ∉ free(t). So t=t[m:=N]. + return t + } + + // t is declared outside of Fn[m] and has type arguments. + // The type arguments may contain type parameters m so + // substitute the type arguments, and instantiate the substituted + // type arguments. + // + // LC: Evaluate this as ((λx. U) A') where A' = A[m := N]. + newTArgs := subst.typelist(targs) + return subst.instantiate(t.Origin(), newTArgs) + } + + // t is declared within Fn[m]. + + if targs.Len() == 0 { // no type arguments? + assert(t == t.Origin(), "local parameterized type abstraction must be an origin type") + + // t has no type arguments. + // The underlying type of t may contain the function's type parameters, + // replace these, and create a new type. + // + // Subtle: We short circuit substitution and use a newly created type in + // subst, i.e. cache[t]=fresh, to preemptively replace t with fresh + // in recursive types during traversal. This both breaks infinite cycles + // and allows for constructing types with the replacement applied in + // subst.typ(U). + // + // A new copy of the Named and Typename (and constraints) per function + // instantiation matches the semantics of Go, which treats all function + // instantiations F[N] as having distinct local types. + // + // LC: x.Len()=0 can be thought of as a special case of λx. U. + // LC: Evaluate (λx. U)[m:=N] as (λx'. U') where U'=U[x:=x',m:=N]. + tname := t.Obj() + obj := types.NewTypeName(tname.Pos(), tname.Pkg(), tname.Name(), nil) + fresh := types.NewNamed(obj, nil, nil) + var newTParams []*types.TypeParam + for i := 0; i < tparams.Len(); i++ { + cur := tparams.At(i) + cobj := cur.Obj() + cname := types.NewTypeName(cobj.Pos(), cobj.Pkg(), cobj.Name(), nil) + ntp := types.NewTypeParam(cname, nil) + subst.cache[cur] = ntp + newTParams = append(newTParams, ntp) + } + fresh.SetTypeParams(newTParams) + subst.cache[t] = fresh + subst.cache[fresh] = fresh + fresh.SetUnderlying(subst.typ(t.Underlying())) + // Substitute into all of the constraints after they are created. + for i, ntp := range newTParams { + bound := tparams.At(i).Constraint() + ntp.SetConstraint(subst.typ(bound)) + } + return fresh + } + + // t is defined within Fn[m] and t has type arguments (an instantiation). + // We reduce this to the two cases above: + // (1) substitute the function's type parameters into t.Origin(). + // (2) substitute t's type arguments A and instantiate the updated t.Origin() with these. + // + // LC: Evaluate ((λx. U) A)[m:=N] as (t' A') where t' = (λx. U)[m:=N] and A'=A [m:=N] + subOrigin := subst.typ(t.Origin()) + subTArgs := subst.typelist(targs) + return subst.instantiate(subOrigin, subTArgs) +} + +func (subst *subster) instantiate(orig types.Type, targs []types.Type) types.Type { + i, err := types.Instantiate(subst.ctxt, orig, targs, false) + assert(err == nil, "failed to Instantiate named (Named or Alias) type") + if c, _ := subst.uniqueness.At(i).(types.Type); c != nil { + return c.(types.Type) + } + subst.uniqueness.Set(i, i) + return i +} + +func (subst *subster) typelist(l *types.TypeList) []types.Type { + res := make([]types.Type, l.Len()) + for i := 0; i < l.Len(); i++ { + res[i] = subst.typ(l.At(i)) + } + return res +} + +func (subst *subster) signature(t *types.Signature) types.Type { + tparams := t.TypeParams() + + // We are choosing not to support tparams.Len() > 0 until a need has been observed in practice. + // + // There are some known usages for types.Types coming from types.{Eval,CheckExpr}. + // To support tparams.Len() > 0, we just need to do the following [psuedocode]: + // targs := {subst.replacements[tparams[i]]]}; Instantiate(ctxt, t, targs, false) + + assert(tparams.Len() == 0, "Substituting types.Signatures with generic functions are currently unsupported.") + + // Either: + // (1)non-generic function. + // no type params to substitute + // (2)generic method and recv needs to be substituted. + + // Receivers can be either: + // named + // pointer to named + // interface + // nil + // interface is the problematic case. We need to cycle break there! + recv := subst.var_(t.Recv()) + params := subst.tuple(t.Params()) + results := subst.tuple(t.Results()) + if recv != t.Recv() || params != t.Params() || results != t.Results() { + return types.NewSignatureType(recv, nil, nil, params, results, t.Variadic()) + } + return t +} + +// reaches returns true if a type t reaches any type t' s.t. c[t'] == true. +// It updates c to cache results. +// +// reaches is currently only part of the wellFormed debug logic, and +// in practice c is initially only type parameters. It is not currently +// relied on in production. +func reaches(t types.Type, c map[types.Type]bool) (res bool) { + if c, ok := c[t]; ok { + return c + } + + // c is populated with temporary false entries as types are visited. + // This avoids repeat visits and break cycles. + c[t] = false + defer func() { + c[t] = res + }() + + switch t := t.(type) { + case *types.TypeParam, *types.Basic: + return false + case *types.Array: + return reaches(t.Elem(), c) + case *types.Slice: + return reaches(t.Elem(), c) + case *types.Pointer: + return reaches(t.Elem(), c) + case *types.Tuple: + for i := 0; i < t.Len(); i++ { + if reaches(t.At(i).Type(), c) { + return true + } + } + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + if reaches(t.Field(i).Type(), c) { + return true + } + } + case *types.Map: + return reaches(t.Key(), c) || reaches(t.Elem(), c) + case *types.Chan: + return reaches(t.Elem(), c) + case *types.Signature: + if t.Recv() != nil && reaches(t.Recv().Type(), c) { + return true + } + return reaches(t.Params(), c) || reaches(t.Results(), c) + case *types.Union: + for i := 0; i < t.Len(); i++ { + if reaches(t.Term(i).Type(), c) { + return true + } + } + case *types.Interface: + for i := 0; i < t.NumEmbeddeds(); i++ { + if reaches(t.Embedded(i), c) { + return true + } + } + for i := 0; i < t.NumExplicitMethods(); i++ { + if reaches(t.ExplicitMethod(i).Type(), c) { + return true + } + } + case *types.Named, *aliases.Alias: + return reaches(t.Underlying(), c) + default: + panic("unreachable") + } + return false +} diff --git a/vendor/golang.org/x/tools/go/ssa/task.go b/vendor/golang.org/x/tools/go/ssa/task.go new file mode 100644 index 0000000..5024985 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/task.go @@ -0,0 +1,103 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +import ( + "sync/atomic" +) + +// Each task has two states: it is initially "active", +// and transitions to "done". +// +// tasks form a directed graph. An edge from x to y (with y in x.edges) +// indicates that the task x waits on the task y to be done. +// Cycles are permitted. +// +// Calling x.wait() blocks the calling goroutine until task x, +// and all the tasks transitively reachable from x are done. +// +// The nil *task is always considered done. +type task struct { + done chan unit // close when the task is done. + edges map[*task]unit // set of predecessors of this task. + transitive atomic.Bool // true once it is known all predecessors are done. +} + +func (x *task) isTransitivelyDone() bool { return x == nil || x.transitive.Load() } + +// addEdge creates an edge from x to y, indicating that +// x.wait() will not return before y is done. +// All calls to x.addEdge(...) should happen before x.markDone(). +func (x *task) addEdge(y *task) { + if x == y || y.isTransitivelyDone() { + return // no work remaining + } + + // heuristic done check + select { + case <-x.done: + panic("cannot add an edge to a done task") + default: + } + + if x.edges == nil { + x.edges = make(map[*task]unit) + } + x.edges[y] = unit{} +} + +// markDone changes the task's state to markDone. +func (x *task) markDone() { + if x != nil { + close(x.done) + } +} + +// wait blocks until x and all the tasks it can reach through edges are done. +func (x *task) wait() { + if x.isTransitivelyDone() { + return // already known to be done. Skip allocations. + } + + // Use BFS to wait on u.done to be closed, for all u transitively + // reachable from x via edges. + // + // This work can be repeated by multiple workers doing wait(). + // + // Note: Tarjan's SCC algorithm is able to mark SCCs as transitively done + // as soon as the SCC has been visited. This is theoretically faster, but is + // a more complex algorithm. Until we have evidence, we need the more complex + // algorithm, the simpler algorithm BFS is implemented. + // + // In Go 1.23, ssa/TestStdlib reaches <=3 *tasks per wait() in most schedules + // On some schedules, there is a cycle building net/http and internal/trace/testtrace + // due to slices functions. + work := []*task{x} + enqueued := map[*task]unit{x: {}} + for i := 0; i < len(work); i++ { + u := work[i] + if u.isTransitivelyDone() { // already transitively done + work[i] = nil + continue + } + <-u.done // wait for u to be marked done. + + for v := range u.edges { + if _, ok := enqueued[v]; !ok { + enqueued[v] = unit{} + work = append(work, v) + } + } + } + + // work is transitively closed over dependencies. + // u in work is done (or transitively done and skipped). + // u is transitively done. + for _, u := range work { + if u != nil { + x.transitive.Store(true) + } + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go new file mode 100644 index 0000000..549c9c8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/util.go @@ -0,0 +1,430 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines a number of miscellaneous utility functions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + "sync" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" +) + +type unit struct{} + +//// Sanity checking utilities + +// assert panics with the mesage msg if p is false. +// Avoid combining with expensive string formatting. +func assert(p bool, msg string) { + if !p { + panic(msg) + } +} + +//// AST utilities + +func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } + +// isBlankIdent returns true iff e is an Ident with name "_". +// They have no associated types.Object, and thus no type. +func isBlankIdent(e ast.Expr) bool { + id, ok := e.(*ast.Ident) + return ok && id.Name == "_" +} + +// rangePosition is the position to give for the `range` token in a RangeStmt. +var rangePosition = func(rng *ast.RangeStmt) token.Pos { + // Before 1.20, this is unreachable. + // rng.For is a close, but incorrect position. + return rng.For +} + +//// Type utilities. Some of these belong in go/types. + +// isNonTypeParamInterface reports whether t is an interface type but not a type parameter. +func isNonTypeParamInterface(t types.Type) bool { + return !typeparams.IsTypeParam(t) && types.IsInterface(t) +} + +// isBasic reports whether t is a basic type. +// t is assumed to be an Underlying type (not Named or Alias). +func isBasic(t types.Type) bool { + _, ok := t.(*types.Basic) + return ok +} + +// isString reports whether t is exactly a string type. +// t is assumed to be an Underlying type (not Named or Alias). +func isString(t types.Type) bool { + basic, ok := t.(*types.Basic) + return ok && basic.Info()&types.IsString != 0 +} + +// isByteSlice reports whether t is of the form []~bytes. +// t is assumed to be an Underlying type (not Named or Alias). +func isByteSlice(t types.Type) bool { + if b, ok := t.(*types.Slice); ok { + e, _ := b.Elem().Underlying().(*types.Basic) + return e != nil && e.Kind() == types.Byte + } + return false +} + +// isRuneSlice reports whether t is of the form []~runes. +// t is assumed to be an Underlying type (not Named or Alias). +func isRuneSlice(t types.Type) bool { + if b, ok := t.(*types.Slice); ok { + e, _ := b.Elem().Underlying().(*types.Basic) + return e != nil && e.Kind() == types.Rune + } + return false +} + +// isBasicConvTypes returns true when a type set can be +// one side of a Convert operation. This is when: +// - All are basic, []byte, or []rune. +// - At least 1 is basic. +// - At most 1 is []byte or []rune. +func isBasicConvTypes(tset termList) bool { + basics := 0 + all := underIs(tset, func(t types.Type) bool { + if isBasic(t) { + basics++ + return true + } + return isByteSlice(t) || isRuneSlice(t) + }) + return all && basics >= 1 && tset.Len()-basics <= 1 +} + +// isPointer reports whether t's underlying type is a pointer. +func isPointer(t types.Type) bool { + return is[*types.Pointer](t.Underlying()) +} + +// isPointerCore reports whether t's core type is a pointer. +// +// (Most pointer manipulation is related to receivers, in which case +// isPointer is appropriate. tecallers can use isPointer(t). +func isPointerCore(t types.Type) bool { + return is[*types.Pointer](typeparams.CoreType(t)) +} + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} + +// recvType returns the receiver type of method obj. +func recvType(obj *types.Func) types.Type { + return obj.Type().(*types.Signature).Recv().Type() +} + +// fieldOf returns the index'th field of the (core type of) a struct type; +// otherwise returns nil. +func fieldOf(typ types.Type, index int) *types.Var { + if st, ok := typeparams.CoreType(typ).(*types.Struct); ok { + if 0 <= index && index < st.NumFields() { + return st.Field(index) + } + } + return nil +} + +// isUntyped reports whether typ is the type of an untyped constant. +func isUntyped(typ types.Type) bool { + // No Underlying/Unalias: untyped constant types cannot be Named or Alias. + b, ok := typ.(*types.Basic) + return ok && b.Info()&types.IsUntyped != 0 +} + +// declaredWithin reports whether an object is declared within a function. +// +// obj must not be a method or a field. +func declaredWithin(obj types.Object, fn *types.Func) bool { + if obj.Pos() != token.NoPos { + return fn.Scope().Contains(obj.Pos()) // trust the positions if they exist. + } + if fn.Pkg() != obj.Pkg() { + return false // fast path for different packages + } + + // Traverse Parent() scopes for fn.Scope(). + for p := obj.Parent(); p != nil; p = p.Parent() { + if p == fn.Scope() { + return true + } + } + return false +} + +// logStack prints the formatted "start" message to stderr and +// returns a closure that prints the corresponding "end" message. +// Call using 'defer logStack(...)()' to show builder stack on panic. +// Don't forget trailing parens! +func logStack(format string, args ...interface{}) func() { + msg := fmt.Sprintf(format, args...) + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, "\n") + return func() { + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, " end\n") + } +} + +// newVar creates a 'var' for use in a types.Tuple. +func newVar(name string, typ types.Type) *types.Var { + return types.NewParam(token.NoPos, nil, name, typ) +} + +// anonVar creates an anonymous 'var' for use in a types.Tuple. +func anonVar(typ types.Type) *types.Var { + return newVar("", typ) +} + +var lenResults = types.NewTuple(anonVar(tInt)) + +// makeLen returns the len builtin specialized to type func(T)int. +func makeLen(T types.Type) *Builtin { + lenParams := types.NewTuple(anonVar(T)) + return &Builtin{ + name: "len", + sig: types.NewSignature(nil, lenParams, lenResults, false), + } +} + +// receiverTypeArgs returns the type arguments to a method's receiver. +// Returns an empty list if the receiver does not have type arguments. +func receiverTypeArgs(method *types.Func) []types.Type { + recv := method.Type().(*types.Signature).Recv() + _, named := typesinternal.ReceiverNamed(recv) + if named == nil { + return nil // recv is anonymous struct/interface + } + ts := named.TypeArgs() + if ts.Len() == 0 { + return nil + } + targs := make([]types.Type, ts.Len()) + for i := 0; i < ts.Len(); i++ { + targs[i] = ts.At(i) + } + return targs +} + +// recvAsFirstArg takes a method signature and returns a function +// signature with receiver as the first parameter. +func recvAsFirstArg(sig *types.Signature) *types.Signature { + params := make([]*types.Var, 0, 1+sig.Params().Len()) + params = append(params, sig.Recv()) + for i := 0; i < sig.Params().Len(); i++ { + params = append(params, sig.Params().At(i)) + } + return types.NewSignatureType(nil, nil, nil, types.NewTuple(params...), sig.Results(), sig.Variadic()) +} + +// instance returns whether an expression is a simple or qualified identifier +// that is a generic instantiation. +func instance(info *types.Info, expr ast.Expr) bool { + // Compare the logic here against go/types.instantiatedIdent, + // which also handles *IndexExpr and *IndexListExpr. + var id *ast.Ident + switch x := expr.(type) { + case *ast.Ident: + id = x + case *ast.SelectorExpr: + id = x.Sel + default: + return false + } + _, ok := info.Instances[id] + return ok +} + +// instanceArgs returns the Instance[id].TypeArgs as a slice. +func instanceArgs(info *types.Info, id *ast.Ident) []types.Type { + targList := info.Instances[id].TypeArgs + if targList == nil { + return nil + } + + targs := make([]types.Type, targList.Len()) + for i, n := 0, targList.Len(); i < n; i++ { + targs[i] = targList.At(i) + } + return targs +} + +// Mapping of a type T to a canonical instance C s.t. types.Indentical(T, C). +// Thread-safe. +type canonizer struct { + mu sync.Mutex + types typeutil.Map // map from type to a canonical instance + lists typeListMap // map from a list of types to a canonical instance +} + +func newCanonizer() *canonizer { + c := &canonizer{} + h := typeutil.MakeHasher() + c.types.SetHasher(h) + c.lists.hasher = h + return c +} + +// List returns a canonical representative of a list of types. +// Representative of the empty list is nil. +func (c *canonizer) List(ts []types.Type) *typeList { + if len(ts) == 0 { + return nil + } + + unaliasAll := func(ts []types.Type) []types.Type { + // Is there some top level alias? + var found bool + for _, t := range ts { + if _, ok := t.(*aliases.Alias); ok { + found = true + break + } + } + if !found { + return ts // no top level alias + } + + cp := make([]types.Type, len(ts)) // copy with top level aliases removed. + for i, t := range ts { + cp[i] = aliases.Unalias(t) + } + return cp + } + l := unaliasAll(ts) + + c.mu.Lock() + defer c.mu.Unlock() + return c.lists.rep(l) +} + +// Type returns a canonical representative of type T. +// Removes top-level aliases. +// +// For performance, reasons the canonical instance is order-dependent, +// and may contain deeply nested aliases. +func (c *canonizer) Type(T types.Type) types.Type { + T = aliases.Unalias(T) // remove the top level alias. + + c.mu.Lock() + defer c.mu.Unlock() + + if r := c.types.At(T); r != nil { + return r.(types.Type) + } + c.types.Set(T, T) + return T +} + +// A type for representing a canonized list of types. +type typeList []types.Type + +func (l *typeList) identical(ts []types.Type) bool { + if l == nil { + return len(ts) == 0 + } + n := len(*l) + if len(ts) != n { + return false + } + for i, left := range *l { + right := ts[i] + if !types.Identical(left, right) { + return false + } + } + return true +} + +type typeListMap struct { + hasher typeutil.Hasher + buckets map[uint32][]*typeList +} + +// rep returns a canonical representative of a slice of types. +func (m *typeListMap) rep(ts []types.Type) *typeList { + if m == nil || len(ts) == 0 { + return nil + } + + if m.buckets == nil { + m.buckets = make(map[uint32][]*typeList) + } + + h := m.hash(ts) + bucket := m.buckets[h] + for _, l := range bucket { + if l.identical(ts) { + return l + } + } + + // not present. create a representative. + cp := make(typeList, len(ts)) + copy(cp, ts) + rep := &cp + + m.buckets[h] = append(bucket, rep) + return rep +} + +func (m *typeListMap) hash(ts []types.Type) uint32 { + if m == nil { + return 0 + } + // Some smallish prime far away from typeutil.Hash. + n := len(ts) + h := uint32(13619) + 2*uint32(n) + for i := 0; i < n; i++ { + h += 3 * m.hasher.Hash(ts[i]) + } + return h +} + +// instantiateMethod instantiates m with targs and returns a canonical representative for this method. +func (canon *canonizer) instantiateMethod(m *types.Func, targs []types.Type, ctxt *types.Context) *types.Func { + recv := recvType(m) + if p, ok := aliases.Unalias(recv).(*types.Pointer); ok { + recv = p.Elem() + } + named := aliases.Unalias(recv).(*types.Named) + inst, err := types.Instantiate(ctxt, named.Origin(), targs, false) + if err != nil { + panic(err) + } + rep := canon.Type(inst) + obj, _, _ := types.LookupFieldOrMethod(rep, true, m.Pkg(), m.Name()) + return obj.(*types.Func) +} + +// Exposed to ssautil using the linkname hack. +func isSyntactic(pkg *Package) bool { return pkg.syntax } + +// mapValues returns a new unordered array of map values. +func mapValues[K comparable, V any](m map[K]V) []V { + vals := make([]V, 0, len(m)) + for _, fn := range m { + vals = append(vals, fn) + } + return vals + +} diff --git a/vendor/golang.org/x/tools/go/ssa/util_go120.go b/vendor/golang.org/x/tools/go/ssa/util_go120.go new file mode 100644 index 0000000..9e8ea87 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/util_go120.go @@ -0,0 +1,17 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 +// +build go1.20 + +package ssa + +import ( + "go/ast" + "go/token" +) + +func init() { + rangePosition = func(rng *ast.RangeStmt) token.Pos { return rng.Range } +} diff --git a/vendor/golang.org/x/tools/go/ssa/wrappers.go b/vendor/golang.org/x/tools/go/ssa/wrappers.go new file mode 100644 index 0000000..d09b4f2 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/wrappers.go @@ -0,0 +1,348 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines synthesis of Functions that delegate to declared +// methods; they come in three kinds: +// +// (1) wrappers: methods that wrap declared methods, performing +// implicit pointer indirections and embedded field selections. +// +// (2) thunks: funcs that wrap declared methods. Like wrappers, +// thunks perform indirections and field selections. The thunk's +// first parameter is used as the receiver for the method call. +// +// (3) bounds: funcs that wrap declared methods. The bound's sole +// free variable, supplied by a closure, is used as the receiver +// for the method call. No indirections or field selections are +// performed since they can be done before the call. + +import ( + "fmt" + + "go/token" + "go/types" + + "golang.org/x/tools/internal/typeparams" +) + +// -- wrappers ----------------------------------------------------------- + +// createWrapper returns a synthetic method that delegates to the +// declared method denoted by meth.Obj(), first performing any +// necessary pointer indirections or field selections implied by meth. +// +// The resulting method's receiver type is meth.Recv(). +// +// This function is versatile but quite subtle! Consider the +// following axes of variation when making changes: +// - optional receiver indirection +// - optional implicit field selections +// - meth.Obj() may denote a concrete or an interface method +// - the result may be a thunk or a wrapper. +func createWrapper(prog *Program, sel *selection) *Function { + obj := sel.obj.(*types.Func) // the declared function + sig := sel.typ.(*types.Signature) // type of this wrapper + + var recv *types.Var // wrapper's receiver or thunk's params[0] + name := obj.Name() + var description string + if sel.kind == types.MethodExpr { + name += "$thunk" + description = "thunk" + recv = sig.Params().At(0) + } else { + description = "wrapper" + recv = sig.Recv() + } + + description = fmt.Sprintf("%s for %s", description, sel.obj) + if prog.mode&LogSource != 0 { + defer logStack("create %s to (%s)", description, recv.Type())() + } + /* method wrapper */ + return &Function{ + name: name, + method: sel, + object: obj, + Signature: sig, + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + // wrappers have no syntax + build: (*builder).buildWrapper, + syntax: nil, + info: nil, + goversion: "", + } +} + +// buildWrapper builds fn.Body for a method wrapper. +func (b *builder) buildWrapper(fn *Function) { + var recv *types.Var // wrapper's receiver or thunk's params[0] + var start int // first regular param + if fn.method.kind == types.MethodExpr { + recv = fn.Signature.Params().At(0) + start = 1 + } else { + recv = fn.Signature.Recv() + } + + fn.startBody() + fn.addSpilledParam(recv) + createParams(fn, start) + + indices := fn.method.index + + var v Value = fn.Locals[0] // spilled receiver + if isPointer(fn.method.recv) { + v = emitLoad(fn, v) + + // For simple indirection wrappers, perform an informative nil-check: + // "value method (T).f called using nil *T pointer" + if len(indices) == 1 && !isPointer(recvType(fn.object)) { + var c Call + c.Call.Value = &Builtin{ + name: "ssa:wrapnilchk", + sig: types.NewSignature(nil, + types.NewTuple(anonVar(fn.method.recv), anonVar(tString), anonVar(tString)), + types.NewTuple(anonVar(fn.method.recv)), false), + } + c.Call.Args = []Value{ + v, + stringConst(typeparams.MustDeref(fn.method.recv).String()), + stringConst(fn.method.obj.Name()), + } + c.setType(v.Type()) + v = fn.emit(&c) + } + } + + // Invariant: v is a pointer, either + // value of *A receiver param, or + // address of A spilled receiver. + + // We use pointer arithmetic (FieldAddr possibly followed by + // Load) in preference to value extraction (Field possibly + // preceded by Load). + + v = emitImplicitSelections(fn, v, indices[:len(indices)-1], token.NoPos) + + // Invariant: v is a pointer, either + // value of implicit *C field, or + // address of implicit C field. + + var c Call + if r := recvType(fn.object); !types.IsInterface(r) { // concrete method + if !isPointer(r) { + v = emitLoad(fn, v) + } + c.Call.Value = fn.Prog.objectMethod(fn.object, b) + c.Call.Args = append(c.Call.Args, v) + } else { + c.Call.Method = fn.object + c.Call.Value = emitLoad(fn, v) // interface (possibly a typeparam) + } + for _, arg := range fn.Params[1:] { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() +} + +// createParams creates parameters for wrapper method fn based on its +// Signature.Params, which do not include the receiver. +// start is the index of the first regular parameter to use. +func createParams(fn *Function, start int) { + tparams := fn.Signature.Params() + for i, n := start, tparams.Len(); i < n; i++ { + fn.addParamVar(tparams.At(i)) + } +} + +// -- bounds ----------------------------------------------------------- + +// createBound returns a bound method wrapper (or "bound"), a synthetic +// function that delegates to a concrete or interface method denoted +// by obj. The resulting function has no receiver, but has one free +// variable which will be used as the method's receiver in the +// tail-call. +// +// Use MakeClosure with such a wrapper to construct a bound method +// closure. e.g.: +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// var t T +// f := t.meth +// f() // calls t.meth() +// +// f is a closure of a synthetic wrapper defined as if by: +// +// f := func() { return t.meth() } +// +// Unlike createWrapper, createBound need perform no indirection or field +// selections because that can be done before the closure is +// constructed. +func createBound(prog *Program, obj *types.Func) *Function { + description := fmt.Sprintf("bound method wrapper for %s", obj) + if prog.mode&LogSource != 0 { + defer logStack("%s", description)() + } + /* bound method wrapper */ + fn := &Function{ + name: obj.Name() + "$bound", + object: obj, + Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + // wrappers have no syntax + build: (*builder).buildBound, + syntax: nil, + info: nil, + goversion: "", + } + fn.FreeVars = []*FreeVar{{name: "recv", typ: recvType(obj), parent: fn}} // (cyclic) + return fn +} + +// buildBound builds fn.Body for a bound method closure. +func (b *builder) buildBound(fn *Function) { + fn.startBody() + createParams(fn, 0) + var c Call + + recv := fn.FreeVars[0] + if !types.IsInterface(recvType(fn.object)) { // concrete + c.Call.Value = fn.Prog.objectMethod(fn.object, b) + c.Call.Args = []Value{recv} + } else { + c.Call.Method = fn.object + c.Call.Value = recv // interface (possibly a typeparam) + } + for _, arg := range fn.Params { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() +} + +// -- thunks ----------------------------------------------------------- + +// createThunk returns a thunk, a synthetic function that delegates to a +// concrete or interface method denoted by sel.obj. The resulting +// function has no receiver, but has an additional (first) regular +// parameter. +// +// Precondition: sel.kind == types.MethodExpr. +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// f := T.meth +// var t T +// f(t) // calls t.meth() +// +// f is a synthetic wrapper defined as if by: +// +// f := func(t T) { return t.meth() } +func createThunk(prog *Program, sel *selection) *Function { + if sel.kind != types.MethodExpr { + panic(sel) + } + + fn := createWrapper(prog, sel) + if fn.Signature.Recv() != nil { + panic(fn) // unexpected receiver + } + + return fn +} + +func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { + return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) +} + +// A local version of *types.Selection. +// Needed for some additional control, such as creating a MethodExpr for an instantiation. +type selection struct { + kind types.SelectionKind + recv types.Type + typ types.Type + obj types.Object + index []int + indirect bool +} + +func toSelection(sel *types.Selection) *selection { + return &selection{ + kind: sel.Kind(), + recv: sel.Recv(), + typ: sel.Type(), + obj: sel.Obj(), + index: sel.Index(), + indirect: sel.Indirect(), + } +} + +// -- instantiations -------------------------------------------------- + +// buildInstantiationWrapper builds the body of an instantiation +// wrapper fn. The body calls the original generic function, +// bracketed by ChangeType conversions on its arguments and results. +func (b *builder) buildInstantiationWrapper(fn *Function) { + orig := fn.topLevelOrigin + sig := fn.Signature + + fn.startBody() + if sig.Recv() != nil { + fn.addParamVar(sig.Recv()) + } + createParams(fn, 0) + + // Create body. Add a call to origin generic function + // and make type changes between argument and parameters, + // as well as return values. + var c Call + c.Call.Value = orig + if res := orig.Signature.Results(); res.Len() == 1 { + c.typ = res.At(0).Type() + } else { + c.typ = res + } + + // parameter of instance becomes an argument to the call + // to the original generic function. + argOffset := 0 + for i, arg := range fn.Params { + var typ types.Type + if i == 0 && sig.Recv() != nil { + typ = orig.Signature.Recv().Type() + argOffset = 1 + } else { + typ = orig.Signature.Params().At(i - argOffset).Type() + } + c.Call.Args = append(c.Call.Args, emitTypeCoercion(fn, arg, typ)) + } + + results := fn.emit(&c) + var ret Return + switch res := sig.Results(); res.Len() { + case 0: + // no results, do nothing. + case 1: + ret.Results = []Value{emitTypeCoercion(fn, results, res.At(0).Type())} + default: + for i := 0; i < sig.Results().Len(); i++ { + v := emitExtract(fn, results, i) + ret.Results = append(ret.Results, emitTypeCoercion(fn, v, res.At(i).Type())) + } + } + + fn.emit(&ret) + fn.currentBlock = nil + + fn.finishBody() +} |