summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorThomas Voss <mail@thomasvoss.com> 2024-09-13 13:01:48 +0200
committerThomas Voss <mail@thomasvoss.com> 2024-09-13 13:01:48 +0200
commit548090e67f66acf84385c4152ca464e52d3e3319 (patch)
tree9b6de528bd7b0aa63362fa83f5c8e6a97f68a5d8
parenta1d809960bee74df19c7e5fc34ffd1e4757cfdcb (diff)
Migrate away from templ and towards html/template
-rw-r--r--.exrc2
-rw-r--r--.gitignore7
-rw-r--r--Makefile26
-rw-r--r--cmd/exttmpl/main.go200
-rw-r--r--go.mod7
-rw-r--r--go.sum10
-rw-r--r--lib/countries.go46
-rw-r--r--lib/locales/bg/messages.gotext.json570
-rw-r--r--lib/locales/el/messages.gotext.json570
-rw-r--r--lib/locales/en/messages.gotext.json748
-rw-r--r--lib/locales/nl/messages.gotext.json570
-rw-r--r--main.go188
-rw-r--r--rosetta/bg/messages.gotext.json285
-rw-r--r--rosetta/el/messages.gotext.json285
-rw-r--r--rosetta/en/messages.gotext.json397
-rw-r--r--rosetta/nl/messages.gotext.json285
-rw-r--r--src/countries.go46
-rw-r--r--src/email/email.go (renamed from lib/email/email.go)5
-rw-r--r--src/http.go190
-rw-r--r--src/i18n.go (renamed from lib/i18n.go)27
-rw-r--r--src/mintage/parser.go (renamed from lib/mintage/parser.go)0
-rw-r--r--src/mintage/parser_test.go (renamed from lib/mintage/parser_test.go)0
-rw-r--r--src/rosetta/bg/messages.gotext.json315
-rw-r--r--src/rosetta/el/messages.gotext.json315
-rw-r--r--src/rosetta/en/messages.gotext.json439
-rw-r--r--src/rosetta/nl/messages.gotext.json315
-rw-r--r--src/templates.go56
-rw-r--r--src/templates/404.html.tmpl11
-rw-r--r--src/templates/about.html.tmpl41
-rw-r--r--src/templates/base.html.tmpl43
-rw-r--r--src/templates/error.html.tmpl14
-rw-r--r--src/templates/index.html.tmpl18
-rw-r--r--src/templates/language.html.tmpl48
-rw-r--r--src/templates/navbar.html.tmpl228
-rw-r--r--template.old/404.templ (renamed from template/404.templ)0
-rw-r--r--template.old/about.templ (renamed from template/about.templ)0
-rw-r--r--template.old/base.templ (renamed from template/base.templ)0
-rw-r--r--template.old/coins.templ (renamed from template/coins.templ)0
-rw-r--r--template.old/coins_designs.templ (renamed from template/coins_designs.templ)0
-rw-r--r--template.old/coins_designs_nl.templ (renamed from template/coins_designs_nl.templ)2
-rw-r--r--template.old/coins_mintages.templ (renamed from template/coins_mintages.templ)0
-rw-r--r--template.old/error.templ (renamed from template/error.templ)0
-rw-r--r--template.old/jargon.templ (renamed from template/jargon.templ)0
-rw-r--r--template.old/language.templ (renamed from template/language.templ)0
-rw-r--r--template.old/navbar.templ (renamed from template/navbar.templ)0
-rw-r--r--template.old/root.templ (renamed from template/root.templ)0
-rw-r--r--template/base.go3
-rw-r--r--vendor/github.com/a-h/templ/.dockerignore3
-rw-r--r--vendor/github.com/a-h/templ/.gitignore28
-rw-r--r--vendor/github.com/a-h/templ/.goreleaser.yaml72
-rw-r--r--vendor/github.com/a-h/templ/.ignore7
-rw-r--r--vendor/github.com/a-h/templ/.version1
-rw-r--r--vendor/github.com/a-h/templ/CODE_OF_CONDUCT.md128
-rw-r--r--vendor/github.com/a-h/templ/CONTRIBUTING.md244
-rw-r--r--vendor/github.com/a-h/templ/LICENSE21
-rw-r--r--vendor/github.com/a-h/templ/README.md171
-rw-r--r--vendor/github.com/a-h/templ/SECURITY.md9
-rw-r--r--vendor/github.com/a-h/templ/cosign.pub4
-rw-r--r--vendor/github.com/a-h/templ/flake.lock140
-rw-r--r--vendor/github.com/a-h/templ/flake.nix93
-rw-r--r--vendor/github.com/a-h/templ/flush.go36
-rw-r--r--vendor/github.com/a-h/templ/gomod2nix.toml90
-rw-r--r--vendor/github.com/a-h/templ/handler.go102
-rw-r--r--vendor/github.com/a-h/templ/ide-demo.gifbin544148 -> 0 bytes
-rw-r--r--vendor/github.com/a-h/templ/jsonscript.go85
-rw-r--r--vendor/github.com/a-h/templ/jsonstring.go14
-rw-r--r--vendor/github.com/a-h/templ/once.go64
-rw-r--r--vendor/github.com/a-h/templ/push-tag.sh14
-rw-r--r--vendor/github.com/a-h/templ/runtime.go855
-rw-r--r--vendor/github.com/a-h/templ/runtime/buffer.go62
-rw-r--r--vendor/github.com/a-h/templ/runtime/bufferpool.go38
-rw-r--r--vendor/github.com/a-h/templ/runtime/builder.go8
-rw-r--r--vendor/github.com/a-h/templ/runtime/runtime.go21
-rw-r--r--vendor/github.com/a-h/templ/safehtml/style.go168
-rw-r--r--vendor/github.com/a-h/templ/templ.pngbin15528 -> 0 bytes
-rw-r--r--vendor/github.com/a-h/templ/url.go20
-rw-r--r--vendor/github.com/a-h/templ/version.go10
-rw-r--r--vendor/golang.org/x/mod/LICENSE27
-rw-r--r--vendor/golang.org/x/mod/PATENTS22
-rw-r--r--vendor/golang.org/x/mod/semver/semver.go401
-rw-r--r--vendor/golang.org/x/sync/LICENSE27
-rw-r--r--vendor/golang.org/x/sync/PATENTS22
-rw-r--r--vendor/golang.org/x/sync/errgroup/errgroup.go135
-rw-r--r--vendor/golang.org/x/sync/errgroup/go120.go13
-rw-r--r--vendor/golang.org/x/sync/errgroup/pre_go120.go14
-rw-r--r--vendor/golang.org/x/text/internal/gen/code.go375
-rw-r--r--vendor/golang.org/x/text/internal/gen/gen.go354
-rw-r--r--vendor/golang.org/x/text/message/pipeline/extract.go821
-rw-r--r--vendor/golang.org/x/text/message/pipeline/generate.go329
-rw-r--r--vendor/golang.org/x/text/message/pipeline/message.go241
-rw-r--r--vendor/golang.org/x/text/message/pipeline/pipeline.go422
-rw-r--r--vendor/golang.org/x/text/message/pipeline/rewrite.go268
-rw-r--r--vendor/golang.org/x/text/runes/cond.go187
-rw-r--r--vendor/golang.org/x/text/runes/runes.go355
-rw-r--r--vendor/golang.org/x/text/unicode/cldr/base.go105
-rw-r--r--vendor/golang.org/x/text/unicode/cldr/cldr.go137
-rw-r--r--vendor/golang.org/x/text/unicode/cldr/collate.go363
-rw-r--r--vendor/golang.org/x/text/unicode/cldr/decode.go171
-rw-r--r--vendor/golang.org/x/text/unicode/cldr/resolve.go602
-rw-r--r--vendor/golang.org/x/text/unicode/cldr/slice.go144
-rw-r--r--vendor/golang.org/x/text/unicode/cldr/xml.go1494
-rw-r--r--vendor/golang.org/x/tools/LICENSE27
-rw-r--r--vendor/golang.org/x/tools/PATENTS22
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/enclosing.go654
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports.go485
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/rewrite.go486
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/util.go19
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/allpackages.go195
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/fakecontext.go111
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/overlay.go101
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/tags.go100
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/util.go209
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/callgraph.go129
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/cha/cha.go164
-rw-r--r--vendor/golang.org/x/tools/go/callgraph/util.go180
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go186
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/importer.go75
-rw-r--r--vendor/golang.org/x/tools/go/internal/cgo/cgo.go219
-rw-r--r--vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go42
-rw-r--r--vendor/golang.org/x/tools/go/loader/doc.go202
-rw-r--r--vendor/golang.org/x/tools/go/loader/loader.go1066
-rw-r--r--vendor/golang.org/x/tools/go/loader/util.go123
-rw-r--r--vendor/golang.org/x/tools/go/packages/doc.go242
-rw-r--r--vendor/golang.org/x/tools/go/packages/external.go156
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist.go1066
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist_overlay.go83
-rw-r--r--vendor/golang.org/x/tools/go/packages/loadmode_string.go57
-rw-r--r--vendor/golang.org/x/tools/go/packages/packages.go1515
-rw-r--r--vendor/golang.org/x/tools/go/packages/visit.go68
-rw-r--r--vendor/golang.org/x/tools/go/ssa/TODO16
-rw-r--r--vendor/golang.org/x/tools/go/ssa/block.go113
-rw-r--r--vendor/golang.org/x/tools/go/ssa/blockopt.go183
-rw-r--r--vendor/golang.org/x/tools/go/ssa/builder.go3276
-rw-r--r--vendor/golang.org/x/tools/go/ssa/const.go232
-rw-r--r--vendor/golang.org/x/tools/go/ssa/coretype.go161
-rw-r--r--vendor/golang.org/x/tools/go/ssa/create.go318
-rw-r--r--vendor/golang.org/x/tools/go/ssa/doc.go122
-rw-r--r--vendor/golang.org/x/tools/go/ssa/dom.go340
-rw-r--r--vendor/golang.org/x/tools/go/ssa/emit.go614
-rw-r--r--vendor/golang.org/x/tools/go/ssa/func.go816
-rw-r--r--vendor/golang.org/x/tools/go/ssa/instantiate.go131
-rw-r--r--vendor/golang.org/x/tools/go/ssa/lift.go688
-rw-r--r--vendor/golang.org/x/tools/go/ssa/lvalue.go155
-rw-r--r--vendor/golang.org/x/tools/go/ssa/methods.go281
-rw-r--r--vendor/golang.org/x/tools/go/ssa/mode.go111
-rw-r--r--vendor/golang.org/x/tools/go/ssa/print.go470
-rw-r--r--vendor/golang.org/x/tools/go/ssa/sanity.go560
-rw-r--r--vendor/golang.org/x/tools/go/ssa/source.go288
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssa.go1871
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/load.go214
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/switch.go230
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/visit.go157
-rw-r--r--vendor/golang.org/x/tools/go/ssa/subst.go642
-rw-r--r--vendor/golang.org/x/tools/go/ssa/task.go103
-rw-r--r--vendor/golang.org/x/tools/go/ssa/util.go430
-rw-r--r--vendor/golang.org/x/tools/go/ssa/util_go120.go17
-rw-r--r--vendor/golang.org/x/tools/go/ssa/wrappers.go348
-rw-r--r--vendor/golang.org/x/tools/go/types/objectpath/objectpath.go788
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/callee.go69
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/imports.go30
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/map.go518
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go73
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/ui.go55
-rw-r--r--vendor/golang.org/x/tools/internal/aliases/aliases.go32
-rw-r--r--vendor/golang.org/x/tools/internal/aliases/aliases_go121.go35
-rw-r--r--vendor/golang.org/x/tools/internal/aliases/aliases_go122.go99
-rw-r--r--vendor/golang.org/x/tools/internal/event/core/event.go85
-rw-r--r--vendor/golang.org/x/tools/internal/event/core/export.go70
-rw-r--r--vendor/golang.org/x/tools/internal/event/core/fast.go77
-rw-r--r--vendor/golang.org/x/tools/internal/event/doc.go7
-rw-r--r--vendor/golang.org/x/tools/internal/event/event.go127
-rw-r--r--vendor/golang.org/x/tools/internal/event/keys/keys.go564
-rw-r--r--vendor/golang.org/x/tools/internal/event/keys/standard.go22
-rw-r--r--vendor/golang.org/x/tools/internal/event/keys/util.go21
-rw-r--r--vendor/golang.org/x/tools/internal/event/label/label.go215
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/bimport.go150
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/exportdata.go99
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go266
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/iexport.go1332
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/iimport.go1100
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go22
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go14
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/support_go118.go34
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/unified_no.go10
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go10
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go728
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/invoke.go555
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/vendor.go163
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/version.go71
-rw-r--r--vendor/golang.org/x/tools/internal/packagesinternal/packages.go22
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/codes.go77
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/decoder.go517
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/doc.go32
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/encoder.go383
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/flags.go9
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go21
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go28
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/reloc.go42
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/support.go17
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/sync.go113
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go89
-rw-r--r--vendor/golang.org/x/tools/internal/stdlib/manifest.go17431
-rw-r--r--vendor/golang.org/x/tools/internal/stdlib/stdlib.go97
-rw-r--r--vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go137
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/common.go142
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/coretype.go150
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/free.go120
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/normalize.go218
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/termlist.go163
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/typeterm.go169
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/errorcode.go1560
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go179
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/recv.go43
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/toonew.go89
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/types.go65
-rw-r--r--vendor/golang.org/x/tools/internal/versions/constraint.go13
-rw-r--r--vendor/golang.org/x/tools/internal/versions/constraint_go121.go14
-rw-r--r--vendor/golang.org/x/tools/internal/versions/features.go43
-rw-r--r--vendor/golang.org/x/tools/internal/versions/gover.go172
-rw-r--r--vendor/golang.org/x/tools/internal/versions/toolchain.go14
-rw-r--r--vendor/golang.org/x/tools/internal/versions/toolchain_go119.go14
-rw-r--r--vendor/golang.org/x/tools/internal/versions/toolchain_go120.go14
-rw-r--r--vendor/golang.org/x/tools/internal/versions/toolchain_go121.go14
-rw-r--r--vendor/golang.org/x/tools/internal/versions/types.go19
-rw-r--r--vendor/golang.org/x/tools/internal/versions/types_go121.go30
-rw-r--r--vendor/golang.org/x/tools/internal/versions/types_go122.go41
-rw-r--r--vendor/golang.org/x/tools/internal/versions/versions.go57
-rw-r--r--vendor/modules.txt43
228 files changed, 61338 insertions, 5222 deletions
diff --git a/.exrc b/.exrc
index 011938b..252d632 100644
--- a/.exrc
+++ b/.exrc
@@ -13,6 +13,8 @@ endfunction
autocmd BufNewFile,BufRead */data/mintages/*
\ setfiletype mintage
\ | setlocal nowrap
+autocmd BufNewFile,BufRead */cmd/exttmpl/*
+ \ setlocal makeprg=go\ build\ ./cmd/exttmpl
autocmd BufNewFile,BufRead */cmd/mfmt/*
\ setlocal makeprg=go\ build\ ./cmd/mfmt
diff --git a/.gitignore b/.gitignore
index e58bfc9..36f48ee 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,11 @@
+# Binaries
euro-cash.eu
+exttmpl
mfmt
-!cmd/mfmt
+
+!cmd/*
+
+# Autogenerated files
out.gotext.json
*.gen.*
*_templ.go
diff --git a/Makefile b/Makefile
index b8a4aea..f867cfa 100644
--- a/Makefile
+++ b/Makefile
@@ -1,13 +1,26 @@
-# Generating translations is rather slow; so don’t do that by default
-all:
- TEMPL_EXPERIMENT=rawgo go generate ./template
+templates = $(shell find src/templates -name '*.tmpl')
+gofiles = $(shell find main.go src -name '*.go')
+
+exttmpl = $(wildcard cmd/exttmpl/*.go)
+mfmt = $(wildcard cmd/mfmt/*.go)
+
+all: euro-cash.eu exttmpl mfmt
+
+euro-cash.eu: $(templates) $(gofiles)
go build
-all-i18n:
- TEMPL_EXPERIMENT=rawgo go generate ./template ./lib
+# Generating translations is rather slow; so don’t do that by default
+all-i18n: exttmpl
+ go generate ./src
find . -name out.gotext.json | mcp -b sed s/out/messages/
go build
+exttmpl: $(exttmpl)
+ go build ./cmd/exttmpl
+
+mfmt: $(mfmt)
+ go build ./cmd/mfmt
+
watch:
ls euro-cash.eu | entr -r ./euro-cash.eu -no-email -port $${PORT:-8080}
@@ -16,3 +29,6 @@ watch:
release: all-i18n
[ -n "$$GOOS" -a -n "$$GOARCH" ]
tar -cf euro-cash.eu-$$GOOS-$$GOARCH.tar.gz euro-cash.eu data/ static/
+
+clean:
+ rm -f euro-cash.eu
diff --git a/cmd/exttmpl/main.go b/cmd/exttmpl/main.go
new file mode 100644
index 0000000..ca7c15f
--- /dev/null
+++ b/cmd/exttmpl/main.go
@@ -0,0 +1,200 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "slices"
+ "text/template/parse"
+
+ "golang.org/x/text/language"
+ "golang.org/x/text/message/pipeline"
+ "golang.org/x/tools/go/packages"
+)
+
+const (
+ pkgbase = "git.thomasvoss.com/euro-cash.eu"
+ srclang = "en"
+ srcdir = "./src"
+ transdir = srcdir + "/rosetta"
+ outfile = "catalog.gen.go"
+ transfn = "T"
+)
+
+func main() {
+ /* cd to the project root directory */
+ try(os.Chdir(filepath.Dir(os.Args[0])))
+
+ pkgnames := packageList(".")
+
+ var paths []string
+ pkgs := try2(packages.Load(&packages.Config{
+ Mode: packages.NeedFiles | packages.NeedEmbedFiles,
+ }, pkgnames...))
+
+ for _, pkg := range pkgs {
+ if len(pkg.Errors) != 0 {
+ for _, err := range pkg.Errors {
+ warn(err.Msg)
+ }
+ os.Exit(1)
+ }
+ for _, f := range pkg.EmbedFiles {
+ if filepath.Ext(f) == ".tmpl" {
+ paths = append(paths, f)
+ }
+ }
+ }
+
+ msgs := make([]pipeline.Message, 0, 1024)
+ for _, path := range paths {
+ f := try2(os.ReadFile(path))
+ trees := make(map[string]*parse.Tree)
+ t := parse.New("name")
+ t.Mode |= parse.SkipFuncCheck
+ try2(t.Parse(string(f), "", "", trees))
+ for _, t := range trees {
+ process(&msgs, t.Root)
+ }
+ }
+
+ pconf := &pipeline.Config{
+ Supported: languages(),
+ SourceLanguage: language.Make(srclang),
+ Packages: pkgnames,
+ Dir: transdir,
+ GenFile: outfile,
+ GenPackage: srcdir,
+ }
+
+ state := try2(pipeline.Extract(pconf))
+ state.Extracted.Messages = append(state.Extracted.Messages, msgs...)
+
+ try(state.Import())
+ try(state.Merge())
+ try(state.Export())
+ try(state.Generate())
+}
+
+func process(tmplMsgs *[]pipeline.Message, node parse.Node) {
+ switch node.Type() {
+ case parse.NodeList:
+ if ln, ok := node.(*parse.ListNode); ok {
+ for _, n := range ln.Nodes {
+ process(tmplMsgs, n)
+ }
+ }
+ case parse.NodeIf:
+ if in, ok := node.(*parse.IfNode); ok {
+ process(tmplMsgs, in.List)
+ if in.ElseList != nil {
+ process(tmplMsgs, in.ElseList)
+ }
+ }
+ case parse.NodeWith:
+ if wn, ok := node.(*parse.WithNode); ok {
+ process(tmplMsgs, wn.List)
+ if wn.ElseList != nil {
+ process(tmplMsgs, wn.ElseList)
+ }
+ }
+ case parse.NodeRange:
+ if rn, ok := node.(*parse.RangeNode); ok {
+ process(tmplMsgs, rn.List)
+ if rn.ElseList != nil {
+ process(tmplMsgs, rn.ElseList)
+ }
+ }
+ case parse.NodeAction:
+ an, ok := node.(*parse.ActionNode)
+ if !ok {
+ break
+ }
+
+ for _, cmd := range an.Pipe.Cmds {
+ if !hasIndent(cmd, transfn) {
+ continue
+ }
+ for _, arg := range cmd.Args {
+ if arg.Type() != parse.NodeString {
+ continue
+ }
+ if sn, ok := arg.(*parse.StringNode); ok {
+ *tmplMsgs = append(*tmplMsgs, pipeline.Message{
+ ID: pipeline.IDList{sn.Text},
+ Key: sn.Text,
+ Message: pipeline.Text{
+ Msg: sn.Text,
+ },
+ })
+ }
+ }
+ }
+ }
+}
+
+func hasIndent(cmd *parse.CommandNode, s string) bool {
+ if len(cmd.Args) == 0 {
+ return false
+ }
+ arg := cmd.Args[0]
+ var idents []string
+ switch arg.Type() {
+ case parse.NodeField:
+ idents = arg.(*parse.FieldNode).Ident
+ case parse.NodeVariable:
+ idents = arg.(*parse.VariableNode).Ident
+ }
+ return slices.Contains(idents, s)
+}
+
+func packageList(path string) []string {
+ ents := try2(os.ReadDir(path))
+ xs := make([]string, 0, len(ents))
+ foundOne := false
+ for _, ent := range ents {
+ switch {
+ case filepath.Ext(ent.Name()) == ".go":
+ if !foundOne {
+ xs = append(xs, pkgbase+"/"+path)
+ foundOne = true
+ }
+ case !ent.IsDir(), ent.Name() == "cmd", ent.Name() == "vendor":
+ continue
+ default:
+ xs = append(xs, packageList(path+"/"+ent.Name())...)
+ }
+ }
+ return xs
+}
+
+func languages() []language.Tag {
+ ents := try2(os.ReadDir(transdir))
+ tags := make([]language.Tag, len(ents))
+ for i, e := range ents {
+ tags[i] = language.MustParse(e.Name())
+ }
+ return tags
+}
+
+func try(err error) {
+ if err != nil {
+ die(err)
+ }
+}
+
+func try2[T any](val T, err error) T {
+ if err != nil {
+ die(err)
+ }
+ return val
+}
+
+func warn(err any) {
+ fmt.Fprintf(os.Stderr, "%s: %s\n", filepath.Base(os.Args[0]), err)
+}
+
+func die(err any) {
+ warn(err)
+ os.Exit(1)
+}
diff --git a/go.mod b/go.mod
index 11b833c..53d5d8c 100644
--- a/go.mod
+++ b/go.mod
@@ -4,4 +4,9 @@ go 1.23
require golang.org/x/text v0.17.0
-require github.com/a-h/templ v0.2.747
+require golang.org/x/tools v0.24.0
+
+require (
+ golang.org/x/mod v0.20.0 // indirect
+ golang.org/x/sync v0.8.0 // indirect
+)
diff --git a/go.sum b/go.sum
index 336985b..410c3e6 100644
--- a/go.sum
+++ b/go.sum
@@ -1,6 +1,8 @@
-github.com/a-h/templ v0.2.747 h1:D0dQ2lxC3W7Dxl6fxQ/1zZHBQslSkTSvl5FxP/CfdKg=
-github.com/a-h/templ v0.2.747/go.mod h1:69ObQIbrcuwPCU32ohNaWce3Cb7qM5GMiqN1K+2yop4=
-github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
-github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
+golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
+golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
+golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
+golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
diff --git a/lib/countries.go b/lib/countries.go
deleted file mode 100644
index 565f61c..0000000
--- a/lib/countries.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package lib
-
-import (
- "slices"
-
- "golang.org/x/text/collate"
- "golang.org/x/text/language"
-)
-
-type Country struct {
- Code, Name string
-}
-
-func SortedCountries(p Printer) []Country {
- xs := []Country{
- {Code: "ad", Name: p.T("Andorra")},
- {Code: "at", Name: p.T("Austria")},
- {Code: "be", Name: p.T("Belgium")},
- {Code: "cy", Name: p.T("Cyprus")},
- {Code: "de", Name: p.T("Germany")},
- {Code: "ee", Name: p.T("Estonia")},
- {Code: "es", Name: p.T("Spain")},
- {Code: "fi", Name: p.T("Finland")},
- {Code: "fr", Name: p.T("France")},
- {Code: "gr", Name: p.T("Greece")},
- {Code: "hr", Name: p.T("Croatia")},
- {Code: "ie", Name: p.T("Ireland")},
- {Code: "it", Name: p.T("Italy")},
- {Code: "lt", Name: p.T("Lithuania")},
- {Code: "lu", Name: p.T("Luxembourg")},
- {Code: "lv", Name: p.T("Latvia")},
- {Code: "mc", Name: p.T("Monaco")},
- {Code: "mt", Name: p.T("Malta")},
- {Code: "nl", Name: p.T("Netherlands")},
- {Code: "pt", Name: p.T("Portugal")},
- {Code: "si", Name: p.T("Slovenia")},
- {Code: "sk", Name: p.T("Slovakia")},
- {Code: "sm", Name: p.T("San Marino")},
- {Code: "va", Name: p.T("Vatican City")},
- }
- c := collate.New(language.MustParse(p.Locale.Bcp))
- slices.SortFunc(xs, func(x, y Country) int {
- return c.CompareString(x.Name, y.Name)
- })
- return xs
-}
diff --git a/lib/locales/bg/messages.gotext.json b/lib/locales/bg/messages.gotext.json
deleted file mode 100644
index 103b1a2..0000000
--- a/lib/locales/bg/messages.gotext.json
+++ /dev/null
@@ -1,570 +0,0 @@
-{
- "language": "bg",
- "messages": [
- {
- "id": "Andorra",
- "message": "Andorra",
- "translation": "Андора"
- },
- {
- "id": "Austria",
- "message": "Austria",
- "translation": "Австрия"
- },
- {
- "id": "Belgium",
- "message": "Belgium",
- "translation": "Белгия"
- },
- {
- "id": "Cyprus",
- "message": "Cyprus",
- "translation": "Кипър"
- },
- {
- "id": "Germany",
- "message": "Germany",
- "translation": "Германия"
- },
- {
- "id": "Estonia",
- "message": "Estonia",
- "translation": "Естония"
- },
- {
- "id": "Spain",
- "message": "Spain",
- "translation": "Испания"
- },
- {
- "id": "Finland",
- "message": "Finland",
- "translation": "Финландия"
- },
- {
- "id": "France",
- "message": "France",
- "translation": "Франция"
- },
- {
- "id": "Greece",
- "message": "Greece",
- "translation": "Гърция"
- },
- {
- "id": "Croatia",
- "message": "Croatia",
- "translation": "Хърватия"
- },
- {
- "id": "Ireland",
- "message": "Ireland",
- "translation": "Ирландия"
- },
- {
- "id": "Italy",
- "message": "Italy",
- "translation": "Италия"
- },
- {
- "id": "Lithuania",
- "message": "Lithuania",
- "translation": "Литва"
- },
- {
- "id": "Luxembourg",
- "message": "Luxembourg",
- "translation": "Люксембург"
- },
- {
- "id": "Latvia",
- "message": "Latvia",
- "translation": "Латвия"
- },
- {
- "id": "Monaco",
- "message": "Monaco",
- "translation": "Монако"
- },
- {
- "id": "Malta",
- "message": "Malta",
- "translation": "Малта"
- },
- {
- "id": "Netherlands",
- "message": "Netherlands",
- "translation": "Нидерландия"
- },
- {
- "id": "Portugal",
- "message": "Portugal",
- "translation": "Португалия"
- },
- {
- "id": "Slovenia",
- "message": "Slovenia",
- "translation": "Словения"
- },
- {
- "id": "Slovakia",
- "message": "Slovakia",
- "translation": "Словакия"
- },
- {
- "id": "San Marino",
- "message": "San Marino",
- "translation": "Сан Марино"
- },
- {
- "id": "Vatican City",
- "message": "Vatican City",
- "translation": "Ватикана"
- },
- {
- "id": "Page Not Found",
- "message": "Page Not Found",
- "translation": ""
- },
- {
- "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "About Us",
- "message": "About Us",
- "translation": ""
- },
- {
- "id": "Open Source",
- "message": "Open Source",
- "translation": ""
- },
- {
- "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "translation": "",
- "placeholders": [
- {
- "id": "RepoLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "repoLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Contact Us",
- "message": "Contact Us",
- "translation": ""
- },
- {
- "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "Special Thanks",
- "message": "Special Thanks",
- "translation": ""
- },
- {
- "id": "Development",
- "message": "Development",
- "translation": ""
- },
- {
- "id": "Research",
- "message": "Research",
- "translation": ""
- },
- {
- "id": "Translations",
- "message": "Translations",
- "translation": ""
- },
- {
- "id": "British- \u0026 American English",
- "message": "British- \u0026 American English",
- "translation": ""
- },
- {
- "id": "Icelandic",
- "message": "Icelandic",
- "translation": ""
- },
- {
- "id": "Found a mistake or want to contribute missing information?",
- "message": "Found a mistake or want to contribute missing information?",
- "translation": ""
- },
- {
- "id": "Feel free to contact us!",
- "message": "Feel free to contact us!",
- "translation": ""
- },
- {
- "id": "Dutch Euro Coin Designs",
- "message": "Dutch Euro Coin Designs",
- "translation": ""
- },
- {
- "id": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "message": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "translation": ""
- },
- {
- "id": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "message": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "translation": ""
- },
- {
- "id": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "message": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "translation": ""
- },
- {
- "id": "Euro Coin Designs",
- "message": "Euro Coin Designs",
- "translation": ""
- },
- {
- "id": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "message": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "translation": "",
- "placeholders": [
- {
- "id": "VarietiesLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "varietiesLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Euro Coin Mintages",
- "message": "Euro Coin Mintages",
- "translation": ""
- },
- {
- "id": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "message": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "translation": ""
- },
- {
- "id": "Additional Notes",
- "message": "Additional Notes",
- "translation": ""
- },
- {
- "id": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "message": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "translation": "",
- "placeholders": [
- {
- "id": "MuntrolpakketLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "muntrolpakketLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "message": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "translation": "",
- "placeholders": [
- {
- "id": "217503",
- "string": "%[1]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 1,
- "expr": "217503"
- },
- {
- "id": "177003",
- "string": "%[2]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 2,
- "expr": "177003"
- }
- ]
- },
- {
- "id": "Country",
- "message": "Country",
- "translation": ""
- },
- {
- "id": "Circulation Coins",
- "message": "Circulation Coins",
- "translation": ""
- },
- {
- "id": "NIFC / BU Sets",
- "message": "NIFC / BU Sets",
- "translation": ""
- },
- {
- "id": "Proof Coins",
- "message": "Proof Coins",
- "translation": ""
- },
- {
- "id": "Filter",
- "message": "Filter",
- "translation": ""
- },
- {
- "id": "Standard Issue Coins",
- "message": "Standard Issue Coins",
- "translation": ""
- },
- {
- "id": "Year",
- "message": "Year",
- "translation": ""
- },
- {
- "id": "Unknown",
- "message": "Unknown",
- "translation": ""
- },
- {
- "id": "Commemorative Coins",
- "message": "Commemorative Coins",
- "translation": ""
- },
- {
- "id": "Commemorated Issue",
- "message": "Commemorated Issue",
- "translation": ""
- },
- {
- "id": "Mintage",
- "message": "Mintage",
- "translation": ""
- },
- {
- "id": "Euro Coins",
- "message": "Euro Coins",
- "translation": ""
- },
- {
- "id": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "message": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "translation": "",
- "placeholders": [
- {
- "id": "NewsLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "newsLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Designs",
- "message": "Designs",
- "translation": ""
- },
- {
- "id": "View the 600+ different Euro-coin designs!",
- "message": "View the 600+ different Euro-coin designs!",
- "translation": ""
- },
- {
- "id": "Mintages",
- "message": "Mintages",
- "translation": ""
- },
- {
- "id": "View the mintage figures of all the Euro coins!",
- "message": "View the mintage figures of all the Euro coins!",
- "translation": ""
- },
- {
- "id": "Varieties",
- "message": "Varieties",
- "translation": ""
- },
- {
- "id": "View all the known Euro varieties!",
- "message": "View all the known Euro varieties!",
- "translation": ""
- },
- {
- "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "translation": ""
- },
- {
- "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "Select Your Language",
- "message": "Select Your Language",
- "translation": ""
- },
- {
- "id": "Select your preferred language to use on the site.",
- "message": "Select your preferred language to use on the site.",
- "translation": ""
- },
- {
- "id": "Eurozone Languages",
- "message": "Eurozone Languages",
- "translation": ""
- },
- {
- "id": "Other Languages",
- "message": "Other Languages",
- "translation": ""
- },
- {
- "id": "Home",
- "message": "Home",
- "translation": ""
- },
- {
- "id": "News",
- "message": "News",
- "translation": ""
- },
- {
- "id": "Coin Collecting",
- "message": "Coin Collecting",
- "translation": ""
- },
- {
- "id": "Coins",
- "message": "Coins",
- "translation": ""
- },
- {
- "id": "Banknotes",
- "message": "Banknotes",
- "translation": ""
- },
- {
- "id": "Jargon",
- "message": "Jargon",
- "translation": ""
- },
- {
- "id": "Discord",
- "message": "Discord",
- "translation": ""
- },
- {
- "id": "About",
- "message": "About",
- "translation": ""
- },
- {
- "id": "Language",
- "message": "Language",
- "translation": ""
- },
- {
- "id": "The Euro Cash Compendium",
- "message": "The Euro Cash Compendium",
- "translation": ""
- },
- {
- "id": "United in",
- "message": "United in",
- "translation": ""
- },
- {
- "id": "diversity",
- "message": "diversity",
- "translation": ""
- },
- {
- "id": "cash",
- "message": "cash",
- "translation": ""
- },
- {
- "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "translation": ""
- }
- ]
-} \ No newline at end of file
diff --git a/lib/locales/el/messages.gotext.json b/lib/locales/el/messages.gotext.json
deleted file mode 100644
index 1827a66..0000000
--- a/lib/locales/el/messages.gotext.json
+++ /dev/null
@@ -1,570 +0,0 @@
-{
- "language": "el",
- "messages": [
- {
- "id": "Andorra",
- "message": "Andorra",
- "translation": "Ανδόρα"
- },
- {
- "id": "Austria",
- "message": "Austria",
- "translation": "Αυστρία"
- },
- {
- "id": "Belgium",
- "message": "Belgium",
- "translation": "Βέλγιο"
- },
- {
- "id": "Cyprus",
- "message": "Cyprus",
- "translation": "Κύπρος"
- },
- {
- "id": "Germany",
- "message": "Germany",
- "translation": "Γερμανία"
- },
- {
- "id": "Estonia",
- "message": "Estonia",
- "translation": "Εσθονία"
- },
- {
- "id": "Spain",
- "message": "Spain",
- "translation": "Ισπανία"
- },
- {
- "id": "Finland",
- "message": "Finland",
- "translation": "Φινλανδία"
- },
- {
- "id": "France",
- "message": "France",
- "translation": "Γαλλία"
- },
- {
- "id": "Greece",
- "message": "Greece",
- "translation": "Ελλάδα"
- },
- {
- "id": "Croatia",
- "message": "Croatia",
- "translation": "Κροατία"
- },
- {
- "id": "Ireland",
- "message": "Ireland",
- "translation": "Ιρλανδία"
- },
- {
- "id": "Italy",
- "message": "Italy",
- "translation": "Ιταλία"
- },
- {
- "id": "Lithuania",
- "message": "Lithuania",
- "translation": "Λιθουανία"
- },
- {
- "id": "Luxembourg",
- "message": "Luxembourg",
- "translation": "Λουξεμβούργο"
- },
- {
- "id": "Latvia",
- "message": "Latvia",
- "translation": "Λετονία"
- },
- {
- "id": "Monaco",
- "message": "Monaco",
- "translation": "Μονακό"
- },
- {
- "id": "Malta",
- "message": "Malta",
- "translation": "Μάλτα"
- },
- {
- "id": "Netherlands",
- "message": "Netherlands",
- "translation": "Ολλανδία"
- },
- {
- "id": "Portugal",
- "message": "Portugal",
- "translation": "Πορτογαλία"
- },
- {
- "id": "Slovenia",
- "message": "Slovenia",
- "translation": "Σλοβενία"
- },
- {
- "id": "Slovakia",
- "message": "Slovakia",
- "translation": "Σλοβακία"
- },
- {
- "id": "San Marino",
- "message": "San Marino",
- "translation": "Σαν Μαρίνο"
- },
- {
- "id": "Vatican City",
- "message": "Vatican City",
- "translation": "Βατικανό"
- },
- {
- "id": "Page Not Found",
- "message": "Page Not Found",
- "translation": ""
- },
- {
- "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "About Us",
- "message": "About Us",
- "translation": ""
- },
- {
- "id": "Open Source",
- "message": "Open Source",
- "translation": ""
- },
- {
- "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "translation": "",
- "placeholders": [
- {
- "id": "RepoLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "repoLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Contact Us",
- "message": "Contact Us",
- "translation": ""
- },
- {
- "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "Special Thanks",
- "message": "Special Thanks",
- "translation": ""
- },
- {
- "id": "Development",
- "message": "Development",
- "translation": ""
- },
- {
- "id": "Research",
- "message": "Research",
- "translation": ""
- },
- {
- "id": "Translations",
- "message": "Translations",
- "translation": ""
- },
- {
- "id": "British- \u0026 American English",
- "message": "British- \u0026 American English",
- "translation": ""
- },
- {
- "id": "Icelandic",
- "message": "Icelandic",
- "translation": ""
- },
- {
- "id": "Found a mistake or want to contribute missing information?",
- "message": "Found a mistake or want to contribute missing information?",
- "translation": ""
- },
- {
- "id": "Feel free to contact us!",
- "message": "Feel free to contact us!",
- "translation": ""
- },
- {
- "id": "Dutch Euro Coin Designs",
- "message": "Dutch Euro Coin Designs",
- "translation": ""
- },
- {
- "id": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "message": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "translation": ""
- },
- {
- "id": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "message": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "translation": ""
- },
- {
- "id": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "message": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "translation": ""
- },
- {
- "id": "Euro Coin Designs",
- "message": "Euro Coin Designs",
- "translation": ""
- },
- {
- "id": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "message": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "translation": "",
- "placeholders": [
- {
- "id": "VarietiesLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "varietiesLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Euro Coin Mintages",
- "message": "Euro Coin Mintages",
- "translation": ""
- },
- {
- "id": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "message": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "translation": ""
- },
- {
- "id": "Additional Notes",
- "message": "Additional Notes",
- "translation": ""
- },
- {
- "id": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "message": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "translation": "",
- "placeholders": [
- {
- "id": "MuntrolpakketLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "muntrolpakketLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "message": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "translation": "",
- "placeholders": [
- {
- "id": "217503",
- "string": "%[1]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 1,
- "expr": "217503"
- },
- {
- "id": "177003",
- "string": "%[2]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 2,
- "expr": "177003"
- }
- ]
- },
- {
- "id": "Country",
- "message": "Country",
- "translation": ""
- },
- {
- "id": "Circulation Coins",
- "message": "Circulation Coins",
- "translation": ""
- },
- {
- "id": "NIFC / BU Sets",
- "message": "NIFC / BU Sets",
- "translation": ""
- },
- {
- "id": "Proof Coins",
- "message": "Proof Coins",
- "translation": ""
- },
- {
- "id": "Filter",
- "message": "Filter",
- "translation": ""
- },
- {
- "id": "Standard Issue Coins",
- "message": "Standard Issue Coins",
- "translation": ""
- },
- {
- "id": "Year",
- "message": "Year",
- "translation": ""
- },
- {
- "id": "Unknown",
- "message": "Unknown",
- "translation": ""
- },
- {
- "id": "Commemorative Coins",
- "message": "Commemorative Coins",
- "translation": ""
- },
- {
- "id": "Commemorated Issue",
- "message": "Commemorated Issue",
- "translation": ""
- },
- {
- "id": "Mintage",
- "message": "Mintage",
- "translation": ""
- },
- {
- "id": "Euro Coins",
- "message": "Euro Coins",
- "translation": ""
- },
- {
- "id": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "message": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "translation": "",
- "placeholders": [
- {
- "id": "NewsLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "newsLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Designs",
- "message": "Designs",
- "translation": ""
- },
- {
- "id": "View the 600+ different Euro-coin designs!",
- "message": "View the 600+ different Euro-coin designs!",
- "translation": ""
- },
- {
- "id": "Mintages",
- "message": "Mintages",
- "translation": ""
- },
- {
- "id": "View the mintage figures of all the Euro coins!",
- "message": "View the mintage figures of all the Euro coins!",
- "translation": ""
- },
- {
- "id": "Varieties",
- "message": "Varieties",
- "translation": ""
- },
- {
- "id": "View all the known Euro varieties!",
- "message": "View all the known Euro varieties!",
- "translation": ""
- },
- {
- "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "translation": ""
- },
- {
- "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "Select Your Language",
- "message": "Select Your Language",
- "translation": ""
- },
- {
- "id": "Select your preferred language to use on the site.",
- "message": "Select your preferred language to use on the site.",
- "translation": ""
- },
- {
- "id": "Eurozone Languages",
- "message": "Eurozone Languages",
- "translation": ""
- },
- {
- "id": "Other Languages",
- "message": "Other Languages",
- "translation": ""
- },
- {
- "id": "Home",
- "message": "Home",
- "translation": ""
- },
- {
- "id": "News",
- "message": "News",
- "translation": ""
- },
- {
- "id": "Coin Collecting",
- "message": "Coin Collecting",
- "translation": ""
- },
- {
- "id": "Coins",
- "message": "Coins",
- "translation": ""
- },
- {
- "id": "Banknotes",
- "message": "Banknotes",
- "translation": ""
- },
- {
- "id": "Jargon",
- "message": "Jargon",
- "translation": ""
- },
- {
- "id": "Discord",
- "message": "Discord",
- "translation": ""
- },
- {
- "id": "About",
- "message": "About",
- "translation": ""
- },
- {
- "id": "Language",
- "message": "Language",
- "translation": ""
- },
- {
- "id": "The Euro Cash Compendium",
- "message": "The Euro Cash Compendium",
- "translation": ""
- },
- {
- "id": "United in",
- "message": "United in",
- "translation": ""
- },
- {
- "id": "diversity",
- "message": "diversity",
- "translation": ""
- },
- {
- "id": "cash",
- "message": "cash",
- "translation": ""
- },
- {
- "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "translation": ""
- }
- ]
-} \ No newline at end of file
diff --git a/lib/locales/en/messages.gotext.json b/lib/locales/en/messages.gotext.json
deleted file mode 100644
index 45d33b0..0000000
--- a/lib/locales/en/messages.gotext.json
+++ /dev/null
@@ -1,748 +0,0 @@
-{
- "language": "en",
- "messages": [
- {
- "id": "Andorra",
- "message": "Andorra",
- "translation": "Andorra",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Austria",
- "message": "Austria",
- "translation": "Austria",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Belgium",
- "message": "Belgium",
- "translation": "Belgium",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Cyprus",
- "message": "Cyprus",
- "translation": "Cyprus",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Germany",
- "message": "Germany",
- "translation": "Germany",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Estonia",
- "message": "Estonia",
- "translation": "Estonia",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Spain",
- "message": "Spain",
- "translation": "Spain",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Finland",
- "message": "Finland",
- "translation": "Finland",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "France",
- "message": "France",
- "translation": "France",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Greece",
- "message": "Greece",
- "translation": "Greece",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Croatia",
- "message": "Croatia",
- "translation": "Croatia",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Ireland",
- "message": "Ireland",
- "translation": "Ireland",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Italy",
- "message": "Italy",
- "translation": "Italy",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Lithuania",
- "message": "Lithuania",
- "translation": "Lithuania",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Luxembourg",
- "message": "Luxembourg",
- "translation": "Luxembourg",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Latvia",
- "message": "Latvia",
- "translation": "Latvia",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Monaco",
- "message": "Monaco",
- "translation": "Monaco",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Malta",
- "message": "Malta",
- "translation": "Malta",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Netherlands",
- "message": "Netherlands",
- "translation": "Netherlands",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Portugal",
- "message": "Portugal",
- "translation": "Portugal",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Slovenia",
- "message": "Slovenia",
- "translation": "Slovenia",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Slovakia",
- "message": "Slovakia",
- "translation": "Slovakia",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "San Marino",
- "message": "San Marino",
- "translation": "San Marino",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Vatican City",
- "message": "Vatican City",
- "translation": "Vatican City",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Page Not Found",
- "message": "Page Not Found",
- "translation": "Page Not Found",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "About Us",
- "message": "About Us",
- "translation": "About Us",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Open Source",
- "message": "Open Source",
- "translation": "Open Source",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "translation": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "RepoLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "repoLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "Contact Us",
- "message": "Contact Us",
- "translation": "Contact Us",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "translation": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "Special Thanks",
- "message": "Special Thanks",
- "translation": "Special Thanks",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Development",
- "message": "Development",
- "translation": "Development",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Research",
- "message": "Research",
- "translation": "Research",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Translations",
- "message": "Translations",
- "translation": "Translations",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "British- \u0026 American English",
- "message": "British- \u0026 American English",
- "translation": "British- \u0026 American English",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Icelandic",
- "message": "Icelandic",
- "translation": "Icelandic",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Found a mistake or want to contribute missing information?",
- "message": "Found a mistake or want to contribute missing information?",
- "translation": "Found a mistake or want to contribute missing information?",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Feel free to contact us!",
- "message": "Feel free to contact us!",
- "translation": "Feel free to contact us!",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Dutch Euro Coin Designs",
- "message": "Dutch Euro Coin Designs",
- "translation": "Dutch Euro Coin Designs",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "message": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "translation": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "message": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "translation": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "message": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "translation": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Euro Coin Designs",
- "message": "Euro Coin Designs",
- "translation": "Euro Coin Designs",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "message": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "translation": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "VarietiesLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "varietiesLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "Euro Coin Mintages",
- "message": "Euro Coin Mintages",
- "translation": "Euro Coin Mintages",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "message": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "translation": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Additional Notes",
- "message": "Additional Notes",
- "translation": "Additional Notes",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "message": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "translation": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "MuntrolpakketLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "muntrolpakketLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "message": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "translation": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "217503",
- "string": "%[1]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 1,
- "expr": "217503"
- },
- {
- "id": "177003",
- "string": "%[2]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 2,
- "expr": "177003"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "Country",
- "message": "Country",
- "translation": "Country",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Circulation Coins",
- "message": "Circulation Coins",
- "translation": "Circulation Coins",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "NIFC / BU Sets",
- "message": "NIFC / BU Sets",
- "translation": "NIFC / BU Sets",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Proof Coins",
- "message": "Proof Coins",
- "translation": "Proof Coins",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Filter",
- "message": "Filter",
- "translation": "Filter",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Standard Issue Coins",
- "message": "Standard Issue Coins",
- "translation": "Standard Issue Coins",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Year",
- "message": "Year",
- "translation": "Year",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Unknown",
- "message": "Unknown",
- "translation": "Unknown",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Commemorative Coins",
- "message": "Commemorative Coins",
- "translation": "Commemorative Coins",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Commemorated Issue",
- "message": "Commemorated Issue",
- "translation": "Commemorated Issue",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Mintage",
- "message": "Mintage",
- "translation": "Mintage",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Euro Coins",
- "message": "Euro Coins",
- "translation": "Euro Coins",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "message": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "translation": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "NewsLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "newsLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "Designs",
- "message": "Designs",
- "translation": "Designs",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "View the 600+ different Euro-coin designs!",
- "message": "View the 600+ different Euro-coin designs!",
- "translation": "View the 600+ different Euro-coin designs!",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Mintages",
- "message": "Mintages",
- "translation": "Mintages",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "View the mintage figures of all the Euro coins!",
- "message": "View the mintage figures of all the Euro coins!",
- "translation": "View the mintage figures of all the Euro coins!",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Varieties",
- "message": "Varieties",
- "translation": "Varieties",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "View all the known Euro varieties!",
- "message": "View all the known Euro varieties!",
- "translation": "View all the known Euro varieties!",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "translation": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translatorComment": "Copied from source.",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ],
- "fuzzy": true
- },
- {
- "id": "Select Your Language",
- "message": "Select Your Language",
- "translation": "Select Your Language",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Select your preferred language to use on the site.",
- "message": "Select your preferred language to use on the site.",
- "translation": "Select your preferred language to use on the site.",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Eurozone Languages",
- "message": "Eurozone Languages",
- "translation": "Eurozone Languages",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Other Languages",
- "message": "Other Languages",
- "translation": "Other Languages",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Home",
- "message": "Home",
- "translation": "Home",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "News",
- "message": "News",
- "translation": "News",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Coin Collecting",
- "message": "Coin Collecting",
- "translation": "Coin Collecting",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Coins",
- "message": "Coins",
- "translation": "Coins",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Banknotes",
- "message": "Banknotes",
- "translation": "Banknotes",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Jargon",
- "message": "Jargon",
- "translation": "Jargon",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Discord",
- "message": "Discord",
- "translation": "Discord",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "About",
- "message": "About",
- "translation": "About",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Language",
- "message": "Language",
- "translation": "Language",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "The Euro Cash Compendium",
- "message": "The Euro Cash Compendium",
- "translation": "The Euro Cash Compendium",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "United in",
- "message": "United in",
- "translation": "United in",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "diversity",
- "message": "diversity",
- "translation": "diversity",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "cash",
- "message": "cash",
- "translation": "cash",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- },
- {
- "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "translation": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "translatorComment": "Copied from source.",
- "fuzzy": true
- }
- ]
-} \ No newline at end of file
diff --git a/lib/locales/nl/messages.gotext.json b/lib/locales/nl/messages.gotext.json
deleted file mode 100644
index c458b9d..0000000
--- a/lib/locales/nl/messages.gotext.json
+++ /dev/null
@@ -1,570 +0,0 @@
-{
- "language": "nl",
- "messages": [
- {
- "id": "Andorra",
- "message": "Andorra",
- "translation": "Andorra"
- },
- {
- "id": "Austria",
- "message": "Austria",
- "translation": "Oostenrijk"
- },
- {
- "id": "Belgium",
- "message": "Belgium",
- "translation": "België"
- },
- {
- "id": "Cyprus",
- "message": "Cyprus",
- "translation": "Cyprus"
- },
- {
- "id": "Germany",
- "message": "Germany",
- "translation": "Duitsland"
- },
- {
- "id": "Estonia",
- "message": "Estonia",
- "translation": "Estland"
- },
- {
- "id": "Spain",
- "message": "Spain",
- "translation": "Spanje"
- },
- {
- "id": "Finland",
- "message": "Finland",
- "translation": "Finland"
- },
- {
- "id": "France",
- "message": "France",
- "translation": "Frankrijk"
- },
- {
- "id": "Greece",
- "message": "Greece",
- "translation": "Griekenland"
- },
- {
- "id": "Croatia",
- "message": "Croatia",
- "translation": "Kroatië"
- },
- {
- "id": "Ireland",
- "message": "Ireland",
- "translation": "Ierland"
- },
- {
- "id": "Italy",
- "message": "Italy",
- "translation": "Italië"
- },
- {
- "id": "Lithuania",
- "message": "Lithuania",
- "translation": "Litouwen"
- },
- {
- "id": "Luxembourg",
- "message": "Luxembourg",
- "translation": "Luxemburg"
- },
- {
- "id": "Latvia",
- "message": "Latvia",
- "translation": "Letland"
- },
- {
- "id": "Monaco",
- "message": "Monaco",
- "translation": "Monaco"
- },
- {
- "id": "Malta",
- "message": "Malta",
- "translation": "Malta"
- },
- {
- "id": "Netherlands",
- "message": "Netherlands",
- "translation": "Nederland"
- },
- {
- "id": "Portugal",
- "message": "Portugal",
- "translation": "Portugal"
- },
- {
- "id": "Slovenia",
- "message": "Slovenia",
- "translation": "Slovenië"
- },
- {
- "id": "Slovakia",
- "message": "Slovakia",
- "translation": "Slowakije"
- },
- {
- "id": "San Marino",
- "message": "San Marino",
- "translation": "San Marino"
- },
- {
- "id": "Vatican City",
- "message": "Vatican City",
- "translation": "Vaticaanstad"
- },
- {
- "id": "Page Not Found",
- "message": "Page Not Found",
- "translation": ""
- },
- {
- "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "About Us",
- "message": "About Us",
- "translation": ""
- },
- {
- "id": "Open Source",
- "message": "Open Source",
- "translation": ""
- },
- {
- "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found {RepoLinkStart}here{LinkEnd}. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with any of the content on this site.",
- "translation": "",
- "placeholders": [
- {
- "id": "RepoLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "repoLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Contact Us",
- "message": "Contact Us",
- "translation": ""
- },
- {
- "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to {ContactEmail} or contact ‘@onetruemangoman’ on Discord.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "Special Thanks",
- "message": "Special Thanks",
- "translation": ""
- },
- {
- "id": "Development",
- "message": "Development",
- "translation": ""
- },
- {
- "id": "Research",
- "message": "Research",
- "translation": ""
- },
- {
- "id": "Translations",
- "message": "Translations",
- "translation": ""
- },
- {
- "id": "British- \u0026 American English",
- "message": "British- \u0026 American English",
- "translation": ""
- },
- {
- "id": "Icelandic",
- "message": "Icelandic",
- "translation": ""
- },
- {
- "id": "Found a mistake or want to contribute missing information?",
- "message": "Found a mistake or want to contribute missing information?",
- "translation": ""
- },
- {
- "id": "Feel free to contact us!",
- "message": "Feel free to contact us!",
- "translation": ""
- },
- {
- "id": "Dutch Euro Coin Designs",
- "message": "Dutch Euro Coin Designs",
- "translation": ""
- },
- {
- "id": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "message": "From the years 1999–2013 all Dutch euro coins featured the portrait of Queen Beatrix of the Netherlands. After her abdication from the throne in 2013 the designs of all denominations were changed to feature the portrait of the new King Willem-Alexander. After her abdication the direction in which the monarchs portrait faced was flipped; a tradition dating back to the earliest coins of the Kingdom of the Netherlands.",
- "translation": ""
- },
- {
- "id": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "message": "Coins featuring both monarchs contain text reading ‘BEATRIX KONINGIN DER NEDERLANDEN’ (‘BEATRIX QUEEN OF THE NETHERLANDS’) and ‘Willem-Alexander Koning der Nederlanden’ (‘Willem-Alexander King of the Netherlands’) respectively.",
- "translation": ""
- },
- {
- "id": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "message": "The €1 and €2 coins featuring King Willem-Alexander were minted with a much lower relief than most euro coins of the same denomination. As a result it is not uncommon for these coins to appear worn after little use in circulation.",
- "translation": ""
- },
- {
- "id": "Euro Coin Designs",
- "message": "Euro Coin Designs",
- "translation": ""
- },
- {
- "id": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "message": "Here you’ll be able to view all the coin designs for each country in the Eurozone. This section of the site doesn’t include minor varieties such as different mintmarks or errors; those are on the {VarietiesLinkStart}varieties{LinkEnd} page.",
- "translation": "",
- "placeholders": [
- {
- "id": "VarietiesLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "varietiesLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Euro Coin Mintages",
- "message": "Euro Coin Mintages",
- "translation": ""
- },
- {
- "id": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "message": "Here you’ll be able to view all the known mintages for all coins. You’ll also be able to filter on country, denomination, etc. If you have any mintage data that’s missing from our site, feel free to contact us.",
- "translation": ""
- },
- {
- "id": "Additional Notes",
- "message": "Additional Notes",
- "translation": ""
- },
- {
- "id": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "message": "Most coins from the years 2003–2016 are listed as NIFC coins while other popular sources such as Numista claim they were minted for circulation. For more information on why others are wrong, {MuntrolpakketLinkStart}click here{LinkEnd}.",
- "translation": "",
- "placeholders": [
- {
- "id": "MuntrolpakketLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "muntrolpakketLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "message": "In 2003 Numista calculated a total of {217503} coins issued for coin sets per denomination. Our own calculations found only {177003}. Numista also forgot to include the many hundred thousand coins from the coin roll sets that were produced.",
- "translation": "",
- "placeholders": [
- {
- "id": "217503",
- "string": "%[1]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 1,
- "expr": "217503"
- },
- {
- "id": "177003",
- "string": "%[2]d",
- "type": "int",
- "underlyingType": "int",
- "argNum": 2,
- "expr": "177003"
- }
- ]
- },
- {
- "id": "Country",
- "message": "Country",
- "translation": ""
- },
- {
- "id": "Circulation Coins",
- "message": "Circulation Coins",
- "translation": ""
- },
- {
- "id": "NIFC / BU Sets",
- "message": "NIFC / BU Sets",
- "translation": ""
- },
- {
- "id": "Proof Coins",
- "message": "Proof Coins",
- "translation": ""
- },
- {
- "id": "Filter",
- "message": "Filter",
- "translation": ""
- },
- {
- "id": "Standard Issue Coins",
- "message": "Standard Issue Coins",
- "translation": ""
- },
- {
- "id": "Year",
- "message": "Year",
- "translation": ""
- },
- {
- "id": "Unknown",
- "message": "Unknown",
- "translation": ""
- },
- {
- "id": "Commemorative Coins",
- "message": "Commemorative Coins",
- "translation": ""
- },
- {
- "id": "Commemorated Issue",
- "message": "Commemorated Issue",
- "translation": ""
- },
- {
- "id": "Mintage",
- "message": "Mintage",
- "translation": ""
- },
- {
- "id": "Euro Coins",
- "message": "Euro Coins",
- "translation": ""
- },
- {
- "id": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "message": "On this section of the site you can find everything there is to know about the coins of the Eurozone. For the latest news on coin- and design releases, check out the {NewsLinkStart}news{LinkEnd} tab!",
- "translation": "",
- "placeholders": [
- {
- "id": "NewsLinkStart",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "newsLinkStart"
- },
- {
- "id": "LinkEnd",
- "string": "%[2]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 2,
- "expr": "linkEnd"
- }
- ]
- },
- {
- "id": "Designs",
- "message": "Designs",
- "translation": ""
- },
- {
- "id": "View the 600+ different Euro-coin designs!",
- "message": "View the 600+ different Euro-coin designs!",
- "translation": ""
- },
- {
- "id": "Mintages",
- "message": "Mintages",
- "translation": ""
- },
- {
- "id": "View the mintage figures of all the Euro coins!",
- "message": "View the mintage figures of all the Euro coins!",
- "translation": ""
- },
- {
- "id": "Varieties",
- "message": "Varieties",
- "translation": ""
- },
- {
- "id": "View all the known Euro varieties!",
- "message": "View all the known Euro varieties!",
- "translation": ""
- },
- {
- "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
- "translation": ""
- },
- {
- "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at {ContactEmail}.",
- "translation": "",
- "placeholders": [
- {
- "id": "ContactEmail",
- "string": "%[1]s",
- "type": "string",
- "underlyingType": "string",
- "argNum": 1,
- "expr": "contactEmail"
- }
- ]
- },
- {
- "id": "Select Your Language",
- "message": "Select Your Language",
- "translation": ""
- },
- {
- "id": "Select your preferred language to use on the site.",
- "message": "Select your preferred language to use on the site.",
- "translation": ""
- },
- {
- "id": "Eurozone Languages",
- "message": "Eurozone Languages",
- "translation": ""
- },
- {
- "id": "Other Languages",
- "message": "Other Languages",
- "translation": ""
- },
- {
- "id": "Home",
- "message": "Home",
- "translation": ""
- },
- {
- "id": "News",
- "message": "News",
- "translation": ""
- },
- {
- "id": "Coin Collecting",
- "message": "Coin Collecting",
- "translation": ""
- },
- {
- "id": "Coins",
- "message": "Coins",
- "translation": ""
- },
- {
- "id": "Banknotes",
- "message": "Banknotes",
- "translation": ""
- },
- {
- "id": "Jargon",
- "message": "Jargon",
- "translation": ""
- },
- {
- "id": "Discord",
- "message": "Discord",
- "translation": ""
- },
- {
- "id": "About",
- "message": "About",
- "translation": ""
- },
- {
- "id": "Language",
- "message": "Language",
- "translation": ""
- },
- {
- "id": "The Euro Cash Compendium",
- "message": "The Euro Cash Compendium",
- "translation": ""
- },
- {
- "id": "United in",
- "message": "United in",
- "translation": ""
- },
- {
- "id": "diversity",
- "message": "diversity",
- "translation": ""
- },
- {
- "id": "cash",
- "message": "cash",
- "translation": ""
- },
- {
- "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
- "translation": ""
- }
- ]
-} \ No newline at end of file
diff --git a/main.go b/main.go
index 4c9f4ba..7c541d0 100644
--- a/main.go
+++ b/main.go
@@ -1,48 +1,23 @@
package main
import (
- "cmp"
- "context"
- "errors"
"flag"
- "fmt"
- "log"
- "math"
- "net/http"
- "os"
- "path/filepath"
- "slices"
- "strconv"
- "strings"
- "git.thomasvoss.com/euro-cash.eu/lib"
- "git.thomasvoss.com/euro-cash.eu/lib/email"
- "git.thomasvoss.com/euro-cash.eu/lib/mintage"
- "git.thomasvoss.com/euro-cash.eu/template"
- "github.com/a-h/templ"
+ "git.thomasvoss.com/euro-cash.eu/src"
+ "git.thomasvoss.com/euro-cash.eu/src/email"
)
-var emailDisabled bool
-
-var (
- notFound = template.NotFound()
- components = map[string]templ.Component{
- "/": template.Root(),
- "/about": template.About(),
- "/coins": template.Coins(),
- "/coins/designs": template.CoinsDesigns(),
- "/coins/designs/nl": template.CoinsDesignsNl(),
- "/coins/mintages": template.CoinsMintages(),
- "/jargon": template.Jargon(),
- "/language": template.Language(),
- }
-)
+// components = map[string]templ.Component{
+// "/coins": template.Coins(),
+// "/coins/designs": template.CoinsDesigns(),
+// "/coins/designs/nl": template.CoinsDesignsNl(),
+// "/coins/mintages": template.CoinsMintages(),
+// "/jargon": template.Jargon(),
+// }
func main() {
- lib.InitPrinters()
-
port := flag.Int("port", 8080, "port number")
- flag.BoolVar(&emailDisabled, "no-email", false,
+ flag.BoolVar(&email.Config.Disabled, "no-email", false,
"disables email support")
flag.StringVar(&email.Config.Host, "smtp-host", "smtp.migadu.com",
"SMTP server hostname")
@@ -56,146 +31,5 @@ func main() {
"password to authenticate the email client")
flag.Parse()
- fs := http.FileServer(http.Dir("static"))
- mux := http.NewServeMux()
- mux.Handle("GET /designs/", fs)
- mux.Handle("GET /favicon.ico", fs)
- mux.Handle("GET /fonts/", fs)
- mux.Handle("GET /style.css", fs)
- mux.Handle("GET /coins/mintages", i18nHandler(mintageHandler(http.HandlerFunc(finalHandler))))
- mux.Handle("GET /", i18nHandler(http.HandlerFunc(finalHandler)))
- mux.Handle("POST /language", http.HandlerFunc(setUserLanguage))
-
- portStr := ":" + strconv.Itoa(*port)
- log.Println("Listening on", portStr)
- log.Fatal(http.ListenAndServe(portStr, mux))
-}
-
-func finalHandler(w http.ResponseWriter, r *http.Request) {
- /* Strip trailing slash from the URL */
- path := r.URL.Path
- if path != "/" && path[len(path)-1] == '/' {
- path = path[:len(path)-1]
- }
-
- c, ok := components[path]
- if !ok {
- w.WriteHeader(http.StatusNotFound)
- c = notFound
- }
-
- /* When a user clicks on the language button to be taken to the
- language selection page, we need to set a redirect cookie so
- that after selecting a language they are taken back to the
- original page they came from. */
- if path == "/language" {
- http.SetCookie(w, &http.Cookie{
- Name: "redirect",
- Value: cmp.Or(r.Referer(), "/"),
- })
- }
- template.Base(c).Render(r.Context(), w)
-}
-
-func i18nHandler(next http.Handler) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- var p, pZero lib.Printer
-
- if c, err := r.Cookie("locale"); err == nil {
- p = lib.Printers[strings.ToLower(c.Value)]
- }
-
- ctx := context.WithValue(
- r.Context(), "printer", cmp.Or(p, lib.DefaultPrinter))
-
- if p == pZero {
- http.SetCookie(w, &http.Cookie{
- Name: "redirect",
- Value: r.URL.Path,
- })
- template.Base(template.Language()).Render(ctx, w)
- } else {
- next.ServeHTTP(w, r.WithContext(ctx))
- }
- })
-}
-
-func mintageHandler(next http.Handler) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- countries := lib.SortedCountries(
- r.Context().Value("printer").(lib.Printer))
-
- code := strings.ToLower(r.FormValue("code"))
- if !slices.ContainsFunc(countries, func(c lib.Country) bool {
- return c.Code == code
- }) {
- code = countries[0].Code
- }
-
- ctype := strings.ToLower(r.FormValue("type"))
- switch ctype {
- case "circ", "nifc", "proof":
- default:
- ctype = "circ"
- }
-
- path := filepath.Join("data", "mintages", code)
- f, err := os.Open(path)
- if err != nil {
- throwError(http.StatusInternalServerError, err, w, r)
- return
- }
- defer f.Close()
-
- data, err := mintage.Parse(f, path)
- if err != nil {
- throwError(http.StatusInternalServerError, err, w, r)
- return
- }
-
- ctx := context.WithValue(r.Context(), "code", code)
- ctx = context.WithValue(ctx, "type", ctype)
- ctx = context.WithValue(ctx, "mintages", data)
- ctx = context.WithValue(ctx, "countries", countries)
- next.ServeHTTP(w, r.WithContext(ctx))
- })
-}
-
-func setUserLanguage(w http.ResponseWriter, r *http.Request) {
- loc := r.FormValue("locale")
- _, ok := lib.Printers[strings.ToLower(loc)]
- if !ok {
- w.WriteHeader(http.StatusUnprocessableEntity)
- fmt.Fprintf(w, "Locale ‘%s’ is invalid or unsupported", loc)
- return
- }
- http.SetCookie(w, &http.Cookie{
- Name: "locale",
- Value: loc,
- MaxAge: math.MaxInt32,
- })
-
- if c, err := r.Cookie("redirect"); errors.Is(err, http.ErrNoCookie) {
- http.Redirect(w, r, "/", http.StatusFound)
- } else {
- http.SetCookie(w, &http.Cookie{
- Name: "redirect",
- MaxAge: -1,
- })
- http.Redirect(w, r, c.Value, http.StatusFound)
- }
-}
-
-func throwError(status int, err error, w http.ResponseWriter, r *http.Request) {
- w.WriteHeader(status)
- if emailDisabled {
- log.Print(err)
- } else {
- go func() {
- if err := email.ServerError(err); err != nil {
- log.Print(err)
- }
- }()
- }
- template.Base(template.Error(status)).Render(r.Context(), w)
+ src.Run(*port)
}
diff --git a/rosetta/bg/messages.gotext.json b/rosetta/bg/messages.gotext.json
new file mode 100644
index 0000000..7445299
--- /dev/null
+++ b/rosetta/bg/messages.gotext.json
@@ -0,0 +1,285 @@
+{
+ "language": "bg",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": "Андора"
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": "Австрия"
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": "Белгия"
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": "Кипър"
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": "Германия"
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": "Естония"
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": "Испания"
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": "Финландия"
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": "Франция"
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": "Гърция"
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": "Хърватия"
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": "Ирландия"
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": "Италия"
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": "Литва"
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": "Люксембург"
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": "Латвия"
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": "Монако"
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": "Малта"
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": "Нидерландия"
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": "Португалия"
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": "Словения"
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": "Словакия"
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": "Сан Марино"
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": "Ватикана"
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": ""
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": ""
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": ""
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": ""
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": ""
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": ""
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": ""
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": ""
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": ""
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": ""
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": ""
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": ""
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": ""
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": ""
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": ""
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": ""
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": ""
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": ""
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": ""
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": ""
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": ""
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": ""
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": ""
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": ""
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": ""
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": ""
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": ""
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": ""
+ }
+ ]
+} \ No newline at end of file
diff --git a/rosetta/el/messages.gotext.json b/rosetta/el/messages.gotext.json
new file mode 100644
index 0000000..7cadc9b
--- /dev/null
+++ b/rosetta/el/messages.gotext.json
@@ -0,0 +1,285 @@
+{
+ "language": "el",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": "Ανδόρα"
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": "Αυστρία"
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": "Βέλγιο"
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": "Κύπρος"
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": "Γερμανία"
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": "Εσθονία"
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": "Ισπανία"
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": "Φινλανδία"
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": "Γαλλία"
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": "Ελλάδα"
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": "Κροατία"
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": "Ιρλανδία"
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": "Ιταλία"
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": "Λιθουανία"
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": "Λουξεμβούργο"
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": "Λετονία"
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": "Μονακό"
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": "Μάλτα"
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": "Ολλανδία"
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": "Πορτογαλία"
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": "Σλοβενία"
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": "Σλοβακία"
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": "Σαν Μαρίνο"
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": "Βατικανό"
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": ""
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": "Η σελίδα που ψάχνατε δεν υπάρχει. Εάν πιστεύετε ότι πρόκειται για λάθος, μην διστάσετε να επικοινωνήσετε με το @onetruemangoman στο Discord ή να μας στείλετε email στο %s."
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": ""
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": ""
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": ""
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": ""
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": ""
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": ""
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": ""
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": ""
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": ""
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": ""
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": ""
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": ""
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": ""
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": ""
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": ""
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": ""
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": ""
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": ""
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": ""
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": ""
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": ""
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": ""
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": ""
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": ""
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": ""
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": ""
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": ""
+ }
+ ]
+} \ No newline at end of file
diff --git a/rosetta/en/messages.gotext.json b/rosetta/en/messages.gotext.json
new file mode 100644
index 0000000..a9fe4d2
--- /dev/null
+++ b/rosetta/en/messages.gotext.json
@@ -0,0 +1,397 @@
+{
+ "language": "en",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": "Andorra",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": "Austria",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": "Belgium",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": "Cyprus",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": "Germany",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": "Estonia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": "Spain",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": "Finland",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": "France",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": "Greece",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": "Croatia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": "Ireland",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": "Italy",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": "Lithuania",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": "Luxembourg",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": "Latvia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": "Monaco",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": "Malta",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": "Netherlands",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": "Portugal",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": "Slovenia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": "Slovakia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": "San Marino",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": "Vatican City",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": "Page Not Found",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": "About Us",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": "Open Source",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": "\u003c/a\u003e",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": "Contact Us",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": "Special Thanks",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": "Development",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": "Research",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": "Translations",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": "British- \u0026 American English",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": "Icelandic",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": "Found a mistake or want to contribute missing information?",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": "Feel free to contact us!",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": "The Euro Cash Compendium",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": "United in",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": "diversity",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": "cash",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": "Home",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": "News",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": "Coin Collecting",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": "Coins",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": "Banknotes",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": "Jargon",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": "Discord",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": "About",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": "Language",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ }
+ ]
+} \ No newline at end of file
diff --git a/rosetta/nl/messages.gotext.json b/rosetta/nl/messages.gotext.json
new file mode 100644
index 0000000..65016fb
--- /dev/null
+++ b/rosetta/nl/messages.gotext.json
@@ -0,0 +1,285 @@
+{
+ "language": "nl",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": "Andorra"
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": "Oostenrijk"
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": "België"
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": "Cyprus"
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": "Duitsland"
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": "Estland"
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": "Spanje"
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": "Finland"
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": "Frankrijk"
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": "Griekenland"
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": "Kroatië"
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": "Ierland"
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": "Italië"
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": "Litouwen"
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": "Luxemburg"
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": "Letland"
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": "Monaco"
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": "Malta"
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": "Nederland"
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": "Portugal"
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": "Slovenië"
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": "Slowakije"
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": "San Marino"
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": "Vaticaanstad"
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": ""
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": ""
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": ""
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": ""
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": ""
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": ""
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": ""
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": ""
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": ""
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": ""
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": ""
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": ""
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": ""
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": ""
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": ""
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": ""
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": ""
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": ""
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": ""
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": ""
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": ""
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": ""
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": ""
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": ""
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": ""
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": ""
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": ""
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": ""
+ }
+ ]
+} \ No newline at end of file
diff --git a/src/countries.go b/src/countries.go
new file mode 100644
index 0000000..de1c919
--- /dev/null
+++ b/src/countries.go
@@ -0,0 +1,46 @@
+package src
+
+import (
+ "slices"
+
+ "golang.org/x/text/collate"
+ "golang.org/x/text/language"
+)
+
+type country struct {
+ code, name string
+}
+
+func sortedCountries(p Printer) []country {
+ xs := []country{
+ {code: "ad", name: p.T("Andorra")},
+ {code: "at", name: p.T("Austria")},
+ {code: "be", name: p.T("Belgium")},
+ {code: "cy", name: p.T("Cyprus")},
+ {code: "de", name: p.T("Germany")},
+ {code: "ee", name: p.T("Estonia")},
+ {code: "es", name: p.T("Spain")},
+ {code: "fi", name: p.T("Finland")},
+ {code: "fr", name: p.T("France")},
+ {code: "gr", name: p.T("Greece")},
+ {code: "hr", name: p.T("Croatia")},
+ {code: "ie", name: p.T("Ireland")},
+ {code: "it", name: p.T("Italy")},
+ {code: "lt", name: p.T("Lithuania")},
+ {code: "lu", name: p.T("Luxembourg")},
+ {code: "lv", name: p.T("Latvia")},
+ {code: "mc", name: p.T("Monaco")},
+ {code: "mt", name: p.T("Malta")},
+ {code: "nl", name: p.T("Netherlands")},
+ {code: "pt", name: p.T("Portugal")},
+ {code: "si", name: p.T("Slovenia")},
+ {code: "sk", name: p.T("Slovakia")},
+ {code: "sm", name: p.T("San Marino")},
+ {code: "va", name: p.T("Vatican City")},
+ }
+ c := collate.New(language.MustParse(p.Locale.Bcp))
+ slices.SortFunc(xs, func(x, y country) int {
+ return c.CompareString(x.name, y.name)
+ })
+ return xs
+}
diff --git a/lib/email/email.go b/src/email/email.go
index 16ba492..0f2c93d 100644
--- a/lib/email/email.go
+++ b/src/email/email.go
@@ -10,6 +10,7 @@ import (
)
var Config struct {
+ Disabled bool
Host string
Port int
ToAddr, FromAddr string
@@ -27,6 +28,10 @@ Message-ID: <%s>
%s`
func ServerError(fault error) error {
+ if Config.Disabled {
+ return fault
+ }
+
msgid := strconv.FormatInt(rand.Int64(), 10) + "@" + Config.Host
msg := fmt.Sprintf(emailTemplate, Config.FromAddr, Config.ToAddr,
"Error Report", time.Now().Format(time.RFC1123Z), msgid, fault)
diff --git a/src/http.go b/src/http.go
new file mode 100644
index 0000000..8ca7564
--- /dev/null
+++ b/src/http.go
@@ -0,0 +1,190 @@
+package src
+
+import (
+ "cmp"
+ "context"
+ "errors"
+ "fmt"
+ "log"
+ "math"
+ "net/http"
+ "os"
+ "path/filepath"
+ "slices"
+ "strconv"
+ "strings"
+
+ "git.thomasvoss.com/euro-cash.eu/src/email"
+ "git.thomasvoss.com/euro-cash.eu/src/mintage"
+)
+
+type middleware = func(http.Handler) http.Handler
+
+func Run(port int) {
+ fs := http.FileServer(http.Dir("static"))
+ final := http.HandlerFunc(finalHandler)
+ mux := http.NewServeMux()
+ mux.Handle("GET /designs/", fs)
+ mux.Handle("GET /favicon.ico", fs)
+ mux.Handle("GET /fonts/", fs)
+ mux.Handle("GET /style.css", fs)
+ mux.Handle("GET /coins/mintages", chain(
+ firstHandler,
+ i18nHandler,
+ mintageHandler,
+ )(final))
+ mux.Handle("GET /", chain(
+ firstHandler,
+ i18nHandler,
+ )(final))
+ mux.Handle("POST /language", http.HandlerFunc(setUserLanguage))
+
+ portStr := ":" + strconv.Itoa(port)
+ log.Println("Listening on", portStr)
+ log.Fatal(http.ListenAndServe(portStr, mux))
+}
+
+func chain(xs ...middleware) middleware {
+ return func(next http.Handler) http.Handler {
+ for i := len(xs) - 1; i >= 0; i-- {
+ next = xs[i](next)
+ }
+ return next
+ }
+}
+
+func firstHandler(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := context.WithValue(r.Context(), "td", &templateData{})
+ next.ServeHTTP(w, r.WithContext(ctx))
+ })
+}
+
+func finalHandler(w http.ResponseWriter, r *http.Request) {
+ /* Strip trailing slash from the URL */
+ path := r.URL.Path
+ if path != "/" && path[len(path)-1] == '/' {
+ path = path[:len(path)-1]
+ }
+
+ t, ok := templates[path]
+ if !ok {
+ w.WriteHeader(http.StatusNotFound)
+ t = notFoundTmpl
+ }
+
+ /* When a user clicks on the language button to be taken to the
+ language selection page, we need to set a redirect cookie so
+ that after selecting a language they are taken back to the
+ original page they came from. */
+ if path == "/language" {
+ http.SetCookie(w, &http.Cookie{
+ Name: "redirect",
+ Value: cmp.Or(r.Referer(), "/"),
+ })
+ }
+
+ data := r.Context().Value("td").(*templateData)
+ t.Execute(w, data)
+}
+
+func i18nHandler(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ var p, pZero Printer
+
+ if c, err := r.Cookie("locale"); err == nil {
+ p = printers[strings.ToLower(c.Value)]
+ }
+
+ td := r.Context().Value("td").(*templateData)
+ td.Printer = cmp.Or(p, defaultPrinter)
+
+ if p == pZero {
+ http.SetCookie(w, &http.Cookie{
+ Name: "redirect",
+ Value: r.URL.Path,
+ })
+ templates["/language"].Execute(w, td)
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ })
+}
+
+func mintageHandler(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ td := r.Context().Value("td").(*templateData)
+ td.Countries = sortedCountries(td.Printer)
+
+ td.Code = strings.ToLower(r.FormValue("code"))
+ if !slices.ContainsFunc(td.Countries, func(c country) bool {
+ return c.code == td.Code
+ }) {
+ td.Code = td.Countries[0].code
+ }
+
+ td.Type = strings.ToLower(r.FormValue("type"))
+ switch td.Type {
+ case "circ", "nifc", "proof":
+ default:
+ td.Type = "circ"
+ }
+
+ path := filepath.Join("data", "mintages", td.Code)
+ f, err := os.Open(path)
+ if err != nil {
+ throwError(http.StatusInternalServerError, err, w, r)
+ return
+ }
+ defer f.Close()
+
+ td.Mintages, err = mintage.Parse(f, path)
+ if err != nil {
+ throwError(http.StatusInternalServerError, err, w, r)
+ return
+ }
+
+ next.ServeHTTP(w, r)
+ })
+}
+
+func setUserLanguage(w http.ResponseWriter, r *http.Request) {
+ loc := r.FormValue("locale")
+ _, ok := printers[strings.ToLower(loc)]
+ if !ok {
+ w.WriteHeader(http.StatusUnprocessableEntity)
+ fmt.Fprintf(w, "Locale ‘%s’ is invalid or unsupported", loc)
+ return
+ }
+ http.SetCookie(w, &http.Cookie{
+ Name: "locale",
+ Value: loc,
+ MaxAge: math.MaxInt32,
+ })
+
+ if c, err := r.Cookie("redirect"); errors.Is(err, http.ErrNoCookie) {
+ http.Redirect(w, r, "/", http.StatusFound)
+ } else {
+ http.SetCookie(w, &http.Cookie{
+ Name: "redirect",
+ MaxAge: -1,
+ })
+ http.Redirect(w, r, c.Value, http.StatusFound)
+ }
+}
+
+func throwError(status int, err error, w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(status)
+ go func() {
+ if err := email.ServerError(err); err != nil {
+ log.Print(err)
+ }
+ }()
+ errorTmpl.Execute(w, struct {
+ Code int
+ Msg string
+ }{
+ Code: status,
+ Msg: http.StatusText(status),
+ })
+}
diff --git a/lib/i18n.go b/src/i18n.go
index 50652cb..eaac4cf 100644
--- a/lib/i18n.go
+++ b/src/i18n.go
@@ -1,6 +1,7 @@
-//go:generate gotext -srclang=en update -out=catalog.gen.go -lang=bg,el,en,nl git.thomasvoss.com/euro-cash.eu
+//go:generate gotext -srclang=en -dir=rosetta extract -lang=bg,el,en,nl .
+//go:generate ../exttmpl
-package lib
+package src
import (
"fmt"
@@ -12,18 +13,18 @@ import (
)
type Printer struct {
- Locale Locale
+ Locale locale
inner *message.Printer
}
-type Locale struct {
+type locale struct {
Bcp, Name string
- dateFmt, moneyFmt string
Eurozone, Enabled bool
+ dateFmt, moneyFmt string
}
var (
- Locales = [...]Locale{
+ Locales = [...]locale{
{
Bcp: "ca",
Name: "català",
@@ -188,24 +189,24 @@ var (
Enabled: false,
},
}
- /* Map of language codes to Printers. We do this instead of just
+ /* Map of language codes to printers. We do this instead of just
using language.MustParse() directly so that we can easily see if a
language is supported or not. */
- Printers map[string]Printer = make(map[string]Printer, len(Locales))
- DefaultPrinter Printer
+ printers map[string]Printer = make(map[string]Printer, len(Locales))
+ defaultPrinter Printer
)
-func InitPrinters() {
+func init() {
for _, loc := range Locales {
if loc.Enabled {
lang := language.MustParse(loc.Bcp)
- Printers[strings.ToLower(loc.Bcp)] = Printer{
+ printers[strings.ToLower(loc.Bcp)] = Printer{
Locale: loc,
inner: message.NewPrinter(lang),
}
}
}
- DefaultPrinter = Printers["en"]
+ defaultPrinter = printers["en"]
}
func (p Printer) T(fmt string, args ...any) string {
@@ -247,6 +248,6 @@ func (p Printer) Money(val float64, round bool) string {
}
/* Transform ‘en-US’ to ‘en’ */
-func (l Locale) Language() string {
+func (l locale) Language() string {
return l.Bcp[:2]
}
diff --git a/lib/mintage/parser.go b/src/mintage/parser.go
index 364b6e8..364b6e8 100644
--- a/lib/mintage/parser.go
+++ b/src/mintage/parser.go
diff --git a/lib/mintage/parser_test.go b/src/mintage/parser_test.go
index 76e0f01..76e0f01 100644
--- a/lib/mintage/parser_test.go
+++ b/src/mintage/parser_test.go
diff --git a/src/rosetta/bg/messages.gotext.json b/src/rosetta/bg/messages.gotext.json
new file mode 100644
index 0000000..b1c3b47
--- /dev/null
+++ b/src/rosetta/bg/messages.gotext.json
@@ -0,0 +1,315 @@
+{
+ "language": "bg",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": ""
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": ""
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": ""
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": ""
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": ""
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": ""
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": ""
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": ""
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": ""
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": ""
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": ""
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": ""
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": ""
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": ""
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": ""
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": ""
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": ""
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": ""
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": ""
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": ""
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": ""
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": ""
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": ""
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": ""
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": ""
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": ""
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": ""
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": ""
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": ""
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": ""
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": ""
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": ""
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": ""
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": ""
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": ""
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": ""
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": ""
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": ""
+ },
+ {
+ "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "translation": ""
+ },
+ {
+ "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": ""
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": ""
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": ""
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": ""
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": ""
+ },
+ {
+ "id": "Select Your Language",
+ "message": "Select Your Language",
+ "translation": ""
+ },
+ {
+ "id": "Select your preferred language to use on the site.",
+ "message": "Select your preferred language to use on the site.",
+ "translation": ""
+ },
+ {
+ "id": "Eurozone Languages",
+ "message": "Eurozone Languages",
+ "translation": ""
+ },
+ {
+ "id": "Other Languages",
+ "message": "Other Languages",
+ "translation": ""
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": ""
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": ""
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": ""
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": ""
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": ""
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": ""
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": ""
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": ""
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": ""
+ }
+ ]
+} \ No newline at end of file
diff --git a/src/rosetta/el/messages.gotext.json b/src/rosetta/el/messages.gotext.json
new file mode 100644
index 0000000..c903fa9
--- /dev/null
+++ b/src/rosetta/el/messages.gotext.json
@@ -0,0 +1,315 @@
+{
+ "language": "el",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": ""
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": ""
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": ""
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": ""
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": ""
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": ""
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": ""
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": ""
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": ""
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": ""
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": ""
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": ""
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": ""
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": ""
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": ""
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": ""
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": ""
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": ""
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": ""
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": ""
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": ""
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": ""
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": ""
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": ""
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": ""
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": ""
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": ""
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": ""
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": ""
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": ""
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": ""
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": ""
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": ""
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": ""
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": ""
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": ""
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": ""
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": ""
+ },
+ {
+ "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "translation": ""
+ },
+ {
+ "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": ""
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": ""
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": ""
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": ""
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": ""
+ },
+ {
+ "id": "Select Your Language",
+ "message": "Select Your Language",
+ "translation": ""
+ },
+ {
+ "id": "Select your preferred language to use on the site.",
+ "message": "Select your preferred language to use on the site.",
+ "translation": ""
+ },
+ {
+ "id": "Eurozone Languages",
+ "message": "Eurozone Languages",
+ "translation": ""
+ },
+ {
+ "id": "Other Languages",
+ "message": "Other Languages",
+ "translation": ""
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": ""
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": ""
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": ""
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": ""
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": ""
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": ""
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": ""
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": ""
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": ""
+ }
+ ]
+} \ No newline at end of file
diff --git a/src/rosetta/en/messages.gotext.json b/src/rosetta/en/messages.gotext.json
new file mode 100644
index 0000000..0e09e33
--- /dev/null
+++ b/src/rosetta/en/messages.gotext.json
@@ -0,0 +1,439 @@
+{
+ "language": "en",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": "Andorra",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": "Austria",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": "Belgium",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": "Cyprus",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": "Germany",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": "Estonia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": "Spain",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": "Finland",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": "France",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": "Greece",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": "Croatia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": "Ireland",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": "Italy",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": "Lithuania",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": "Luxembourg",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": "Latvia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": "Monaco",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": "Malta",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": "Netherlands",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": "Portugal",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": "Slovenia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": "Slovakia",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": "San Marino",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": "Vatican City",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": "Page Not Found",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": "About Us",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": "Open Source",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": "\u003c/a\u003e",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": "Contact Us",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": "Special Thanks",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": "Development",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": "Research",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": "Translations",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": "British- \u0026 American English",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": "Icelandic",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": "Found a mistake or want to contribute missing information?",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": "Feel free to contact us!",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "translation": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "translation": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": "The Euro Cash Compendium",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": "United in",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": "diversity",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": "cash",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Select Your Language",
+ "message": "Select Your Language",
+ "translation": "Select Your Language",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Select your preferred language to use on the site.",
+ "message": "Select your preferred language to use on the site.",
+ "translation": "Select your preferred language to use on the site.",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Eurozone Languages",
+ "message": "Eurozone Languages",
+ "translation": "Eurozone Languages",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Other Languages",
+ "message": "Other Languages",
+ "translation": "Other Languages",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": "Home",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": "News",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": "Coin Collecting",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": "Coins",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": "Banknotes",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": "Jargon",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": "Discord",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": "About",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": "Language",
+ "translatorComment": "Copied from source.",
+ "fuzzy": true
+ }
+ ]
+} \ No newline at end of file
diff --git a/src/rosetta/nl/messages.gotext.json b/src/rosetta/nl/messages.gotext.json
new file mode 100644
index 0000000..ea7134a
--- /dev/null
+++ b/src/rosetta/nl/messages.gotext.json
@@ -0,0 +1,315 @@
+{
+ "language": "nl",
+ "messages": [
+ {
+ "id": "Andorra",
+ "message": "Andorra",
+ "translation": ""
+ },
+ {
+ "id": "Austria",
+ "message": "Austria",
+ "translation": ""
+ },
+ {
+ "id": "Belgium",
+ "message": "Belgium",
+ "translation": ""
+ },
+ {
+ "id": "Cyprus",
+ "message": "Cyprus",
+ "translation": ""
+ },
+ {
+ "id": "Germany",
+ "message": "Germany",
+ "translation": ""
+ },
+ {
+ "id": "Estonia",
+ "message": "Estonia",
+ "translation": ""
+ },
+ {
+ "id": "Spain",
+ "message": "Spain",
+ "translation": ""
+ },
+ {
+ "id": "Finland",
+ "message": "Finland",
+ "translation": ""
+ },
+ {
+ "id": "France",
+ "message": "France",
+ "translation": ""
+ },
+ {
+ "id": "Greece",
+ "message": "Greece",
+ "translation": ""
+ },
+ {
+ "id": "Croatia",
+ "message": "Croatia",
+ "translation": ""
+ },
+ {
+ "id": "Ireland",
+ "message": "Ireland",
+ "translation": ""
+ },
+ {
+ "id": "Italy",
+ "message": "Italy",
+ "translation": ""
+ },
+ {
+ "id": "Lithuania",
+ "message": "Lithuania",
+ "translation": ""
+ },
+ {
+ "id": "Luxembourg",
+ "message": "Luxembourg",
+ "translation": ""
+ },
+ {
+ "id": "Latvia",
+ "message": "Latvia",
+ "translation": ""
+ },
+ {
+ "id": "Monaco",
+ "message": "Monaco",
+ "translation": ""
+ },
+ {
+ "id": "Malta",
+ "message": "Malta",
+ "translation": ""
+ },
+ {
+ "id": "Netherlands",
+ "message": "Netherlands",
+ "translation": ""
+ },
+ {
+ "id": "Portugal",
+ "message": "Portugal",
+ "translation": ""
+ },
+ {
+ "id": "Slovenia",
+ "message": "Slovenia",
+ "translation": ""
+ },
+ {
+ "id": "Slovakia",
+ "message": "Slovakia",
+ "translation": ""
+ },
+ {
+ "id": "San Marino",
+ "message": "San Marino",
+ "translation": ""
+ },
+ {
+ "id": "Vatican City",
+ "message": "Vatican City",
+ "translation": ""
+ },
+ {
+ "id": "Page Not Found",
+ "message": "Page Not Found",
+ "translation": ""
+ },
+ {
+ "id": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "message": "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "message": "\u003ca href=\"mailto:mail@euro-cash.eu\"\u003email@euro-cash.eu\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "About Us",
+ "message": "About Us",
+ "translation": ""
+ },
+ {
+ "id": "Open Source",
+ "message": "Open Source",
+ "translation": ""
+ },
+ {
+ "id": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "message": "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site.",
+ "translation": ""
+ },
+ {
+ "id": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "message": "\u003ca href=\"https://git.thomasvoss.com/www.euro-cash.eu\" target=\"_blank\"\u003e",
+ "translation": ""
+ },
+ {
+ "id": "\u003c/a\u003e",
+ "message": "\u003c/a\u003e",
+ "translation": ""
+ },
+ {
+ "id": "Contact Us",
+ "message": "Contact Us",
+ "translation": ""
+ },
+ {
+ "id": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "message": "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord.",
+ "translation": ""
+ },
+ {
+ "id": "Special Thanks",
+ "message": "Special Thanks",
+ "translation": ""
+ },
+ {
+ "id": "Development",
+ "message": "Development",
+ "translation": ""
+ },
+ {
+ "id": "Research",
+ "message": "Research",
+ "translation": ""
+ },
+ {
+ "id": "Translations",
+ "message": "Translations",
+ "translation": ""
+ },
+ {
+ "id": "British- \u0026 American English",
+ "message": "British- \u0026 American English",
+ "translation": ""
+ },
+ {
+ "id": "Icelandic",
+ "message": "Icelandic",
+ "translation": ""
+ },
+ {
+ "id": "Found a mistake or want to contribute missing information?",
+ "message": "Found a mistake or want to contribute missing information?",
+ "translation": ""
+ },
+ {
+ "id": "Feel free to contact us!",
+ "message": "Feel free to contact us!",
+ "translation": ""
+ },
+ {
+ "id": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "message": "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience.",
+ "translation": ""
+ },
+ {
+ "id": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "message": "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s.",
+ "translation": ""
+ },
+ {
+ "id": "The Euro Cash Compendium",
+ "message": "The Euro Cash Compendium",
+ "translation": ""
+ },
+ {
+ "id": "United in",
+ "message": "United in",
+ "translation": ""
+ },
+ {
+ "id": "diversity",
+ "message": "diversity",
+ "translation": ""
+ },
+ {
+ "id": "cash",
+ "message": "cash",
+ "translation": ""
+ },
+ {
+ "id": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "message": "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors.",
+ "translation": ""
+ },
+ {
+ "id": "Select Your Language",
+ "message": "Select Your Language",
+ "translation": ""
+ },
+ {
+ "id": "Select your preferred language to use on the site.",
+ "message": "Select your preferred language to use on the site.",
+ "translation": ""
+ },
+ {
+ "id": "Eurozone Languages",
+ "message": "Eurozone Languages",
+ "translation": ""
+ },
+ {
+ "id": "Other Languages",
+ "message": "Other Languages",
+ "translation": ""
+ },
+ {
+ "id": "Home",
+ "message": "Home",
+ "translation": ""
+ },
+ {
+ "id": "News",
+ "message": "News",
+ "translation": ""
+ },
+ {
+ "id": "Coin Collecting",
+ "message": "Coin Collecting",
+ "translation": ""
+ },
+ {
+ "id": "Coins",
+ "message": "Coins",
+ "translation": ""
+ },
+ {
+ "id": "Banknotes",
+ "message": "Banknotes",
+ "translation": ""
+ },
+ {
+ "id": "Jargon",
+ "message": "Jargon",
+ "translation": ""
+ },
+ {
+ "id": "Discord",
+ "message": "Discord",
+ "translation": ""
+ },
+ {
+ "id": "About",
+ "message": "About",
+ "translation": ""
+ },
+ {
+ "id": "Language",
+ "message": "Language",
+ "translation": ""
+ }
+ ]
+} \ No newline at end of file
diff --git a/src/templates.go b/src/templates.go
new file mode 100644
index 0000000..839a6fb
--- /dev/null
+++ b/src/templates.go
@@ -0,0 +1,56 @@
+package src
+
+import (
+ "embed"
+ "html/template"
+ "strings"
+
+ "git.thomasvoss.com/euro-cash.eu/src/mintage"
+)
+
+type templateData struct {
+ Printer Printer
+ Code, Type string
+ Mintages mintage.Data
+ Countries []country
+}
+
+var (
+ //go:embed templates/*.html.tmpl
+ templateFS embed.FS
+ notFoundTmpl = buildTemplate("404")
+ errorTmpl = buildTemplate("error")
+ templates = map[string]*template.Template{
+ "/": buildTemplate("index"),
+ "/about": buildTemplate("about"),
+ "/language": buildTemplate("language"),
+ }
+ funcmap = map[string]any{
+ "safe": asHTML,
+ "locales": locales,
+ "toUpper": strings.ToUpper,
+ }
+)
+
+func buildTemplate(names ...string) *template.Template {
+ names = append([]string{"base", "navbar"}, names...)
+ for i, s := range names {
+ names[i] = "templates/" + s + ".html.tmpl"
+ }
+ return template.Must(template.
+ New("base.html.tmpl").
+ Funcs(funcmap).
+ ParseFS(templateFS, names...))
+}
+
+func asHTML(s string) template.HTML {
+ return template.HTML(s)
+}
+
+func locales() []locale {
+ return Locales[:]
+}
+
+func (td templateData) T(fmt string, args ...any) string {
+ return td.Printer.T(fmt, args...)
+}
diff --git a/src/templates/404.html.tmpl b/src/templates/404.html.tmpl
new file mode 100644
index 0000000..3771a0f
--- /dev/null
+++ b/src/templates/404.html.tmpl
@@ -0,0 +1,11 @@
+{{ define "content" }}
+<header>
+ {{ template "navbar" . }}
+ <h1>{{ .T "Page Not Found" }}</h1>
+</header>
+<main>
+ <p>
+ {{ .T "The page you were looking for does not exist. If you believe this is a mistake then don’t hesitate to contact @onetruemangoman on Discord or email us at %s." `<a href="mailto:mail@euro-cash.eu">mail@euro-cash.eu</a>` | safe }}
+ </p>
+</main>
+{{ end }}
diff --git a/src/templates/about.html.tmpl b/src/templates/about.html.tmpl
new file mode 100644
index 0000000..aed356a
--- /dev/null
+++ b/src/templates/about.html.tmpl
@@ -0,0 +1,41 @@
+{{ define "content" }}
+<header>
+ {{ template "navbar" . }}
+ <h1>{{ .T "About Us" }}</h1>
+</header>
+<main>
+ <h2>{{ .T "Open Source" }}</h2>
+ <p>
+ {{ .T "This website is an open project, and a collaboration between developers, translators, and researchers. All source code, data, images, and more for the website are open source and can be found %shere%s. This site is licensed under the BSD 0-Clause license giving you the full freedom to do whatever you would like with anyof the content on this site." `<a href="https://git.thomasvoss.com/www.euro-cash.eu" target="_blank">` `</a>` | safe }}
+ </p>
+ <h2>{{ .T "Contact Us" }}</h2>
+ <p>
+ {{ .T "While we try to stay as up-to-date as possible and to fact check our information, it is always possible that we get something wrong, lack a translation, or are missing some piece of data you may have. In such a case don’t hesitate to contact us; we’ll try to get the site updated or fixed as soon as possible. You are always free to contribute via a git patch if you are more technically included, but if not you can always send an email to %s or contact ‘@onetruemangoman’ on Discord." `<a href="mailto:mail@euro-cash.eu">mail@euro-cash.eu</a>` | safe }}
+ </p>
+ <h2>{{ .T "Special Thanks" }}</h2>
+ <table>
+ <thead>
+ <th scope="col">{{ .T "Development" }}</th>
+ <th scope="col">{{ .T "Research" }}</th>
+ <th scope="col">{{ .T "Translations" }}</th>
+ </thead>
+ <tbody>
+ <tr>
+ <td>
+ Jessika Wexler,
+ Lyyli Savolainen,
+ Ralf Nadel
+ </td>
+ <td>
+ Elín Hjartardóttir,
+ Storm Sørensen
+ </td>
+ <td>
+ <span data-tooltip={{ .T "British- & American English" }}>Thomas Voss</span>,
+ <span data-tooltip={{ .T "Icelandic" }}>Védís Indriðadóttir</span>
+ </td>
+ </tr>
+ </tbody>
+ </table>
+</main>
+{{ end }}
diff --git a/src/templates/base.html.tmpl b/src/templates/base.html.tmpl
new file mode 100644
index 0000000..0d5e731
--- /dev/null
+++ b/src/templates/base.html.tmpl
@@ -0,0 +1,43 @@
+<!DOCTYPE html>
+<html lang={{ .Printer.Locale.Bcp }}>
+ <head>
+ <meta charset="UTF-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+ <link href="/style.css" type="text/css" rel="stylesheet">
+ <!-- TODO: Translate website name -->
+ <title>Euro Cash</title>
+ <script type="text/javascript">
+ const $ = q => document.querySelector(q);
+ const $$ = q => document.querySelectorAll(q);
+
+ const validate = theme =>
+ ["light", "dark"].includes(theme) ? theme : "light";
+ const toggle = theme =>
+ theme == "light" ? "dark" : "light";
+
+ const setTheme = theme => {
+ localStorage.setItem("theme", theme);
+ $("html").setAttribute("data-theme", theme);
+ $(`#nav-icon-theme-${theme}`).style.display = "";
+ $(`#nav-icon-theme-${toggle(theme)}`).style.display = "none";
+ };
+
+ document.addEventListener("DOMContentLoaded", _ => {
+ $("#theme-button").onclick = () =>
+ setTheme(toggle(validate(localStorage.getItem("theme"))));
+ setTheme(validate(localStorage.getItem("theme")));
+ });
+ </script>
+ </head>
+ <body>
+ {{ template "content" . }}
+ <footer>
+ <p>
+ <small>
+ {{ .T "Found a mistake or want to contribute missing information?" }}
+ <a href="/about">{{ .T "Feel free to contact us!" }}</a>
+ </small>
+ </p>
+ </footer>
+ </body>
+</html>
diff --git a/src/templates/error.html.tmpl b/src/templates/error.html.tmpl
new file mode 100644
index 0000000..28ef0a3
--- /dev/null
+++ b/src/templates/error.html.tmpl
@@ -0,0 +1,14 @@
+{{ define "content" }}
+<header>
+ {{ template "navbar" . }}
+ <h1>{{ .Code }} {{ .Msg }}</h1>
+</header>
+<main>
+ <p>
+ {{ .T "If you’re seeing this page, it means that something went wrong on our end that we need to fix. Our team has been notified of this error, and we apologise for the inconvenience." }}
+ </p>
+ <p>
+ {{ .T "If this issue persists, don’t hesitate to contact @onetruemangoman on Discord or to email us at %s." `<a href="https://git.thomasvoss.com/www.euro-cash.eu" target="_blank">` | safe }}
+ </p>
+</main>
+{{ end }}
diff --git a/src/templates/index.html.tmpl b/src/templates/index.html.tmpl
new file mode 100644
index 0000000..9a26046
--- /dev/null
+++ b/src/templates/index.html.tmpl
@@ -0,0 +1,18 @@
+{{ define "content" }}
+<header>
+ {{ template "navbar" . }}
+ <hgroup>
+ <h1>{{ .T "The Euro Cash Compendium" }}</h1>
+ <p>
+ {{ .T "United in" }}
+ <del>{{ .T "diversity" }}</del>
+ <ins>{{ .T "cash" }}</ins>
+ </p>
+ </hgroup>
+</header>
+<main>
+ <p>
+ {{ .T "Welcome to the Euro Cash Compendium. This sites aims to be a resource for you to discover everything there is to know about the coins and banknotes of the Euro, a currency that spans 26 countries and 350 million people. We also have dedicated sections of the site for collectors." }}
+ </p>
+</main>
+{{ end }}
diff --git a/src/templates/language.html.tmpl b/src/templates/language.html.tmpl
new file mode 100644
index 0000000..f7affa1
--- /dev/null
+++ b/src/templates/language.html.tmpl
@@ -0,0 +1,48 @@
+{{ define "content" }}
+<header>
+ {{ template "navbar" . }}
+ <h1>{{ .T "Select Your Language" }}</h1>
+</header>
+<main>
+ <p>
+ {{ .T "Select your preferred language to use on the site." }}
+ </p>
+ <p>
+ If you are an American user, it’s suggested that you select
+ American English instead of British English. This will ensure that
+ dates will be formatted with the month before the day.
+ </p>
+ <hr/>
+ <h2>{{ .T "Eurozone Languages" }}</h2>
+ {{ template "langgrid" true }}
+ <h2>{{ .T "Other Languages" }}</h2>
+ {{ template "langgrid" false }}
+</main>
+{{ end }}
+
+{{ define "langgrid" }}
+{{ $ez := . }}
+<form action="/language" method="POST">
+ <div class="lang-grid">
+ {{ range locales }}
+ {{ if eq $ez .Eurozone }}
+ <button
+ type="submit"
+ name="locale"
+ value={{ .Bcp }}
+ {{ if not .Enabled }}
+ disabled
+ {{ end }}
+ >
+ <span
+ lang={{ .Bcp }}
+ data-code={{ .Language | toUpper}}
+ >
+ {{ .Name }}
+ </span>
+ </button>
+ {{ end }}
+ {{ end }}
+ </div>
+</form>
+{{ end }}
diff --git a/src/templates/navbar.html.tmpl b/src/templates/navbar.html.tmpl
new file mode 100644
index 0000000..90f3cc7
--- /dev/null
+++ b/src/templates/navbar.html.tmpl
@@ -0,0 +1,228 @@
+{{ define "navbar" }}
+<nav>
+ <menu>
+ <li><a href="/">{{ .T "Home" }}</a></li>
+ <li><a href="#TODO">{{ .T "News" }}</a></li>
+ <li><a href="#TODO">{{ .T "Coin Collecting" }}</a></li>
+ <li><a href="/coins">{{ .T "Coins" }}</a></li>
+ <li><a href="#TODO">{{ .T "Banknotes" }}</a></li>
+ <li><a href="/jargon">{{ .T "Jargon" }}</a></li>
+ </menu>
+ <menu>
+ <li>
+ <a href="https://discord.gg/DCaXfRcy9C" target="_blank">
+ {{ .T "Discord" }}
+ </a>
+ </li>
+ <li><a href="/about">{{ .T "About" }}</a></li>
+ <li id="nav-icon-lang">
+ <a href="/language">
+ <svg
+ version="1.1"
+ width="19"
+ height="19"
+ viewBox="0 0 19 19"
+ fill="none"
+ xmlns="http://www.w3.org/2000/svg"
+ >
+ <circle
+ cx="9.5"
+ cy="9.5"
+ r="9"
+ stroke-linejoin="round"
+ ></circle>
+ <path
+ d="M 9.5 .5
+ C 9.5 .5
+ 6.0 3.5
+ 6.0 9.5
+ C 6.0 15.5
+ 9.5 18.5
+ 9.5 18.5"
+ stroke-linejoin="round"
+ ></path>
+ <path
+ d="M 9.5 .5
+ C 9.5 .5
+ 13.0 3.5
+ 13.0 9.5
+ C 13.0 15.5
+ 9.5 18.5
+ 9.5 18.5"
+ stroke-linejoin="round"
+ ></path>
+ <path d="M .5 9.5 H 18.5" stroke-linejoin="round"></path>
+ <path d="M 17.0 5.0 H 2.0" stroke-linejoin="round"></path>
+ <path d="M 17.0 14.0 H 2.0" stroke-linejoin="round"></path>
+ </svg>
+ {{ .T "Language" }}
+ </a>
+ </li>
+ <li id="nav-icon-theme">
+ <button id="theme-button">
+ <svg
+ id="nav-icon-theme-dark"
+ version="1.1"
+ width="24"
+ height="24"
+ viewBox="0 0 24 24"
+ fill="none"
+ xmlns="http://www.w3.org/2000/svg"
+ >
+ <path
+ d="M 7.28451 10.3333
+ C 7.10026 10.8546
+ 7 11.4156
+ 7 12
+ C 7 14.7614
+ 9.23858 17
+ 12 17
+ C 14.7614 17
+ 17 14.7614
+ 17 12
+ C 17 9.23858
+ 14.7614 7
+ 12 7
+ C 11.4156 7
+ 10.8546 7.10026
+ 10.3333 7.28451"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 12 2 V 4"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 12 20 V 22"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 4 12 L 2 12"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 22 12 L 20 12"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 19.7778 4.22266 L 17.5558 6.25424"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 4.22217 4.22266 L 6.44418 6.25424"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 6.44434 17.5557 L 4.22211 19.7779"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ <path
+ d="M 19.7778 19.7773 L 17.5558 17.5551"
+ stroke-width="1.5"
+ stroke-linecap="round"
+ ></path>
+ </svg>
+ <svg
+ id="nav-icon-theme-light"
+ version="1.1"
+ width="24"
+ height="24"
+ viewBox="0 0 24 24"
+ fill="none"
+ xmlns="http://www.w3.org/2000/svg"
+ >
+ <path
+ d="M 21.0672 11.8568
+ L 20.4253 11.469
+ L 21.0672 11.8568
+ Z
+ M 12.1432 2.93276
+ L 11.7553 2.29085
+ V 2.29085
+ L 12.1432 2.93276
+ Z
+ M 7.37554 20.013
+ C 7.017 19.8056 6.5582 19.9281 6.3508 20.2866
+ C 6.14339 20.6452 6.26591 21.104 6.62446 21.3114
+ L 7.37554 20.013
+ Z
+ M 2.68862 17.3755
+ C 2.89602 17.7341 3.35482 17.8566 3.71337 17.6492
+ C 4.07191 17.4418 4.19443 16.983 3.98703 16.6245
+ L 2.68862 17.3755
+ Z
+ M 21.25 12
+ C 21.25 17.1086 17.1086 21.25 12 21.25
+ V 22.75
+ C 17.9371 22.75 22.75 17.9371 22.75 12
+ H 21.25
+ Z
+ M 2.75 12
+ C 2.75 6.89137 6.89137 2.75 12 2.75
+ V 1.25
+ C 6.06294 1.25 1.25 6.06294 1.25 12
+ H 2.75
+ Z
+ M 15.5 14.25
+ C 12.3244 14.25 9.75 11.6756 9.75 8.5
+ H 8.25
+ C 8.25 12.5041 11.4959 15.75 15.5 15.75
+ V 14.25
+ Z
+ M 20.4253 11.469
+ C 19.4172 13.1373 17.5882 14.25 15.5 14.25
+ V 15.75
+ C 18.1349 15.75 20.4407 14.3439 21.7092 12.2447
+ L 20.4253 11.469
+ Z
+ M 9.75 8.5
+ C 9.75 6.41182 10.8627 4.5828 12.531 3.57467
+ L 11.7553 2.29085
+ C 9.65609 3.5593 8.25 5.86509 8.25 8.5
+ H 9.75
+ Z
+ M 12 2.75
+ C 11.9115 2.75 11.8077 2.71008 11.7324 2.63168
+ C 11.6686 2.56527 11.6538 2.50244 11.6503 2.47703
+ C 11.6461 2.44587 11.6482 2.35557 11.7553 2.29085
+ L 12.531 3.57467
+ C 13.0342 3.27065 13.196 2.71398 13.1368 2.27627
+ C 13.0754 1.82126 12.7166 1.25 12 1.25
+ V 2.75
+ Z
+ M 21.7092 12.2447
+ C 21.6444 12.3518 21.5541 12.3539 21.523 12.3497
+ C 21.4976 12.3462 21.4347 12.3314 21.3683 12.2676
+ C 21.2899 12.1923 21.25 12.0885 21.25 12
+ H 22.75
+ C 22.75 11.2834 22.1787 10.9246 21.7237 10.8632
+ C 21.286 10.804 20.7293 10.9658 20.4253 11.469
+ L 21.7092 12.2447
+ Z
+ M 12 21.25
+ C 10.3139 21.25 8.73533 20.7996 7.37554 20.013
+ L 6.62446 21.3114
+ C 8.2064 22.2265 10.0432 22.75 12 22.75
+ V 21.25
+ Z
+ M 3.98703 16.6245
+ C 3.20043 15.2647 2.75 13.6861 2.75 12
+ H 1.25
+ C 1.25 13.9568 1.77351 15.7936 2.68862 17.3755
+ L 3.98703 16.6245
+ Z"
+ ></path>
+ </svg>
+ </button>
+ </li>
+ </menu>
+</nav>
+{{ end }}
diff --git a/template/404.templ b/template.old/404.templ
index 9a03b43..9a03b43 100644
--- a/template/404.templ
+++ b/template.old/404.templ
diff --git a/template/about.templ b/template.old/about.templ
index d7dfa06..d7dfa06 100644
--- a/template/about.templ
+++ b/template.old/about.templ
diff --git a/template/base.templ b/template.old/base.templ
index dcef2d8..dcef2d8 100644
--- a/template/base.templ
+++ b/template.old/base.templ
diff --git a/template/coins.templ b/template.old/coins.templ
index ef1b0a1..ef1b0a1 100644
--- a/template/coins.templ
+++ b/template.old/coins.templ
diff --git a/template/coins_designs.templ b/template.old/coins_designs.templ
index de47482..de47482 100644
--- a/template/coins_designs.templ
+++ b/template.old/coins_designs.templ
diff --git a/template/coins_designs_nl.templ b/template.old/coins_designs_nl.templ
index 819b294..92c01be 100644
--- a/template/coins_designs_nl.templ
+++ b/template.old/coins_designs_nl.templ
@@ -1,6 +1,6 @@
package template
-import "git.thomasvoss.com/euro-cash.eu/lib"
+import "git.thomasvoss.com/euro-cash.eu/src"
templ CoinsDesignsNl() {
{{ p := ctx.Value("printer").(lib.Printer) }}
diff --git a/template/coins_mintages.templ b/template.old/coins_mintages.templ
index 894cb75..894cb75 100644
--- a/template/coins_mintages.templ
+++ b/template.old/coins_mintages.templ
diff --git a/template/error.templ b/template.old/error.templ
index d2fed83..d2fed83 100644
--- a/template/error.templ
+++ b/template.old/error.templ
diff --git a/template/jargon.templ b/template.old/jargon.templ
index 255068e..255068e 100644
--- a/template/jargon.templ
+++ b/template.old/jargon.templ
diff --git a/template/language.templ b/template.old/language.templ
index 1b48295..1b48295 100644
--- a/template/language.templ
+++ b/template.old/language.templ
diff --git a/template/navbar.templ b/template.old/navbar.templ
index 85ad8a2..85ad8a2 100644
--- a/template/navbar.templ
+++ b/template.old/navbar.templ
diff --git a/template/root.templ b/template.old/root.templ
index 657314c..657314c 100644
--- a/template/root.templ
+++ b/template.old/root.templ
diff --git a/template/base.go b/template/base.go
deleted file mode 100644
index 392b1c2..0000000
--- a/template/base.go
+++ /dev/null
@@ -1,3 +0,0 @@
-//go:generate templ generate -log-level warn
-
-package template
diff --git a/vendor/github.com/a-h/templ/.dockerignore b/vendor/github.com/a-h/templ/.dockerignore
deleted file mode 100644
index 17896fe..0000000
--- a/vendor/github.com/a-h/templ/.dockerignore
+++ /dev/null
@@ -1,3 +0,0 @@
-.git
-Dockerfile
-.dockerignore
diff --git a/vendor/github.com/a-h/templ/.gitignore b/vendor/github.com/a-h/templ/.gitignore
deleted file mode 100644
index 0338eda..0000000
--- a/vendor/github.com/a-h/templ/.gitignore
+++ /dev/null
@@ -1,28 +0,0 @@
-# Output.
-cmd/templ/templ
-
-# Logs.
-cmd/templ/lspcmd/*log.txt
-
-# Go code coverage.
-coverage.out
-coverage
-
-# Mac filesystem jank.
-.DS_Store
-
-# Docusaurus.
-docs/build/
-docs/resources/_gen/
-node_modules/
-dist/
-
-# Nix artifacts.
-result
-
-# Editors
-## nvim
-.null-ls*
-
-# Go workspace.
-go.work
diff --git a/vendor/github.com/a-h/templ/.goreleaser.yaml b/vendor/github.com/a-h/templ/.goreleaser.yaml
deleted file mode 100644
index 456187c..0000000
--- a/vendor/github.com/a-h/templ/.goreleaser.yaml
+++ /dev/null
@@ -1,72 +0,0 @@
-builds:
- - env:
- - CGO_ENABLED=0
- dir: cmd/templ
- mod_timestamp: '{{ .CommitTimestamp }}'
- flags:
- - -trimpath
- ldflags:
- - -s -w
- goos:
- - linux
- - windows
- - darwin
-
-checksum:
- name_template: 'checksums.txt'
-
-signs:
- - id: checksums
- cmd: cosign
- stdin: '{{ .Env.COSIGN_PASSWORD }}'
- output: true
- artifacts: checksum
- args:
- - sign-blob
- - --yes
- - --key
- - env://COSIGN_PRIVATE_KEY
- - '--output-certificate=${certificate}'
- - '--output-signature=${signature}'
- - '${artifact}'
-
-archives:
- - format: tar.gz
- name_template: >-
- {{ .ProjectName }}_
- {{- title .Os }}_
- {{- if eq .Arch "amd64" }}x86_64
- {{- else if eq .Arch "386" }}i386
- {{- else }}{{ .Arch }}{{ end }}
- {{- if .Arm }}v{{ .Arm }}{{ end }}
-
-kos:
- - repository: ghcr.io/a-h/templ
- platforms:
- - linux/amd64
- - linux/arm64
- tags:
- - latest
- - '{{.Tag}}'
- bare: true
-
-docker_signs:
- - cmd: cosign
- artifacts: all
- output: true
- args:
- - sign
- - --yes
- - --key
- - env://COSIGN_PRIVATE_KEY
- - '${artifact}'
-
-snapshot:
- name_template: "{{ incpatch .Version }}-next"
-
-changelog:
- sort: asc
- filters:
- exclude:
- - '^docs:'
- - '^test:'
diff --git a/vendor/github.com/a-h/templ/.ignore b/vendor/github.com/a-h/templ/.ignore
deleted file mode 100644
index 21cb25e..0000000
--- a/vendor/github.com/a-h/templ/.ignore
+++ /dev/null
@@ -1,7 +0,0 @@
-*_templ.go
-examples/integration-ct/static/index.js
-examples/counter/assets/css/bulma.*
-examples/counter/assets/js/htmx.min.js
-examples/counter-basic/assets/css/bulma.*
-examples/typescript/assets/index.js
-package-lock.json
diff --git a/vendor/github.com/a-h/templ/.version b/vendor/github.com/a-h/templ/.version
deleted file mode 100644
index baee64f..0000000
--- a/vendor/github.com/a-h/templ/.version
+++ /dev/null
@@ -1 +0,0 @@
-0.2.747 \ No newline at end of file
diff --git a/vendor/github.com/a-h/templ/CODE_OF_CONDUCT.md b/vendor/github.com/a-h/templ/CODE_OF_CONDUCT.md
deleted file mode 100644
index 08340d3..0000000
--- a/vendor/github.com/a-h/templ/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,128 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-We as members, contributors, and leaders pledge to make participation in our
-community a harassment-free experience for everyone, regardless of age, body
-size, visible or invisible disability, ethnicity, sex characteristics, gender
-identity and expression, level of experience, education, socio-economic status,
-nationality, personal appearance, race, religion, or sexual identity
-and orientation.
-
-We pledge to act and interact in ways that contribute to an open, welcoming,
-diverse, inclusive, and healthy community.
-
-## Our Standards
-
-Examples of behavior that contributes to a positive environment for our
-community include:
-
-* Demonstrating empathy and kindness toward other people
-* Being respectful of differing opinions, viewpoints, and experiences
-* Giving and gracefully accepting constructive feedback
-* Accepting responsibility and apologizing to those affected by our mistakes,
- and learning from the experience
-* Focusing on what is best not just for us as individuals, but for the
- overall community
-
-Examples of unacceptable behavior include:
-
-* The use of sexualized language or imagery, and sexual attention or
- advances of any kind
-* Trolling, insulting or derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or email
- address, without their explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Enforcement Responsibilities
-
-Community leaders are responsible for clarifying and enforcing our standards of
-acceptable behavior and will take appropriate and fair corrective action in
-response to any behavior that they deem inappropriate, threatening, offensive,
-or harmful.
-
-Community leaders have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions that are
-not aligned to this Code of Conduct, and will communicate reasons for moderation
-decisions when appropriate.
-
-## Scope
-
-This Code of Conduct applies within all community spaces, and also applies when
-an individual is officially representing the community in public spaces.
-Examples of representing our community include using an official e-mail address,
-posting via an official social media account, or acting as an appointed
-representative at an online or offline event.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported to the community leaders responsible for enforcement at
-adrianhesketh@hushail.com.
-All complaints will be reviewed and investigated promptly and fairly.
-
-All community leaders are obligated to respect the privacy and security of the
-reporter of any incident.
-
-## Enforcement Guidelines
-
-Community leaders will follow these Community Impact Guidelines in determining
-the consequences for any action they deem in violation of this Code of Conduct:
-
-### 1. Correction
-
-**Community Impact**: Use of inappropriate language or other behavior deemed
-unprofessional or unwelcome in the community.
-
-**Consequence**: A private, written warning from community leaders, providing
-clarity around the nature of the violation and an explanation of why the
-behavior was inappropriate. A public apology may be requested.
-
-### 2. Warning
-
-**Community Impact**: A violation through a single incident or series
-of actions.
-
-**Consequence**: A warning with consequences for continued behavior. No
-interaction with the people involved, including unsolicited interaction with
-those enforcing the Code of Conduct, for a specified period of time. This
-includes avoiding interactions in community spaces as well as external channels
-like social media. Violating these terms may lead to a temporary or
-permanent ban.
-
-### 3. Temporary Ban
-
-**Community Impact**: A serious violation of community standards, including
-sustained inappropriate behavior.
-
-**Consequence**: A temporary ban from any sort of interaction or public
-communication with the community for a specified period of time. No public or
-private interaction with the people involved, including unsolicited interaction
-with those enforcing the Code of Conduct, is allowed during this period.
-Violating these terms may lead to a permanent ban.
-
-### 4. Permanent Ban
-
-**Community Impact**: Demonstrating a pattern of violation of community
-standards, including sustained inappropriate behavior, harassment of an
-individual, or aggression toward or disparagement of classes of individuals.
-
-**Consequence**: A permanent ban from any sort of public interaction within
-the community.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage],
-version 2.0, available at
-https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
-
-Community Impact Guidelines were inspired by [Mozilla's code of conduct
-enforcement ladder](https://github.com/mozilla/diversity).
-
-[homepage]: https://www.contributor-covenant.org
-
-For answers to common questions about this code of conduct, see the FAQ at
-https://www.contributor-covenant.org/faq. Translations are available at
-https://www.contributor-covenant.org/translations.
diff --git a/vendor/github.com/a-h/templ/CONTRIBUTING.md b/vendor/github.com/a-h/templ/CONTRIBUTING.md
deleted file mode 100644
index e98d31f..0000000
--- a/vendor/github.com/a-h/templ/CONTRIBUTING.md
+++ /dev/null
@@ -1,244 +0,0 @@
-# Contributing to templ
-
-## Vision
-
-Enable Go developers to build strongly typed, component-based HTML user interfaces with first-class developer tooling, and a short learning curve.
-
-## Come up with a design and share it
-
-Before starting work on any major pull requests or code changes, start a discussion at https://github.com/a-h/templ/discussions or raise an issue.
-
-We don't want you to spend time on a PR or feature that ultimately doesn't get merged because it doesn't fit with the project goals, or the design doesn't work for some reason.
-
-For issues, it really helps if you provide a reproduction repo, or can create a failing unit test to describe the behaviour.
-
-In designs, we need to consider:
-
-* Backwards compatibility - Not changing the public API between releases, introducing gradual deprecation - don't break people's code.
-* Correctness over time - How can we reduce the risk of defects both now, and in future releases?
-* Threat model - How could each change be used to inject vulnerabilities into web pages?
-* Go version - We target the oldest supported version of Go as per https://go.dev/doc/devel/release
-* Automatic migration - If we need to force through a change.
-* Compile time vs runtime errors - Prefer compile time.
-* Documentation - New features are only useful if people can understand the new feature, what would the documentation look like?
-* Examples - How will we demonstrate the feature?
-
-## Project structure
-
-templ is structured into a few areas:
-
-### Parser `./parser`
-
-The parser directory currently contains both v1 and v2 parsers.
-
-The v1 parser is not maintained, it's only used to migrate v1 code over to the v2 syntax.
-
-The parser is responsible for parsing templ files into an object model. The types that make up the object model are in `types.go`. Automatic formatting of the types is tested in `types_test.go`.
-
-A templ file is parsed into the `TemplateFile` struct object model.
-
-```go
-type TemplateFile struct {
- // Header contains comments or whitespace at the top of the file.
- Header []GoExpression
- // Package expression.
- Package Package
- // Nodes in the file.
- Nodes []TemplateFileNode
-}
-```
-
-Parsers are individually tested using two types of unit test.
-
-One test covers the successful parsing of text into an object. For example, the `HTMLCommentParser` test checks for successful patterns.
-
-```go
-func TestHTMLCommentParser(t *testing.T) {
- var tests = []struct {
- name string
- input string
- expected HTMLComment
- }{
- {
- name: "comment - single line",
- input: `<!-- single line comment -->`,
- expected: HTMLComment{
- Contents: " single line comment ",
- },
- },
- {
- name: "comment - no whitespace",
- input: `<!--no whitespace between sequence open and close-->`,
- expected: HTMLComment{
- Contents: "no whitespace between sequence open and close",
- },
- },
- {
- name: "comment - multiline",
- input: `<!-- multiline
- comment
- -->`,
- expected: HTMLComment{
- Contents: ` multiline
- comment
- `,
- },
- },
- {
- name: "comment - with tag",
- input: `<!-- <p class="test">tag</p> -->`,
- expected: HTMLComment{
- Contents: ` <p class="test">tag</p> `,
- },
- },
- {
- name: "comments can contain tags",
- input: `<!-- <div> hello world </div> -->`,
- expected: HTMLComment{
- Contents: ` <div> hello world </div> `,
- },
- },
- }
- for _, tt := range tests {
- tt := tt
- t.Run(tt.name, func(t *testing.T) {
- input := parse.NewInput(tt.input)
- result, ok, err := htmlComment.Parse(input)
- if err != nil {
- t.Fatalf("parser error: %v", err)
- }
- if !ok {
- t.Fatalf("failed to parse at %d", input.Index())
- }
- if diff := cmp.Diff(tt.expected, result); diff != "" {
- t.Errorf(diff)
- }
- })
- }
-}
-```
-
-Alongside each success test, is a similar test to check that invalid syntax is detected.
-
-```go
-func TestHTMLCommentParserErrors(t *testing.T) {
- var tests = []struct {
- name string
- input string
- expected error
- }{
- {
- name: "unclosed HTML comment",
- input: `<!-- unclosed HTML comment`,
- expected: parse.Error("expected end comment literal '-->' not found",
- parse.Position{
- Index: 26,
- Line: 0,
- Col: 26,
- }),
- },
- {
- name: "comment in comment",
- input: `<!-- <-- other --> -->`,
- expected: parse.Error("comment contains invalid sequence '--'", parse.Position{
- Index: 8,
- Line: 0,
- Col: 8,
- }),
- },
- }
- for _, tt := range tests {
- tt := tt
- t.Run(tt.name, func(t *testing.T) {
- input := parse.NewInput(tt.input)
- _, _, err := htmlComment.Parse(input)
- if diff := cmp.Diff(tt.expected, err); diff != "" {
- t.Error(diff)
- }
- })
- }
-}
-```
-
-### Generator
-
-The generator takes the object model and writes out Go code that produces the expected output. Any changes to Go code output by templ are made in this area.
-
-Testing of the generator is carried out by creating a templ file, and a matching expected output file.
-
-For example, `./generator/test-a-href` contains a templ file of:
-
-```templ
-package testahref
-
-templ render() {
- <a href="javascript:alert(&#39;unaffected&#39;);">Ignored</a>
- <a href={ templ.URL("javascript:alert('should be sanitized')") }>Sanitized</a>
- <a href={ templ.SafeURL("javascript:alert('should not be sanitized')") }>Unsanitized</a>
-}
-```
-
-It also contains an expected output file.
-
-```html
-<a href="javascript:alert(&#39;unaffected&#39;);">Ignored</a>
-<a href="about:invalid#TemplFailedSanitizationURL">Sanitized</a>
-<a href="javascript:alert(&#39;should not be sanitized&#39;)">Unsanitized</a>
-```
-
-These tests contribute towards the code coverage metrics by building an instrumented test CLI program. See the `test-cover` task in the `README.md` file.
-
-### CLI
-
-The command line interface for templ is used to generate Go code from templ files, format templ files, and run the LSP.
-
-The code for this is at `./cmd/templ`.
-
-Testing of the templ command line is done with unit tests to check the argument parsing.
-
-The `templ generate` command is tested by generating templ files in the project, and testing that the expected output HTML is present.
-
-### Runtime
-
-The runtime is used by generated code, and by template authors, to serve template content over HTTP, and to carry out various operations.
-
-It is in the root directory of the project at `./runtime.go`. The runtime is unit tested, as well as being tested as part of the `generate` tests.
-
-### LSP
-
-The LSP is structured within the command line interface, and proxies commands through to the `gopls` LSP.
-
-### Docs
-
-The docs are a Docusaurus project at `./docs`.
-
-## Coding
-
-### Build tasks
-
-templ uses the `xc` task runner - https://github.com/joerdav/xc
-
-If you run `xc` you can get see a list of the development tasks that can be run, or you can read the `README.md` file and see the `Tasks` section.
-
-The most useful tasks for local development are:
-
-* `install-snapshot` - this builds the templ CLI and installs it into `~/bin`. Ensure that this is in your path.
-* `test` - this regenerates all templates, and runs the unit tests.
-* `fmt` - run the `gofmt` tool to format all Go code.
-* `lint` - run the same linting as run in the CI process.
-* `docs-run` - run the Docusaurus documentation site.
-
-### Commit messages
-
-The project using https://www.conventionalcommits.org/en/v1.0.0/
-
-Examples:
-
-* `feat: support Go comments in templates, fixes #234"`
-
-### Coding style
-
-* Reduce nesting - i.e. prefer early returns over an `else` block, as per https://danp.net/posts/reducing-go-nesting/ or https://go.dev/doc/effective_go#if
-* Use line breaks to separate "paragraphs" of code - don't use line breaks in between lines, or at the start/end of functions etc.
-* Use the `fmt` and `lint` build tasks to format and lint your code before submitting a PR.
-
diff --git a/vendor/github.com/a-h/templ/LICENSE b/vendor/github.com/a-h/templ/LICENSE
deleted file mode 100644
index 15e6fb8..0000000
--- a/vendor/github.com/a-h/templ/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2021 Adrian Hesketh
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/vendor/github.com/a-h/templ/README.md b/vendor/github.com/a-h/templ/README.md
deleted file mode 100644
index e3087f0..0000000
--- a/vendor/github.com/a-h/templ/README.md
+++ /dev/null
@@ -1,171 +0,0 @@
-![templ](https://github.com/a-h/templ/raw/main/templ.png)
-
-## An HTML templating language for Go that has great developer tooling.
-
-![templ](ide-demo.gif)
-
-
-## Documentation
-
-See user documentation at https://templ.guide
-
-<p align="center">
-<a href="https://pkg.go.dev/github.com/a-h/templ"><img src="https://pkg.go.dev/badge/github.com/a-h/templ.svg" alt="Go Reference" /></a>
-<a href="https://xcfile.dev"><img src="https://xcfile.dev/badge.svg" alt="xc compatible" /></a>
-<a href="https://raw.githack.com/wiki/a-h/templ/coverage.html"><img src="https://github.com/a-h/templ/wiki/coverage.svg" alt="Go Coverage" /></a>
-<a href="https://goreportcard.com/report/github.com/a-h/templ"><img src="https://goreportcard.com/badge/github.com/a-h/templ" alt="Go Report Card" /></a<
-</p>
-
-## Tasks
-
-### build
-
-Build a local version.
-
-```sh
-go run ./get-version > .version
-cd cmd/templ
-go build
-```
-
-### nix-update-gomod2nix
-
-```sh
-gomod2nix
-```
-
-### install-snapshot
-
-Build and install current version.
-
-```sh
-# Remove templ from the non-standard ~/bin/templ path
-# that this command previously used.
-rm -f ~/bin/templ
-# Clear LSP logs.
-rm -f cmd/templ/lspcmd/*.txt
-# Update version.
-go run ./get-version > .version
-# Install to $GOPATH/bin or $HOME/go/bin
-cd cmd/templ && go install
-```
-
-### build-snapshot
-
-Use goreleaser to build the command line binary using goreleaser.
-
-```sh
-goreleaser build --snapshot --clean
-```
-
-### generate
-
-Run templ generate using local version.
-
-```sh
-go run ./cmd/templ generate -include-version=false
-```
-
-### test
-
-Run Go tests.
-
-```sh
-go run ./get-version > .version
-go run ./cmd/templ generate -include-version=false
-go test ./...
-```
-
-### test-short
-
-Run Go tests.
-
-```sh
-go run ./get-version > .version
-go run ./cmd/templ generate -include-version=false
-go test ./... -short
-```
-
-### test-cover
-
-Run Go tests.
-
-```sh
-# Create test profile directories.
-mkdir -p coverage/fmt
-mkdir -p coverage/generate
-mkdir -p coverage/version
-mkdir -p coverage/unit
-# Build the test binary.
-go build -cover -o ./coverage/templ-cover ./cmd/templ
-# Run the covered generate command.
-GOCOVERDIR=coverage/fmt ./coverage/templ-cover fmt .
-GOCOVERDIR=coverage/generate ./coverage/templ-cover generate -include-version=false
-GOCOVERDIR=coverage/version ./coverage/templ-cover version
-# Run the unit tests.
-go test -cover ./... -coverpkg ./... -args -test.gocoverdir="$PWD/coverage/unit"
-# Display the combined percentage.
-go tool covdata percent -i=./coverage/fmt,./coverage/generate,./coverage/version,./coverage/unit
-# Generate a text coverage profile for tooling to use.
-go tool covdata textfmt -i=./coverage/fmt,./coverage/generate,./coverage/version,./coverage/unit -o coverage.out
-# Print total
-go tool cover -func coverage.out | grep total
-```
-
-### test-cover-watch
-
-```sh
-gotestsum --watch -- -coverprofile=coverage.out
-```
-
-### benchmark
-
-Run benchmarks.
-
-```sh
-go run ./cmd/templ generate -include-version=false && go test ./... -bench=. -benchmem
-```
-
-### fmt
-
-Format all Go and templ code.
-
-```sh
-gofmt -s -w .
-go run ./cmd/templ fmt .
-```
-
-### lint
-
-```sh
-golangci-lint run --verbose
-```
-
-### push-release-tag
-
-Push a semantic version number to Github to trigger the release process.
-
-```sh
-./push-tag.sh
-```
-
-### docs-run
-
-Run the development server.
-
-Directory: docs
-
-```sh
-npm run start
-```
-
-### docs-build
-
-Build production docs site.
-
-Directory: docs
-
-```sh
-npm run build
-```
-
diff --git a/vendor/github.com/a-h/templ/SECURITY.md b/vendor/github.com/a-h/templ/SECURITY.md
deleted file mode 100644
index 8241f55..0000000
--- a/vendor/github.com/a-h/templ/SECURITY.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# Security Policy
-
-## Supported Versions
-
-The latest version of templ is supported.
-
-## Reporting a Vulnerability
-
-Use the "Security" tab in Github and fill out the "Report a vulnerability" form.
diff --git a/vendor/github.com/a-h/templ/cosign.pub b/vendor/github.com/a-h/templ/cosign.pub
deleted file mode 100644
index 9d7967b..0000000
--- a/vendor/github.com/a-h/templ/cosign.pub
+++ /dev/null
@@ -1,4 +0,0 @@
------BEGIN PUBLIC KEY-----
-MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEqHp75uAj8XqKrLO2YvY0M2EddckH
-evQnNAj+0GmBptqdf3NJcUCjL6w4z2Ikh/Zb8lh6b13akAwO/dJQaMLoMA==
------END PUBLIC KEY-----
diff --git a/vendor/github.com/a-h/templ/flake.lock b/vendor/github.com/a-h/templ/flake.lock
deleted file mode 100644
index af4e370..0000000
--- a/vendor/github.com/a-h/templ/flake.lock
+++ /dev/null
@@ -1,140 +0,0 @@
-{
- "nodes": {
- "flake-utils": {
- "inputs": {
- "systems": "systems"
- },
- "locked": {
- "lastModified": 1694529238,
- "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
- "owner": "numtide",
- "repo": "flake-utils",
- "rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
- "type": "github"
- },
- "original": {
- "owner": "numtide",
- "repo": "flake-utils",
- "type": "github"
- }
- },
- "flake-utils_2": {
- "locked": {
- "lastModified": 1667395993,
- "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
- "owner": "numtide",
- "repo": "flake-utils",
- "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
- "type": "github"
- },
- "original": {
- "owner": "numtide",
- "repo": "flake-utils",
- "type": "github"
- }
- },
- "gitignore": {
- "inputs": {
- "nixpkgs": [
- "nixpkgs"
- ]
- },
- "locked": {
- "lastModified": 1709087332,
- "narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
- "owner": "hercules-ci",
- "repo": "gitignore.nix",
- "rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
- "type": "github"
- },
- "original": {
- "owner": "hercules-ci",
- "repo": "gitignore.nix",
- "type": "github"
- }
- },
- "gomod2nix": {
- "inputs": {
- "flake-utils": "flake-utils",
- "nixpkgs": [
- "nixpkgs"
- ]
- },
- "locked": {
- "lastModified": 1717050755,
- "narHash": "sha256-C9IEHABulv2zEDFA+Bf0E1nmfN4y6MIUe5eM2RCrDC0=",
- "owner": "nix-community",
- "repo": "gomod2nix",
- "rev": "31b6d2e40b36456e792cd6cf50d5a8ddd2fa59a1",
- "type": "github"
- },
- "original": {
- "owner": "nix-community",
- "repo": "gomod2nix",
- "type": "github"
- }
- },
- "nixpkgs": {
- "locked": {
- "lastModified": 1720096762,
- "narHash": "sha256-KvpJIWxTNuaSpN2L/9TmTlEhlwxEnzJ1vCpEcfK/4mQ=",
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "638369f687471823770f6d3093f1721dc7b8c897",
- "type": "github"
- },
- "original": {
- "owner": "NixOS",
- "ref": "release-24.05",
- "repo": "nixpkgs",
- "type": "github"
- }
- },
- "root": {
- "inputs": {
- "gitignore": "gitignore",
- "gomod2nix": "gomod2nix",
- "nixpkgs": "nixpkgs",
- "xc": "xc"
- }
- },
- "systems": {
- "locked": {
- "lastModified": 1681028828,
- "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
- "owner": "nix-systems",
- "repo": "default",
- "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
- "type": "github"
- },
- "original": {
- "owner": "nix-systems",
- "repo": "default",
- "type": "github"
- }
- },
- "xc": {
- "inputs": {
- "flake-utils": "flake-utils_2",
- "nixpkgs": [
- "nixpkgs"
- ]
- },
- "locked": {
- "lastModified": 1717601811,
- "narHash": "sha256-+XQvDRXpzjBdZI3JGKP6SAOYXM+JSEbWL5kqtCwRJXE=",
- "owner": "joerdav",
- "repo": "xc",
- "rev": "f8e8e658978d6c9fe49c27b684ca7375a74deef1",
- "type": "github"
- },
- "original": {
- "owner": "joerdav",
- "repo": "xc",
- "type": "github"
- }
- }
- },
- "root": "root",
- "version": 7
-}
diff --git a/vendor/github.com/a-h/templ/flake.nix b/vendor/github.com/a-h/templ/flake.nix
deleted file mode 100644
index fd8a238..0000000
--- a/vendor/github.com/a-h/templ/flake.nix
+++ /dev/null
@@ -1,93 +0,0 @@
-{
- description = "templ";
-
- inputs = {
- nixpkgs.url = "github:NixOS/nixpkgs/release-24.05";
- gomod2nix = {
- url = "github:nix-community/gomod2nix";
- inputs.nixpkgs.follows = "nixpkgs";
- };
- gitignore = {
- url = "github:hercules-ci/gitignore.nix";
- inputs.nixpkgs.follows = "nixpkgs";
- };
- xc = {
- url = "github:joerdav/xc";
- inputs.nixpkgs.follows = "nixpkgs";
- };
- };
-
- outputs = { self, nixpkgs, gomod2nix, gitignore, xc }:
- let
- allSystems = [
- "x86_64-linux" # 64-bit Intel/AMD Linux
- "aarch64-linux" # 64-bit ARM Linux
- "x86_64-darwin" # 64-bit Intel macOS
- "aarch64-darwin" # 64-bit ARM macOS
- ];
- forAllSystems = f: nixpkgs.lib.genAttrs allSystems (system: f {
- inherit system;
- pkgs = import nixpkgs { inherit system; };
- });
- in
- {
- packages = forAllSystems ({ system, pkgs, ... }:
- let
- buildGoApplication = gomod2nix.legacyPackages.${system}.buildGoApplication;
- in
- rec {
- default = templ;
-
- templ = buildGoApplication {
- name = "templ";
- src = gitignore.lib.gitignoreSource ./.;
- # Update to latest Go version when https://nixpk.gs/pr-tracker.html?pr=324123 is backported to release-24.05.
- go = pkgs.go;
- # Must be added due to bug https://github.com/nix-community/gomod2nix/issues/120
- pwd = ./.;
- subPackages = [ "cmd/templ" ];
- CGO_ENABLED = 0;
- flags = [
- "-trimpath"
- ];
- ldflags = [
- "-s"
- "-w"
- "-extldflags -static"
- ];
- };
- });
-
- # `nix develop` provides a shell containing development tools.
- devShell = forAllSystems ({ system, pkgs }:
- pkgs.mkShell {
- buildInputs = with pkgs; [
- (golangci-lint.override { buildGoModule = buildGo121Module; })
- cosign # Used to sign container images.
- esbuild # Used to package JS examples.
- go_1_21
- gomod2nix.legacyPackages.${system}.gomod2nix
- gopls
- goreleaser
- gotestsum
- ko # Used to build Docker images.
- nodejs # Used to build templ-docs.
- xc.packages.${system}.xc
- ];
- });
-
- # This flake outputs an overlay that can be used to add templ and
- # templ-docs to nixpkgs as per https://templ.guide/quick-start/installation/#nix
- #
- # Example usage:
- #
- # nixpkgs.overlays = [
- # inputs.templ.overlays.default
- # ];
- overlays.default = final: prev: {
- templ = self.packages.${final.stdenv.system}.templ;
- templ-docs = self.packages.${final.stdenv.system}.templ-docs;
- };
- };
-}
-
diff --git a/vendor/github.com/a-h/templ/flush.go b/vendor/github.com/a-h/templ/flush.go
deleted file mode 100644
index 56d7d3a..0000000
--- a/vendor/github.com/a-h/templ/flush.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package templ
-
-import (
- "context"
- "io"
-)
-
-// Flush flushes the output buffer after all its child components have been rendered.
-func Flush() FlushComponent {
- return FlushComponent{}
-}
-
-type FlushComponent struct {
-}
-
-type flusherError interface {
- Flush() error
-}
-
-type flusher interface {
- Flush()
-}
-
-func (f FlushComponent) Render(ctx context.Context, w io.Writer) (err error) {
- if err = GetChildren(ctx).Render(ctx, w); err != nil {
- return err
- }
- switch w := w.(type) {
- case flusher:
- w.Flush()
- return nil
- case flusherError:
- return w.Flush()
- }
- return nil
-}
diff --git a/vendor/github.com/a-h/templ/gomod2nix.toml b/vendor/github.com/a-h/templ/gomod2nix.toml
deleted file mode 100644
index d572a5f..0000000
--- a/vendor/github.com/a-h/templ/gomod2nix.toml
+++ /dev/null
@@ -1,90 +0,0 @@
-schema = 3
-
-[mod]
- [mod."github.com/PuerkitoBio/goquery"]
- version = "v1.8.1"
- hash = "sha256-z2RaB8PVPEzSJdMUfkfNjT616yXWTjW2gkhNOh989ZU="
- [mod."github.com/a-h/htmlformat"]
- version = "v0.0.0-20231108124658-5bd994fe268e"
- hash = "sha256-YSl9GsXhc0L2oKGZLwwjUtpe5W6ra6kk74zvQdsDCMU="
- [mod."github.com/a-h/parse"]
- version = "v0.0.0-20240121214402-3caf7543159a"
- hash = "sha256-ee/g6xwwhtF7vVt3griUSh96Kz4z0hM5/tpXxHW6PZk="
- [mod."github.com/a-h/pathvars"]
- version = "v0.0.14"
- hash = "sha256-2NytUpcO0zbzE5XunCLcK3jDqxYzmyb3WqtYDEudAYg="
- [mod."github.com/a-h/protocol"]
- version = "v0.0.0-20240704131721-1e461c188041"
- hash = "sha256-KSw8m+kVIubEi+nuS3dMdBw2ZZTlmcKD/hGbVRFaE5Q="
- [mod."github.com/andybalholm/brotli"]
- version = "v1.1.0"
- hash = "sha256-njLViV4v++ZdgOWGWzlvkefuFvA/nkugl3Ta/h1nu/0="
- [mod."github.com/andybalholm/cascadia"]
- version = "v1.3.1"
- hash = "sha256-M0u22DXSeXUaYtl1KoW1qWL46niFpycFkraCEQ/luYA="
- [mod."github.com/cenkalti/backoff/v4"]
- version = "v4.3.0"
- hash = "sha256-wfVjNZsGG1WoNC5aL+kdcy6QXPgZo4THAevZ1787md8="
- [mod."github.com/cli/browser"]
- version = "v1.3.0"
- hash = "sha256-06hcvQeOEm31clxkTuZ8ts8ZtdNKY575EsM1osRVpLg="
- [mod."github.com/fatih/color"]
- version = "v1.16.0"
- hash = "sha256-Aq/SM28aPJVzvapllQ64R/DM4aZ5CHPewcm/AUJPyJQ="
- [mod."github.com/fsnotify/fsnotify"]
- version = "v1.7.0"
- hash = "sha256-MdT2rQyQHspPJcx6n9ozkLbsktIOJutOqDuKpNAtoZY="
- [mod."github.com/google/go-cmp"]
- version = "v0.6.0"
- hash = "sha256-qgra5jze4iPGP0JSTVeY5qV5AvEnEu39LYAuUCIkMtg="
- [mod."github.com/mattn/go-colorable"]
- version = "v0.1.13"
- hash = "sha256-qb3Qbo0CELGRIzvw7NVM1g/aayaz4Tguppk9MD2/OI8="
- [mod."github.com/mattn/go-isatty"]
- version = "v0.0.20"
- hash = "sha256-qhw9hWtU5wnyFyuMbKx+7RB8ckQaFQ8D+8GKPkN3HHQ="
- [mod."github.com/natefinch/atomic"]
- version = "v1.0.1"
- hash = "sha256-fbOVHCwRNI8PFjC4o0YXpKZO0JU2aWTfH5c7WXXKMHg="
- [mod."github.com/rs/cors"]
- version = "v1.11.0"
- hash = "sha256-hF25bVehtWCQsxiOfLuL4Hv8NKVunEqLPk/Vcuheha0="
- [mod."github.com/segmentio/asm"]
- version = "v1.2.0"
- hash = "sha256-zbNuKxNrUDUc6IlmRQNuJQzVe5Ol/mqp7srDg9IMMqs="
- [mod."github.com/segmentio/encoding"]
- version = "v0.4.0"
- hash = "sha256-4pWI9eTZRRDP9kO8rG6vbLCtBVVRLtbCJKd0Z2+8JoU="
- [mod."github.com/stretchr/testify"]
- version = "v1.8.4"
- hash = "sha256-MoOmRzbz9QgiJ+OOBo5h5/LbilhJfRUryvzHJmXAWjo="
- [mod."go.lsp.dev/jsonrpc2"]
- version = "v0.10.0"
- hash = "sha256-RbRsMYVBLR7ZDHHGMooycrkdbIauMXkQjVOGP7ggSgM="
- [mod."go.lsp.dev/pkg"]
- version = "v0.0.0-20210717090340-384b27a52fb2"
- hash = "sha256-TxS0Iqe1wbIaFe7MWZJRQdgqhKE8i8CggaGSV9zU1Vg="
- [mod."go.lsp.dev/uri"]
- version = "v0.3.0"
- hash = "sha256-jGP0N7Gf+bql5oJraUo33sXqWg7AKOTj0D8b4paV4dc="
- [mod."go.uber.org/multierr"]
- version = "v1.11.0"
- hash = "sha256-Lb6rHHfR62Ozg2j2JZy3MKOMKdsfzd1IYTR57r3Mhp0="
- [mod."go.uber.org/zap"]
- version = "v1.27.0"
- hash = "sha256-8655KDrulc4Das3VRduO9MjCn8ZYD5WkULjCvruaYsU="
- [mod."golang.org/x/mod"]
- version = "v0.17.0"
- hash = "sha256-CLaPeF6uTFuRDv4oHwOQE6MCMvrzkUjWN3NuyywZjKU="
- [mod."golang.org/x/net"]
- version = "v0.24.0"
- hash = "sha256-w1c21ljta5wNIyel9CSIn/crPzwOCRofNKhqmfs4aEQ="
- [mod."golang.org/x/sync"]
- version = "v0.3.0"
- hash = "sha256-bCJKLvwExhYacH2ZrWlZ38lr1d6oNenNt2m1QqDCs0o="
- [mod."golang.org/x/sys"]
- version = "v0.21.0"
- hash = "sha256-gapzPWuEqY36V6W2YhIDYR49sEvjJRd7bSuf9K1f4JY="
- [mod."golang.org/x/tools"]
- version = "v0.13.0"
- hash = "sha256-OCgLOwia8fNHxfdogXVApf0/qK6jE2ukegOx7lkOzfo="
diff --git a/vendor/github.com/a-h/templ/handler.go b/vendor/github.com/a-h/templ/handler.go
deleted file mode 100644
index a28d561..0000000
--- a/vendor/github.com/a-h/templ/handler.go
+++ /dev/null
@@ -1,102 +0,0 @@
-package templ
-
-import "net/http"
-
-// ComponentHandler is a http.Handler that renders components.
-type ComponentHandler struct {
- Component Component
- Status int
- ContentType string
- ErrorHandler func(r *http.Request, err error) http.Handler
- StreamResponse bool
-}
-
-const componentHandlerErrorMessage = "templ: failed to render template"
-
-func (ch *ComponentHandler) ServeHTTPBuffered(w http.ResponseWriter, r *http.Request) {
- // Since the component may error, write to a buffer first.
- // This prevents partial responses from being written to the client.
- buf := GetBuffer()
- defer ReleaseBuffer(buf)
- err := ch.Component.Render(r.Context(), buf)
- if err != nil {
- if ch.ErrorHandler != nil {
- w.Header().Set("Content-Type", ch.ContentType)
- ch.ErrorHandler(r, err).ServeHTTP(w, r)
- return
- }
- http.Error(w, componentHandlerErrorMessage, http.StatusInternalServerError)
- return
- }
- w.Header().Set("Content-Type", ch.ContentType)
- if ch.Status != 0 {
- w.WriteHeader(ch.Status)
- }
- // Ignore write error like http.Error() does, because there is
- // no way to recover at this point.
- _, _ = w.Write(buf.Bytes())
-}
-
-func (ch *ComponentHandler) ServeHTTPStreamed(w http.ResponseWriter, r *http.Request) {
- w.Header().Set("Content-Type", ch.ContentType)
- if ch.Status != 0 {
- w.WriteHeader(ch.Status)
- }
- if err := ch.Component.Render(r.Context(), w); err != nil {
- if ch.ErrorHandler != nil {
- w.Header().Set("Content-Type", ch.ContentType)
- ch.ErrorHandler(r, err).ServeHTTP(w, r)
- return
- }
- http.Error(w, componentHandlerErrorMessage, http.StatusInternalServerError)
- }
-}
-
-// ServeHTTP implements the http.Handler interface.
-func (ch ComponentHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
- if ch.StreamResponse {
- ch.ServeHTTPStreamed(w, r)
- return
- }
- ch.ServeHTTPBuffered(w, r)
-}
-
-// Handler creates a http.Handler that renders the template.
-func Handler(c Component, options ...func(*ComponentHandler)) *ComponentHandler {
- ch := &ComponentHandler{
- Component: c,
- ContentType: "text/html; charset=utf-8",
- }
- for _, o := range options {
- o(ch)
- }
- return ch
-}
-
-// WithStatus sets the HTTP status code returned by the ComponentHandler.
-func WithStatus(status int) func(*ComponentHandler) {
- return func(ch *ComponentHandler) {
- ch.Status = status
- }
-}
-
-// WithContentType sets the Content-Type header returned by the ComponentHandler.
-func WithContentType(contentType string) func(*ComponentHandler) {
- return func(ch *ComponentHandler) {
- ch.ContentType = contentType
- }
-}
-
-// WithErrorHandler sets the error handler used if rendering fails.
-func WithErrorHandler(eh func(r *http.Request, err error) http.Handler) func(*ComponentHandler) {
- return func(ch *ComponentHandler) {
- ch.ErrorHandler = eh
- }
-}
-
-// WithStreaming sets the ComponentHandler to stream the response instead of buffering it.
-func WithStreaming() func(*ComponentHandler) {
- return func(ch *ComponentHandler) {
- ch.StreamResponse = true
- }
-}
diff --git a/vendor/github.com/a-h/templ/ide-demo.gif b/vendor/github.com/a-h/templ/ide-demo.gif
deleted file mode 100644
index e35fd68..0000000
--- a/vendor/github.com/a-h/templ/ide-demo.gif
+++ /dev/null
Binary files differ
diff --git a/vendor/github.com/a-h/templ/jsonscript.go b/vendor/github.com/a-h/templ/jsonscript.go
deleted file mode 100644
index 6e88174..0000000
--- a/vendor/github.com/a-h/templ/jsonscript.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package templ
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "io"
-)
-
-var _ Component = JSONScriptElement{}
-
-// JSONScript renders a JSON object inside a script element.
-// e.g. <script type="application/json">{"foo":"bar"}</script>
-func JSONScript(id string, data any) JSONScriptElement {
- return JSONScriptElement{
- ID: id,
- Type: "application/json",
- Data: data,
- Nonce: GetNonce,
- }
-}
-
-// WithType sets the value of the type attribute of the script element.
-func (j JSONScriptElement) WithType(t string) JSONScriptElement {
- j.Type = t
- return j
-}
-
-// WithNonceFromString sets the value of the nonce attribute of the script element to the given string.
-func (j JSONScriptElement) WithNonceFromString(nonce string) JSONScriptElement {
- j.Nonce = func(context.Context) string {
- return nonce
- }
- return j
-}
-
-// WithNonceFrom sets the value of the nonce attribute of the script element to the value returned by the given function.
-func (j JSONScriptElement) WithNonceFrom(f func(context.Context) string) JSONScriptElement {
- j.Nonce = f
- return j
-}
-
-type JSONScriptElement struct {
- // ID of the element in the DOM.
- ID string
- // Type of the script element, defaults to "application/json".
- Type string
- // Data that will be encoded as JSON.
- Data any
- // Nonce is a function that returns a CSP nonce.
- // Defaults to CSPNonceFromContext.
- // See https://content-security-policy.com/nonce for more information.
- Nonce func(ctx context.Context) string
-}
-
-func (j JSONScriptElement) Render(ctx context.Context, w io.Writer) (err error) {
- if _, err = io.WriteString(w, "<script"); err != nil {
- return err
- }
- if j.ID != "" {
- if _, err = fmt.Fprintf(w, " id=\"%s\"", EscapeString(j.ID)); err != nil {
- return err
- }
- }
- if j.Type != "" {
- if _, err = fmt.Fprintf(w, " type=\"%s\"", EscapeString(j.Type)); err != nil {
- return err
- }
- }
- if nonce := j.Nonce(ctx); nonce != "" {
- if _, err = fmt.Fprintf(w, " nonce=\"%s\"", EscapeString(nonce)); err != nil {
- return err
- }
- }
- if _, err = io.WriteString(w, ">"); err != nil {
- return err
- }
- if err = json.NewEncoder(w).Encode(j.Data); err != nil {
- return err
- }
- if _, err = io.WriteString(w, "</script>"); err != nil {
- return err
- }
- return nil
-}
diff --git a/vendor/github.com/a-h/templ/jsonstring.go b/vendor/github.com/a-h/templ/jsonstring.go
deleted file mode 100644
index 425e4e8..0000000
--- a/vendor/github.com/a-h/templ/jsonstring.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package templ
-
-import (
- "encoding/json"
-)
-
-// JSONString returns a JSON encoded string of v.
-func JSONString(v any) (string, error) {
- b, err := json.Marshal(v)
- if err != nil {
- return "", err
- }
- return string(b), nil
-}
diff --git a/vendor/github.com/a-h/templ/once.go b/vendor/github.com/a-h/templ/once.go
deleted file mode 100644
index 7860ab8..0000000
--- a/vendor/github.com/a-h/templ/once.go
+++ /dev/null
@@ -1,64 +0,0 @@
-package templ
-
-import (
- "context"
- "io"
- "sync/atomic"
-)
-
-// onceHandleIndex is used to identify unique once handles in a program run.
-var onceHandleIndex int64
-
-type OnceOpt func(*OnceHandle)
-
-// WithOnceComponent sets the component to be rendered once per context.
-// This can be used instead of setting the children of the `Once` method,
-// for example, if creating a code component outside of a templ HTML template.
-func WithComponent(c Component) OnceOpt {
- return func(o *OnceHandle) {
- o.c = c
- }
-}
-
-// NewOnceHandle creates a OnceHandle used to ensure that the children of its
-// `Once` method are only rendered once per context.
-func NewOnceHandle(opts ...OnceOpt) *OnceHandle {
- oh := &OnceHandle{
- id: atomic.AddInt64(&onceHandleIndex, 1),
- }
- for _, opt := range opts {
- opt(oh)
- }
- return oh
-}
-
-// OnceHandle is used to ensure that the children of its `Once` method are are only
-// rendered once per context.
-type OnceHandle struct {
- // id is used to identify which instance of the OnceHandle is being used.
- // The OnceHandle can't be an empty struct, because:
- //
- // | Two distinct zero-size variables may
- // | have the same address in memory
- //
- // https://go.dev/ref/spec#Size_and_alignment_guarantees
- id int64
- // c is the component to be rendered once per context.
- // if c is nil, the children of the `Once` method are rendered.
- c Component
-}
-
-// Once returns a component that renders its children once per context.
-func (o *OnceHandle) Once() Component {
- return ComponentFunc(func(ctx context.Context, w io.Writer) (err error) {
- _, v := getContext(ctx)
- if v.getHasBeenRendered(o) {
- return nil
- }
- v.setHasBeenRendered(o)
- if o.c != nil {
- return o.c.Render(ctx, w)
- }
- return GetChildren(ctx).Render(ctx, w)
- })
-}
diff --git a/vendor/github.com/a-h/templ/push-tag.sh b/vendor/github.com/a-h/templ/push-tag.sh
deleted file mode 100644
index 9eedeed..0000000
--- a/vendor/github.com/a-h/templ/push-tag.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-if [ `git rev-parse --abbrev-ref HEAD` != "main" ]; then
- echo "Error: Not on main branch. Please switch to main branch.";
- exit 1;
-fi
-git pull
-if ! git diff --quiet; then
- echo "Error: Working directory is not clean. Please commit the changes first.";
- exit 1;
-fi
-export VERSION=`cat .version`
-echo Adding git tag with version v${VERSION};
-git tag v${VERSION};
-git push origin v${VERSION};
diff --git a/vendor/github.com/a-h/templ/runtime.go b/vendor/github.com/a-h/templ/runtime.go
deleted file mode 100644
index d4d5aa0..0000000
--- a/vendor/github.com/a-h/templ/runtime.go
+++ /dev/null
@@ -1,855 +0,0 @@
-package templ
-
-import (
- "bytes"
- "context"
- "crypto/sha256"
- "encoding/hex"
- "encoding/json"
- "errors"
- "fmt"
- "html"
- "html/template"
- "io"
- "net/http"
- "os"
- "reflect"
- "runtime"
- "sort"
- "strconv"
- "strings"
- "sync"
- "time"
-
- "github.com/a-h/templ/safehtml"
-)
-
-// Types exposed by all components.
-
-// Component is the interface that all templates implement.
-type Component interface {
- // Render the template.
- Render(ctx context.Context, w io.Writer) error
-}
-
-// ComponentFunc converts a function that matches the Component interface's
-// Render method into a Component.
-type ComponentFunc func(ctx context.Context, w io.Writer) error
-
-// Render the template.
-func (cf ComponentFunc) Render(ctx context.Context, w io.Writer) error {
- return cf(ctx, w)
-}
-
-// WithNonce sets a CSP nonce on the context and returns it.
-func WithNonce(ctx context.Context, nonce string) context.Context {
- ctx, v := getContext(ctx)
- v.nonce = nonce
- return ctx
-}
-
-// GetNonce returns the CSP nonce value set with WithNonce, or an
-// empty string if none has been set.
-func GetNonce(ctx context.Context) (nonce string) {
- if ctx == nil {
- return ""
- }
- _, v := getContext(ctx)
- return v.nonce
-}
-
-func WithChildren(ctx context.Context, children Component) context.Context {
- ctx, v := getContext(ctx)
- v.children = &children
- return ctx
-}
-
-func ClearChildren(ctx context.Context) context.Context {
- _, v := getContext(ctx)
- v.children = nil
- return ctx
-}
-
-// NopComponent is a component that doesn't render anything.
-var NopComponent = ComponentFunc(func(ctx context.Context, w io.Writer) error { return nil })
-
-// GetChildren from the context.
-func GetChildren(ctx context.Context) Component {
- _, v := getContext(ctx)
- if v.children == nil {
- return NopComponent
- }
- return *v.children
-}
-
-// EscapeString escapes HTML text within templates.
-func EscapeString(s string) string {
- return html.EscapeString(s)
-}
-
-// Bool attribute value.
-func Bool(value bool) bool {
- return value
-}
-
-// Classes for CSS.
-// Supported types are string, ConstantCSSClass, ComponentCSSClass, map[string]bool.
-func Classes(classes ...any) CSSClasses {
- return CSSClasses(classes)
-}
-
-// CSSClasses is a slice of CSS classes.
-type CSSClasses []any
-
-// String returns the names of all CSS classes.
-func (classes CSSClasses) String() string {
- if len(classes) == 0 {
- return ""
- }
- cp := newCSSProcessor()
- for _, v := range classes {
- cp.Add(v)
- }
- return cp.String()
-}
-
-func newCSSProcessor() *cssProcessor {
- return &cssProcessor{
- classNameToEnabled: make(map[string]bool),
- }
-}
-
-type cssProcessor struct {
- classNameToEnabled map[string]bool
- orderedNames []string
-}
-
-func (cp *cssProcessor) Add(item any) {
- switch c := item.(type) {
- case []string:
- for _, className := range c {
- cp.AddClassName(className, true)
- }
- case string:
- cp.AddClassName(c, true)
- case ConstantCSSClass:
- cp.AddClassName(c.ClassName(), true)
- case ComponentCSSClass:
- cp.AddClassName(c.ClassName(), true)
- case map[string]bool:
- // In Go, map keys are iterated in a randomized order.
- // So the keys in the map must be sorted to produce consistent output.
- keys := make([]string, len(c))
- var i int
- for key := range c {
- keys[i] = key
- i++
- }
- sort.Strings(keys)
- for _, className := range keys {
- cp.AddClassName(className, c[className])
- }
- case []KeyValue[string, bool]:
- for _, kv := range c {
- cp.AddClassName(kv.Key, kv.Value)
- }
- case KeyValue[string, bool]:
- cp.AddClassName(c.Key, c.Value)
- case []KeyValue[CSSClass, bool]:
- for _, kv := range c {
- cp.AddClassName(kv.Key.ClassName(), kv.Value)
- }
- case KeyValue[CSSClass, bool]:
- cp.AddClassName(c.Key.ClassName(), c.Value)
- case CSSClasses:
- for _, item := range c {
- cp.Add(item)
- }
- case []CSSClass:
- for _, item := range c {
- cp.Add(item)
- }
- case func() CSSClass:
- cp.AddClassName(c().ClassName(), true)
- default:
- cp.AddClassName(unknownTypeClassName, true)
- }
-}
-
-func (cp *cssProcessor) AddClassName(className string, enabled bool) {
- cp.classNameToEnabled[className] = enabled
- cp.orderedNames = append(cp.orderedNames, className)
-}
-
-func (cp *cssProcessor) String() string {
- // Order the outputs according to how they were input, and remove disabled names.
- rendered := make(map[string]any, len(cp.classNameToEnabled))
- var names []string
- for _, name := range cp.orderedNames {
- if enabled := cp.classNameToEnabled[name]; !enabled {
- continue
- }
- if _, hasBeenRendered := rendered[name]; hasBeenRendered {
- continue
- }
- names = append(names, name)
- rendered[name] = struct{}{}
- }
-
- return strings.Join(names, " ")
-}
-
-// KeyValue is a key and value pair.
-type KeyValue[TKey comparable, TValue any] struct {
- Key TKey `json:"name"`
- Value TValue `json:"value"`
-}
-
-// KV creates a new key/value pair from the input key and value.
-func KV[TKey comparable, TValue any](key TKey, value TValue) KeyValue[TKey, TValue] {
- return KeyValue[TKey, TValue]{
- Key: key,
- Value: value,
- }
-}
-
-const unknownTypeClassName = "--templ-css-class-unknown-type"
-
-// Class returns a CSS class name.
-// Deprecated: use a string instead.
-func Class(name string) CSSClass {
- return SafeClass(name)
-}
-
-// SafeClass bypasses CSS class name validation.
-// Deprecated: use a string instead.
-func SafeClass(name string) CSSClass {
- return ConstantCSSClass(name)
-}
-
-// CSSClass provides a class name.
-type CSSClass interface {
- ClassName() string
-}
-
-// ConstantCSSClass is a string constant of a CSS class name.
-// Deprecated: use a string instead.
-type ConstantCSSClass string
-
-// ClassName of the CSS class.
-func (css ConstantCSSClass) ClassName() string {
- return string(css)
-}
-
-// ComponentCSSClass is a templ.CSS
-type ComponentCSSClass struct {
- // ID of the class, will be autogenerated.
- ID string
- // Definition of the CSS.
- Class SafeCSS
-}
-
-// ClassName of the CSS class.
-func (css ComponentCSSClass) ClassName() string {
- return css.ID
-}
-
-// CSSID calculates an ID.
-func CSSID(name string, css string) string {
- sum := sha256.Sum256([]byte(css))
- hp := hex.EncodeToString(sum[:])[0:4]
- // Benchmarking showed this was fastest, and with fewest allocations (1).
- // Using strings.Builder (2 allocs).
- // Using fmt.Sprintf (3 allocs).
- return name + "_" + hp
-}
-
-// NewCSSMiddleware creates HTTP middleware that renders a global stylesheet of ComponentCSSClass
-// CSS if the request path matches, or updates the HTTP context to ensure that any handlers that
-// use templ.Components skip rendering <style> elements for classes that are included in the global
-// stylesheet. By default, the stylesheet path is /styles/templ.css
-func NewCSSMiddleware(next http.Handler, classes ...CSSClass) CSSMiddleware {
- return CSSMiddleware{
- Path: "/styles/templ.css",
- CSSHandler: NewCSSHandler(classes...),
- Next: next,
- }
-}
-
-// CSSMiddleware renders a global stylesheet.
-type CSSMiddleware struct {
- Path string
- CSSHandler CSSHandler
- Next http.Handler
-}
-
-func (cssm CSSMiddleware) ServeHTTP(w http.ResponseWriter, r *http.Request) {
- if r.URL.Path == cssm.Path {
- cssm.CSSHandler.ServeHTTP(w, r)
- return
- }
- // Add registered classes to the context.
- ctx, v := getContext(r.Context())
- for _, c := range cssm.CSSHandler.Classes {
- v.addClass(c.ID)
- }
- // Serve the request. Templ components will use the updated context
- // to know to skip rendering <style> elements for any component CSS
- // classes that have been included in the global stylesheet.
- cssm.Next.ServeHTTP(w, r.WithContext(ctx))
-}
-
-// NewCSSHandler creates a handler that serves a stylesheet containing the CSS of the
-// classes passed in. This is used by the CSSMiddleware to provide global stylesheets
-// for templ components.
-func NewCSSHandler(classes ...CSSClass) CSSHandler {
- ccssc := make([]ComponentCSSClass, 0, len(classes))
- for _, c := range classes {
- ccss, ok := c.(ComponentCSSClass)
- if !ok {
- continue
- }
- ccssc = append(ccssc, ccss)
- }
- return CSSHandler{
- Classes: ccssc,
- }
-}
-
-// CSSHandler is a HTTP handler that serves CSS.
-type CSSHandler struct {
- Logger func(err error)
- Classes []ComponentCSSClass
-}
-
-func (cssh CSSHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
- w.Header().Set("Content-Type", "text/css")
- for _, c := range cssh.Classes {
- _, err := w.Write([]byte(c.Class))
- if err != nil && cssh.Logger != nil {
- cssh.Logger(err)
- }
- }
-}
-
-// RenderCSSItems renders the CSS to the writer, if the items haven't already been rendered.
-func RenderCSSItems(ctx context.Context, w io.Writer, classes ...any) (err error) {
- if len(classes) == 0 {
- return nil
- }
- _, v := getContext(ctx)
- sb := new(strings.Builder)
- renderCSSItemsToBuilder(sb, v, classes...)
- if sb.Len() > 0 {
- if _, err = io.WriteString(w, `<style type="text/css">`); err != nil {
- return err
- }
- if _, err = io.WriteString(w, sb.String()); err != nil {
- return err
- }
- if _, err = io.WriteString(w, `</style>`); err != nil {
- return err
- }
- }
- return nil
-}
-
-func renderCSSItemsToBuilder(sb *strings.Builder, v *contextValue, classes ...any) {
- for _, c := range classes {
- switch ccc := c.(type) {
- case ComponentCSSClass:
- if !v.hasClassBeenRendered(ccc.ID) {
- sb.WriteString(string(ccc.Class))
- v.addClass(ccc.ID)
- }
- case KeyValue[ComponentCSSClass, bool]:
- if !ccc.Value {
- continue
- }
- renderCSSItemsToBuilder(sb, v, ccc.Key)
- case KeyValue[CSSClass, bool]:
- if !ccc.Value {
- continue
- }
- renderCSSItemsToBuilder(sb, v, ccc.Key)
- case CSSClasses:
- renderCSSItemsToBuilder(sb, v, ccc...)
- case []CSSClass:
- for _, item := range ccc {
- renderCSSItemsToBuilder(sb, v, item)
- }
- case func() CSSClass:
- renderCSSItemsToBuilder(sb, v, ccc())
- case []string:
- // Skip. These are class names, not CSS classes.
- case string:
- // Skip. This is a class name, not a CSS class.
- case ConstantCSSClass:
- // Skip. This is a class name, not a CSS class.
- case CSSClass:
- // Skip. This is a class name, not a CSS class.
- case map[string]bool:
- // Skip. These are class names, not CSS classes.
- case KeyValue[string, bool]:
- // Skip. These are class names, not CSS classes.
- case []KeyValue[string, bool]:
- // Skip. These are class names, not CSS classes.
- case KeyValue[ConstantCSSClass, bool]:
- // Skip. These are class names, not CSS classes.
- case []KeyValue[ConstantCSSClass, bool]:
- // Skip. These are class names, not CSS classes.
- }
- }
-}
-
-// SafeCSS is CSS that has been sanitized.
-type SafeCSS string
-
-type SafeCSSProperty string
-
-var safeCSSPropertyType = reflect.TypeOf(SafeCSSProperty(""))
-
-// SanitizeCSS sanitizes CSS properties to ensure that they are safe.
-func SanitizeCSS[T ~string](property string, value T) SafeCSS {
- if reflect.TypeOf(value) == safeCSSPropertyType {
- return SafeCSS(safehtml.SanitizeCSSProperty(property) + ":" + string(value) + ";")
- }
- p, v := safehtml.SanitizeCSS(property, string(value))
- return SafeCSS(p + ":" + v + ";")
-}
-
-// Attributes is an alias to map[string]any made for spread attributes.
-type Attributes map[string]any
-
-// sortedKeys returns the keys of a map in sorted order.
-func sortedKeys(m map[string]any) (keys []string) {
- keys = make([]string, len(m))
- var i int
- for k := range m {
- keys[i] = k
- i++
- }
- sort.Strings(keys)
- return keys
-}
-
-func writeStrings(w io.Writer, ss ...string) (err error) {
- for _, s := range ss {
- if _, err = io.WriteString(w, s); err != nil {
- return err
- }
- }
- return nil
-}
-
-func RenderAttributes(ctx context.Context, w io.Writer, attributes Attributes) (err error) {
- for _, key := range sortedKeys(attributes) {
- value := attributes[key]
- switch value := value.(type) {
- case string:
- if err = writeStrings(w, ` `, EscapeString(key), `="`, EscapeString(value), `"`); err != nil {
- return err
- }
- case *string:
- if value != nil {
- if err = writeStrings(w, ` `, EscapeString(key), `="`, EscapeString(*value), `"`); err != nil {
- return err
- }
- }
- case bool:
- if value {
- if err = writeStrings(w, ` `, EscapeString(key)); err != nil {
- return err
- }
- }
- case *bool:
- if value != nil && *value {
- if err = writeStrings(w, ` `, EscapeString(key)); err != nil {
- return err
- }
- }
- case KeyValue[string, bool]:
- if value.Value {
- if err = writeStrings(w, ` `, EscapeString(key), `="`, EscapeString(value.Key), `"`); err != nil {
- return err
- }
- }
- case KeyValue[bool, bool]:
- if value.Value && value.Key {
- if err = writeStrings(w, ` `, EscapeString(key)); err != nil {
- return err
- }
- }
- case func() bool:
- if value() {
- if err = writeStrings(w, ` `, EscapeString(key)); err != nil {
- return err
- }
- }
- }
- }
- return nil
-}
-
-// Script handling.
-
-func safeEncodeScriptParams(escapeHTML bool, params []any) []string {
- encodedParams := make([]string, len(params))
- for i := 0; i < len(encodedParams); i++ {
- enc, _ := json.Marshal(params[i])
- if !escapeHTML {
- encodedParams[i] = string(enc)
- continue
- }
- encodedParams[i] = EscapeString(string(enc))
- }
- return encodedParams
-}
-
-// SafeScript encodes unknown parameters for safety for inside HTML attributes.
-func SafeScript(functionName string, params ...any) string {
- encodedParams := safeEncodeScriptParams(true, params)
- sb := new(strings.Builder)
- sb.WriteString(functionName)
- sb.WriteRune('(')
- sb.WriteString(strings.Join(encodedParams, ","))
- sb.WriteRune(')')
- return sb.String()
-}
-
-// SafeScript encodes unknown parameters for safety for inline scripts.
-func SafeScriptInline(functionName string, params ...any) string {
- encodedParams := safeEncodeScriptParams(false, params)
- sb := new(strings.Builder)
- sb.WriteString(functionName)
- sb.WriteRune('(')
- sb.WriteString(strings.Join(encodedParams, ","))
- sb.WriteRune(')')
- return sb.String()
-}
-
-type contextKeyType int
-
-const contextKey = contextKeyType(0)
-
-type contextValue struct {
- ss map[string]struct{}
- onceHandles map[*OnceHandle]struct{}
- children *Component
- nonce string
-}
-
-func (v *contextValue) setHasBeenRendered(h *OnceHandle) {
- if v.onceHandles == nil {
- v.onceHandles = map[*OnceHandle]struct{}{}
- }
- v.onceHandles[h] = struct{}{}
-}
-
-func (v *contextValue) getHasBeenRendered(h *OnceHandle) (ok bool) {
- if v.onceHandles == nil {
- v.onceHandles = map[*OnceHandle]struct{}{}
- }
- _, ok = v.onceHandles[h]
- return
-}
-
-func (v *contextValue) addScript(s string) {
- if v.ss == nil {
- v.ss = map[string]struct{}{}
- }
- v.ss["script_"+s] = struct{}{}
-}
-
-func (v *contextValue) hasScriptBeenRendered(s string) (ok bool) {
- if v.ss == nil {
- v.ss = map[string]struct{}{}
- }
- _, ok = v.ss["script_"+s]
- return
-}
-
-func (v *contextValue) addClass(s string) {
- if v.ss == nil {
- v.ss = map[string]struct{}{}
- }
- v.ss["class_"+s] = struct{}{}
-}
-
-func (v *contextValue) hasClassBeenRendered(s string) (ok bool) {
- if v.ss == nil {
- v.ss = map[string]struct{}{}
- }
- _, ok = v.ss["class_"+s]
- return
-}
-
-// InitializeContext initializes context used to store internal state used during rendering.
-func InitializeContext(ctx context.Context) context.Context {
- if _, ok := ctx.Value(contextKey).(*contextValue); ok {
- return ctx
- }
- v := &contextValue{}
- ctx = context.WithValue(ctx, contextKey, v)
- return ctx
-}
-
-func getContext(ctx context.Context) (context.Context, *contextValue) {
- v, ok := ctx.Value(contextKey).(*contextValue)
- if !ok {
- ctx = InitializeContext(ctx)
- v = ctx.Value(contextKey).(*contextValue)
- }
- return ctx, v
-}
-
-// ComponentScript is a templ Script template.
-type ComponentScript struct {
- // Name of the script, e.g. print.
- Name string
- // Function to render.
- Function string
- // Call of the function in JavaScript syntax, including parameters, and
- // ensures parameters are HTML escaped; useful for injecting into HTML
- // attributes like onclick, onhover, etc.
- //
- // Given:
- // functionName("some string",12345)
- // It would render:
- // __templ_functionName_sha(&#34;some string&#34;,12345))
- //
- // This is can be injected into HTML attributes:
- // <button onClick="__templ_functionName_sha(&#34;some string&#34;,12345))">Click Me</button>
- Call string
- // Call of the function in JavaScript syntax, including parameters. It
- // does not HTML escape parameters; useful for directly calling in script
- // elements.
- //
- // Given:
- // functionName("some string",12345)
- // It would render:
- // __templ_functionName_sha("some string",12345))
- //
- // This is can be used to call the function inside a script tag:
- // <script>__templ_functionName_sha("some string",12345))</script>
- CallInline string
-}
-
-var _ Component = ComponentScript{}
-
-func writeScriptHeader(ctx context.Context, w io.Writer) (err error) {
- var nonceAttr string
- if nonce := GetNonce(ctx); nonce != "" {
- nonceAttr = " nonce=\"" + EscapeString(nonce) + "\""
- }
- _, err = fmt.Fprintf(w, `<script type="text/javascript"%s>`, nonceAttr)
- return err
-}
-
-func (c ComponentScript) Render(ctx context.Context, w io.Writer) error {
- err := RenderScriptItems(ctx, w, c)
- if err != nil {
- return err
- }
- if len(c.Call) > 0 {
- if err = writeScriptHeader(ctx, w); err != nil {
- return err
- }
- if _, err = io.WriteString(w, c.CallInline); err != nil {
- return err
- }
- if _, err = io.WriteString(w, `</script>`); err != nil {
- return err
- }
- }
- return nil
-}
-
-// RenderScriptItems renders a <script> element, if the script has not already been rendered.
-func RenderScriptItems(ctx context.Context, w io.Writer, scripts ...ComponentScript) (err error) {
- if len(scripts) == 0 {
- return nil
- }
- _, v := getContext(ctx)
- sb := new(strings.Builder)
- for _, s := range scripts {
- if !v.hasScriptBeenRendered(s.Name) {
- sb.WriteString(s.Function)
- v.addScript(s.Name)
- }
- }
- if sb.Len() > 0 {
- if err = writeScriptHeader(ctx, w); err != nil {
- return err
- }
- if _, err = io.WriteString(w, sb.String()); err != nil {
- return err
- }
- if _, err = io.WriteString(w, `</script>`); err != nil {
- return err
- }
- }
- return nil
-}
-
-var bufferPool = sync.Pool{
- New: func() any {
- return new(bytes.Buffer)
- },
-}
-
-func GetBuffer() *bytes.Buffer {
- return bufferPool.Get().(*bytes.Buffer)
-}
-
-func ReleaseBuffer(b *bytes.Buffer) {
- b.Reset()
- bufferPool.Put(b)
-}
-
-// JoinStringErrs joins an optional list of errors.
-func JoinStringErrs(s string, errs ...error) (string, error) {
- return s, errors.Join(errs...)
-}
-
-// Error returned during template rendering.
-type Error struct {
- Err error
- // FileName of the template file.
- FileName string
- // Line index of the error.
- Line int
- // Col index of the error.
- Col int
-}
-
-func (e Error) Error() string {
- if e.FileName == "" {
- e.FileName = "templ"
- }
- return fmt.Sprintf("%s: error at line %d, col %d: %v", e.FileName, e.Line, e.Col, e.Err)
-}
-
-func (e Error) Unwrap() error {
- return e.Err
-}
-
-// Raw renders the input HTML to the output without applying HTML escaping.
-//
-// Use of this component presents a security risk - the HTML should come from
-// a trusted source, because it will be included as-is in the output.
-func Raw[T ~string](html T, errs ...error) Component {
- return ComponentFunc(func(ctx context.Context, w io.Writer) (err error) {
- if err = errors.Join(errs...); err != nil {
- return err
- }
- _, err = io.WriteString(w, string(html))
- return err
- })
-}
-
-// FromGoHTML creates a templ Component from a Go html/template template.
-func FromGoHTML(t *template.Template, data any) Component {
- return ComponentFunc(func(ctx context.Context, w io.Writer) (err error) {
- return t.Execute(w, data)
- })
-}
-
-// ToGoHTML renders the component to a Go html/template template.HTML string.
-func ToGoHTML(ctx context.Context, c Component) (s template.HTML, err error) {
- b := GetBuffer()
- defer ReleaseBuffer(b)
- if err = c.Render(ctx, b); err != nil {
- return
- }
- s = template.HTML(b.String())
- return
-}
-
-// WriteWatchModeString is used when rendering templates in development mode.
-// the generator would have written non-go code to the _templ.txt file, which
-// is then read by this function and written to the output.
-func WriteWatchModeString(w io.Writer, lineNum int) error {
- _, path, _, _ := runtime.Caller(1)
- if !strings.HasSuffix(path, "_templ.go") {
- return errors.New("templ: WriteWatchModeString can only be called from _templ.go")
- }
- txtFilePath := strings.Replace(path, "_templ.go", "_templ.txt", 1)
-
- literals, err := getWatchedStrings(txtFilePath)
- if err != nil {
- return fmt.Errorf("templ: failed to cache strings: %w", err)
- }
-
- if lineNum > len(literals) {
- return errors.New("templ: failed to find line " + strconv.Itoa(lineNum) + " in " + txtFilePath)
- }
-
- unquoted, err := strconv.Unquote(`"` + literals[lineNum-1] + `"`)
- if err != nil {
- return err
- }
- _, err = io.WriteString(w, unquoted)
- return err
-}
-
-var (
- watchModeCache = map[string]watchState{}
- watchStateMutex sync.Mutex
-)
-
-type watchState struct {
- modTime time.Time
- strings []string
-}
-
-func getWatchedStrings(txtFilePath string) ([]string, error) {
- watchStateMutex.Lock()
- defer watchStateMutex.Unlock()
-
- state, cached := watchModeCache[txtFilePath]
- if !cached {
- return cacheStrings(txtFilePath)
- }
-
- if time.Since(state.modTime) < time.Millisecond*100 {
- return state.strings, nil
- }
-
- info, err := os.Stat(txtFilePath)
- if err != nil {
- return nil, fmt.Errorf("templ: failed to stat %s: %w", txtFilePath, err)
- }
-
- if !info.ModTime().After(state.modTime) {
- return state.strings, nil
- }
-
- return cacheStrings(txtFilePath)
-}
-
-func cacheStrings(txtFilePath string) ([]string, error) {
- txtFile, err := os.Open(txtFilePath)
- if err != nil {
- return nil, fmt.Errorf("templ: failed to open %s: %w", txtFilePath, err)
- }
- defer txtFile.Close()
-
- info, err := txtFile.Stat()
- if err != nil {
- return nil, fmt.Errorf("templ: failed to stat %s: %w", txtFilePath, err)
- }
-
- all, err := io.ReadAll(txtFile)
- if err != nil {
- return nil, fmt.Errorf("templ: failed to read %s: %w", txtFilePath, err)
- }
-
- literals := strings.Split(string(all), "\n")
- watchModeCache[txtFilePath] = watchState{
- modTime: info.ModTime(),
- strings: literals,
- }
-
- return literals, nil
-}
diff --git a/vendor/github.com/a-h/templ/runtime/buffer.go b/vendor/github.com/a-h/templ/runtime/buffer.go
deleted file mode 100644
index 63e4acd..0000000
--- a/vendor/github.com/a-h/templ/runtime/buffer.go
+++ /dev/null
@@ -1,62 +0,0 @@
-package runtime
-
-import (
- "bufio"
- "io"
- "net/http"
-)
-
-// DefaultBufferSize is the default size of buffers. It is set to 4KB by default, which is the
-// same as the default buffer size of bufio.Writer.
-var DefaultBufferSize = 4 * 1024 // 4KB
-
-// Buffer is a wrapper around bufio.Writer that enables flushing and closing of
-// the underlying writer.
-type Buffer struct {
- Underlying io.Writer
- b *bufio.Writer
-}
-
-// Write the contents of p into the buffer.
-func (b *Buffer) Write(p []byte) (n int, err error) {
- return b.b.Write(p)
-}
-
-// Flush writes any buffered data to the underlying io.Writer and
-// calls the Flush method of the underlying http.Flusher if it implements it.
-func (b *Buffer) Flush() error {
- if err := b.b.Flush(); err != nil {
- return err
- }
- if f, ok := b.Underlying.(http.Flusher); ok {
- f.Flush()
- }
- return nil
-}
-
-// Close closes the buffer and the underlying io.Writer if it implements io.Closer.
-func (b *Buffer) Close() error {
- if c, ok := b.Underlying.(io.Closer); ok {
- return c.Close()
- }
- return nil
-}
-
-// Reset sets the underlying io.Writer to w and resets the buffer.
-func (b *Buffer) Reset(w io.Writer) {
- if b.b == nil {
- b.b = bufio.NewWriterSize(b, DefaultBufferSize)
- }
- b.Underlying = w
- b.b.Reset(w)
-}
-
-// Size returns the size of the underlying buffer in bytes.
-func (b *Buffer) Size() int {
- return b.b.Size()
-}
-
-// WriteString writes the contents of s into the buffer.
-func (b *Buffer) WriteString(s string) (n int, err error) {
- return b.b.WriteString(s)
-}
diff --git a/vendor/github.com/a-h/templ/runtime/bufferpool.go b/vendor/github.com/a-h/templ/runtime/bufferpool.go
deleted file mode 100644
index ca2a131..0000000
--- a/vendor/github.com/a-h/templ/runtime/bufferpool.go
+++ /dev/null
@@ -1,38 +0,0 @@
-package runtime
-
-import (
- "io"
- "sync"
-)
-
-var bufferPool = sync.Pool{
- New: func() any {
- return new(Buffer)
- },
-}
-
-// GetBuffer creates and returns a new buffer if the writer is not already a buffer,
-// or returns the existing buffer if it is.
-func GetBuffer(w io.Writer) (b *Buffer, existing bool) {
- if w == nil {
- return nil, false
- }
- b, ok := w.(*Buffer)
- if ok {
- return b, true
- }
- b = bufferPool.Get().(*Buffer)
- b.Reset(w)
- return b, false
-}
-
-// ReleaseBuffer flushes the buffer and returns it to the pool.
-func ReleaseBuffer(w io.Writer) (err error) {
- b, ok := w.(*Buffer)
- if !ok {
- return nil
- }
- err = b.Flush()
- bufferPool.Put(b)
- return err
-}
diff --git a/vendor/github.com/a-h/templ/runtime/builder.go b/vendor/github.com/a-h/templ/runtime/builder.go
deleted file mode 100644
index 0f4c9d4..0000000
--- a/vendor/github.com/a-h/templ/runtime/builder.go
+++ /dev/null
@@ -1,8 +0,0 @@
-package runtime
-
-import "strings"
-
-// GetBuilder returns a strings.Builder.
-func GetBuilder() (sb strings.Builder) {
- return sb
-}
diff --git a/vendor/github.com/a-h/templ/runtime/runtime.go b/vendor/github.com/a-h/templ/runtime/runtime.go
deleted file mode 100644
index aaa4a2c..0000000
--- a/vendor/github.com/a-h/templ/runtime/runtime.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package runtime
-
-import (
- "context"
- "io"
-
- "github.com/a-h/templ"
-)
-
-// GeneratedComponentInput is used to avoid generated code needing to import the `context` and `io` packages.
-type GeneratedComponentInput struct {
- Context context.Context
- Writer io.Writer
-}
-
-// GeneratedTemplate is used to avoid generated code needing to import the `context` and `io` packages.
-func GeneratedTemplate(f func(GeneratedComponentInput) error) templ.Component {
- return templ.ComponentFunc(func(ctx context.Context, w io.Writer) error {
- return f(GeneratedComponentInput{ctx, w})
- })
-}
diff --git a/vendor/github.com/a-h/templ/safehtml/style.go b/vendor/github.com/a-h/templ/safehtml/style.go
deleted file mode 100644
index 486df7c..0000000
--- a/vendor/github.com/a-h/templ/safehtml/style.go
+++ /dev/null
@@ -1,168 +0,0 @@
-// Adapted from https://raw.githubusercontent.com/google/safehtml/3c4cd5b5d8c9a6c5882fba099979e9f50b65c876/style.go
-
-// Copyright (c) 2017 The Go Authors. All rights reserved.
-//
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file or at
-// https://developers.google.com/open-source/licenses/bsd
-
-package safehtml
-
-import (
- "net/url"
- "regexp"
- "strings"
-)
-
-// SanitizeCSS attempts to sanitize CSS properties.
-func SanitizeCSS(property, value string) (string, string) {
- property = SanitizeCSSProperty(property)
- if property == InnocuousPropertyName {
- return InnocuousPropertyName, InnocuousPropertyValue
- }
- return property, SanitizeCSSValue(property, value)
-}
-
-func SanitizeCSSValue(property, value string) string {
- if sanitizer, ok := cssPropertyNameToValueSanitizer[property]; ok {
- return sanitizer(value)
- }
- return sanitizeRegular(value)
-}
-
-func SanitizeCSSProperty(property string) string {
- if !identifierPattern.MatchString(property) {
- return InnocuousPropertyName
- }
- return strings.ToLower(property)
-}
-
-// identifierPattern matches a subset of valid <ident-token> values defined in
-// https://www.w3.org/TR/css-syntax-3/#ident-token-diagram. This pattern matches all generic family name
-// keywords defined in https://drafts.csswg.org/css-fonts-3/#family-name-value.
-var identifierPattern = regexp.MustCompile(`^[-a-zA-Z]+$`)
-
-var cssPropertyNameToValueSanitizer = map[string]func(string) string{
- "background-image": sanitizeBackgroundImage,
- "font-family": sanitizeFontFamily,
- "display": sanitizeEnum,
- "background-color": sanitizeRegular,
- "background-position": sanitizeRegular,
- "background-repeat": sanitizeRegular,
- "background-size": sanitizeRegular,
- "color": sanitizeRegular,
- "height": sanitizeRegular,
- "width": sanitizeRegular,
- "left": sanitizeRegular,
- "right": sanitizeRegular,
- "top": sanitizeRegular,
- "bottom": sanitizeRegular,
- "font-weight": sanitizeRegular,
- "padding": sanitizeRegular,
- "z-index": sanitizeRegular,
-}
-
-var validURLPrefixes = []string{
- `url("`,
- `url('`,
- `url(`,
-}
-
-var validURLSuffixes = []string{
- `")`,
- `')`,
- `)`,
-}
-
-func sanitizeBackgroundImage(v string) string {
- // Check for <> as per https://github.com/google/safehtml/blob/be23134998433fcf0135dda53593fc8f8bf4df7c/style.go#L87C2-L89C3
- if strings.ContainsAny(v, "<>") {
- return InnocuousPropertyValue
- }
- for _, u := range strings.Split(v, ",") {
- u = strings.TrimSpace(u)
- var found bool
- for i, prefix := range validURLPrefixes {
- if strings.HasPrefix(u, prefix) && strings.HasSuffix(u, validURLSuffixes[i]) {
- found = true
- u = strings.TrimPrefix(u, validURLPrefixes[i])
- u = strings.TrimSuffix(u, validURLSuffixes[i])
- break
- }
- }
- if !found || !urlIsSafe(u) {
- return InnocuousPropertyValue
- }
- }
- return v
-}
-
-func urlIsSafe(s string) bool {
- u, err := url.Parse(s)
- if err != nil {
- return false
- }
- if u.IsAbs() {
- if strings.EqualFold(u.Scheme, "http") || strings.EqualFold(u.Scheme, "https") || strings.EqualFold(u.Scheme, "mailto") {
- return true
- }
- return false
- }
- return true
-}
-
-var genericFontFamilyName = regexp.MustCompile(`^[a-zA-Z][- a-zA-Z]+$`)
-
-func sanitizeFontFamily(s string) string {
- for _, f := range strings.Split(s, ",") {
- f = strings.TrimSpace(f)
- if strings.HasPrefix(f, `"`) {
- if !strings.HasSuffix(f, `"`) {
- return InnocuousPropertyValue
- }
- continue
- }
- if !genericFontFamilyName.MatchString(f) {
- return InnocuousPropertyValue
- }
- }
- return s
-}
-
-func sanitizeEnum(s string) string {
- if !safeEnumPropertyValuePattern.MatchString(s) {
- return InnocuousPropertyValue
- }
- return s
-}
-
-func sanitizeRegular(s string) string {
- if !safeRegularPropertyValuePattern.MatchString(s) {
- return InnocuousPropertyValue
- }
- return s
-}
-
-// InnocuousPropertyName is an innocuous property generated by a sanitizer when its input is unsafe.
-const InnocuousPropertyName = "zTemplUnsafeCSSPropertyName"
-
-// InnocuousPropertyValue is an innocuous property generated by a sanitizer when its input is unsafe.
-const InnocuousPropertyValue = "zTemplUnsafeCSSPropertyValue"
-
-// safeRegularPropertyValuePattern matches strings that are safe to use as property values.
-// Specifically, it matches string where every '*' or '/' is followed by end-of-text or a safe rune
-// (i.e. alphanumerics or runes in the set [+-.!#%_ \t]). This regex ensures that the following
-// are disallowed:
-// - "/*" and "*/", which are CSS comment markers.
-// - "//", even though this is not a comment marker in the CSS specification. Disallowing
-// this string minimizes the chance that browser peculiarities or parsing bugs will allow
-// sanitization to be bypassed.
-// - '(' and ')', which can be used to call functions.
-// - ',', since it can be used to inject extra values into a property.
-// - Runes which could be matched on CSS error recovery of a previously malformed token, such as '@'
-// and ':'. See http://www.w3.org/TR/css3-syntax/#error-handling.
-var safeRegularPropertyValuePattern = regexp.MustCompile(`^(?:[*/]?(?:[0-9a-zA-Z+-.!#%_ \t]|$))*$`)
-
-// safeEnumPropertyValuePattern matches strings that are safe to use as enumerated property values.
-// Specifically, it matches strings that contain only alphabetic and '-' runes.
-var safeEnumPropertyValuePattern = regexp.MustCompile(`^[a-zA-Z-]*$`)
diff --git a/vendor/github.com/a-h/templ/templ.png b/vendor/github.com/a-h/templ/templ.png
deleted file mode 100644
index 1c4bc2f..0000000
--- a/vendor/github.com/a-h/templ/templ.png
+++ /dev/null
Binary files differ
diff --git a/vendor/github.com/a-h/templ/url.go b/vendor/github.com/a-h/templ/url.go
deleted file mode 100644
index bf912e1..0000000
--- a/vendor/github.com/a-h/templ/url.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package templ
-
-import "strings"
-
-// FailedSanitizationURL is returned if a URL fails sanitization checks.
-const FailedSanitizationURL = SafeURL("about:invalid#TemplFailedSanitizationURL")
-
-// URL sanitizes the input string s and returns a SafeURL.
-func URL(s string) SafeURL {
- if i := strings.IndexRune(s, ':'); i >= 0 && !strings.ContainsRune(s[:i], '/') {
- protocol := s[:i]
- if !strings.EqualFold(protocol, "http") && !strings.EqualFold(protocol, "https") && !strings.EqualFold(protocol, "mailto") && !strings.EqualFold(protocol, "tel") && !strings.EqualFold(protocol, "ftp") && !strings.EqualFold(protocol, "ftps") {
- return FailedSanitizationURL
- }
- }
- return SafeURL(s)
-}
-
-// SafeURL is a URL that has been sanitized.
-type SafeURL string
diff --git a/vendor/github.com/a-h/templ/version.go b/vendor/github.com/a-h/templ/version.go
deleted file mode 100644
index b7fbb6f..0000000
--- a/vendor/github.com/a-h/templ/version.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package templ
-
-import _ "embed"
-
-//go:embed .version
-var version string
-
-func Version() string {
- return "v" + version
-}
diff --git a/vendor/golang.org/x/mod/LICENSE b/vendor/golang.org/x/mod/LICENSE
new file mode 100644
index 0000000..2a7cf70
--- /dev/null
+++ b/vendor/golang.org/x/mod/LICENSE
@@ -0,0 +1,27 @@
+Copyright 2009 The Go Authors.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google LLC nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/mod/PATENTS b/vendor/golang.org/x/mod/PATENTS
new file mode 100644
index 0000000..7330990
--- /dev/null
+++ b/vendor/golang.org/x/mod/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/mod/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go
new file mode 100644
index 0000000..9a2dfd3
--- /dev/null
+++ b/vendor/golang.org/x/mod/semver/semver.go
@@ -0,0 +1,401 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package semver implements comparison of semantic version strings.
+// In this package, semantic version strings must begin with a leading "v",
+// as in "v1.0.0".
+//
+// The general form of a semantic version string accepted by this package is
+//
+// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]]
+//
+// where square brackets indicate optional parts of the syntax;
+// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros;
+// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers
+// using only alphanumeric characters and hyphens; and
+// all-numeric PRERELEASE identifiers must not have leading zeros.
+//
+// This package follows Semantic Versioning 2.0.0 (see semver.org)
+// with two exceptions. First, it requires the "v" prefix. Second, it recognizes
+// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes)
+// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0.
+package semver
+
+import "sort"
+
+// parsed returns the parsed form of a semantic version string.
+type parsed struct {
+ major string
+ minor string
+ patch string
+ short string
+ prerelease string
+ build string
+}
+
+// IsValid reports whether v is a valid semantic version string.
+func IsValid(v string) bool {
+ _, ok := parse(v)
+ return ok
+}
+
+// Canonical returns the canonical formatting of the semantic version v.
+// It fills in any missing .MINOR or .PATCH and discards build metadata.
+// Two semantic versions compare equal only if their canonical formattings
+// are identical strings.
+// The canonical invalid semantic version is the empty string.
+func Canonical(v string) string {
+ p, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ if p.build != "" {
+ return v[:len(v)-len(p.build)]
+ }
+ if p.short != "" {
+ return v + p.short
+ }
+ return v
+}
+
+// Major returns the major version prefix of the semantic version v.
+// For example, Major("v2.1.0") == "v2".
+// If v is an invalid semantic version string, Major returns the empty string.
+func Major(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return v[:1+len(pv.major)]
+}
+
+// MajorMinor returns the major.minor version prefix of the semantic version v.
+// For example, MajorMinor("v2.1.0") == "v2.1".
+// If v is an invalid semantic version string, MajorMinor returns the empty string.
+func MajorMinor(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ i := 1 + len(pv.major)
+ if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor {
+ return v[:j]
+ }
+ return v[:i] + "." + pv.minor
+}
+
+// Prerelease returns the prerelease suffix of the semantic version v.
+// For example, Prerelease("v2.1.0-pre+meta") == "-pre".
+// If v is an invalid semantic version string, Prerelease returns the empty string.
+func Prerelease(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.prerelease
+}
+
+// Build returns the build suffix of the semantic version v.
+// For example, Build("v2.1.0+meta") == "+meta".
+// If v is an invalid semantic version string, Build returns the empty string.
+func Build(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.build
+}
+
+// Compare returns an integer comparing two versions according to
+// semantic version precedence.
+// The result will be 0 if v == w, -1 if v < w, or +1 if v > w.
+//
+// An invalid semantic version string is considered less than a valid one.
+// All invalid semantic version strings compare equal to each other.
+func Compare(v, w string) int {
+ pv, ok1 := parse(v)
+ pw, ok2 := parse(w)
+ if !ok1 && !ok2 {
+ return 0
+ }
+ if !ok1 {
+ return -1
+ }
+ if !ok2 {
+ return +1
+ }
+ if c := compareInt(pv.major, pw.major); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.minor, pw.minor); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.patch, pw.patch); c != 0 {
+ return c
+ }
+ return comparePrerelease(pv.prerelease, pw.prerelease)
+}
+
+// Max canonicalizes its arguments and then returns the version string
+// that compares greater.
+//
+// Deprecated: use [Compare] instead. In most cases, returning a canonicalized
+// version is not expected or desired.
+func Max(v, w string) string {
+ v = Canonical(v)
+ w = Canonical(w)
+ if Compare(v, w) > 0 {
+ return v
+ }
+ return w
+}
+
+// ByVersion implements [sort.Interface] for sorting semantic version strings.
+type ByVersion []string
+
+func (vs ByVersion) Len() int { return len(vs) }
+func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] }
+func (vs ByVersion) Less(i, j int) bool {
+ cmp := Compare(vs[i], vs[j])
+ if cmp != 0 {
+ return cmp < 0
+ }
+ return vs[i] < vs[j]
+}
+
+// Sort sorts a list of semantic version strings using [ByVersion].
+func Sort(list []string) {
+ sort.Sort(ByVersion(list))
+}
+
+func parse(v string) (p parsed, ok bool) {
+ if v == "" || v[0] != 'v' {
+ return
+ }
+ p.major, v, ok = parseInt(v[1:])
+ if !ok {
+ return
+ }
+ if v == "" {
+ p.minor = "0"
+ p.patch = "0"
+ p.short = ".0.0"
+ return
+ }
+ if v[0] != '.' {
+ ok = false
+ return
+ }
+ p.minor, v, ok = parseInt(v[1:])
+ if !ok {
+ return
+ }
+ if v == "" {
+ p.patch = "0"
+ p.short = ".0"
+ return
+ }
+ if v[0] != '.' {
+ ok = false
+ return
+ }
+ p.patch, v, ok = parseInt(v[1:])
+ if !ok {
+ return
+ }
+ if len(v) > 0 && v[0] == '-' {
+ p.prerelease, v, ok = parsePrerelease(v)
+ if !ok {
+ return
+ }
+ }
+ if len(v) > 0 && v[0] == '+' {
+ p.build, v, ok = parseBuild(v)
+ if !ok {
+ return
+ }
+ }
+ if v != "" {
+ ok = false
+ return
+ }
+ ok = true
+ return
+}
+
+func parseInt(v string) (t, rest string, ok bool) {
+ if v == "" {
+ return
+ }
+ if v[0] < '0' || '9' < v[0] {
+ return
+ }
+ i := 1
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ if v[0] == '0' && i != 1 {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parsePrerelease(v string) (t, rest string, ok bool) {
+ // "A pre-release version MAY be denoted by appending a hyphen and
+ // a series of dot separated identifiers immediately following the patch version.
+ // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-].
+ // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes."
+ if v == "" || v[0] != '-' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) && v[i] != '+' {
+ if !isIdentChar(v[i]) && v[i] != '.' {
+ return
+ }
+ if v[i] == '.' {
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parseBuild(v string) (t, rest string, ok bool) {
+ if v == "" || v[0] != '+' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) {
+ if !isIdentChar(v[i]) && v[i] != '.' {
+ return
+ }
+ if v[i] == '.' {
+ if start == i {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func isIdentChar(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-'
+}
+
+func isBadNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v) && i > 1 && v[0] == '0'
+}
+
+func isNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v)
+}
+
+func compareInt(x, y string) int {
+ if x == y {
+ return 0
+ }
+ if len(x) < len(y) {
+ return -1
+ }
+ if len(x) > len(y) {
+ return +1
+ }
+ if x < y {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func comparePrerelease(x, y string) int {
+ // "When major, minor, and patch are equal, a pre-release version has
+ // lower precedence than a normal version.
+ // Example: 1.0.0-alpha < 1.0.0.
+ // Precedence for two pre-release versions with the same major, minor,
+ // and patch version MUST be determined by comparing each dot separated
+ // identifier from left to right until a difference is found as follows:
+ // identifiers consisting of only digits are compared numerically and
+ // identifiers with letters or hyphens are compared lexically in ASCII
+ // sort order. Numeric identifiers always have lower precedence than
+ // non-numeric identifiers. A larger set of pre-release fields has a
+ // higher precedence than a smaller set, if all of the preceding
+ // identifiers are equal.
+ // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta <
+ // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0."
+ if x == y {
+ return 0
+ }
+ if x == "" {
+ return +1
+ }
+ if y == "" {
+ return -1
+ }
+ for x != "" && y != "" {
+ x = x[1:] // skip - or .
+ y = y[1:] // skip - or .
+ var dx, dy string
+ dx, x = nextIdent(x)
+ dy, y = nextIdent(y)
+ if dx != dy {
+ ix := isNum(dx)
+ iy := isNum(dy)
+ if ix != iy {
+ if ix {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ if ix {
+ if len(dx) < len(dy) {
+ return -1
+ }
+ if len(dx) > len(dy) {
+ return +1
+ }
+ }
+ if dx < dy {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ }
+ if x == "" {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func nextIdent(x string) (dx, rest string) {
+ i := 0
+ for i < len(x) && x[i] != '.' {
+ i++
+ }
+ return x[:i], x[i:]
+}
diff --git a/vendor/golang.org/x/sync/LICENSE b/vendor/golang.org/x/sync/LICENSE
new file mode 100644
index 0000000..2a7cf70
--- /dev/null
+++ b/vendor/golang.org/x/sync/LICENSE
@@ -0,0 +1,27 @@
+Copyright 2009 The Go Authors.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google LLC nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/sync/PATENTS b/vendor/golang.org/x/sync/PATENTS
new file mode 100644
index 0000000..7330990
--- /dev/null
+++ b/vendor/golang.org/x/sync/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go
new file mode 100644
index 0000000..948a3ee
--- /dev/null
+++ b/vendor/golang.org/x/sync/errgroup/errgroup.go
@@ -0,0 +1,135 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package errgroup provides synchronization, error propagation, and Context
+// cancelation for groups of goroutines working on subtasks of a common task.
+//
+// [errgroup.Group] is related to [sync.WaitGroup] but adds handling of tasks
+// returning errors.
+package errgroup
+
+import (
+ "context"
+ "fmt"
+ "sync"
+)
+
+type token struct{}
+
+// A Group is a collection of goroutines working on subtasks that are part of
+// the same overall task.
+//
+// A zero Group is valid, has no limit on the number of active goroutines,
+// and does not cancel on error.
+type Group struct {
+ cancel func(error)
+
+ wg sync.WaitGroup
+
+ sem chan token
+
+ errOnce sync.Once
+ err error
+}
+
+func (g *Group) done() {
+ if g.sem != nil {
+ <-g.sem
+ }
+ g.wg.Done()
+}
+
+// WithContext returns a new Group and an associated Context derived from ctx.
+//
+// The derived Context is canceled the first time a function passed to Go
+// returns a non-nil error or the first time Wait returns, whichever occurs
+// first.
+func WithContext(ctx context.Context) (*Group, context.Context) {
+ ctx, cancel := withCancelCause(ctx)
+ return &Group{cancel: cancel}, ctx
+}
+
+// Wait blocks until all function calls from the Go method have returned, then
+// returns the first non-nil error (if any) from them.
+func (g *Group) Wait() error {
+ g.wg.Wait()
+ if g.cancel != nil {
+ g.cancel(g.err)
+ }
+ return g.err
+}
+
+// Go calls the given function in a new goroutine.
+// It blocks until the new goroutine can be added without the number of
+// active goroutines in the group exceeding the configured limit.
+//
+// The first call to return a non-nil error cancels the group's context, if the
+// group was created by calling WithContext. The error will be returned by Wait.
+func (g *Group) Go(f func() error) {
+ if g.sem != nil {
+ g.sem <- token{}
+ }
+
+ g.wg.Add(1)
+ go func() {
+ defer g.done()
+
+ if err := f(); err != nil {
+ g.errOnce.Do(func() {
+ g.err = err
+ if g.cancel != nil {
+ g.cancel(g.err)
+ }
+ })
+ }
+ }()
+}
+
+// TryGo calls the given function in a new goroutine only if the number of
+// active goroutines in the group is currently below the configured limit.
+//
+// The return value reports whether the goroutine was started.
+func (g *Group) TryGo(f func() error) bool {
+ if g.sem != nil {
+ select {
+ case g.sem <- token{}:
+ // Note: this allows barging iff channels in general allow barging.
+ default:
+ return false
+ }
+ }
+
+ g.wg.Add(1)
+ go func() {
+ defer g.done()
+
+ if err := f(); err != nil {
+ g.errOnce.Do(func() {
+ g.err = err
+ if g.cancel != nil {
+ g.cancel(g.err)
+ }
+ })
+ }
+ }()
+ return true
+}
+
+// SetLimit limits the number of active goroutines in this group to at most n.
+// A negative value indicates no limit.
+//
+// Any subsequent call to the Go method will block until it can add an active
+// goroutine without exceeding the configured limit.
+//
+// The limit must not be modified while any goroutines in the group are active.
+func (g *Group) SetLimit(n int) {
+ if n < 0 {
+ g.sem = nil
+ return
+ }
+ if len(g.sem) != 0 {
+ panic(fmt.Errorf("errgroup: modify limit while %v goroutines in the group are still active", len(g.sem)))
+ }
+ g.sem = make(chan token, n)
+}
diff --git a/vendor/golang.org/x/sync/errgroup/go120.go b/vendor/golang.org/x/sync/errgroup/go120.go
new file mode 100644
index 0000000..f93c740
--- /dev/null
+++ b/vendor/golang.org/x/sync/errgroup/go120.go
@@ -0,0 +1,13 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.20
+
+package errgroup
+
+import "context"
+
+func withCancelCause(parent context.Context) (context.Context, func(error)) {
+ return context.WithCancelCause(parent)
+}
diff --git a/vendor/golang.org/x/sync/errgroup/pre_go120.go b/vendor/golang.org/x/sync/errgroup/pre_go120.go
new file mode 100644
index 0000000..88ce334
--- /dev/null
+++ b/vendor/golang.org/x/sync/errgroup/pre_go120.go
@@ -0,0 +1,14 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.20
+
+package errgroup
+
+import "context"
+
+func withCancelCause(parent context.Context) (context.Context, func(error)) {
+ ctx, cancel := context.WithCancel(parent)
+ return ctx, func(error) { cancel() }
+}
diff --git a/vendor/golang.org/x/text/internal/gen/code.go b/vendor/golang.org/x/text/internal/gen/code.go
new file mode 100644
index 0000000..75435c9
--- /dev/null
+++ b/vendor/golang.org/x/text/internal/gen/code.go
@@ -0,0 +1,375 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gen
+
+import (
+ "bytes"
+ "encoding/gob"
+ "fmt"
+ "hash"
+ "hash/fnv"
+ "io"
+ "log"
+ "os"
+ "reflect"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// This file contains utilities for generating code.
+
+// TODO: other write methods like:
+// - slices, maps, types, etc.
+
+// CodeWriter is a utility for writing structured code. It computes the content
+// hash and size of written content. It ensures there are newlines between
+// written code blocks.
+type CodeWriter struct {
+ buf bytes.Buffer
+ Size int
+ Hash hash.Hash32 // content hash
+ gob *gob.Encoder
+ // For comments we skip the usual one-line separator if they are followed by
+ // a code block.
+ skipSep bool
+}
+
+func (w *CodeWriter) Write(p []byte) (n int, err error) {
+ return w.buf.Write(p)
+}
+
+// NewCodeWriter returns a new CodeWriter.
+func NewCodeWriter() *CodeWriter {
+ h := fnv.New32()
+ return &CodeWriter{Hash: h, gob: gob.NewEncoder(h)}
+}
+
+// WriteGoFile appends the buffer with the total size of all created structures
+// and writes it as a Go file to the given file with the given package name.
+func (w *CodeWriter) WriteGoFile(filename, pkg string) {
+ f, err := os.Create(filename)
+ if err != nil {
+ log.Fatalf("Could not create file %s: %v", filename, err)
+ }
+ defer f.Close()
+ if _, err = w.WriteGo(f, pkg, ""); err != nil {
+ log.Fatalf("Error writing file %s: %v", filename, err)
+ }
+}
+
+// WriteVersionedGoFile appends the buffer with the total size of all created
+// structures and writes it as a Go file to the given file with the given
+// package name and build tags for the current Unicode version,
+func (w *CodeWriter) WriteVersionedGoFile(filename, pkg string) {
+ tags := buildTags()
+ if tags != "" {
+ pattern := fileToPattern(filename)
+ updateBuildTags(pattern)
+ filename = fmt.Sprintf(pattern, UnicodeVersion())
+ }
+ f, err := os.Create(filename)
+ if err != nil {
+ log.Fatalf("Could not create file %s: %v", filename, err)
+ }
+ defer f.Close()
+ if _, err = w.WriteGo(f, pkg, tags); err != nil {
+ log.Fatalf("Error writing file %s: %v", filename, err)
+ }
+}
+
+// WriteGo appends the buffer with the total size of all created structures and
+// writes it as a Go file to the given writer with the given package name.
+func (w *CodeWriter) WriteGo(out io.Writer, pkg, tags string) (n int, err error) {
+ sz := w.Size
+ if sz > 0 {
+ w.WriteComment("Total table size %d bytes (%dKiB); checksum: %X\n", sz, sz/1024, w.Hash.Sum32())
+ }
+ defer w.buf.Reset()
+ return WriteGo(out, pkg, tags, w.buf.Bytes())
+}
+
+func (w *CodeWriter) printf(f string, x ...interface{}) {
+ fmt.Fprintf(w, f, x...)
+}
+
+func (w *CodeWriter) insertSep() {
+ if w.skipSep {
+ w.skipSep = false
+ return
+ }
+ // Use at least two newlines to ensure a blank space between the previous
+ // block. WriteGoFile will remove extraneous newlines.
+ w.printf("\n\n")
+}
+
+// WriteComment writes a comment block. All line starts are prefixed with "//".
+// Initial empty lines are gobbled. The indentation for the first line is
+// stripped from consecutive lines.
+func (w *CodeWriter) WriteComment(comment string, args ...interface{}) {
+ s := fmt.Sprintf(comment, args...)
+ s = strings.Trim(s, "\n")
+
+ // Use at least two newlines to ensure a blank space between the previous
+ // block. WriteGoFile will remove extraneous newlines.
+ w.printf("\n\n// ")
+ w.skipSep = true
+
+ // strip first indent level.
+ sep := "\n"
+ for ; len(s) > 0 && (s[0] == '\t' || s[0] == ' '); s = s[1:] {
+ sep += s[:1]
+ }
+
+ strings.NewReplacer(sep, "\n// ", "\n", "\n// ").WriteString(w, s)
+
+ w.printf("\n")
+}
+
+func (w *CodeWriter) writeSizeInfo(size int) {
+ w.printf("// Size: %d bytes\n", size)
+}
+
+// WriteConst writes a constant of the given name and value.
+func (w *CodeWriter) WriteConst(name string, x interface{}) {
+ w.insertSep()
+ v := reflect.ValueOf(x)
+
+ switch v.Type().Kind() {
+ case reflect.String:
+ w.printf("const %s %s = ", name, typeName(x))
+ w.WriteString(v.String())
+ w.printf("\n")
+ default:
+ w.printf("const %s = %#v\n", name, x)
+ }
+}
+
+// WriteVar writes a variable of the given name and value.
+func (w *CodeWriter) WriteVar(name string, x interface{}) {
+ w.insertSep()
+ v := reflect.ValueOf(x)
+ oldSize := w.Size
+ sz := int(v.Type().Size())
+ w.Size += sz
+
+ switch v.Type().Kind() {
+ case reflect.String:
+ w.printf("var %s %s = ", name, typeName(x))
+ w.WriteString(v.String())
+ case reflect.Struct:
+ w.gob.Encode(x)
+ fallthrough
+ case reflect.Slice, reflect.Array:
+ w.printf("var %s = ", name)
+ w.writeValue(v)
+ w.writeSizeInfo(w.Size - oldSize)
+ default:
+ w.printf("var %s %s = ", name, typeName(x))
+ w.gob.Encode(x)
+ w.writeValue(v)
+ w.writeSizeInfo(w.Size - oldSize)
+ }
+ w.printf("\n")
+}
+
+func (w *CodeWriter) writeValue(v reflect.Value) {
+ x := v.Interface()
+ switch v.Kind() {
+ case reflect.String:
+ w.WriteString(v.String())
+ case reflect.Array:
+ // Don't double count: callers of WriteArray count on the size being
+ // added, so we need to discount it here.
+ w.Size -= int(v.Type().Size())
+ w.writeSlice(x, true)
+ case reflect.Slice:
+ w.writeSlice(x, false)
+ case reflect.Struct:
+ w.printf("%s{\n", typeName(v.Interface()))
+ t := v.Type()
+ for i := 0; i < v.NumField(); i++ {
+ w.printf("%s: ", t.Field(i).Name)
+ w.writeValue(v.Field(i))
+ w.printf(",\n")
+ }
+ w.printf("}")
+ default:
+ w.printf("%#v", x)
+ }
+}
+
+// WriteString writes a string literal.
+func (w *CodeWriter) WriteString(s string) {
+ io.WriteString(w.Hash, s) // content hash
+ w.Size += len(s)
+
+ const maxInline = 40
+ if len(s) <= maxInline {
+ w.printf("%q", s)
+ return
+ }
+
+ // We will render the string as a multi-line string.
+ const maxWidth = 80 - 4 - len(`"`) - len(`" +`)
+
+ // When starting on its own line, go fmt indents line 2+ an extra level.
+ n, max := maxWidth, maxWidth-4
+
+ // As per https://golang.org/issue/18078, the compiler has trouble
+ // compiling the concatenation of many strings, s0 + s1 + s2 + ... + sN,
+ // for large N. We insert redundant, explicit parentheses to work around
+ // that, lowering the N at any given step: (s0 + s1 + ... + s63) + (s64 +
+ // ... + s127) + etc + (etc + ... + sN).
+ explicitParens, extraComment := len(s) > 128*1024, ""
+ if explicitParens {
+ w.printf(`(`)
+ extraComment = "; the redundant, explicit parens are for https://golang.org/issue/18078"
+ }
+
+ // Print "" +\n, if a string does not start on its own line.
+ b := w.buf.Bytes()
+ if p := len(bytes.TrimRight(b, " \t")); p > 0 && b[p-1] != '\n' {
+ w.printf("\"\" + // Size: %d bytes%s\n", len(s), extraComment)
+ n, max = maxWidth, maxWidth
+ }
+
+ w.printf(`"`)
+
+ for sz, p, nLines := 0, 0, 0; p < len(s); {
+ var r rune
+ r, sz = utf8.DecodeRuneInString(s[p:])
+ out := s[p : p+sz]
+ chars := 1
+ if !unicode.IsPrint(r) || r == utf8.RuneError || r == '"' {
+ switch sz {
+ case 1:
+ out = fmt.Sprintf("\\x%02x", s[p])
+ case 2, 3:
+ out = fmt.Sprintf("\\u%04x", r)
+ case 4:
+ out = fmt.Sprintf("\\U%08x", r)
+ }
+ chars = len(out)
+ } else if r == '\\' {
+ out = "\\" + string(r)
+ chars = 2
+ }
+ if n -= chars; n < 0 {
+ nLines++
+ if explicitParens && nLines&63 == 63 {
+ w.printf("\") + (\"")
+ }
+ w.printf("\" +\n\"")
+ n = max - len(out)
+ }
+ w.printf("%s", out)
+ p += sz
+ }
+ w.printf(`"`)
+ if explicitParens {
+ w.printf(`)`)
+ }
+}
+
+// WriteSlice writes a slice value.
+func (w *CodeWriter) WriteSlice(x interface{}) {
+ w.writeSlice(x, false)
+}
+
+// WriteArray writes an array value.
+func (w *CodeWriter) WriteArray(x interface{}) {
+ w.writeSlice(x, true)
+}
+
+func (w *CodeWriter) writeSlice(x interface{}, isArray bool) {
+ v := reflect.ValueOf(x)
+ w.gob.Encode(v.Len())
+ w.Size += v.Len() * int(v.Type().Elem().Size())
+ name := typeName(x)
+ if isArray {
+ name = fmt.Sprintf("[%d]%s", v.Len(), name[strings.Index(name, "]")+1:])
+ }
+ if isArray {
+ w.printf("%s{\n", name)
+ } else {
+ w.printf("%s{ // %d elements\n", name, v.Len())
+ }
+
+ switch kind := v.Type().Elem().Kind(); kind {
+ case reflect.String:
+ for _, s := range x.([]string) {
+ w.WriteString(s)
+ w.printf(",\n")
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ // nLine and nBlock are the number of elements per line and block.
+ nLine, nBlock, format := 8, 64, "%d,"
+ switch kind {
+ case reflect.Uint8:
+ format = "%#02x,"
+ case reflect.Uint16:
+ format = "%#04x,"
+ case reflect.Uint32:
+ nLine, nBlock, format = 4, 32, "%#08x,"
+ case reflect.Uint, reflect.Uint64:
+ nLine, nBlock, format = 4, 32, "%#016x,"
+ case reflect.Int8:
+ nLine = 16
+ }
+ n := nLine
+ for i := 0; i < v.Len(); i++ {
+ if i%nBlock == 0 && v.Len() > nBlock {
+ w.printf("// Entry %X - %X\n", i, i+nBlock-1)
+ }
+ x := v.Index(i).Interface()
+ w.gob.Encode(x)
+ w.printf(format, x)
+ if n--; n == 0 {
+ n = nLine
+ w.printf("\n")
+ }
+ }
+ w.printf("\n")
+ case reflect.Struct:
+ zero := reflect.Zero(v.Type().Elem()).Interface()
+ for i := 0; i < v.Len(); i++ {
+ x := v.Index(i).Interface()
+ w.gob.EncodeValue(v)
+ if !reflect.DeepEqual(zero, x) {
+ line := fmt.Sprintf("%#v,\n", x)
+ line = line[strings.IndexByte(line, '{'):]
+ w.printf("%d: ", i)
+ w.printf(line)
+ }
+ }
+ case reflect.Array:
+ for i := 0; i < v.Len(); i++ {
+ w.printf("%d: %#v,\n", i, v.Index(i).Interface())
+ }
+ default:
+ panic("gen: slice elem type not supported")
+ }
+ w.printf("}")
+}
+
+// WriteType writes a definition of the type of the given value and returns the
+// type name.
+func (w *CodeWriter) WriteType(x interface{}) string {
+ t := reflect.TypeOf(x)
+ w.printf("type %s struct {\n", t.Name())
+ for i := 0; i < t.NumField(); i++ {
+ w.printf("\t%s %s\n", t.Field(i).Name, t.Field(i).Type)
+ }
+ w.printf("}\n")
+ return t.Name()
+}
+
+// typeName returns the name of the go type of x.
+func typeName(x interface{}) string {
+ t := reflect.ValueOf(x).Type()
+ return strings.Replace(fmt.Sprint(t), "main.", "", 1)
+}
diff --git a/vendor/golang.org/x/text/internal/gen/gen.go b/vendor/golang.org/x/text/internal/gen/gen.go
new file mode 100644
index 0000000..78bfef6
--- /dev/null
+++ b/vendor/golang.org/x/text/internal/gen/gen.go
@@ -0,0 +1,354 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gen contains common code for the various code generation tools in the
+// text repository. Its usage ensures consistency between tools.
+//
+// This package defines command line flags that are common to most generation
+// tools. The flags allow for specifying specific Unicode and CLDR versions
+// in the public Unicode data repository (https://www.unicode.org/Public).
+//
+// A local Unicode data mirror can be set through the flag -local or the
+// environment variable UNICODE_DIR. The former takes precedence. The local
+// directory should follow the same structure as the public repository.
+//
+// IANA data can also optionally be mirrored by putting it in the iana directory
+// rooted at the top of the local mirror. Beware, though, that IANA data is not
+// versioned. So it is up to the developer to use the right version.
+package gen // import "golang.org/x/text/internal/gen"
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "go/build"
+ "go/format"
+ "io"
+ "log"
+ "net/http"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "sync"
+ "unicode"
+
+ "golang.org/x/text/unicode/cldr"
+)
+
+var (
+ url = flag.String("url",
+ "https://www.unicode.org/Public",
+ "URL of Unicode database directory")
+ iana = flag.String("iana",
+ "http://www.iana.org",
+ "URL of the IANA repository")
+ unicodeVersion = flag.String("unicode",
+ getEnv("UNICODE_VERSION", unicode.Version),
+ "unicode version to use")
+ cldrVersion = flag.String("cldr",
+ getEnv("CLDR_VERSION", cldr.Version),
+ "cldr version to use")
+)
+
+func getEnv(name, def string) string {
+ if v := os.Getenv(name); v != "" {
+ return v
+ }
+ return def
+}
+
+// Init performs common initialization for a gen command. It parses the flags
+// and sets up the standard logging parameters.
+func Init() {
+ log.SetPrefix("")
+ log.SetFlags(log.Lshortfile)
+ flag.Parse()
+}
+
+const header = `// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
+
+`
+
+// UnicodeVersion reports the requested Unicode version.
+func UnicodeVersion() string {
+ return *unicodeVersion
+}
+
+// CLDRVersion reports the requested CLDR version.
+func CLDRVersion() string {
+ return *cldrVersion
+}
+
+var tags = []struct{ version, buildTags string }{
+ {"9.0.0", "!go1.10"},
+ {"10.0.0", "go1.10,!go1.13"},
+ {"11.0.0", "go1.13,!go1.14"},
+ {"12.0.0", "go1.14,!go1.16"},
+ {"13.0.0", "go1.16,!go1.21"},
+ {"15.0.0", "go1.21"},
+}
+
+// buildTags reports the build tags used for the current Unicode version.
+func buildTags() string {
+ v := UnicodeVersion()
+ for _, e := range tags {
+ if e.version == v {
+ return e.buildTags
+ }
+ }
+ log.Fatalf("Unknown build tags for Unicode version %q.", v)
+ return ""
+}
+
+// IsLocal reports whether data files are available locally.
+func IsLocal() bool {
+ dir, err := localReadmeFile()
+ if err != nil {
+ return false
+ }
+ if _, err = os.Stat(dir); err != nil {
+ return false
+ }
+ return true
+}
+
+// OpenUCDFile opens the requested UCD file. The file is specified relative to
+// the public Unicode root directory. It will call log.Fatal if there are any
+// errors.
+func OpenUCDFile(file string) io.ReadCloser {
+ return openUnicode(path.Join(*unicodeVersion, "ucd", file))
+}
+
+// OpenCLDRCoreZip opens the CLDR core zip file. It will call log.Fatal if there
+// are any errors.
+func OpenCLDRCoreZip() io.ReadCloser {
+ return OpenUnicodeFile("cldr", *cldrVersion, "core.zip")
+}
+
+// OpenUnicodeFile opens the requested file of the requested category from the
+// root of the Unicode data archive. The file is specified relative to the
+// public Unicode root directory. If version is "", it will use the default
+// Unicode version. It will call log.Fatal if there are any errors.
+func OpenUnicodeFile(category, version, file string) io.ReadCloser {
+ if version == "" {
+ version = UnicodeVersion()
+ }
+ return openUnicode(path.Join(category, version, file))
+}
+
+// OpenIANAFile opens the requested IANA file. The file is specified relative
+// to the IANA root, which is typically either http://www.iana.org or the
+// iana directory in the local mirror. It will call log.Fatal if there are any
+// errors.
+func OpenIANAFile(path string) io.ReadCloser {
+ return Open(*iana, "iana", path)
+}
+
+var (
+ dirMutex sync.Mutex
+ localDir string
+)
+
+const permissions = 0755
+
+func localReadmeFile() (string, error) {
+ p, err := build.Import("golang.org/x/text", "", build.FindOnly)
+ if err != nil {
+ return "", fmt.Errorf("Could not locate package: %v", err)
+ }
+ return filepath.Join(p.Dir, "DATA", "README"), nil
+}
+
+func getLocalDir() string {
+ dirMutex.Lock()
+ defer dirMutex.Unlock()
+
+ readme, err := localReadmeFile()
+ if err != nil {
+ log.Fatal(err)
+ }
+ dir := filepath.Dir(readme)
+ if _, err := os.Stat(readme); err != nil {
+ if err := os.MkdirAll(dir, permissions); err != nil {
+ log.Fatalf("Could not create directory: %v", err)
+ }
+ os.WriteFile(readme, []byte(readmeTxt), permissions)
+ }
+ return dir
+}
+
+const readmeTxt = `Generated by golang.org/x/text/internal/gen. DO NOT EDIT.
+
+This directory contains downloaded files used to generate the various tables
+in the golang.org/x/text subrepo.
+
+Note that the language subtag repo (iana/assignments/language-subtag-registry)
+and all other times in the iana subdirectory are not versioned and will need
+to be periodically manually updated. The easiest way to do this is to remove
+the entire iana directory. This is mostly of concern when updating the language
+package.
+`
+
+// Open opens subdir/path if a local directory is specified and the file exists,
+// where subdir is a directory relative to the local root, or fetches it from
+// urlRoot/path otherwise. It will call log.Fatal if there are any errors.
+func Open(urlRoot, subdir, path string) io.ReadCloser {
+ file := filepath.Join(getLocalDir(), subdir, filepath.FromSlash(path))
+ return open(file, urlRoot, path)
+}
+
+func openUnicode(path string) io.ReadCloser {
+ file := filepath.Join(getLocalDir(), filepath.FromSlash(path))
+ return open(file, *url, path)
+}
+
+// TODO: automatically periodically update non-versioned files.
+
+func open(file, urlRoot, path string) io.ReadCloser {
+ if f, err := os.Open(file); err == nil {
+ return f
+ }
+ r := get(urlRoot, path)
+ defer r.Close()
+ b, err := io.ReadAll(r)
+ if err != nil {
+ log.Fatalf("Could not download file: %v", err)
+ }
+ os.MkdirAll(filepath.Dir(file), permissions)
+ if err := os.WriteFile(file, b, permissions); err != nil {
+ log.Fatalf("Could not create file: %v", err)
+ }
+ return io.NopCloser(bytes.NewReader(b))
+}
+
+func get(root, path string) io.ReadCloser {
+ url := root + "/" + path
+ fmt.Printf("Fetching %s...", url)
+ defer fmt.Println(" done.")
+ resp, err := http.Get(url)
+ if err != nil {
+ log.Fatalf("HTTP GET: %v", err)
+ }
+ if resp.StatusCode != 200 {
+ log.Fatalf("Bad GET status for %q: %q", url, resp.Status)
+ }
+ return resp.Body
+}
+
+// TODO: use Write*Version in all applicable packages.
+
+// WriteUnicodeVersion writes a constant for the Unicode version from which the
+// tables are generated.
+func WriteUnicodeVersion(w io.Writer) {
+ fmt.Fprintf(w, "// UnicodeVersion is the Unicode version from which the tables in this package are derived.\n")
+ fmt.Fprintf(w, "const UnicodeVersion = %q\n\n", UnicodeVersion())
+}
+
+// WriteCLDRVersion writes a constant for the CLDR version from which the
+// tables are generated.
+func WriteCLDRVersion(w io.Writer) {
+ fmt.Fprintf(w, "// CLDRVersion is the CLDR version from which the tables in this package are derived.\n")
+ fmt.Fprintf(w, "const CLDRVersion = %q\n\n", CLDRVersion())
+}
+
+// WriteGoFile prepends a standard file comment and package statement to the
+// given bytes, applies gofmt, and writes them to a file with the given name.
+// It will call log.Fatal if there are any errors.
+func WriteGoFile(filename, pkg string, b []byte) {
+ w, err := os.Create(filename)
+ if err != nil {
+ log.Fatalf("Could not create file %s: %v", filename, err)
+ }
+ defer w.Close()
+ if _, err = WriteGo(w, pkg, "", b); err != nil {
+ log.Fatalf("Error writing file %s: %v", filename, err)
+ }
+}
+
+func fileToPattern(filename string) string {
+ suffix := ".go"
+ if strings.HasSuffix(filename, "_test.go") {
+ suffix = "_test.go"
+ }
+ prefix := filename[:len(filename)-len(suffix)]
+ return fmt.Sprint(prefix, "%s", suffix)
+}
+
+// tagLines returns the //go:build lines to add to the file.
+func tagLines(tags string) string {
+ return "//go:build " + strings.ReplaceAll(tags, ",", " && ") + "\n"
+}
+
+func updateBuildTags(pattern string) {
+ for _, t := range tags {
+ oldFile := fmt.Sprintf(pattern, t.version)
+ b, err := os.ReadFile(oldFile)
+ if err != nil {
+ continue
+ }
+ b = regexp.MustCompile(`//go:build.*\n`).ReplaceAll(b, []byte(tagLines(t.buildTags)))
+ err = os.WriteFile(oldFile, b, 0644)
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+
+// WriteVersionedGoFile prepends a standard file comment, adds build tags to
+// version the file for the current Unicode version, and package statement to
+// the given bytes, applies gofmt, and writes them to a file with the given
+// name. It will call log.Fatal if there are any errors.
+func WriteVersionedGoFile(filename, pkg string, b []byte) {
+ pattern := fileToPattern(filename)
+ updateBuildTags(pattern)
+ filename = fmt.Sprintf(pattern, UnicodeVersion())
+
+ w, err := os.Create(filename)
+ if err != nil {
+ log.Fatalf("Could not create file %s: %v", filename, err)
+ }
+ defer w.Close()
+ if _, err = WriteGo(w, pkg, buildTags(), b); err != nil {
+ log.Fatalf("Error writing file %s: %v", filename, err)
+ }
+}
+
+// WriteGo prepends a standard file comment and package statement to the given
+// bytes, applies gofmt, and writes them to w.
+func WriteGo(w io.Writer, pkg, tags string, b []byte) (n int, err error) {
+ src := []byte(header)
+ if tags != "" {
+ src = append(src, tagLines(tags)...)
+ src = append(src, '\n')
+ }
+ src = append(src, fmt.Sprintf("package %s\n\n", pkg)...)
+ src = append(src, b...)
+ formatted, err := format.Source(src)
+ if err != nil {
+ // Print the generated code even in case of an error so that the
+ // returned error can be meaningfully interpreted.
+ n, _ = w.Write(src)
+ return n, err
+ }
+ return w.Write(formatted)
+}
+
+// Repackage rewrites a Go file from belonging to package main to belonging to
+// the given package.
+func Repackage(inFile, outFile, pkg string) {
+ src, err := os.ReadFile(inFile)
+ if err != nil {
+ log.Fatalf("reading %s: %v", inFile, err)
+ }
+ const toDelete = "package main\n\n"
+ i := bytes.Index(src, []byte(toDelete))
+ if i < 0 {
+ log.Fatalf("Could not find %q in %s.", toDelete, inFile)
+ }
+ w := &bytes.Buffer{}
+ w.Write(src[i+len(toDelete):])
+ WriteGoFile(outFile, pkg, w.Bytes())
+}
diff --git a/vendor/golang.org/x/text/message/pipeline/extract.go b/vendor/golang.org/x/text/message/pipeline/extract.go
new file mode 100644
index 0000000..a15a7f9
--- /dev/null
+++ b/vendor/golang.org/x/text/message/pipeline/extract.go
@@ -0,0 +1,821 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pipeline
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/format"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "sort"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ fmtparser "golang.org/x/text/internal/format"
+ "golang.org/x/tools/go/callgraph"
+ "golang.org/x/tools/go/callgraph/cha"
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+const debug = false
+
+// TODO:
+// - merge information into existing files
+// - handle different file formats (PO, XLIFF)
+// - handle features (gender, plural)
+// - message rewriting
+
+// - `msg:"etc"` tags
+
+// Extract extracts all strings form the package defined in Config.
+func Extract(c *Config) (*State, error) {
+ x, err := newExtracter(c)
+ if err != nil {
+ return nil, wrap(err, "")
+ }
+
+ if err := x.seedEndpoints(); err != nil {
+ return nil, err
+ }
+ x.extractMessages()
+
+ return &State{
+ Config: *c,
+ program: x.iprog,
+ Extracted: Messages{
+ Language: c.SourceLanguage,
+ Messages: x.messages,
+ },
+ }, nil
+}
+
+type extracter struct {
+ conf loader.Config
+ iprog *loader.Program
+ prog *ssa.Program
+ callGraph *callgraph.Graph
+
+ // Calls and other expressions to collect.
+ globals map[token.Pos]*constData
+ funcs map[token.Pos]*callData
+ messages []Message
+}
+
+func newExtracter(c *Config) (x *extracter, err error) {
+ x = &extracter{
+ conf: loader.Config{},
+ globals: map[token.Pos]*constData{},
+ funcs: map[token.Pos]*callData{},
+ }
+
+ x.iprog, err = loadPackages(&x.conf, c.Packages)
+ if err != nil {
+ return nil, wrap(err, "")
+ }
+
+ x.prog = ssautil.CreateProgram(x.iprog, ssa.GlobalDebug|ssa.BareInits)
+ x.prog.Build()
+
+ x.callGraph = cha.CallGraph(x.prog)
+
+ return x, nil
+}
+
+func (x *extracter) globalData(pos token.Pos) *constData {
+ cd := x.globals[pos]
+ if cd == nil {
+ cd = &constData{}
+ x.globals[pos] = cd
+ }
+ return cd
+}
+
+func (x *extracter) seedEndpoints() error {
+ pkgInfo := x.iprog.Package("golang.org/x/text/message")
+ if pkgInfo == nil {
+ return errors.New("pipeline: golang.org/x/text/message is not imported")
+ }
+ pkg := x.prog.Package(pkgInfo.Pkg)
+ typ := types.NewPointer(pkg.Type("Printer").Type())
+
+ x.processGlobalVars()
+
+ x.handleFunc(x.prog.LookupMethod(typ, pkg.Pkg, "Printf"), &callData{
+ formatPos: 1,
+ argPos: 2,
+ isMethod: true,
+ })
+ x.handleFunc(x.prog.LookupMethod(typ, pkg.Pkg, "Sprintf"), &callData{
+ formatPos: 1,
+ argPos: 2,
+ isMethod: true,
+ })
+ x.handleFunc(x.prog.LookupMethod(typ, pkg.Pkg, "Fprintf"), &callData{
+ formatPos: 2,
+ argPos: 3,
+ isMethod: true,
+ })
+ return nil
+}
+
+// processGlobalVars finds string constants that are assigned to global
+// variables.
+func (x *extracter) processGlobalVars() {
+ for _, p := range x.prog.AllPackages() {
+ m, ok := p.Members["init"]
+ if !ok {
+ continue
+ }
+ for _, b := range m.(*ssa.Function).Blocks {
+ for _, i := range b.Instrs {
+ s, ok := i.(*ssa.Store)
+ if !ok {
+ continue
+ }
+ a, ok := s.Addr.(*ssa.Global)
+ if !ok {
+ continue
+ }
+ t := a.Type()
+ for {
+ p, ok := t.(*types.Pointer)
+ if !ok {
+ break
+ }
+ t = p.Elem()
+ }
+ if b, ok := t.(*types.Basic); !ok || b.Kind() != types.String {
+ continue
+ }
+ x.visitInit(a, s.Val)
+ }
+ }
+ }
+}
+
+type constData struct {
+ call *callData // to provide a signature for the constants
+ values []constVal
+ others []token.Pos // Assigned to other global data.
+}
+
+func (d *constData) visit(x *extracter, f func(c constant.Value)) {
+ for _, v := range d.values {
+ f(v.value)
+ }
+ for _, p := range d.others {
+ if od, ok := x.globals[p]; ok {
+ od.visit(x, f)
+ }
+ }
+}
+
+type constVal struct {
+ value constant.Value
+ pos token.Pos
+}
+
+type callData struct {
+ call ssa.CallInstruction
+ expr *ast.CallExpr
+ formats []constant.Value
+
+ callee *callData
+ isMethod bool
+ formatPos int
+ argPos int // varargs at this position in the call
+ argTypes []int // arguments extractable from this position
+}
+
+func (c *callData) callFormatPos() int {
+ c = c.callee
+ if c.isMethod {
+ return c.formatPos - 1
+ }
+ return c.formatPos
+}
+
+func (c *callData) callArgsStart() int {
+ c = c.callee
+ if c.isMethod {
+ return c.argPos - 1
+ }
+ return c.argPos
+}
+
+func (c *callData) Pos() token.Pos { return c.call.Pos() }
+func (c *callData) Pkg() *types.Package { return c.call.Parent().Pkg.Pkg }
+
+func (x *extracter) handleFunc(f *ssa.Function, fd *callData) {
+ for _, e := range x.callGraph.Nodes[f].In {
+ if e.Pos() == 0 {
+ continue
+ }
+
+ call := e.Site
+ caller := x.funcs[call.Pos()]
+ if caller != nil {
+ // TODO: theoretically a format string could be passed to multiple
+ // arguments of a function. Support this eventually.
+ continue
+ }
+ x.debug(call, "CALL", f.String())
+
+ caller = &callData{
+ call: call,
+ callee: fd,
+ formatPos: -1,
+ argPos: -1,
+ }
+ // Offset by one if we are invoking an interface method.
+ offset := 0
+ if call.Common().IsInvoke() {
+ offset = -1
+ }
+ x.funcs[call.Pos()] = caller
+ if fd.argPos >= 0 {
+ x.visitArgs(caller, call.Common().Args[fd.argPos+offset])
+ }
+ x.visitFormats(caller, call.Common().Args[fd.formatPos+offset])
+ }
+}
+
+type posser interface {
+ Pos() token.Pos
+ Parent() *ssa.Function
+}
+
+func (x *extracter) debug(v posser, header string, args ...interface{}) {
+ if debug {
+ pos := ""
+ if p := v.Parent(); p != nil {
+ pos = posString(&x.conf, p.Package().Pkg, v.Pos())
+ }
+ if header != "CALL" && header != "INSERT" {
+ header = " " + header
+ }
+ fmt.Printf("%-32s%-10s%-15T ", pos+fmt.Sprintf("@%d", v.Pos()), header, v)
+ for _, a := range args {
+ fmt.Printf(" %v", a)
+ }
+ fmt.Println()
+ }
+}
+
+// visitInit evaluates and collects values assigned to global variables in an
+// init function.
+func (x *extracter) visitInit(global *ssa.Global, v ssa.Value) {
+ if v == nil {
+ return
+ }
+ x.debug(v, "GLOBAL", v)
+
+ switch v := v.(type) {
+ case *ssa.Phi:
+ for _, e := range v.Edges {
+ x.visitInit(global, e)
+ }
+
+ case *ssa.Const:
+ // Only record strings with letters.
+ if str := constant.StringVal(v.Value); isMsg(str) {
+ cd := x.globalData(global.Pos())
+ cd.values = append(cd.values, constVal{v.Value, v.Pos()})
+ }
+ // TODO: handle %m-directive.
+
+ case *ssa.Global:
+ cd := x.globalData(global.Pos())
+ cd.others = append(cd.others, v.Pos())
+
+ case *ssa.FieldAddr, *ssa.Field:
+ // TODO: mark field index v.Field of v.X.Type() for extraction. extract
+ // an example args as to give parameters for the translator.
+
+ case *ssa.Slice:
+ if v.Low == nil && v.High == nil && v.Max == nil {
+ x.visitInit(global, v.X)
+ }
+
+ case *ssa.Alloc:
+ if ref := v.Referrers(); ref == nil {
+ for _, r := range *ref {
+ values := []ssa.Value{}
+ for _, o := range r.Operands(nil) {
+ if o == nil || *o == v {
+ continue
+ }
+ values = append(values, *o)
+ }
+ // TODO: return something different if we care about multiple
+ // values as well.
+ if len(values) == 1 {
+ x.visitInit(global, values[0])
+ }
+ }
+ }
+
+ case ssa.Instruction:
+ rands := v.Operands(nil)
+ if len(rands) == 1 && rands[0] != nil {
+ x.visitInit(global, *rands[0])
+ }
+ }
+ return
+}
+
+// visitFormats finds the original source of the value. The returned index is
+// position of the argument if originated from a function argument or -1
+// otherwise.
+func (x *extracter) visitFormats(call *callData, v ssa.Value) {
+ if v == nil {
+ return
+ }
+ x.debug(v, "VALUE", v)
+
+ switch v := v.(type) {
+ case *ssa.Phi:
+ for _, e := range v.Edges {
+ x.visitFormats(call, e)
+ }
+
+ case *ssa.Const:
+ // Only record strings with letters.
+ if isMsg(constant.StringVal(v.Value)) {
+ x.debug(call.call, "FORMAT", v.Value.ExactString())
+ call.formats = append(call.formats, v.Value)
+ }
+ // TODO: handle %m-directive.
+
+ case *ssa.Global:
+ x.globalData(v.Pos()).call = call
+
+ case *ssa.FieldAddr, *ssa.Field:
+ // TODO: mark field index v.Field of v.X.Type() for extraction. extract
+ // an example args as to give parameters for the translator.
+
+ case *ssa.Slice:
+ if v.Low == nil && v.High == nil && v.Max == nil {
+ x.visitFormats(call, v.X)
+ }
+
+ case *ssa.Parameter:
+ // TODO: handle the function for the index parameter.
+ f := v.Parent()
+ for i, p := range f.Params {
+ if p == v {
+ if call.formatPos < 0 {
+ call.formatPos = i
+ // TODO: is there a better way to detect this is calling
+ // a method rather than a function?
+ call.isMethod = len(f.Params) > f.Signature.Params().Len()
+ x.handleFunc(v.Parent(), call)
+ } else if debug && i != call.formatPos {
+ // TODO: support this.
+ fmt.Printf("WARNING:%s: format string passed to arg %d and %d\n",
+ posString(&x.conf, call.Pkg(), call.Pos()),
+ call.formatPos, i)
+ }
+ }
+ }
+
+ case *ssa.Alloc:
+ if ref := v.Referrers(); ref == nil {
+ for _, r := range *ref {
+ values := []ssa.Value{}
+ for _, o := range r.Operands(nil) {
+ if o == nil || *o == v {
+ continue
+ }
+ values = append(values, *o)
+ }
+ // TODO: return something different if we care about multiple
+ // values as well.
+ if len(values) == 1 {
+ x.visitFormats(call, values[0])
+ }
+ }
+ }
+
+ // TODO:
+ // case *ssa.Index:
+ // // Get all values in the array if applicable
+ // case *ssa.IndexAddr:
+ // // Get all values in the slice or *array if applicable.
+ // case *ssa.Lookup:
+ // // Get all values in the map if applicable.
+
+ case *ssa.FreeVar:
+ // TODO: find the link between free variables and parameters:
+ //
+ // func freeVar(p *message.Printer, str string) {
+ // fn := func(p *message.Printer) {
+ // p.Printf(str)
+ // }
+ // fn(p)
+ // }
+
+ case *ssa.Call:
+
+ case ssa.Instruction:
+ rands := v.Operands(nil)
+ if len(rands) == 1 && rands[0] != nil {
+ x.visitFormats(call, *rands[0])
+ }
+ }
+}
+
+// Note: a function may have an argument marked as both format and passthrough.
+
+// visitArgs collects information on arguments. For wrapped functions it will
+// just determine the position of the variable args slice.
+func (x *extracter) visitArgs(fd *callData, v ssa.Value) {
+ if v == nil {
+ return
+ }
+ x.debug(v, "ARGV", v)
+ switch v := v.(type) {
+
+ case *ssa.Slice:
+ if v.Low == nil && v.High == nil && v.Max == nil {
+ x.visitArgs(fd, v.X)
+ }
+
+ case *ssa.Parameter:
+ // TODO: handle the function for the index parameter.
+ f := v.Parent()
+ for i, p := range f.Params {
+ if p == v {
+ fd.argPos = i
+ }
+ }
+
+ case *ssa.Alloc:
+ if ref := v.Referrers(); ref == nil {
+ for _, r := range *ref {
+ values := []ssa.Value{}
+ for _, o := range r.Operands(nil) {
+ if o == nil || *o == v {
+ continue
+ }
+ values = append(values, *o)
+ }
+ // TODO: return something different if we care about
+ // multiple values as well.
+ if len(values) == 1 {
+ x.visitArgs(fd, values[0])
+ }
+ }
+ }
+
+ case ssa.Instruction:
+ rands := v.Operands(nil)
+ if len(rands) == 1 && rands[0] != nil {
+ x.visitArgs(fd, *rands[0])
+ }
+ }
+}
+
+// print returns Go syntax for the specified node.
+func (x *extracter) print(n ast.Node) string {
+ var buf bytes.Buffer
+ format.Node(&buf, x.conf.Fset, n)
+ return buf.String()
+}
+
+type packageExtracter struct {
+ f *ast.File
+ x *extracter
+ info *loader.PackageInfo
+ cmap ast.CommentMap
+}
+
+func (px packageExtracter) getComment(n ast.Node) string {
+ cs := px.cmap.Filter(n).Comments()
+ if len(cs) > 0 {
+ return strings.TrimSpace(cs[0].Text())
+ }
+ return ""
+}
+
+func (x *extracter) extractMessages() {
+ prog := x.iprog
+ keys := make([]*types.Package, 0, len(x.iprog.AllPackages))
+ for k := range x.iprog.AllPackages {
+ keys = append(keys, k)
+ }
+ sort.Slice(keys, func(i, j int) bool { return keys[i].Path() < keys[j].Path() })
+ files := []packageExtracter{}
+ for _, k := range keys {
+ info := x.iprog.AllPackages[k]
+ for _, f := range info.Files {
+ // Associate comments with nodes.
+ px := packageExtracter{
+ f, x, info,
+ ast.NewCommentMap(prog.Fset, f, f.Comments),
+ }
+ files = append(files, px)
+ }
+ }
+ for _, px := range files {
+ ast.Inspect(px.f, func(n ast.Node) bool {
+ switch v := n.(type) {
+ case *ast.CallExpr:
+ if d := x.funcs[v.Lparen]; d != nil {
+ d.expr = v
+ }
+ }
+ return true
+ })
+ }
+ for _, px := range files {
+ ast.Inspect(px.f, func(n ast.Node) bool {
+ switch v := n.(type) {
+ case *ast.CallExpr:
+ return px.handleCall(v)
+ case *ast.ValueSpec:
+ return px.handleGlobal(v)
+ }
+ return true
+ })
+ }
+}
+
+func (px packageExtracter) handleGlobal(spec *ast.ValueSpec) bool {
+ comment := px.getComment(spec)
+
+ for _, ident := range spec.Names {
+ data, ok := px.x.globals[ident.Pos()]
+ if !ok {
+ continue
+ }
+ name := ident.Name
+ var arguments []argument
+ if data.call != nil {
+ arguments = px.getArguments(data.call)
+ } else if !strings.HasPrefix(name, "msg") && !strings.HasPrefix(name, "Msg") {
+ continue
+ }
+ data.visit(px.x, func(c constant.Value) {
+ px.addMessage(spec.Pos(), []string{name}, c, comment, arguments)
+ })
+ }
+
+ return true
+}
+
+func (px packageExtracter) handleCall(call *ast.CallExpr) bool {
+ x := px.x
+ data := x.funcs[call.Lparen]
+ if data == nil || len(data.formats) == 0 {
+ return true
+ }
+ if data.expr != call {
+ panic("invariant `data.call != call` failed")
+ }
+ x.debug(data.call, "INSERT", data.formats)
+
+ argn := data.callFormatPos()
+ if argn >= len(call.Args) {
+ return true
+ }
+ format := call.Args[argn]
+
+ arguments := px.getArguments(data)
+
+ comment := ""
+ key := []string{}
+ if ident, ok := format.(*ast.Ident); ok {
+ key = append(key, ident.Name)
+ if v, ok := ident.Obj.Decl.(*ast.ValueSpec); ok && v.Comment != nil {
+ // TODO: get comment above ValueSpec as well
+ comment = v.Comment.Text()
+ }
+ }
+ if c := px.getComment(call.Args[0]); c != "" {
+ comment = c
+ }
+
+ formats := data.formats
+ for _, c := range formats {
+ px.addMessage(call.Lparen, key, c, comment, arguments)
+ }
+ return true
+}
+
+func (px packageExtracter) getArguments(data *callData) []argument {
+ arguments := []argument{}
+ x := px.x
+ info := px.info
+ if data.callArgsStart() >= 0 {
+ args := data.expr.Args[data.callArgsStart():]
+ for i, arg := range args {
+ expr := x.print(arg)
+ val := ""
+ if v := info.Types[arg].Value; v != nil {
+ val = v.ExactString()
+ switch arg.(type) {
+ case *ast.BinaryExpr, *ast.UnaryExpr:
+ expr = val
+ }
+ }
+ arguments = append(arguments, argument{
+ ArgNum: i + 1,
+ Type: info.Types[arg].Type.String(),
+ UnderlyingType: info.Types[arg].Type.Underlying().String(),
+ Expr: expr,
+ Value: val,
+ Comment: px.getComment(arg),
+ Position: posString(&x.conf, info.Pkg, arg.Pos()),
+ // TODO report whether it implements
+ // interfaces plural.Interface,
+ // gender.Interface.
+ })
+ }
+ }
+ return arguments
+}
+
+func (px packageExtracter) addMessage(
+ pos token.Pos,
+ key []string,
+ c constant.Value,
+ comment string,
+ arguments []argument) {
+ x := px.x
+ fmtMsg := constant.StringVal(c)
+
+ ph := placeholders{index: map[string]string{}}
+
+ trimmed, _, _ := trimWS(fmtMsg)
+
+ p := fmtparser.Parser{}
+ simArgs := make([]interface{}, len(arguments))
+ for i, v := range arguments {
+ simArgs[i] = v
+ }
+ msg := ""
+ p.Reset(simArgs)
+ for p.SetFormat(trimmed); p.Scan(); {
+ name := ""
+ var arg *argument
+ switch p.Status {
+ case fmtparser.StatusText:
+ msg += p.Text()
+ continue
+ case fmtparser.StatusSubstitution,
+ fmtparser.StatusBadWidthSubstitution,
+ fmtparser.StatusBadPrecSubstitution:
+ arguments[p.ArgNum-1].used = true
+ arg = &arguments[p.ArgNum-1]
+ name = getID(arg)
+ case fmtparser.StatusBadArgNum, fmtparser.StatusMissingArg:
+ arg = &argument{
+ ArgNum: p.ArgNum,
+ Position: posString(&x.conf, px.info.Pkg, pos),
+ }
+ name, arg.UnderlyingType = verbToPlaceholder(p.Text(), p.ArgNum)
+ }
+ sub := p.Text()
+ if !p.HasIndex {
+ r, sz := utf8.DecodeLastRuneInString(sub)
+ sub = fmt.Sprintf("%s[%d]%c", sub[:len(sub)-sz], p.ArgNum, r)
+ }
+ msg += fmt.Sprintf("{%s}", ph.addArg(arg, name, sub))
+ }
+ key = append(key, msg)
+
+ // Add additional Placeholders that can be used in translations
+ // that are not present in the string.
+ for _, arg := range arguments {
+ if arg.used {
+ continue
+ }
+ ph.addArg(&arg, getID(&arg), fmt.Sprintf("%%[%d]v", arg.ArgNum))
+ }
+
+ x.messages = append(x.messages, Message{
+ ID: key,
+ Key: fmtMsg,
+ Message: Text{Msg: msg},
+ // TODO(fix): this doesn't get the before comment.
+ Comment: comment,
+ Placeholders: ph.slice,
+ Position: posString(&x.conf, px.info.Pkg, pos),
+ })
+}
+
+func posString(conf *loader.Config, pkg *types.Package, pos token.Pos) string {
+ p := conf.Fset.Position(pos)
+ file := fmt.Sprintf("%s:%d:%d", filepath.Base(p.Filename), p.Line, p.Column)
+ return filepath.Join(pkg.Path(), file)
+}
+
+func getID(arg *argument) string {
+ s := getLastComponent(arg.Expr)
+ s = strip(s)
+ s = strings.Replace(s, " ", "", -1)
+ // For small variable names, use user-defined types for more info.
+ if len(s) <= 2 && arg.UnderlyingType != arg.Type {
+ s = getLastComponent(arg.Type)
+ }
+ return strings.Title(s)
+}
+
+// strip is a dirty hack to convert function calls to placeholder IDs.
+func strip(s string) string {
+ s = strings.Map(func(r rune) rune {
+ if unicode.IsSpace(r) || r == '-' {
+ return '_'
+ }
+ if !unicode.In(r, unicode.Letter, unicode.Mark, unicode.Number) {
+ return -1
+ }
+ return r
+ }, s)
+ // Strip "Get" from getter functions.
+ if strings.HasPrefix(s, "Get") || strings.HasPrefix(s, "get") {
+ if len(s) > len("get") {
+ r, _ := utf8.DecodeRuneInString(s)
+ if !unicode.In(r, unicode.Ll, unicode.M) { // not lower or mark
+ s = s[len("get"):]
+ }
+ }
+ }
+ return s
+}
+
+// verbToPlaceholder gives a name for a placeholder based on the substitution
+// verb. This is only to be used if there is otherwise no other type information
+// available.
+func verbToPlaceholder(sub string, pos int) (name, underlying string) {
+ r, _ := utf8.DecodeLastRuneInString(sub)
+ name = fmt.Sprintf("Arg_%d", pos)
+ switch r {
+ case 's', 'q':
+ underlying = "string"
+ case 'd':
+ name = "Integer"
+ underlying = "int"
+ case 'e', 'f', 'g':
+ name = "Number"
+ underlying = "float64"
+ case 'm':
+ name = "Message"
+ underlying = "string"
+ default:
+ underlying = "interface{}"
+ }
+ return name, underlying
+}
+
+type placeholders struct {
+ index map[string]string
+ slice []Placeholder
+}
+
+func (p *placeholders) addArg(arg *argument, name, sub string) (id string) {
+ id = name
+ alt, ok := p.index[id]
+ for i := 1; ok && alt != sub; i++ {
+ id = fmt.Sprintf("%s_%d", name, i)
+ alt, ok = p.index[id]
+ }
+ p.index[id] = sub
+ p.slice = append(p.slice, Placeholder{
+ ID: id,
+ String: sub,
+ Type: arg.Type,
+ UnderlyingType: arg.UnderlyingType,
+ ArgNum: arg.ArgNum,
+ Expr: arg.Expr,
+ Comment: arg.Comment,
+ })
+ return id
+}
+
+func getLastComponent(s string) string {
+ return s[1+strings.LastIndexByte(s, '.'):]
+}
+
+// isMsg returns whether s should be translated.
+func isMsg(s string) bool {
+ // TODO: parse as format string and omit strings that contain letters
+ // coming from format verbs.
+ for _, r := range s {
+ if unicode.In(r, unicode.L) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/text/message/pipeline/generate.go b/vendor/golang.org/x/text/message/pipeline/generate.go
new file mode 100644
index 0000000..f747c37
--- /dev/null
+++ b/vendor/golang.org/x/text/message/pipeline/generate.go
@@ -0,0 +1,329 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pipeline
+
+import (
+ "fmt"
+ "go/build"
+ "io"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+ "text/template"
+
+ "golang.org/x/text/collate"
+ "golang.org/x/text/feature/plural"
+ "golang.org/x/text/internal"
+ "golang.org/x/text/internal/catmsg"
+ "golang.org/x/text/internal/gen"
+ "golang.org/x/text/language"
+ "golang.org/x/tools/go/loader"
+)
+
+var transRe = regexp.MustCompile(`messages\.(.*)\.json`)
+
+// Generate writes a Go file that defines a Catalog with translated messages.
+// Translations are retrieved from s.Messages, not s.Translations, so it
+// is assumed Merge has been called.
+func (s *State) Generate() error {
+ path := s.Config.GenPackage
+ if path == "" {
+ path = "."
+ }
+ isDir := path[0] == '.'
+ prog, err := loadPackages(&loader.Config{}, []string{path})
+ if err != nil {
+ return wrap(err, "could not load package")
+ }
+ pkgs := prog.InitialPackages()
+ if len(pkgs) != 1 {
+ return errorf("more than one package selected: %v", pkgs)
+ }
+ pkg := pkgs[0].Pkg.Name()
+
+ cw, err := s.generate()
+ if err != nil {
+ return err
+ }
+ if !isDir {
+ gopath := filepath.SplitList(build.Default.GOPATH)[0]
+ path = filepath.Join(gopath, "src", filepath.FromSlash(pkgs[0].Pkg.Path()))
+ }
+ if len(s.Config.GenFile) == 0 {
+ cw.WriteGo(os.Stdout, pkg, "")
+ return nil
+ }
+ if filepath.IsAbs(s.Config.GenFile) {
+ path = s.Config.GenFile
+ } else {
+ path = filepath.Join(path, s.Config.GenFile)
+ }
+ cw.WriteGoFile(path, pkg) // TODO: WriteGoFile should return error.
+ return err
+}
+
+// WriteGen writes a Go file with the given package name to w that defines a
+// Catalog with translated messages. Translations are retrieved from s.Messages,
+// not s.Translations, so it is assumed Merge has been called.
+func (s *State) WriteGen(w io.Writer, pkg string) error {
+ cw, err := s.generate()
+ if err != nil {
+ return err
+ }
+ _, err = cw.WriteGo(w, pkg, "")
+ return err
+}
+
+// Generate is deprecated; use (*State).Generate().
+func Generate(w io.Writer, pkg string, extracted *Messages, trans ...Messages) (n int, err error) {
+ s := State{
+ Extracted: *extracted,
+ Translations: trans,
+ }
+ cw, err := s.generate()
+ if err != nil {
+ return 0, err
+ }
+ return cw.WriteGo(w, pkg, "")
+}
+
+func (s *State) generate() (*gen.CodeWriter, error) {
+ // Build up index of translations and original messages.
+ translations := map[language.Tag]map[string]Message{}
+ languages := []language.Tag{}
+ usedKeys := map[string]int{}
+
+ for _, loc := range s.Messages {
+ tag := loc.Language
+ if _, ok := translations[tag]; !ok {
+ translations[tag] = map[string]Message{}
+ languages = append(languages, tag)
+ }
+ for _, m := range loc.Messages {
+ if !m.Translation.IsEmpty() {
+ for _, id := range m.ID {
+ if _, ok := translations[tag][id]; ok {
+ warnf("Duplicate translation in locale %q for message %q", tag, id)
+ }
+ translations[tag][id] = m
+ }
+ }
+ }
+ }
+
+ // Verify completeness and register keys.
+ internal.SortTags(languages)
+
+ langVars := []string{}
+ for _, tag := range languages {
+ langVars = append(langVars, strings.Replace(tag.String(), "-", "_", -1))
+ dict := translations[tag]
+ for _, msg := range s.Extracted.Messages {
+ for _, id := range msg.ID {
+ if trans, ok := dict[id]; ok && !trans.Translation.IsEmpty() {
+ if _, ok := usedKeys[msg.Key]; !ok {
+ usedKeys[msg.Key] = len(usedKeys)
+ }
+ break
+ }
+ // TODO: log missing entry.
+ warnf("%s: Missing entry for %q.", tag, id)
+ }
+ }
+ }
+
+ cw := gen.NewCodeWriter()
+
+ x := &struct {
+ Fallback language.Tag
+ Languages []string
+ }{
+ Fallback: s.Extracted.Language,
+ Languages: langVars,
+ }
+
+ if err := lookup.Execute(cw, x); err != nil {
+ return nil, wrap(err, "error")
+ }
+
+ keyToIndex := []string{}
+ for k := range usedKeys {
+ keyToIndex = append(keyToIndex, k)
+ }
+ sort.Strings(keyToIndex)
+ fmt.Fprint(cw, "var messageKeyToIndex = map[string]int{\n")
+ for _, k := range keyToIndex {
+ fmt.Fprintf(cw, "%q: %d,\n", k, usedKeys[k])
+ }
+ fmt.Fprint(cw, "}\n\n")
+
+ for i, tag := range languages {
+ dict := translations[tag]
+ a := make([]string, len(usedKeys))
+ for _, msg := range s.Extracted.Messages {
+ for _, id := range msg.ID {
+ if trans, ok := dict[id]; ok && !trans.Translation.IsEmpty() {
+ m, err := assemble(&msg, &trans.Translation)
+ if err != nil {
+ return nil, wrap(err, "error")
+ }
+ _, leadWS, trailWS := trimWS(msg.Key)
+ if leadWS != "" || trailWS != "" {
+ m = catmsg.Affix{
+ Message: m,
+ Prefix: leadWS,
+ Suffix: trailWS,
+ }
+ }
+ // TODO: support macros.
+ data, err := catmsg.Compile(tag, nil, m)
+ if err != nil {
+ return nil, wrap(err, "error")
+ }
+ key := usedKeys[msg.Key]
+ if d := a[key]; d != "" && d != data {
+ warnf("Duplicate non-consistent translation for key %q, picking the one for message %q", msg.Key, id)
+ }
+ a[key] = string(data)
+ break
+ }
+ }
+ }
+ index := []uint32{0}
+ p := 0
+ for _, s := range a {
+ p += len(s)
+ index = append(index, uint32(p))
+ }
+
+ cw.WriteVar(langVars[i]+"Index", index)
+ cw.WriteConst(langVars[i]+"Data", strings.Join(a, ""))
+ }
+ return cw, nil
+}
+
+func assemble(m *Message, t *Text) (msg catmsg.Message, err error) {
+ keys := []string{}
+ for k := range t.Var {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ var a []catmsg.Message
+ for _, k := range keys {
+ t := t.Var[k]
+ m, err := assemble(m, &t)
+ if err != nil {
+ return nil, err
+ }
+ a = append(a, &catmsg.Var{Name: k, Message: m})
+ }
+ if t.Select != nil {
+ s, err := assembleSelect(m, t.Select)
+ if err != nil {
+ return nil, err
+ }
+ a = append(a, s)
+ }
+ if t.Msg != "" {
+ sub, err := m.Substitute(t.Msg)
+ if err != nil {
+ return nil, err
+ }
+ a = append(a, catmsg.String(sub))
+ }
+ switch len(a) {
+ case 0:
+ return nil, errorf("generate: empty message")
+ case 1:
+ return a[0], nil
+ default:
+ return catmsg.FirstOf(a), nil
+
+ }
+}
+
+func assembleSelect(m *Message, s *Select) (msg catmsg.Message, err error) {
+ cases := []string{}
+ for c := range s.Cases {
+ cases = append(cases, c)
+ }
+ sortCases(cases)
+
+ caseMsg := []interface{}{}
+ for _, c := range cases {
+ cm := s.Cases[c]
+ m, err := assemble(m, &cm)
+ if err != nil {
+ return nil, err
+ }
+ caseMsg = append(caseMsg, c, m)
+ }
+
+ ph := m.Placeholder(s.Arg)
+
+ switch s.Feature {
+ case "plural":
+ // TODO: only printf-style selects are supported as of yet.
+ return plural.Selectf(ph.ArgNum, ph.String, caseMsg...), nil
+ }
+ return nil, errorf("unknown feature type %q", s.Feature)
+}
+
+func sortCases(cases []string) {
+ // TODO: implement full interface.
+ sort.Slice(cases, func(i, j int) bool {
+ switch {
+ case cases[i] != "other" && cases[j] == "other":
+ return true
+ case cases[i] == "other" && cases[j] != "other":
+ return false
+ }
+ // the following code relies on '<' < '=' < any letter.
+ return cmpNumeric(cases[i], cases[j]) == -1
+ })
+}
+
+var cmpNumeric = collate.New(language.Und, collate.Numeric).CompareString
+
+var lookup = template.Must(template.New("gen").Parse(`
+import (
+ "golang.org/x/text/language"
+ "golang.org/x/text/message"
+ "golang.org/x/text/message/catalog"
+)
+
+type dictionary struct {
+ index []uint32
+ data string
+}
+
+func (d *dictionary) Lookup(key string) (data string, ok bool) {
+ p, ok := messageKeyToIndex[key]
+ if !ok {
+ return "", false
+ }
+ start, end := d.index[p], d.index[p+1]
+ if start == end {
+ return "", false
+ }
+ return d.data[start:end], true
+}
+
+func init() {
+ dict := map[string]catalog.Dictionary{
+ {{range .Languages}}"{{.}}": &dictionary{index: {{.}}Index, data: {{.}}Data },
+ {{end}}
+ }
+ fallback := language.MustParse("{{.Fallback}}")
+ cat, err := catalog.NewFromMap(dict, catalog.Fallback(fallback))
+ if err != nil {
+ panic(err)
+ }
+ message.DefaultCatalog = cat
+}
+
+`))
diff --git a/vendor/golang.org/x/text/message/pipeline/message.go b/vendor/golang.org/x/text/message/pipeline/message.go
new file mode 100644
index 0000000..c83a8fd
--- /dev/null
+++ b/vendor/golang.org/x/text/message/pipeline/message.go
@@ -0,0 +1,241 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pipeline
+
+import (
+ "encoding/json"
+ "errors"
+ "strings"
+
+ "golang.org/x/text/language"
+)
+
+// TODO: these definitions should be moved to a package so that the can be used
+// by other tools.
+
+// The file contains the structures used to define translations of a certain
+// messages.
+//
+// A translation may have multiple translations strings, or messages, depending
+// on the feature values of the various arguments. For instance, consider
+// a hypothetical translation from English to English, where the source defines
+// the format string "%d file(s) remaining".
+// See the examples directory for examples of extracted messages.
+
+// Messages is used to store translations for a single language.
+type Messages struct {
+ Language language.Tag `json:"language"`
+ Messages []Message `json:"messages"`
+ Macros map[string]Text `json:"macros,omitempty"`
+}
+
+// A Message describes a message to be translated.
+type Message struct {
+ // ID contains a list of identifiers for the message.
+ ID IDList `json:"id"`
+ // Key is the string that is used to look up the message at runtime.
+ Key string `json:"key,omitempty"`
+ Meaning string `json:"meaning,omitempty"`
+ Message Text `json:"message"`
+ Translation Text `json:"translation"`
+
+ Comment string `json:"comment,omitempty"`
+ TranslatorComment string `json:"translatorComment,omitempty"`
+
+ Placeholders []Placeholder `json:"placeholders,omitempty"`
+
+ // Fuzzy indicates that the provide translation needs review by a
+ // translator, for instance because it was derived from automated
+ // translation.
+ Fuzzy bool `json:"fuzzy,omitempty"`
+
+ // TODO: default placeholder syntax is {foo}. Allow alternative escaping
+ // like `foo`.
+
+ // Extraction information.
+ Position string `json:"position,omitempty"` // filePosition:line
+}
+
+// Placeholder reports the placeholder for the given ID if it is defined or nil
+// otherwise.
+func (m *Message) Placeholder(id string) *Placeholder {
+ for _, p := range m.Placeholders {
+ if p.ID == id {
+ return &p
+ }
+ }
+ return nil
+}
+
+// Substitute replaces placeholders in msg with their original value.
+func (m *Message) Substitute(msg string) (sub string, err error) {
+ last := 0
+ for i := 0; i < len(msg); {
+ pLeft := strings.IndexByte(msg[i:], '{')
+ if pLeft == -1 {
+ break
+ }
+ pLeft += i
+ pRight := strings.IndexByte(msg[pLeft:], '}')
+ if pRight == -1 {
+ return "", errorf("unmatched '}'")
+ }
+ pRight += pLeft
+ id := strings.TrimSpace(msg[pLeft+1 : pRight])
+ i = pRight + 1
+ if id != "" && id[0] == '$' {
+ continue
+ }
+ sub += msg[last:pLeft]
+ last = i
+ ph := m.Placeholder(id)
+ if ph == nil {
+ return "", errorf("unknown placeholder %q in message %q", id, msg)
+ }
+ sub += ph.String
+ }
+ sub += msg[last:]
+ return sub, err
+}
+
+var errIncompatibleMessage = errors.New("messages incompatible")
+
+func checkEquivalence(a, b *Message) error {
+ for _, v := range a.ID {
+ for _, w := range b.ID {
+ if v == w {
+ return nil
+ }
+ }
+ }
+ // TODO: canonicalize placeholders and check for type equivalence.
+ return errIncompatibleMessage
+}
+
+// A Placeholder is a part of the message that should not be changed by a
+// translator. It can be used to hide or prettify format strings (e.g. %d or
+// {{.Count}}), hide HTML, or mark common names that should not be translated.
+type Placeholder struct {
+ // ID is the placeholder identifier without the curly braces.
+ ID string `json:"id"`
+
+ // String is the string with which to replace the placeholder. This may be a
+ // formatting string (for instance "%d" or "{{.Count}}") or a literal string
+ // (<div>).
+ String string `json:"string"`
+
+ Type string `json:"type"`
+ UnderlyingType string `json:"underlyingType"`
+ // ArgNum and Expr are set if the placeholder is a substitution of an
+ // argument.
+ ArgNum int `json:"argNum,omitempty"`
+ Expr string `json:"expr,omitempty"`
+
+ Comment string `json:"comment,omitempty"`
+ Example string `json:"example,omitempty"`
+
+ // Features contains the features that are available for the implementation
+ // of this argument.
+ Features []Feature `json:"features,omitempty"`
+}
+
+// An argument contains information about the arguments passed to a message.
+type argument struct {
+ // ArgNum corresponds to the number that should be used for explicit argument indexes (e.g.
+ // "%[1]d").
+ ArgNum int `json:"argNum,omitempty"`
+
+ used bool // Used by Placeholder
+ Type string `json:"type"`
+ UnderlyingType string `json:"underlyingType"`
+ Expr string `json:"expr"`
+ Value string `json:"value,omitempty"`
+ Comment string `json:"comment,omitempty"`
+ Position string `json:"position,omitempty"`
+}
+
+// Feature holds information about a feature that can be implemented by
+// an Argument.
+type Feature struct {
+ Type string `json:"type"` // Right now this is only gender and plural.
+
+ // TODO: possible values and examples for the language under consideration.
+
+}
+
+// Text defines a message to be displayed.
+type Text struct {
+ // Msg and Select contains the message to be displayed. Msg may be used as
+ // a fallback value if none of the select cases match.
+ Msg string `json:"msg,omitempty"`
+ Select *Select `json:"select,omitempty"`
+
+ // Var defines a map of variables that may be substituted in the selected
+ // message.
+ Var map[string]Text `json:"var,omitempty"`
+
+ // Example contains an example message formatted with default values.
+ Example string `json:"example,omitempty"`
+}
+
+// IsEmpty reports whether this Text can generate anything.
+func (t *Text) IsEmpty() bool {
+ return t.Msg == "" && t.Select == nil && t.Var == nil
+}
+
+// rawText erases the UnmarshalJSON method.
+type rawText Text
+
+// UnmarshalJSON implements json.Unmarshaler.
+func (t *Text) UnmarshalJSON(b []byte) error {
+ if b[0] == '"' {
+ return json.Unmarshal(b, &t.Msg)
+ }
+ return json.Unmarshal(b, (*rawText)(t))
+}
+
+// MarshalJSON implements json.Marshaler.
+func (t *Text) MarshalJSON() ([]byte, error) {
+ if t.Select == nil && t.Var == nil && t.Example == "" {
+ return json.Marshal(t.Msg)
+ }
+ return json.Marshal((*rawText)(t))
+}
+
+// IDList is a set identifiers that each may refer to possibly different
+// versions of the same message. When looking up a messages, the first
+// identifier in the list takes precedence.
+type IDList []string
+
+// UnmarshalJSON implements json.Unmarshaler.
+func (id *IDList) UnmarshalJSON(b []byte) error {
+ if b[0] == '"' {
+ *id = []string{""}
+ return json.Unmarshal(b, &((*id)[0]))
+ }
+ return json.Unmarshal(b, (*[]string)(id))
+}
+
+// MarshalJSON implements json.Marshaler.
+func (id *IDList) MarshalJSON() ([]byte, error) {
+ if len(*id) == 1 {
+ return json.Marshal((*id)[0])
+ }
+ return json.Marshal((*[]string)(id))
+}
+
+// Select selects a Text based on the feature value associated with a feature of
+// a certain argument.
+type Select struct {
+ Feature string `json:"feature"` // Name of Feature type (e.g plural)
+ Arg string `json:"arg"` // The placeholder ID
+ Cases map[string]Text `json:"cases"`
+}
+
+// TODO: order matters, but can we derive the ordering from the case keys?
+// type Case struct {
+// Key string `json:"key"`
+// Value Text `json:"value"`
+// }
diff --git a/vendor/golang.org/x/text/message/pipeline/pipeline.go b/vendor/golang.org/x/text/message/pipeline/pipeline.go
new file mode 100644
index 0000000..34f15f8
--- /dev/null
+++ b/vendor/golang.org/x/text/message/pipeline/pipeline.go
@@ -0,0 +1,422 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package pipeline provides tools for creating translation pipelines.
+//
+// NOTE: UNDER DEVELOPMENT. API MAY CHANGE.
+package pipeline
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "go/build"
+ "go/parser"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "text/template"
+ "unicode"
+
+ "golang.org/x/text/internal"
+ "golang.org/x/text/language"
+ "golang.org/x/text/runes"
+ "golang.org/x/tools/go/loader"
+)
+
+const (
+ extractFile = "extracted.gotext.json"
+ outFile = "out.gotext.json"
+ gotextSuffix = "gotext.json"
+)
+
+// Config contains configuration for the translation pipeline.
+type Config struct {
+ // Supported indicates the languages for which data should be generated.
+ // The default is to support all locales for which there are matching
+ // translation files.
+ Supported []language.Tag
+
+ // --- Extraction
+
+ SourceLanguage language.Tag
+
+ Packages []string
+
+ // --- File structure
+
+ // Dir is the root dir for all operations.
+ Dir string
+
+ // TranslationsPattern is a regular expression to match incoming translation
+ // files. These files may appear in any directory rooted at Dir.
+ // language for the translation files is determined as follows:
+ // 1. From the Language field in the file.
+ // 2. If not present, from a valid language tag in the filename, separated
+ // by dots (e.g. "en-US.json" or "incoming.pt_PT.xmb").
+ // 3. If not present, from a the closest subdirectory in which the file
+ // is contained that parses as a valid language tag.
+ TranslationsPattern string
+
+ // OutPattern defines the location for translation files for a certain
+ // language. The default is "{{.Dir}}/{{.Language}}/out.{{.Ext}}"
+ OutPattern string
+
+ // Format defines the file format for generated translation files.
+ // The default is XMB. Alternatives are GetText, XLIFF, L20n, GoText.
+ Format string
+
+ Ext string
+
+ // TODO:
+ // Actions are additional actions to be performed after the initial extract
+ // and merge.
+ // Actions []struct {
+ // Name string
+ // Options map[string]string
+ // }
+
+ // --- Generation
+
+ // GenFile may be in a different package. It is not defined, it will
+ // be written to stdout.
+ GenFile string
+
+ // GenPackage is the package or relative path into which to generate the
+ // file. If not specified it is relative to the current directory.
+ GenPackage string
+
+ // DeclareVar defines a variable to which to assign the generated Catalog.
+ DeclareVar string
+
+ // SetDefault determines whether to assign the generated Catalog to
+ // message.DefaultCatalog. The default for this is true if DeclareVar is
+ // not defined, false otherwise.
+ SetDefault bool
+
+ // TODO:
+ // - Printf-style configuration
+ // - Template-style configuration
+ // - Extraction options
+ // - Rewrite options
+ // - Generation options
+}
+
+// Operations:
+// - extract: get the strings
+// - disambiguate: find messages with the same key, but possible different meaning.
+// - create out: create a list of messages that need translations
+// - load trans: load the list of current translations
+// - merge: assign list of translations as done
+// - (action)expand: analyze features and create example sentences for each version.
+// - (action)googletrans: pre-populate messages with automatic translations.
+// - (action)export: send out messages somewhere non-standard
+// - (action)import: load messages from somewhere non-standard
+// - vet program: don't pass "foo" + var + "bar" strings. Not using funcs for translated strings.
+// - vet trans: coverage: all translations/ all features.
+// - generate: generate Go code
+
+// State holds all accumulated information on translations during processing.
+type State struct {
+ Config Config
+
+ Package string
+ program *loader.Program
+
+ Extracted Messages `json:"messages"`
+
+ // Messages includes all messages for which there need to be translations.
+ // Duplicates may be eliminated. Generation will be done from these messages
+ // (usually after merging).
+ Messages []Messages
+
+ // Translations are incoming translations for the application messages.
+ Translations []Messages
+}
+
+func (s *State) dir() string {
+ if d := s.Config.Dir; d != "" {
+ return d
+ }
+ return "./locales"
+}
+
+func outPattern(s *State) (string, error) {
+ c := s.Config
+ pat := c.OutPattern
+ if pat == "" {
+ pat = "{{.Dir}}/{{.Language}}/out.{{.Ext}}"
+ }
+
+ ext := c.Ext
+ if ext == "" {
+ ext = c.Format
+ }
+ if ext == "" {
+ ext = gotextSuffix
+ }
+ t, err := template.New("").Parse(pat)
+ if err != nil {
+ return "", wrap(err, "error parsing template")
+ }
+ buf := bytes.Buffer{}
+ err = t.Execute(&buf, map[string]string{
+ "Dir": s.dir(),
+ "Language": "%s",
+ "Ext": ext,
+ })
+ return filepath.FromSlash(buf.String()), wrap(err, "incorrect OutPattern")
+}
+
+var transRE = regexp.MustCompile(`.*\.` + gotextSuffix)
+
+// Import loads existing translation files.
+func (s *State) Import() error {
+ outPattern, err := outPattern(s)
+ if err != nil {
+ return err
+ }
+ re := transRE
+ if pat := s.Config.TranslationsPattern; pat != "" {
+ if re, err = regexp.Compile(pat); err != nil {
+ return wrapf(err, "error parsing regexp %q", s.Config.TranslationsPattern)
+ }
+ }
+ x := importer{s, outPattern, re}
+ return x.walkImport(s.dir(), s.Config.SourceLanguage)
+}
+
+type importer struct {
+ state *State
+ outPattern string
+ transFile *regexp.Regexp
+}
+
+func (i *importer) walkImport(path string, tag language.Tag) error {
+ files, err := ioutil.ReadDir(path)
+ if err != nil {
+ return nil
+ }
+ for _, f := range files {
+ name := f.Name()
+ tag := tag
+ if f.IsDir() {
+ if t, err := language.Parse(name); err == nil {
+ tag = t
+ }
+ // We ignore errors
+ if err := i.walkImport(filepath.Join(path, name), tag); err != nil {
+ return err
+ }
+ continue
+ }
+ for _, l := range strings.Split(name, ".") {
+ if t, err := language.Parse(l); err == nil {
+ tag = t
+ }
+ }
+ file := filepath.Join(path, name)
+ // TODO: Should we skip files that match output files?
+ if fmt.Sprintf(i.outPattern, tag) == file {
+ continue
+ }
+ // TODO: handle different file formats.
+ if !i.transFile.MatchString(name) {
+ continue
+ }
+ b, err := ioutil.ReadFile(file)
+ if err != nil {
+ return wrap(err, "read file failed")
+ }
+ var translations Messages
+ if err := json.Unmarshal(b, &translations); err != nil {
+ return wrap(err, "parsing translation file failed")
+ }
+ i.state.Translations = append(i.state.Translations, translations)
+ }
+ return nil
+}
+
+// Merge merges the extracted messages with the existing translations.
+func (s *State) Merge() error {
+ if s.Messages != nil {
+ panic("already merged")
+ }
+ // Create an index for each unique message.
+ // Duplicates are okay as long as the substitution arguments are okay as
+ // well.
+ // Top-level messages are okay to appear in multiple substitution points.
+
+ // Collect key equivalence.
+ msgs := []*Message{}
+ keyToIDs := map[string]*Message{}
+ for _, m := range s.Extracted.Messages {
+ m := m
+ if prev, ok := keyToIDs[m.Key]; ok {
+ if err := checkEquivalence(&m, prev); err != nil {
+ warnf("Key %q matches conflicting messages: %v and %v", m.Key, prev.ID, m.ID)
+ // TODO: track enough information so that the rewriter can
+ // suggest/disambiguate messages.
+ }
+ // TODO: add position to message.
+ continue
+ }
+ i := len(msgs)
+ msgs = append(msgs, &m)
+ keyToIDs[m.Key] = msgs[i]
+ }
+
+ // Messages with different keys may still refer to the same translated
+ // message (e.g. different whitespace). Filter these.
+ idMap := map[string]bool{}
+ filtered := []*Message{}
+ for _, m := range msgs {
+ found := false
+ for _, id := range m.ID {
+ found = found || idMap[id]
+ }
+ if !found {
+ filtered = append(filtered, m)
+ }
+ for _, id := range m.ID {
+ idMap[id] = true
+ }
+ }
+
+ // Build index of translations.
+ translations := map[language.Tag]map[string]Message{}
+ languages := append([]language.Tag{}, s.Config.Supported...)
+
+ for _, t := range s.Translations {
+ tag := t.Language
+ if _, ok := translations[tag]; !ok {
+ translations[tag] = map[string]Message{}
+ languages = append(languages, tag)
+ }
+ for _, m := range t.Messages {
+ if !m.Translation.IsEmpty() {
+ for _, id := range m.ID {
+ if _, ok := translations[tag][id]; ok {
+ warnf("Duplicate translation in locale %q for message %q", tag, id)
+ }
+ translations[tag][id] = m
+ }
+ }
+ }
+ }
+ languages = internal.UniqueTags(languages)
+
+ for _, tag := range languages {
+ ms := Messages{Language: tag}
+ for _, orig := range filtered {
+ m := *orig
+ m.Key = ""
+ m.Position = ""
+
+ for _, id := range m.ID {
+ if t, ok := translations[tag][id]; ok {
+ m.Translation = t.Translation
+ if t.TranslatorComment != "" {
+ m.TranslatorComment = t.TranslatorComment
+ m.Fuzzy = t.Fuzzy
+ }
+ break
+ }
+ }
+ if tag == s.Config.SourceLanguage && m.Translation.IsEmpty() {
+ m.Translation = m.Message
+ if m.TranslatorComment == "" {
+ m.TranslatorComment = "Copied from source."
+ m.Fuzzy = true
+ }
+ }
+ // TODO: if translation is empty: pre-expand based on available
+ // linguistic features. This may also be done as a plugin.
+ ms.Messages = append(ms.Messages, m)
+ }
+ s.Messages = append(s.Messages, ms)
+ }
+ return nil
+}
+
+// Export writes out the messages to translation out files.
+func (s *State) Export() error {
+ path, err := outPattern(s)
+ if err != nil {
+ return wrap(err, "export failed")
+ }
+ for _, out := range s.Messages {
+ // TODO: inject translations from existing files to avoid retranslation.
+ data, err := json.MarshalIndent(out, "", " ")
+ if err != nil {
+ return wrap(err, "JSON marshal failed")
+ }
+ file := fmt.Sprintf(path, out.Language)
+ if err := os.MkdirAll(filepath.Dir(file), 0755); err != nil {
+ return wrap(err, "dir create failed")
+ }
+ if err := ioutil.WriteFile(file, data, 0644); err != nil {
+ return wrap(err, "write failed")
+ }
+ }
+ return nil
+}
+
+var (
+ ws = runes.In(unicode.White_Space).Contains
+ notWS = runes.NotIn(unicode.White_Space).Contains
+)
+
+func trimWS(s string) (trimmed, leadWS, trailWS string) {
+ trimmed = strings.TrimRightFunc(s, ws)
+ trailWS = s[len(trimmed):]
+ if i := strings.IndexFunc(trimmed, notWS); i > 0 {
+ leadWS = trimmed[:i]
+ trimmed = trimmed[i:]
+ }
+ return trimmed, leadWS, trailWS
+}
+
+// NOTE: The command line tool already prefixes with "gotext:".
+var (
+ wrap = func(err error, msg string) error {
+ if err == nil {
+ return nil
+ }
+ return fmt.Errorf("%s: %v", msg, err)
+ }
+ wrapf = func(err error, msg string, args ...interface{}) error {
+ if err == nil {
+ return nil
+ }
+ return wrap(err, fmt.Sprintf(msg, args...))
+ }
+ errorf = fmt.Errorf
+)
+
+func warnf(format string, args ...interface{}) {
+ // TODO: don't log.
+ log.Printf(format, args...)
+}
+
+func loadPackages(conf *loader.Config, args []string) (*loader.Program, error) {
+ if len(args) == 0 {
+ args = []string{"."}
+ }
+
+ conf.Build = &build.Default
+ conf.ParserMode = parser.ParseComments
+
+ // Use the initial packages from the command line.
+ args, err := conf.FromArgs(args, false)
+ if err != nil {
+ return nil, wrap(err, "loading packages failed")
+ }
+
+ // Load, parse and type-check the whole program.
+ return conf.Load()
+}
diff --git a/vendor/golang.org/x/text/message/pipeline/rewrite.go b/vendor/golang.org/x/text/message/pipeline/rewrite.go
new file mode 100644
index 0000000..cf1511f
--- /dev/null
+++ b/vendor/golang.org/x/text/message/pipeline/rewrite.go
@@ -0,0 +1,268 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pipeline
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/format"
+ "go/token"
+ "io"
+ "os"
+ "strings"
+
+ "golang.org/x/tools/go/loader"
+)
+
+const printerType = "golang.org/x/text/message.Printer"
+
+// Rewrite rewrites the Go files in a single package to use the localization
+// machinery and rewrites strings to adopt best practices when possible.
+// If w is not nil the generated files are written to it, each files with a
+// "--- <filename>" header. Otherwise the files are overwritten.
+func Rewrite(w io.Writer, args ...string) error {
+ conf := &loader.Config{
+ AllowErrors: true, // Allow unused instances of message.Printer.
+ }
+ prog, err := loadPackages(conf, args)
+ if err != nil {
+ return wrap(err, "")
+ }
+
+ for _, info := range prog.InitialPackages() {
+ for _, f := range info.Files {
+ // Associate comments with nodes.
+
+ // Pick up initialized Printers at the package level.
+ r := rewriter{info: info, conf: conf}
+ for _, n := range info.InitOrder {
+ if t := r.info.Types[n.Rhs].Type.String(); strings.HasSuffix(t, printerType) {
+ r.printerVar = n.Lhs[0].Name()
+ }
+ }
+
+ ast.Walk(&r, f)
+
+ w := w
+ if w == nil {
+ var err error
+ if w, err = os.Create(conf.Fset.File(f.Pos()).Name()); err != nil {
+ return wrap(err, "open failed")
+ }
+ } else {
+ fmt.Fprintln(w, "---", conf.Fset.File(f.Pos()).Name())
+ }
+
+ if err := format.Node(w, conf.Fset, f); err != nil {
+ return wrap(err, "go format failed")
+ }
+ }
+ }
+
+ return nil
+}
+
+type rewriter struct {
+ info *loader.PackageInfo
+ conf *loader.Config
+ printerVar string
+}
+
+// print returns Go syntax for the specified node.
+func (r *rewriter) print(n ast.Node) string {
+ var buf bytes.Buffer
+ format.Node(&buf, r.conf.Fset, n)
+ return buf.String()
+}
+
+func (r *rewriter) Visit(n ast.Node) ast.Visitor {
+ // Save the state by scope.
+ if _, ok := n.(*ast.BlockStmt); ok {
+ r := *r
+ return &r
+ }
+ // Find Printers created by assignment.
+ stmt, ok := n.(*ast.AssignStmt)
+ if ok {
+ for _, v := range stmt.Lhs {
+ if r.printerVar == r.print(v) {
+ r.printerVar = ""
+ }
+ }
+ for i, v := range stmt.Rhs {
+ if t := r.info.Types[v].Type.String(); strings.HasSuffix(t, printerType) {
+ r.printerVar = r.print(stmt.Lhs[i])
+ return r
+ }
+ }
+ }
+ // Find Printers created by variable declaration.
+ spec, ok := n.(*ast.ValueSpec)
+ if ok {
+ for _, v := range spec.Names {
+ if r.printerVar == r.print(v) {
+ r.printerVar = ""
+ }
+ }
+ for i, v := range spec.Values {
+ if t := r.info.Types[v].Type.String(); strings.HasSuffix(t, printerType) {
+ r.printerVar = r.print(spec.Names[i])
+ return r
+ }
+ }
+ }
+ if r.printerVar == "" {
+ return r
+ }
+ call, ok := n.(*ast.CallExpr)
+ if !ok {
+ return r
+ }
+
+ // TODO: Handle literal values?
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return r
+ }
+ meth := r.info.Selections[sel]
+
+ source := r.print(sel.X)
+ fun := r.print(sel.Sel)
+ if meth != nil {
+ source = meth.Recv().String()
+ fun = meth.Obj().Name()
+ }
+
+ // TODO: remove cheap hack and check if the type either
+ // implements some interface or is specifically of type
+ // "golang.org/x/text/message".Printer.
+ m, ok := rewriteFuncs[source]
+ if !ok {
+ return r
+ }
+
+ rewriteType, ok := m[fun]
+ if !ok {
+ return r
+ }
+ ident := ast.NewIdent(r.printerVar)
+ ident.NamePos = sel.X.Pos()
+ sel.X = ident
+ if rewriteType.method != "" {
+ sel.Sel.Name = rewriteType.method
+ }
+
+ // Analyze arguments.
+ argn := rewriteType.arg
+ if rewriteType.format || argn >= len(call.Args) {
+ return r
+ }
+ hasConst := false
+ for _, a := range call.Args[argn:] {
+ if v := r.info.Types[a].Value; v != nil && v.Kind() == constant.String {
+ hasConst = true
+ break
+ }
+ }
+ if !hasConst {
+ return r
+ }
+ sel.Sel.Name = rewriteType.methodf
+
+ // We are done if there is only a single string that does not need to be
+ // escaped.
+ if len(call.Args) == 1 {
+ s, ok := constStr(r.info, call.Args[0])
+ if ok && !strings.Contains(s, "%") && !rewriteType.newLine {
+ return r
+ }
+ }
+
+ // Rewrite arguments as format string.
+ expr := &ast.BasicLit{
+ ValuePos: call.Lparen,
+ Kind: token.STRING,
+ }
+ newArgs := append(call.Args[:argn:argn], expr)
+ newStr := []string{}
+ for i, a := range call.Args[argn:] {
+ if s, ok := constStr(r.info, a); ok {
+ newStr = append(newStr, strings.Replace(s, "%", "%%", -1))
+ } else {
+ newStr = append(newStr, "%v")
+ newArgs = append(newArgs, call.Args[argn+i])
+ }
+ }
+ s := strings.Join(newStr, rewriteType.sep)
+ if rewriteType.newLine {
+ s += "\n"
+ }
+ expr.Value = fmt.Sprintf("%q", s)
+
+ call.Args = newArgs
+
+ // TODO: consider creating an expression instead of a constant string and
+ // then wrapping it in an escape function or so:
+ // call.Args[argn+i] = &ast.CallExpr{
+ // Fun: &ast.SelectorExpr{
+ // X: ast.NewIdent("message"),
+ // Sel: ast.NewIdent("Lookup"),
+ // },
+ // Args: []ast.Expr{a},
+ // }
+ // }
+
+ return r
+}
+
+type rewriteType struct {
+ // method is the name of the equivalent method on a printer, or "" if it is
+ // the same.
+ method string
+
+ // methodf is the method to use if the arguments can be rewritten as a
+ // arguments to a printf-style call.
+ methodf string
+
+ // format is true if the method takes a formatting string followed by
+ // substitution arguments.
+ format bool
+
+ // arg indicates the position of the argument to extract. If all is
+ // positive, all arguments from this argument onwards needs to be extracted.
+ arg int
+
+ sep string
+ newLine bool
+}
+
+// rewriteFuncs list functions that can be directly mapped to the printer
+// functions of the message package.
+var rewriteFuncs = map[string]map[string]rewriteType{
+ // TODO: Printer -> *golang.org/x/text/message.Printer
+ "fmt": {
+ "Print": rewriteType{methodf: "Printf"},
+ "Sprint": rewriteType{methodf: "Sprintf"},
+ "Fprint": rewriteType{methodf: "Fprintf"},
+
+ "Println": rewriteType{methodf: "Printf", sep: " ", newLine: true},
+ "Sprintln": rewriteType{methodf: "Sprintf", sep: " ", newLine: true},
+ "Fprintln": rewriteType{methodf: "Fprintf", sep: " ", newLine: true},
+
+ "Printf": rewriteType{method: "Printf", format: true},
+ "Sprintf": rewriteType{method: "Sprintf", format: true},
+ "Fprintf": rewriteType{method: "Fprintf", format: true},
+ },
+}
+
+func constStr(info *loader.PackageInfo, e ast.Expr) (s string, ok bool) {
+ v := info.Types[e].Value
+ if v == nil || v.Kind() != constant.String {
+ return "", false
+ }
+ return constant.StringVal(v), true
+}
diff --git a/vendor/golang.org/x/text/runes/cond.go b/vendor/golang.org/x/text/runes/cond.go
new file mode 100644
index 0000000..df7aa02
--- /dev/null
+++ b/vendor/golang.org/x/text/runes/cond.go
@@ -0,0 +1,187 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package runes
+
+import (
+ "unicode/utf8"
+
+ "golang.org/x/text/transform"
+)
+
+// Note: below we pass invalid UTF-8 to the tIn and tNotIn transformers as is.
+// This is done for various reasons:
+// - To retain the semantics of the Nop transformer: if input is passed to a Nop
+// one would expect it to be unchanged.
+// - It would be very expensive to pass a converted RuneError to a transformer:
+// a transformer might need more source bytes after RuneError, meaning that
+// the only way to pass it safely is to create a new buffer and manage the
+// intermingling of RuneErrors and normal input.
+// - Many transformers leave ill-formed UTF-8 as is, so this is not
+// inconsistent. Generally ill-formed UTF-8 is only replaced if it is a
+// logical consequence of the operation (as for Map) or if it otherwise would
+// pose security concerns (as for Remove).
+// - An alternative would be to return an error on ill-formed UTF-8, but this
+// would be inconsistent with other operations.
+
+// If returns a transformer that applies tIn to consecutive runes for which
+// s.Contains(r) and tNotIn to consecutive runes for which !s.Contains(r). Reset
+// is called on tIn and tNotIn at the start of each run. A Nop transformer will
+// substitute a nil value passed to tIn or tNotIn. Invalid UTF-8 is translated
+// to RuneError to determine which transformer to apply, but is passed as is to
+// the respective transformer.
+func If(s Set, tIn, tNotIn transform.Transformer) Transformer {
+ if tIn == nil && tNotIn == nil {
+ return Transformer{transform.Nop}
+ }
+ if tIn == nil {
+ tIn = transform.Nop
+ }
+ if tNotIn == nil {
+ tNotIn = transform.Nop
+ }
+ sIn, ok := tIn.(transform.SpanningTransformer)
+ if !ok {
+ sIn = dummySpan{tIn}
+ }
+ sNotIn, ok := tNotIn.(transform.SpanningTransformer)
+ if !ok {
+ sNotIn = dummySpan{tNotIn}
+ }
+
+ a := &cond{
+ tIn: sIn,
+ tNotIn: sNotIn,
+ f: s.Contains,
+ }
+ a.Reset()
+ return Transformer{a}
+}
+
+type dummySpan struct{ transform.Transformer }
+
+func (d dummySpan) Span(src []byte, atEOF bool) (n int, err error) {
+ return 0, transform.ErrEndOfSpan
+}
+
+type cond struct {
+ tIn, tNotIn transform.SpanningTransformer
+ f func(rune) bool
+ check func(rune) bool // current check to perform
+ t transform.SpanningTransformer // current transformer to use
+}
+
+// Reset implements transform.Transformer.
+func (t *cond) Reset() {
+ t.check = t.is
+ t.t = t.tIn
+ t.t.Reset() // notIn will be reset on first usage.
+}
+
+func (t *cond) is(r rune) bool {
+ if t.f(r) {
+ return true
+ }
+ t.check = t.isNot
+ t.t = t.tNotIn
+ t.tNotIn.Reset()
+ return false
+}
+
+func (t *cond) isNot(r rune) bool {
+ if !t.f(r) {
+ return true
+ }
+ t.check = t.is
+ t.t = t.tIn
+ t.tIn.Reset()
+ return false
+}
+
+// This implementation of Span doesn't help all too much, but it needs to be
+// there to satisfy this package's Transformer interface.
+// TODO: there are certainly room for improvements, though. For example, if
+// t.t == transform.Nop (which will a common occurrence) it will save a bundle
+// to special-case that loop.
+func (t *cond) Span(src []byte, atEOF bool) (n int, err error) {
+ p := 0
+ for n < len(src) && err == nil {
+ // Don't process too much at a time as the Spanner that will be
+ // called on this block may terminate early.
+ const maxChunk = 4096
+ max := len(src)
+ if v := n + maxChunk; v < max {
+ max = v
+ }
+ atEnd := false
+ size := 0
+ current := t.t
+ for ; p < max; p += size {
+ r := rune(src[p])
+ if r < utf8.RuneSelf {
+ size = 1
+ } else if r, size = utf8.DecodeRune(src[p:]); size == 1 {
+ if !atEOF && !utf8.FullRune(src[p:]) {
+ err = transform.ErrShortSrc
+ break
+ }
+ }
+ if !t.check(r) {
+ // The next rune will be the start of a new run.
+ atEnd = true
+ break
+ }
+ }
+ n2, err2 := current.Span(src[n:p], atEnd || (atEOF && p == len(src)))
+ n += n2
+ if err2 != nil {
+ return n, err2
+ }
+ // At this point either err != nil or t.check will pass for the rune at p.
+ p = n + size
+ }
+ return n, err
+}
+
+func (t *cond) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ p := 0
+ for nSrc < len(src) && err == nil {
+ // Don't process too much at a time, as the work might be wasted if the
+ // destination buffer isn't large enough to hold the result or a
+ // transform returns an error early.
+ const maxChunk = 4096
+ max := len(src)
+ if n := nSrc + maxChunk; n < len(src) {
+ max = n
+ }
+ atEnd := false
+ size := 0
+ current := t.t
+ for ; p < max; p += size {
+ r := rune(src[p])
+ if r < utf8.RuneSelf {
+ size = 1
+ } else if r, size = utf8.DecodeRune(src[p:]); size == 1 {
+ if !atEOF && !utf8.FullRune(src[p:]) {
+ err = transform.ErrShortSrc
+ break
+ }
+ }
+ if !t.check(r) {
+ // The next rune will be the start of a new run.
+ atEnd = true
+ break
+ }
+ }
+ nDst2, nSrc2, err2 := current.Transform(dst[nDst:], src[nSrc:p], atEnd || (atEOF && p == len(src)))
+ nDst += nDst2
+ nSrc += nSrc2
+ if err2 != nil {
+ return nDst, nSrc, err2
+ }
+ // At this point either err != nil or t.check will pass for the rune at p.
+ p = nSrc + size
+ }
+ return nDst, nSrc, err
+}
diff --git a/vendor/golang.org/x/text/runes/runes.go b/vendor/golang.org/x/text/runes/runes.go
new file mode 100644
index 0000000..930e87f
--- /dev/null
+++ b/vendor/golang.org/x/text/runes/runes.go
@@ -0,0 +1,355 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package runes provide transforms for UTF-8 encoded text.
+package runes // import "golang.org/x/text/runes"
+
+import (
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/text/transform"
+)
+
+// A Set is a collection of runes.
+type Set interface {
+ // Contains returns true if r is contained in the set.
+ Contains(r rune) bool
+}
+
+type setFunc func(rune) bool
+
+func (s setFunc) Contains(r rune) bool {
+ return s(r)
+}
+
+// Note: using funcs here instead of wrapping types result in cleaner
+// documentation and a smaller API.
+
+// In creates a Set with a Contains method that returns true for all runes in
+// the given RangeTable.
+func In(rt *unicode.RangeTable) Set {
+ return setFunc(func(r rune) bool { return unicode.Is(rt, r) })
+}
+
+// NotIn creates a Set with a Contains method that returns true for all runes not
+// in the given RangeTable.
+func NotIn(rt *unicode.RangeTable) Set {
+ return setFunc(func(r rune) bool { return !unicode.Is(rt, r) })
+}
+
+// Predicate creates a Set with a Contains method that returns f(r).
+func Predicate(f func(rune) bool) Set {
+ return setFunc(f)
+}
+
+// Transformer implements the transform.Transformer interface.
+type Transformer struct {
+ t transform.SpanningTransformer
+}
+
+func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ return t.t.Transform(dst, src, atEOF)
+}
+
+func (t Transformer) Span(b []byte, atEOF bool) (n int, err error) {
+ return t.t.Span(b, atEOF)
+}
+
+func (t Transformer) Reset() { t.t.Reset() }
+
+// Bytes returns a new byte slice with the result of converting b using t. It
+// calls Reset on t. It returns nil if any error was found. This can only happen
+// if an error-producing Transformer is passed to If.
+func (t Transformer) Bytes(b []byte) []byte {
+ b, _, err := transform.Bytes(t, b)
+ if err != nil {
+ return nil
+ }
+ return b
+}
+
+// String returns a string with the result of converting s using t. It calls
+// Reset on t. It returns the empty string if any error was found. This can only
+// happen if an error-producing Transformer is passed to If.
+func (t Transformer) String(s string) string {
+ s, _, err := transform.String(t, s)
+ if err != nil {
+ return ""
+ }
+ return s
+}
+
+// TODO:
+// - Copy: copying strings and bytes in whole-rune units.
+// - Validation (maybe)
+// - Well-formed-ness (maybe)
+
+const runeErrorString = string(utf8.RuneError)
+
+// Remove returns a Transformer that removes runes r for which s.Contains(r).
+// Illegal input bytes are replaced by RuneError before being passed to f.
+func Remove(s Set) Transformer {
+ if f, ok := s.(setFunc); ok {
+ // This little trick cuts the running time of BenchmarkRemove for sets
+ // created by Predicate roughly in half.
+ // TODO: special-case RangeTables as well.
+ return Transformer{remove(f)}
+ }
+ return Transformer{remove(s.Contains)}
+}
+
+// TODO: remove transform.RemoveFunc.
+
+type remove func(r rune) bool
+
+func (remove) Reset() {}
+
+// Span implements transform.Spanner.
+func (t remove) Span(src []byte, atEOF bool) (n int, err error) {
+ for r, size := rune(0), 0; n < len(src); {
+ if r = rune(src[n]); r < utf8.RuneSelf {
+ size = 1
+ } else if r, size = utf8.DecodeRune(src[n:]); size == 1 {
+ // Invalid rune.
+ if !atEOF && !utf8.FullRune(src[n:]) {
+ err = transform.ErrShortSrc
+ } else {
+ err = transform.ErrEndOfSpan
+ }
+ break
+ }
+ if t(r) {
+ err = transform.ErrEndOfSpan
+ break
+ }
+ n += size
+ }
+ return
+}
+
+// Transform implements transform.Transformer.
+func (t remove) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ for r, size := rune(0), 0; nSrc < len(src); {
+ if r = rune(src[nSrc]); r < utf8.RuneSelf {
+ size = 1
+ } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 {
+ // Invalid rune.
+ if !atEOF && !utf8.FullRune(src[nSrc:]) {
+ err = transform.ErrShortSrc
+ break
+ }
+ // We replace illegal bytes with RuneError. Not doing so might
+ // otherwise turn a sequence of invalid UTF-8 into valid UTF-8.
+ // The resulting byte sequence may subsequently contain runes
+ // for which t(r) is true that were passed unnoticed.
+ if !t(utf8.RuneError) {
+ if nDst+3 > len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ dst[nDst+0] = runeErrorString[0]
+ dst[nDst+1] = runeErrorString[1]
+ dst[nDst+2] = runeErrorString[2]
+ nDst += 3
+ }
+ nSrc++
+ continue
+ }
+ if t(r) {
+ nSrc += size
+ continue
+ }
+ if nDst+size > len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ for i := 0; i < size; i++ {
+ dst[nDst] = src[nSrc]
+ nDst++
+ nSrc++
+ }
+ }
+ return
+}
+
+// Map returns a Transformer that maps the runes in the input using the given
+// mapping. Illegal bytes in the input are converted to utf8.RuneError before
+// being passed to the mapping func.
+func Map(mapping func(rune) rune) Transformer {
+ return Transformer{mapper(mapping)}
+}
+
+type mapper func(rune) rune
+
+func (mapper) Reset() {}
+
+// Span implements transform.Spanner.
+func (t mapper) Span(src []byte, atEOF bool) (n int, err error) {
+ for r, size := rune(0), 0; n < len(src); n += size {
+ if r = rune(src[n]); r < utf8.RuneSelf {
+ size = 1
+ } else if r, size = utf8.DecodeRune(src[n:]); size == 1 {
+ // Invalid rune.
+ if !atEOF && !utf8.FullRune(src[n:]) {
+ err = transform.ErrShortSrc
+ } else {
+ err = transform.ErrEndOfSpan
+ }
+ break
+ }
+ if t(r) != r {
+ err = transform.ErrEndOfSpan
+ break
+ }
+ }
+ return n, err
+}
+
+// Transform implements transform.Transformer.
+func (t mapper) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ var replacement rune
+ var b [utf8.UTFMax]byte
+
+ for r, size := rune(0), 0; nSrc < len(src); {
+ if r = rune(src[nSrc]); r < utf8.RuneSelf {
+ if replacement = t(r); replacement < utf8.RuneSelf {
+ if nDst == len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ dst[nDst] = byte(replacement)
+ nDst++
+ nSrc++
+ continue
+ }
+ size = 1
+ } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 {
+ // Invalid rune.
+ if !atEOF && !utf8.FullRune(src[nSrc:]) {
+ err = transform.ErrShortSrc
+ break
+ }
+
+ if replacement = t(utf8.RuneError); replacement == utf8.RuneError {
+ if nDst+3 > len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ dst[nDst+0] = runeErrorString[0]
+ dst[nDst+1] = runeErrorString[1]
+ dst[nDst+2] = runeErrorString[2]
+ nDst += 3
+ nSrc++
+ continue
+ }
+ } else if replacement = t(r); replacement == r {
+ if nDst+size > len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ for i := 0; i < size; i++ {
+ dst[nDst] = src[nSrc]
+ nDst++
+ nSrc++
+ }
+ continue
+ }
+
+ n := utf8.EncodeRune(b[:], replacement)
+
+ if nDst+n > len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ for i := 0; i < n; i++ {
+ dst[nDst] = b[i]
+ nDst++
+ }
+ nSrc += size
+ }
+ return
+}
+
+// ReplaceIllFormed returns a transformer that replaces all input bytes that are
+// not part of a well-formed UTF-8 code sequence with utf8.RuneError.
+func ReplaceIllFormed() Transformer {
+ return Transformer{&replaceIllFormed{}}
+}
+
+type replaceIllFormed struct{ transform.NopResetter }
+
+func (t replaceIllFormed) Span(src []byte, atEOF bool) (n int, err error) {
+ for n < len(src) {
+ // ASCII fast path.
+ if src[n] < utf8.RuneSelf {
+ n++
+ continue
+ }
+
+ r, size := utf8.DecodeRune(src[n:])
+
+ // Look for a valid non-ASCII rune.
+ if r != utf8.RuneError || size != 1 {
+ n += size
+ continue
+ }
+
+ // Look for short source data.
+ if !atEOF && !utf8.FullRune(src[n:]) {
+ err = transform.ErrShortSrc
+ break
+ }
+
+ // We have an invalid rune.
+ err = transform.ErrEndOfSpan
+ break
+ }
+ return n, err
+}
+
+func (t replaceIllFormed) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ for nSrc < len(src) {
+ // ASCII fast path.
+ if r := src[nSrc]; r < utf8.RuneSelf {
+ if nDst == len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ dst[nDst] = r
+ nDst++
+ nSrc++
+ continue
+ }
+
+ // Look for a valid non-ASCII rune.
+ if _, size := utf8.DecodeRune(src[nSrc:]); size != 1 {
+ if size != copy(dst[nDst:], src[nSrc:nSrc+size]) {
+ err = transform.ErrShortDst
+ break
+ }
+ nDst += size
+ nSrc += size
+ continue
+ }
+
+ // Look for short source data.
+ if !atEOF && !utf8.FullRune(src[nSrc:]) {
+ err = transform.ErrShortSrc
+ break
+ }
+
+ // We have an invalid rune.
+ if nDst+3 > len(dst) {
+ err = transform.ErrShortDst
+ break
+ }
+ dst[nDst+0] = runeErrorString[0]
+ dst[nDst+1] = runeErrorString[1]
+ dst[nDst+2] = runeErrorString[2]
+ nDst += 3
+ nSrc++
+ }
+ return nDst, nSrc, err
+}
diff --git a/vendor/golang.org/x/text/unicode/cldr/base.go b/vendor/golang.org/x/text/unicode/cldr/base.go
new file mode 100644
index 0000000..b71420c
--- /dev/null
+++ b/vendor/golang.org/x/text/unicode/cldr/base.go
@@ -0,0 +1,105 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cldr
+
+import (
+ "encoding/xml"
+ "regexp"
+ "strconv"
+)
+
+// Elem is implemented by every XML element.
+type Elem interface {
+ setEnclosing(Elem)
+ setName(string)
+ enclosing() Elem
+
+ GetCommon() *Common
+}
+
+type hidden struct {
+ CharData string `xml:",chardata"`
+ Alias *struct {
+ Common
+ Source string `xml:"source,attr"`
+ Path string `xml:"path,attr"`
+ } `xml:"alias"`
+ Def *struct {
+ Common
+ Choice string `xml:"choice,attr,omitempty"`
+ Type string `xml:"type,attr,omitempty"`
+ } `xml:"default"`
+}
+
+// Common holds several of the most common attributes and sub elements
+// of an XML element.
+type Common struct {
+ XMLName xml.Name
+ name string
+ enclElem Elem
+ Type string `xml:"type,attr,omitempty"`
+ Reference string `xml:"reference,attr,omitempty"`
+ Alt string `xml:"alt,attr,omitempty"`
+ ValidSubLocales string `xml:"validSubLocales,attr,omitempty"`
+ Draft string `xml:"draft,attr,omitempty"`
+ hidden
+}
+
+// Default returns the default type to select from the enclosed list
+// or "" if no default value is specified.
+func (e *Common) Default() string {
+ if e.Def == nil {
+ return ""
+ }
+ if e.Def.Choice != "" {
+ return e.Def.Choice
+ } else if e.Def.Type != "" {
+ // Type is still used by the default element in collation.
+ return e.Def.Type
+ }
+ return ""
+}
+
+// Element returns the XML element name.
+func (e *Common) Element() string {
+ return e.name
+}
+
+// GetCommon returns e. It is provided such that Common implements Elem.
+func (e *Common) GetCommon() *Common {
+ return e
+}
+
+// Data returns the character data accumulated for this element.
+func (e *Common) Data() string {
+ e.CharData = charRe.ReplaceAllStringFunc(e.CharData, replaceUnicode)
+ return e.CharData
+}
+
+func (e *Common) setName(s string) {
+ e.name = s
+}
+
+func (e *Common) enclosing() Elem {
+ return e.enclElem
+}
+
+func (e *Common) setEnclosing(en Elem) {
+ e.enclElem = en
+}
+
+// Escape characters that can be escaped without further escaping the string.
+var charRe = regexp.MustCompile(`&#x[0-9a-fA-F]*;|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\x[0-9a-fA-F]{2}|\\[0-7]{3}|\\[abtnvfr]`)
+
+// replaceUnicode converts hexadecimal Unicode codepoint notations to a one-rune string.
+// It assumes the input string is correctly formatted.
+func replaceUnicode(s string) string {
+ if s[1] == '#' {
+ r, _ := strconv.ParseInt(s[3:len(s)-1], 16, 32)
+ return string(rune(r))
+ }
+ r, _, _, _ := strconv.UnquoteChar(s, 0)
+ return string(r)
+}
diff --git a/vendor/golang.org/x/text/unicode/cldr/cldr.go b/vendor/golang.org/x/text/unicode/cldr/cldr.go
new file mode 100644
index 0000000..f39b2e3
--- /dev/null
+++ b/vendor/golang.org/x/text/unicode/cldr/cldr.go
@@ -0,0 +1,137 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run makexml.go -output xml.go
+
+// Package cldr provides a parser for LDML and related XML formats.
+//
+// This package is intended to be used by the table generation tools for the
+// various packages in x/text and is not internal for historical reasons.
+//
+// As the XML types are generated from the CLDR DTD, and as the CLDR standard is
+// periodically amended, this package may change considerably over time. This
+// mostly means that data may appear and disappear between versions. That is,
+// old code should keep compiling for newer versions, but data may have moved or
+// changed. CLDR version 22 is the first version supported by this package.
+// Older versions may not work.
+package cldr // import "golang.org/x/text/unicode/cldr"
+
+import (
+ "fmt"
+ "sort"
+)
+
+// CLDR provides access to parsed data of the Unicode Common Locale Data Repository.
+type CLDR struct {
+ parent map[string][]string
+ locale map[string]*LDML
+ resolved map[string]*LDML
+ bcp47 *LDMLBCP47
+ supp *SupplementalData
+}
+
+func makeCLDR() *CLDR {
+ return &CLDR{
+ parent: make(map[string][]string),
+ locale: make(map[string]*LDML),
+ resolved: make(map[string]*LDML),
+ bcp47: &LDMLBCP47{},
+ supp: &SupplementalData{},
+ }
+}
+
+// BCP47 returns the parsed BCP47 LDML data. If no such data was parsed, nil is returned.
+func (cldr *CLDR) BCP47() *LDMLBCP47 {
+ return nil
+}
+
+// Draft indicates the draft level of an element.
+type Draft int
+
+const (
+ Approved Draft = iota
+ Contributed
+ Provisional
+ Unconfirmed
+)
+
+var drafts = []string{"unconfirmed", "provisional", "contributed", "approved", ""}
+
+// ParseDraft returns the Draft value corresponding to the given string. The
+// empty string corresponds to Approved.
+func ParseDraft(level string) (Draft, error) {
+ if level == "" {
+ return Approved, nil
+ }
+ for i, s := range drafts {
+ if level == s {
+ return Unconfirmed - Draft(i), nil
+ }
+ }
+ return Approved, fmt.Errorf("cldr: unknown draft level %q", level)
+}
+
+func (d Draft) String() string {
+ return drafts[len(drafts)-1-int(d)]
+}
+
+// SetDraftLevel sets which draft levels to include in the evaluated LDML.
+// Any draft element for which the draft level is higher than lev will be excluded.
+// If multiple draft levels are available for a single element, the one with the
+// lowest draft level will be selected, unless preferDraft is true, in which case
+// the highest draft will be chosen.
+// It is assumed that the underlying LDML is canonicalized.
+func (cldr *CLDR) SetDraftLevel(lev Draft, preferDraft bool) {
+ // TODO: implement
+ cldr.resolved = make(map[string]*LDML)
+}
+
+// RawLDML returns the LDML XML for id in unresolved form.
+// id must be one of the strings returned by Locales.
+func (cldr *CLDR) RawLDML(loc string) *LDML {
+ return cldr.locale[loc]
+}
+
+// LDML returns the fully resolved LDML XML for loc, which must be one of
+// the strings returned by Locales.
+//
+// Deprecated: Use RawLDML and implement inheritance manually or using the
+// internal cldrtree package.
+// Inheritance has changed quite a bit since the onset of this package and in
+// practice data often represented in a way where knowledge of how it was
+// inherited is relevant.
+func (cldr *CLDR) LDML(loc string) (*LDML, error) {
+ return cldr.resolve(loc)
+}
+
+// Supplemental returns the parsed supplemental data. If no such data was parsed,
+// nil is returned.
+func (cldr *CLDR) Supplemental() *SupplementalData {
+ return cldr.supp
+}
+
+// Locales returns the locales for which there exist files.
+// Valid sublocales for which there is no file are not included.
+// The root locale is always sorted first.
+func (cldr *CLDR) Locales() []string {
+ loc := []string{"root"}
+ hasRoot := false
+ for l, _ := range cldr.locale {
+ if l == "root" {
+ hasRoot = true
+ continue
+ }
+ loc = append(loc, l)
+ }
+ sort.Strings(loc[1:])
+ if !hasRoot {
+ return loc[1:]
+ }
+ return loc
+}
+
+// Get fills in the fields of x based on the XPath path.
+func Get(e Elem, path string) (res Elem, err error) {
+ return walkXPath(e, path)
+}
diff --git a/vendor/golang.org/x/text/unicode/cldr/collate.go b/vendor/golang.org/x/text/unicode/cldr/collate.go
new file mode 100644
index 0000000..5794ae4
--- /dev/null
+++ b/vendor/golang.org/x/text/unicode/cldr/collate.go
@@ -0,0 +1,363 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cldr
+
+import (
+ "bufio"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// RuleProcessor can be passed to Collator's Process method, which
+// parses the rules and calls the respective method for each rule found.
+type RuleProcessor interface {
+ Reset(anchor string, before int) error
+ Insert(level int, str, context, extend string) error
+ Index(id string)
+}
+
+const (
+ // cldrIndex is a Unicode-reserved sentinel value used to mark the start
+ // of a grouping within an index.
+ // We ignore any rule that starts with this rune.
+ // See https://unicode.org/reports/tr35/#Collation_Elements for details.
+ cldrIndex = "\uFDD0"
+
+ // specialAnchor is the format in which to represent logical reset positions,
+ // such as "first tertiary ignorable".
+ specialAnchor = "<%s/>"
+)
+
+// Process parses the rules for the tailorings of this collation
+// and calls the respective methods of p for each rule found.
+func (c Collation) Process(p RuleProcessor) (err error) {
+ if len(c.Cr) > 0 {
+ if len(c.Cr) > 1 {
+ return fmt.Errorf("multiple cr elements, want 0 or 1")
+ }
+ return processRules(p, c.Cr[0].Data())
+ }
+ if c.Rules.Any != nil {
+ return c.processXML(p)
+ }
+ return errors.New("no tailoring data")
+}
+
+// processRules parses rules in the Collation Rule Syntax defined in
+// https://www.unicode.org/reports/tr35/tr35-collation.html#Collation_Tailorings.
+func processRules(p RuleProcessor, s string) (err error) {
+ chk := func(s string, e error) string {
+ if err == nil {
+ err = e
+ }
+ return s
+ }
+ i := 0 // Save the line number for use after the loop.
+ scanner := bufio.NewScanner(strings.NewReader(s))
+ for ; scanner.Scan() && err == nil; i++ {
+ for s := skipSpace(scanner.Text()); s != "" && s[0] != '#'; s = skipSpace(s) {
+ level := 5
+ var ch byte
+ switch ch, s = s[0], s[1:]; ch {
+ case '&': // followed by <anchor> or '[' <key> ']'
+ if s = skipSpace(s); consume(&s, '[') {
+ s = chk(parseSpecialAnchor(p, s))
+ } else {
+ s = chk(parseAnchor(p, 0, s))
+ }
+ case '<': // sort relation '<'{1,4}, optionally followed by '*'.
+ for level = 1; consume(&s, '<'); level++ {
+ }
+ if level > 4 {
+ err = fmt.Errorf("level %d > 4", level)
+ }
+ fallthrough
+ case '=': // identity relation, optionally followed by *.
+ if consume(&s, '*') {
+ s = chk(parseSequence(p, level, s))
+ } else {
+ s = chk(parseOrder(p, level, s))
+ }
+ default:
+ chk("", fmt.Errorf("illegal operator %q", ch))
+ break
+ }
+ }
+ }
+ if chk("", scanner.Err()); err != nil {
+ return fmt.Errorf("%d: %v", i, err)
+ }
+ return nil
+}
+
+// parseSpecialAnchor parses the anchor syntax which is either of the form
+//
+// ['before' <level>] <anchor>
+//
+// or
+//
+// [<label>]
+//
+// The starting should already be consumed.
+func parseSpecialAnchor(p RuleProcessor, s string) (tail string, err error) {
+ i := strings.IndexByte(s, ']')
+ if i == -1 {
+ return "", errors.New("unmatched bracket")
+ }
+ a := strings.TrimSpace(s[:i])
+ s = s[i+1:]
+ if strings.HasPrefix(a, "before ") {
+ l, err := strconv.ParseUint(skipSpace(a[len("before "):]), 10, 3)
+ if err != nil {
+ return s, err
+ }
+ return parseAnchor(p, int(l), s)
+ }
+ return s, p.Reset(fmt.Sprintf(specialAnchor, a), 0)
+}
+
+func parseAnchor(p RuleProcessor, level int, s string) (tail string, err error) {
+ anchor, s, err := scanString(s)
+ if err != nil {
+ return s, err
+ }
+ return s, p.Reset(anchor, level)
+}
+
+func parseOrder(p RuleProcessor, level int, s string) (tail string, err error) {
+ var value, context, extend string
+ if value, s, err = scanString(s); err != nil {
+ return s, err
+ }
+ if strings.HasPrefix(value, cldrIndex) {
+ p.Index(value[len(cldrIndex):])
+ return
+ }
+ if consume(&s, '|') {
+ if context, s, err = scanString(s); err != nil {
+ return s, errors.New("missing string after context")
+ }
+ }
+ if consume(&s, '/') {
+ if extend, s, err = scanString(s); err != nil {
+ return s, errors.New("missing string after extension")
+ }
+ }
+ return s, p.Insert(level, value, context, extend)
+}
+
+// scanString scans a single input string.
+func scanString(s string) (str, tail string, err error) {
+ if s = skipSpace(s); s == "" {
+ return s, s, errors.New("missing string")
+ }
+ buf := [16]byte{} // small but enough to hold most cases.
+ value := buf[:0]
+ for s != "" {
+ if consume(&s, '\'') {
+ i := strings.IndexByte(s, '\'')
+ if i == -1 {
+ return "", "", errors.New(`unmatched single quote`)
+ }
+ if i == 0 {
+ value = append(value, '\'')
+ } else {
+ value = append(value, s[:i]...)
+ }
+ s = s[i+1:]
+ continue
+ }
+ r, sz := utf8.DecodeRuneInString(s)
+ if unicode.IsSpace(r) || strings.ContainsRune("&<=#", r) {
+ break
+ }
+ value = append(value, s[:sz]...)
+ s = s[sz:]
+ }
+ return string(value), skipSpace(s), nil
+}
+
+func parseSequence(p RuleProcessor, level int, s string) (tail string, err error) {
+ if s = skipSpace(s); s == "" {
+ return s, errors.New("empty sequence")
+ }
+ last := rune(0)
+ for s != "" {
+ r, sz := utf8.DecodeRuneInString(s)
+ s = s[sz:]
+
+ if r == '-' {
+ // We have a range. The first element was already written.
+ if last == 0 {
+ return s, errors.New("range without starter value")
+ }
+ r, sz = utf8.DecodeRuneInString(s)
+ s = s[sz:]
+ if r == utf8.RuneError || r < last {
+ return s, fmt.Errorf("invalid range %q-%q", last, r)
+ }
+ for i := last + 1; i <= r; i++ {
+ if err := p.Insert(level, string(i), "", ""); err != nil {
+ return s, err
+ }
+ }
+ last = 0
+ continue
+ }
+
+ if unicode.IsSpace(r) || unicode.IsPunct(r) {
+ break
+ }
+
+ // normal case
+ if err := p.Insert(level, string(r), "", ""); err != nil {
+ return s, err
+ }
+ last = r
+ }
+ return s, nil
+}
+
+func skipSpace(s string) string {
+ return strings.TrimLeftFunc(s, unicode.IsSpace)
+}
+
+// consume returns whether the next byte is ch. If so, it gobbles it by
+// updating s.
+func consume(s *string, ch byte) (ok bool) {
+ if *s == "" || (*s)[0] != ch {
+ return false
+ }
+ *s = (*s)[1:]
+ return true
+}
+
+// The following code parses Collation rules of CLDR version 24 and before.
+
+var lmap = map[byte]int{
+ 'p': 1,
+ 's': 2,
+ 't': 3,
+ 'i': 5,
+}
+
+type rulesElem struct {
+ Rules struct {
+ Common
+ Any []*struct {
+ XMLName xml.Name
+ rule
+ } `xml:",any"`
+ } `xml:"rules"`
+}
+
+type rule struct {
+ Value string `xml:",chardata"`
+ Before string `xml:"before,attr"`
+ Any []*struct {
+ XMLName xml.Name
+ rule
+ } `xml:",any"`
+}
+
+var emptyValueError = errors.New("cldr: empty rule value")
+
+func (r *rule) value() (string, error) {
+ // Convert hexadecimal Unicode codepoint notation to a string.
+ s := charRe.ReplaceAllStringFunc(r.Value, replaceUnicode)
+ r.Value = s
+ if s == "" {
+ if len(r.Any) != 1 {
+ return "", emptyValueError
+ }
+ r.Value = fmt.Sprintf(specialAnchor, r.Any[0].XMLName.Local)
+ r.Any = nil
+ } else if len(r.Any) != 0 {
+ return "", fmt.Errorf("cldr: XML elements found in collation rule: %v", r.Any)
+ }
+ return r.Value, nil
+}
+
+func (r rule) process(p RuleProcessor, name, context, extend string) error {
+ v, err := r.value()
+ if err != nil {
+ return err
+ }
+ switch name {
+ case "p", "s", "t", "i":
+ if strings.HasPrefix(v, cldrIndex) {
+ p.Index(v[len(cldrIndex):])
+ return nil
+ }
+ if err := p.Insert(lmap[name[0]], v, context, extend); err != nil {
+ return err
+ }
+ case "pc", "sc", "tc", "ic":
+ level := lmap[name[0]]
+ for _, s := range v {
+ if err := p.Insert(level, string(s), context, extend); err != nil {
+ return err
+ }
+ }
+ default:
+ return fmt.Errorf("cldr: unsupported tag: %q", name)
+ }
+ return nil
+}
+
+// processXML parses the format of CLDR versions 24 and older.
+func (c Collation) processXML(p RuleProcessor) (err error) {
+ // Collation is generated and defined in xml.go.
+ var v string
+ for _, r := range c.Rules.Any {
+ switch r.XMLName.Local {
+ case "reset":
+ level := 0
+ switch r.Before {
+ case "primary", "1":
+ level = 1
+ case "secondary", "2":
+ level = 2
+ case "tertiary", "3":
+ level = 3
+ case "":
+ default:
+ return fmt.Errorf("cldr: unknown level %q", r.Before)
+ }
+ v, err = r.value()
+ if err == nil {
+ err = p.Reset(v, level)
+ }
+ case "x":
+ var context, extend string
+ for _, r1 := range r.Any {
+ v, err = r1.value()
+ switch r1.XMLName.Local {
+ case "context":
+ context = v
+ case "extend":
+ extend = v
+ }
+ }
+ for _, r1 := range r.Any {
+ if t := r1.XMLName.Local; t == "context" || t == "extend" {
+ continue
+ }
+ r1.rule.process(p, r1.XMLName.Local, context, extend)
+ }
+ default:
+ err = r.rule.process(p, r.XMLName.Local, "", "")
+ }
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/vendor/golang.org/x/text/unicode/cldr/decode.go b/vendor/golang.org/x/text/unicode/cldr/decode.go
new file mode 100644
index 0000000..7a8fb5a
--- /dev/null
+++ b/vendor/golang.org/x/text/unicode/cldr/decode.go
@@ -0,0 +1,171 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cldr
+
+import (
+ "archive/zip"
+ "bytes"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+)
+
+// A Decoder loads an archive of CLDR data.
+type Decoder struct {
+ dirFilter []string
+ sectionFilter []string
+ loader Loader
+ cldr *CLDR
+ curLocale string
+}
+
+// SetSectionFilter takes a list top-level LDML element names to which
+// evaluation of LDML should be limited. It automatically calls SetDirFilter.
+func (d *Decoder) SetSectionFilter(filter ...string) {
+ d.sectionFilter = filter
+ // TODO: automatically set dir filter
+}
+
+// SetDirFilter limits the loading of LDML XML files of the specified directories.
+// Note that sections may be split across directories differently for different CLDR versions.
+// For more robust code, use SetSectionFilter.
+func (d *Decoder) SetDirFilter(dir ...string) {
+ d.dirFilter = dir
+}
+
+// A Loader provides access to the files of a CLDR archive.
+type Loader interface {
+ Len() int
+ Path(i int) string
+ Reader(i int) (io.ReadCloser, error)
+}
+
+var fileRe = regexp.MustCompile(`.*[/\\](.*)[/\\](.*)\.xml`)
+
+// Decode loads and decodes the files represented by l.
+func (d *Decoder) Decode(l Loader) (cldr *CLDR, err error) {
+ d.cldr = makeCLDR()
+ for i := 0; i < l.Len(); i++ {
+ fname := l.Path(i)
+ if m := fileRe.FindStringSubmatch(fname); m != nil {
+ if len(d.dirFilter) > 0 && !in(d.dirFilter, m[1]) {
+ continue
+ }
+ var r io.ReadCloser
+ if r, err = l.Reader(i); err == nil {
+ err = d.decode(m[1], m[2], r)
+ r.Close()
+ }
+ if err != nil {
+ return nil, err
+ }
+ }
+ }
+ d.cldr.finalize(d.sectionFilter)
+ return d.cldr, nil
+}
+
+func (d *Decoder) decode(dir, id string, r io.Reader) error {
+ var v interface{}
+ var l *LDML
+ cldr := d.cldr
+ switch {
+ case dir == "supplemental":
+ v = cldr.supp
+ case dir == "transforms":
+ return nil
+ case dir == "bcp47":
+ v = cldr.bcp47
+ case dir == "validity":
+ return nil
+ default:
+ ok := false
+ if v, ok = cldr.locale[id]; !ok {
+ l = &LDML{}
+ v, cldr.locale[id] = l, l
+ }
+ }
+ x := xml.NewDecoder(r)
+ if err := x.Decode(v); err != nil {
+ log.Printf("%s/%s: %v", dir, id, err)
+ return err
+ }
+ if l != nil {
+ if l.Identity == nil {
+ return fmt.Errorf("%s/%s: missing identity element", dir, id)
+ }
+ // TODO: verify when CLDR bug https://unicode.org/cldr/trac/ticket/8970
+ // is resolved.
+ // path := strings.Split(id, "_")
+ // if lang := l.Identity.Language.Type; lang != path[0] {
+ // return fmt.Errorf("%s/%s: language was %s; want %s", dir, id, lang, path[0])
+ // }
+ }
+ return nil
+}
+
+type pathLoader []string
+
+func makePathLoader(path string) (pl pathLoader, err error) {
+ err = filepath.Walk(path, func(path string, _ os.FileInfo, err error) error {
+ pl = append(pl, path)
+ return err
+ })
+ return pl, err
+}
+
+func (pl pathLoader) Len() int {
+ return len(pl)
+}
+
+func (pl pathLoader) Path(i int) string {
+ return pl[i]
+}
+
+func (pl pathLoader) Reader(i int) (io.ReadCloser, error) {
+ return os.Open(pl[i])
+}
+
+// DecodePath loads CLDR data from the given path.
+func (d *Decoder) DecodePath(path string) (cldr *CLDR, err error) {
+ loader, err := makePathLoader(path)
+ if err != nil {
+ return nil, err
+ }
+ return d.Decode(loader)
+}
+
+type zipLoader struct {
+ r *zip.Reader
+}
+
+func (zl zipLoader) Len() int {
+ return len(zl.r.File)
+}
+
+func (zl zipLoader) Path(i int) string {
+ return zl.r.File[i].Name
+}
+
+func (zl zipLoader) Reader(i int) (io.ReadCloser, error) {
+ return zl.r.File[i].Open()
+}
+
+// DecodeZip loads CLDR data from the zip archive for which r is the source.
+func (d *Decoder) DecodeZip(r io.Reader) (cldr *CLDR, err error) {
+ buffer, err := io.ReadAll(r)
+ if err != nil {
+ return nil, err
+ }
+ archive, err := zip.NewReader(bytes.NewReader(buffer), int64(len(buffer)))
+ if err != nil {
+ return nil, err
+ }
+ return d.Decode(zipLoader{archive})
+}
diff --git a/vendor/golang.org/x/text/unicode/cldr/resolve.go b/vendor/golang.org/x/text/unicode/cldr/resolve.go
new file mode 100644
index 0000000..31cc7be
--- /dev/null
+++ b/vendor/golang.org/x/text/unicode/cldr/resolve.go
@@ -0,0 +1,602 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cldr
+
+// This file implements the various inheritance constructs defined by LDML.
+// See https://www.unicode.org/reports/tr35/#Inheritance_and_Validity
+// for more details.
+
+import (
+ "fmt"
+ "log"
+ "reflect"
+ "regexp"
+ "sort"
+ "strings"
+)
+
+// fieldIter iterates over fields in a struct. It includes
+// fields of embedded structs.
+type fieldIter struct {
+ v reflect.Value
+ index, n []int
+}
+
+func iter(v reflect.Value) fieldIter {
+ if v.Kind() != reflect.Struct {
+ log.Panicf("value %v must be a struct", v)
+ }
+ i := fieldIter{
+ v: v,
+ index: []int{0},
+ n: []int{v.NumField()},
+ }
+ i.descent()
+ return i
+}
+
+func (i *fieldIter) descent() {
+ for f := i.field(); f.Anonymous && f.Type.NumField() > 0; f = i.field() {
+ i.index = append(i.index, 0)
+ i.n = append(i.n, f.Type.NumField())
+ }
+}
+
+func (i *fieldIter) done() bool {
+ return len(i.index) == 1 && i.index[0] >= i.n[0]
+}
+
+func skip(f reflect.StructField) bool {
+ return !f.Anonymous && (f.Name[0] < 'A' || f.Name[0] > 'Z')
+}
+
+func (i *fieldIter) next() {
+ for {
+ k := len(i.index) - 1
+ i.index[k]++
+ if i.index[k] < i.n[k] {
+ if !skip(i.field()) {
+ break
+ }
+ } else {
+ if k == 0 {
+ return
+ }
+ i.index = i.index[:k]
+ i.n = i.n[:k]
+ }
+ }
+ i.descent()
+}
+
+func (i *fieldIter) value() reflect.Value {
+ return i.v.FieldByIndex(i.index)
+}
+
+func (i *fieldIter) field() reflect.StructField {
+ return i.v.Type().FieldByIndex(i.index)
+}
+
+type visitor func(v reflect.Value) error
+
+var stopDescent = fmt.Errorf("do not recurse")
+
+func (f visitor) visit(x interface{}) error {
+ return f.visitRec(reflect.ValueOf(x))
+}
+
+// visit recursively calls f on all nodes in v.
+func (f visitor) visitRec(v reflect.Value) error {
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ return nil
+ }
+ return f.visitRec(v.Elem())
+ }
+ if err := f(v); err != nil {
+ if err == stopDescent {
+ return nil
+ }
+ return err
+ }
+ switch v.Kind() {
+ case reflect.Struct:
+ for i := iter(v); !i.done(); i.next() {
+ if err := f.visitRec(i.value()); err != nil {
+ return err
+ }
+ }
+ case reflect.Slice:
+ for i := 0; i < v.Len(); i++ {
+ if err := f.visitRec(v.Index(i)); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+// getPath is used for error reporting purposes only.
+func getPath(e Elem) string {
+ if e == nil {
+ return "<nil>"
+ }
+ if e.enclosing() == nil {
+ return e.GetCommon().name
+ }
+ if e.GetCommon().Type == "" {
+ return fmt.Sprintf("%s.%s", getPath(e.enclosing()), e.GetCommon().name)
+ }
+ return fmt.Sprintf("%s.%s[type=%s]", getPath(e.enclosing()), e.GetCommon().name, e.GetCommon().Type)
+}
+
+// xmlName returns the xml name of the element or attribute
+func xmlName(f reflect.StructField) (name string, attr bool) {
+ tags := strings.Split(f.Tag.Get("xml"), ",")
+ for _, s := range tags {
+ attr = attr || s == "attr"
+ }
+ return tags[0], attr
+}
+
+func findField(v reflect.Value, key string) (reflect.Value, error) {
+ v = reflect.Indirect(v)
+ for i := iter(v); !i.done(); i.next() {
+ if n, _ := xmlName(i.field()); n == key {
+ return i.value(), nil
+ }
+ }
+ return reflect.Value{}, fmt.Errorf("cldr: no field %q in element %#v", key, v.Interface())
+}
+
+var xpathPart = regexp.MustCompile(`(\pL+)(?:\[@(\pL+)='([\w-]+)'\])?`)
+
+func walkXPath(e Elem, path string) (res Elem, err error) {
+ for _, c := range strings.Split(path, "/") {
+ if c == ".." {
+ if e = e.enclosing(); e == nil {
+ panic("path ..")
+ return nil, fmt.Errorf(`cldr: ".." moves past root in path %q`, path)
+ }
+ continue
+ } else if c == "" {
+ continue
+ }
+ m := xpathPart.FindStringSubmatch(c)
+ if len(m) == 0 || len(m[0]) != len(c) {
+ return nil, fmt.Errorf("cldr: syntax error in path component %q", c)
+ }
+ v, err := findField(reflect.ValueOf(e), m[1])
+ if err != nil {
+ return nil, err
+ }
+ switch v.Kind() {
+ case reflect.Slice:
+ i := 0
+ if m[2] != "" || v.Len() > 1 {
+ if m[2] == "" {
+ m[2] = "type"
+ if m[3] = e.GetCommon().Default(); m[3] == "" {
+ return nil, fmt.Errorf("cldr: type selector or default value needed for element %s", m[1])
+ }
+ }
+ for ; i < v.Len(); i++ {
+ vi := v.Index(i)
+ key, err := findField(vi.Elem(), m[2])
+ if err != nil {
+ return nil, err
+ }
+ key = reflect.Indirect(key)
+ if key.Kind() == reflect.String && key.String() == m[3] {
+ break
+ }
+ }
+ }
+ if i == v.Len() || v.Index(i).IsNil() {
+ return nil, fmt.Errorf("no %s found with %s==%s", m[1], m[2], m[3])
+ }
+ e = v.Index(i).Interface().(Elem)
+ case reflect.Ptr:
+ if v.IsNil() {
+ return nil, fmt.Errorf("cldr: element %q not found within element %q", m[1], e.GetCommon().name)
+ }
+ var ok bool
+ if e, ok = v.Interface().(Elem); !ok {
+ return nil, fmt.Errorf("cldr: %q is not an XML element", m[1])
+ } else if m[2] != "" || m[3] != "" {
+ return nil, fmt.Errorf("cldr: no type selector allowed for element %s", m[1])
+ }
+ default:
+ return nil, fmt.Errorf("cldr: %q is not an XML element", m[1])
+ }
+ }
+ return e, nil
+}
+
+const absPrefix = "//ldml/"
+
+func (cldr *CLDR) resolveAlias(e Elem, src, path string) (res Elem, err error) {
+ if src != "locale" {
+ if !strings.HasPrefix(path, absPrefix) {
+ return nil, fmt.Errorf("cldr: expected absolute path, found %q", path)
+ }
+ path = path[len(absPrefix):]
+ if e, err = cldr.resolve(src); err != nil {
+ return nil, err
+ }
+ }
+ return walkXPath(e, path)
+}
+
+func (cldr *CLDR) resolveAndMergeAlias(e Elem) error {
+ alias := e.GetCommon().Alias
+ if alias == nil {
+ return nil
+ }
+ a, err := cldr.resolveAlias(e, alias.Source, alias.Path)
+ if err != nil {
+ return fmt.Errorf("%v: error evaluating path %q: %v", getPath(e), alias.Path, err)
+ }
+ // Ensure alias node was already evaluated. TODO: avoid double evaluation.
+ err = cldr.resolveAndMergeAlias(a)
+ v := reflect.ValueOf(e).Elem()
+ for i := iter(reflect.ValueOf(a).Elem()); !i.done(); i.next() {
+ if vv := i.value(); vv.Kind() != reflect.Ptr || !vv.IsNil() {
+ if _, attr := xmlName(i.field()); !attr {
+ v.FieldByIndex(i.index).Set(vv)
+ }
+ }
+ }
+ return err
+}
+
+func (cldr *CLDR) aliasResolver() visitor {
+ return func(v reflect.Value) (err error) {
+ if e, ok := v.Addr().Interface().(Elem); ok {
+ err = cldr.resolveAndMergeAlias(e)
+ if err == nil && blocking[e.GetCommon().name] {
+ return stopDescent
+ }
+ }
+ return err
+ }
+}
+
+// elements within blocking elements do not inherit.
+// Taken from CLDR's supplementalMetaData.xml.
+var blocking = map[string]bool{
+ "identity": true,
+ "supplementalData": true,
+ "cldrTest": true,
+ "collation": true,
+ "transform": true,
+}
+
+// Distinguishing attributes affect inheritance; two elements with different
+// distinguishing attributes are treated as different for purposes of inheritance,
+// except when such attributes occur in the indicated elements.
+// Taken from CLDR's supplementalMetaData.xml.
+var distinguishing = map[string][]string{
+ "key": nil,
+ "request_id": nil,
+ "id": nil,
+ "registry": nil,
+ "alt": nil,
+ "iso4217": nil,
+ "iso3166": nil,
+ "mzone": nil,
+ "from": nil,
+ "to": nil,
+ "type": []string{
+ "abbreviationFallback",
+ "default",
+ "mapping",
+ "measurementSystem",
+ "preferenceOrdering",
+ },
+ "numberSystem": nil,
+}
+
+func in(set []string, s string) bool {
+ for _, v := range set {
+ if v == s {
+ return true
+ }
+ }
+ return false
+}
+
+// attrKey computes a key based on the distinguishable attributes of
+// an element and its values.
+func attrKey(v reflect.Value, exclude ...string) string {
+ parts := []string{}
+ ename := v.Interface().(Elem).GetCommon().name
+ v = v.Elem()
+ for i := iter(v); !i.done(); i.next() {
+ if name, attr := xmlName(i.field()); attr {
+ if except, ok := distinguishing[name]; ok && !in(exclude, name) && !in(except, ename) {
+ v := i.value()
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+ if v.IsValid() {
+ parts = append(parts, fmt.Sprintf("%s=%s", name, v.String()))
+ }
+ }
+ }
+ }
+ sort.Strings(parts)
+ return strings.Join(parts, ";")
+}
+
+// Key returns a key for e derived from all distinguishing attributes
+// except those specified by exclude.
+func Key(e Elem, exclude ...string) string {
+ return attrKey(reflect.ValueOf(e), exclude...)
+}
+
+// linkEnclosing sets the enclosing element as well as the name
+// for all sub-elements of child, recursively.
+func linkEnclosing(parent, child Elem) {
+ child.setEnclosing(parent)
+ v := reflect.ValueOf(child).Elem()
+ for i := iter(v); !i.done(); i.next() {
+ vf := i.value()
+ if vf.Kind() == reflect.Slice {
+ for j := 0; j < vf.Len(); j++ {
+ linkEnclosing(child, vf.Index(j).Interface().(Elem))
+ }
+ } else if vf.Kind() == reflect.Ptr && !vf.IsNil() && vf.Elem().Kind() == reflect.Struct {
+ linkEnclosing(child, vf.Interface().(Elem))
+ }
+ }
+}
+
+func setNames(e Elem, name string) {
+ e.setName(name)
+ v := reflect.ValueOf(e).Elem()
+ for i := iter(v); !i.done(); i.next() {
+ vf := i.value()
+ name, _ = xmlName(i.field())
+ if vf.Kind() == reflect.Slice {
+ for j := 0; j < vf.Len(); j++ {
+ setNames(vf.Index(j).Interface().(Elem), name)
+ }
+ } else if vf.Kind() == reflect.Ptr && !vf.IsNil() && vf.Elem().Kind() == reflect.Struct {
+ setNames(vf.Interface().(Elem), name)
+ }
+ }
+}
+
+// deepCopy copies elements of v recursively. All elements of v that may
+// be modified by inheritance are explicitly copied.
+func deepCopy(v reflect.Value) reflect.Value {
+ switch v.Kind() {
+ case reflect.Ptr:
+ if v.IsNil() || v.Elem().Kind() != reflect.Struct {
+ return v
+ }
+ nv := reflect.New(v.Elem().Type())
+ nv.Elem().Set(v.Elem())
+ deepCopyRec(nv.Elem(), v.Elem())
+ return nv
+ case reflect.Slice:
+ nv := reflect.MakeSlice(v.Type(), v.Len(), v.Len())
+ for i := 0; i < v.Len(); i++ {
+ deepCopyRec(nv.Index(i), v.Index(i))
+ }
+ return nv
+ }
+ panic("deepCopy: must be called with pointer or slice")
+}
+
+// deepCopyRec is only called by deepCopy.
+func deepCopyRec(nv, v reflect.Value) {
+ if v.Kind() == reflect.Struct {
+ t := v.Type()
+ for i := 0; i < v.NumField(); i++ {
+ if name, attr := xmlName(t.Field(i)); name != "" && !attr {
+ deepCopyRec(nv.Field(i), v.Field(i))
+ }
+ }
+ } else {
+ nv.Set(deepCopy(v))
+ }
+}
+
+// newNode is used to insert a missing node during inheritance.
+func (cldr *CLDR) newNode(v, enc reflect.Value) reflect.Value {
+ n := reflect.New(v.Type())
+ for i := iter(v); !i.done(); i.next() {
+ if name, attr := xmlName(i.field()); name == "" || attr {
+ n.Elem().FieldByIndex(i.index).Set(i.value())
+ }
+ }
+ n.Interface().(Elem).GetCommon().setEnclosing(enc.Addr().Interface().(Elem))
+ return n
+}
+
+// v, parent must be pointers to struct
+func (cldr *CLDR) inheritFields(v, parent reflect.Value) (res reflect.Value, err error) {
+ t := v.Type()
+ nv := reflect.New(t)
+ nv.Elem().Set(v)
+ for i := iter(v); !i.done(); i.next() {
+ vf := i.value()
+ f := i.field()
+ name, attr := xmlName(f)
+ if name == "" || attr {
+ continue
+ }
+ pf := parent.FieldByIndex(i.index)
+ if blocking[name] {
+ if vf.IsNil() {
+ vf = pf
+ }
+ nv.Elem().FieldByIndex(i.index).Set(deepCopy(vf))
+ continue
+ }
+ switch f.Type.Kind() {
+ case reflect.Ptr:
+ if f.Type.Elem().Kind() == reflect.Struct {
+ if !vf.IsNil() {
+ if vf, err = cldr.inheritStructPtr(vf, pf); err != nil {
+ return reflect.Value{}, err
+ }
+ vf.Interface().(Elem).setEnclosing(nv.Interface().(Elem))
+ nv.Elem().FieldByIndex(i.index).Set(vf)
+ } else if !pf.IsNil() {
+ n := cldr.newNode(pf.Elem(), v)
+ if vf, err = cldr.inheritStructPtr(n, pf); err != nil {
+ return reflect.Value{}, err
+ }
+ vf.Interface().(Elem).setEnclosing(nv.Interface().(Elem))
+ nv.Elem().FieldByIndex(i.index).Set(vf)
+ }
+ }
+ case reflect.Slice:
+ vf, err := cldr.inheritSlice(nv.Elem(), vf, pf)
+ if err != nil {
+ return reflect.Zero(t), err
+ }
+ nv.Elem().FieldByIndex(i.index).Set(vf)
+ }
+ }
+ return nv, nil
+}
+
+func root(e Elem) *LDML {
+ for ; e.enclosing() != nil; e = e.enclosing() {
+ }
+ return e.(*LDML)
+}
+
+// inheritStructPtr first merges possible aliases in with v and then inherits
+// any underspecified elements from parent.
+func (cldr *CLDR) inheritStructPtr(v, parent reflect.Value) (r reflect.Value, err error) {
+ if !v.IsNil() {
+ e := v.Interface().(Elem).GetCommon()
+ alias := e.Alias
+ if alias == nil && !parent.IsNil() {
+ alias = parent.Interface().(Elem).GetCommon().Alias
+ }
+ if alias != nil {
+ a, err := cldr.resolveAlias(v.Interface().(Elem), alias.Source, alias.Path)
+ if a != nil {
+ if v, err = cldr.inheritFields(v.Elem(), reflect.ValueOf(a).Elem()); err != nil {
+ return reflect.Value{}, err
+ }
+ }
+ }
+ if !parent.IsNil() {
+ return cldr.inheritFields(v.Elem(), parent.Elem())
+ }
+ } else if parent.IsNil() {
+ panic("should not reach here")
+ }
+ return v, nil
+}
+
+// Must be slice of struct pointers.
+func (cldr *CLDR) inheritSlice(enc, v, parent reflect.Value) (res reflect.Value, err error) {
+ t := v.Type()
+ index := make(map[string]reflect.Value)
+ if !v.IsNil() {
+ for i := 0; i < v.Len(); i++ {
+ vi := v.Index(i)
+ key := attrKey(vi)
+ index[key] = vi
+ }
+ }
+ if !parent.IsNil() {
+ for i := 0; i < parent.Len(); i++ {
+ vi := parent.Index(i)
+ key := attrKey(vi)
+ if w, ok := index[key]; ok {
+ index[key], err = cldr.inheritStructPtr(w, vi)
+ } else {
+ n := cldr.newNode(vi.Elem(), enc)
+ index[key], err = cldr.inheritStructPtr(n, vi)
+ }
+ index[key].Interface().(Elem).setEnclosing(enc.Addr().Interface().(Elem))
+ if err != nil {
+ return v, err
+ }
+ }
+ }
+ keys := make([]string, 0, len(index))
+ for k, _ := range index {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ sl := reflect.MakeSlice(t, len(index), len(index))
+ for i, k := range keys {
+ sl.Index(i).Set(index[k])
+ }
+ return sl, nil
+}
+
+func parentLocale(loc string) string {
+ parts := strings.Split(loc, "_")
+ if len(parts) == 1 {
+ return "root"
+ }
+ parts = parts[:len(parts)-1]
+ key := strings.Join(parts, "_")
+ return key
+}
+
+func (cldr *CLDR) resolve(loc string) (res *LDML, err error) {
+ if r := cldr.resolved[loc]; r != nil {
+ return r, nil
+ }
+ x := cldr.RawLDML(loc)
+ if x == nil {
+ return nil, fmt.Errorf("cldr: unknown locale %q", loc)
+ }
+ var v reflect.Value
+ if loc == "root" {
+ x = deepCopy(reflect.ValueOf(x)).Interface().(*LDML)
+ linkEnclosing(nil, x)
+ err = cldr.aliasResolver().visit(x)
+ } else {
+ key := parentLocale(loc)
+ var parent *LDML
+ for ; cldr.locale[key] == nil; key = parentLocale(key) {
+ }
+ if parent, err = cldr.resolve(key); err != nil {
+ return nil, err
+ }
+ v, err = cldr.inheritFields(reflect.ValueOf(x).Elem(), reflect.ValueOf(parent).Elem())
+ x = v.Interface().(*LDML)
+ linkEnclosing(nil, x)
+ }
+ if err != nil {
+ return nil, err
+ }
+ cldr.resolved[loc] = x
+ return x, err
+}
+
+// finalize finalizes the initialization of the raw LDML structs. It also
+// removed unwanted fields, as specified by filter, so that they will not
+// be unnecessarily evaluated.
+func (cldr *CLDR) finalize(filter []string) {
+ for _, x := range cldr.locale {
+ if filter != nil {
+ v := reflect.ValueOf(x).Elem()
+ t := v.Type()
+ for i := 0; i < v.NumField(); i++ {
+ f := t.Field(i)
+ name, _ := xmlName(f)
+ if name != "" && name != "identity" && !in(filter, name) {
+ v.Field(i).Set(reflect.Zero(f.Type))
+ }
+ }
+ }
+ linkEnclosing(nil, x) // for resolving aliases and paths
+ setNames(x, "ldml")
+ }
+}
diff --git a/vendor/golang.org/x/text/unicode/cldr/slice.go b/vendor/golang.org/x/text/unicode/cldr/slice.go
new file mode 100644
index 0000000..388c983
--- /dev/null
+++ b/vendor/golang.org/x/text/unicode/cldr/slice.go
@@ -0,0 +1,144 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cldr
+
+import (
+ "fmt"
+ "reflect"
+ "sort"
+)
+
+// Slice provides utilities for modifying slices of elements.
+// It can be wrapped around any slice of which the element type implements
+// interface Elem.
+type Slice struct {
+ ptr reflect.Value
+ typ reflect.Type
+}
+
+// Value returns the reflect.Value of the underlying slice.
+func (s *Slice) Value() reflect.Value {
+ return s.ptr.Elem()
+}
+
+// MakeSlice wraps a pointer to a slice of Elems.
+// It replaces the array pointed to by the slice so that subsequent modifications
+// do not alter the data in a CLDR type.
+// It panics if an incorrect type is passed.
+func MakeSlice(slicePtr interface{}) Slice {
+ ptr := reflect.ValueOf(slicePtr)
+ if ptr.Kind() != reflect.Ptr {
+ panic(fmt.Sprintf("MakeSlice: argument must be pointer to slice, found %v", ptr.Type()))
+ }
+ sl := ptr.Elem()
+ if sl.Kind() != reflect.Slice {
+ panic(fmt.Sprintf("MakeSlice: argument must point to a slice, found %v", sl.Type()))
+ }
+ intf := reflect.TypeOf((*Elem)(nil)).Elem()
+ if !sl.Type().Elem().Implements(intf) {
+ panic(fmt.Sprintf("MakeSlice: element type of slice (%v) does not implement Elem", sl.Type().Elem()))
+ }
+ nsl := reflect.MakeSlice(sl.Type(), sl.Len(), sl.Len())
+ reflect.Copy(nsl, sl)
+ sl.Set(nsl)
+ return Slice{
+ ptr: ptr,
+ typ: sl.Type().Elem().Elem(),
+ }
+}
+
+func (s Slice) indexForAttr(a string) []int {
+ for i := iter(reflect.Zero(s.typ)); !i.done(); i.next() {
+ if n, _ := xmlName(i.field()); n == a {
+ return i.index
+ }
+ }
+ panic(fmt.Sprintf("MakeSlice: no attribute %q for type %v", a, s.typ))
+}
+
+// Filter filters s to only include elements for which fn returns true.
+func (s Slice) Filter(fn func(e Elem) bool) {
+ k := 0
+ sl := s.Value()
+ for i := 0; i < sl.Len(); i++ {
+ vi := sl.Index(i)
+ if fn(vi.Interface().(Elem)) {
+ sl.Index(k).Set(vi)
+ k++
+ }
+ }
+ sl.Set(sl.Slice(0, k))
+}
+
+// Group finds elements in s for which fn returns the same value and groups
+// them in a new Slice.
+func (s Slice) Group(fn func(e Elem) string) []Slice {
+ m := make(map[string][]reflect.Value)
+ sl := s.Value()
+ for i := 0; i < sl.Len(); i++ {
+ vi := sl.Index(i)
+ key := fn(vi.Interface().(Elem))
+ m[key] = append(m[key], vi)
+ }
+ keys := []string{}
+ for k, _ := range m {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ res := []Slice{}
+ for _, k := range keys {
+ nsl := reflect.New(sl.Type())
+ nsl.Elem().Set(reflect.Append(nsl.Elem(), m[k]...))
+ res = append(res, MakeSlice(nsl.Interface()))
+ }
+ return res
+}
+
+// SelectAnyOf filters s to contain only elements for which attr matches
+// any of the values.
+func (s Slice) SelectAnyOf(attr string, values ...string) {
+ index := s.indexForAttr(attr)
+ s.Filter(func(e Elem) bool {
+ vf := reflect.ValueOf(e).Elem().FieldByIndex(index)
+ return in(values, vf.String())
+ })
+}
+
+// SelectOnePerGroup filters s to include at most one element e per group of
+// elements matching Key(attr), where e has an attribute a that matches any
+// the values in v.
+// If more than one element in a group matches a value in v preference
+// is given to the element that matches the first value in v.
+func (s Slice) SelectOnePerGroup(a string, v []string) {
+ index := s.indexForAttr(a)
+ grouped := s.Group(func(e Elem) string { return Key(e, a) })
+ sl := s.Value()
+ sl.Set(sl.Slice(0, 0))
+ for _, g := range grouped {
+ e := reflect.Value{}
+ found := len(v)
+ gsl := g.Value()
+ for i := 0; i < gsl.Len(); i++ {
+ vi := gsl.Index(i).Elem().FieldByIndex(index)
+ j := 0
+ for ; j < len(v) && v[j] != vi.String(); j++ {
+ }
+ if j < found {
+ found = j
+ e = gsl.Index(i)
+ }
+ }
+ if found < len(v) {
+ sl.Set(reflect.Append(sl, e))
+ }
+ }
+}
+
+// SelectDraft drops all elements from the list with a draft level smaller than d
+// and selects the highest draft level of the remaining.
+// This method assumes that the input CLDR is canonicalized.
+func (s Slice) SelectDraft(d Draft) {
+ s.SelectOnePerGroup("draft", drafts[len(drafts)-2-int(d):])
+}
diff --git a/vendor/golang.org/x/text/unicode/cldr/xml.go b/vendor/golang.org/x/text/unicode/cldr/xml.go
new file mode 100644
index 0000000..bbae53b
--- /dev/null
+++ b/vendor/golang.org/x/text/unicode/cldr/xml.go
@@ -0,0 +1,1494 @@
+// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
+
+package cldr
+
+// LDMLBCP47 holds information on allowable values for various variables in LDML.
+type LDMLBCP47 struct {
+ Common
+ Version *struct {
+ Common
+ Number string `xml:"number,attr"`
+ } `xml:"version"`
+ Generation *struct {
+ Common
+ Date string `xml:"date,attr"`
+ } `xml:"generation"`
+ Keyword []*struct {
+ Common
+ Key []*struct {
+ Common
+ Extension string `xml:"extension,attr"`
+ Name string `xml:"name,attr"`
+ Description string `xml:"description,attr"`
+ Deprecated string `xml:"deprecated,attr"`
+ Preferred string `xml:"preferred,attr"`
+ Alias string `xml:"alias,attr"`
+ ValueType string `xml:"valueType,attr"`
+ Since string `xml:"since,attr"`
+ Type []*struct {
+ Common
+ Name string `xml:"name,attr"`
+ Description string `xml:"description,attr"`
+ Deprecated string `xml:"deprecated,attr"`
+ Preferred string `xml:"preferred,attr"`
+ Alias string `xml:"alias,attr"`
+ Since string `xml:"since,attr"`
+ } `xml:"type"`
+ } `xml:"key"`
+ } `xml:"keyword"`
+ Attribute []*struct {
+ Common
+ Name string `xml:"name,attr"`
+ Description string `xml:"description,attr"`
+ Deprecated string `xml:"deprecated,attr"`
+ Preferred string `xml:"preferred,attr"`
+ Since string `xml:"since,attr"`
+ } `xml:"attribute"`
+}
+
+// SupplementalData holds information relevant for internationalization
+// and proper use of CLDR, but that is not contained in the locale hierarchy.
+type SupplementalData struct {
+ Common
+ Version *struct {
+ Common
+ Number string `xml:"number,attr"`
+ } `xml:"version"`
+ Generation *struct {
+ Common
+ Date string `xml:"date,attr"`
+ } `xml:"generation"`
+ CurrencyData *struct {
+ Common
+ Fractions []*struct {
+ Common
+ Info []*struct {
+ Common
+ Iso4217 string `xml:"iso4217,attr"`
+ Digits string `xml:"digits,attr"`
+ Rounding string `xml:"rounding,attr"`
+ CashDigits string `xml:"cashDigits,attr"`
+ CashRounding string `xml:"cashRounding,attr"`
+ } `xml:"info"`
+ } `xml:"fractions"`
+ Region []*struct {
+ Common
+ Iso3166 string `xml:"iso3166,attr"`
+ Currency []*struct {
+ Common
+ Before string `xml:"before,attr"`
+ From string `xml:"from,attr"`
+ To string `xml:"to,attr"`
+ Iso4217 string `xml:"iso4217,attr"`
+ Digits string `xml:"digits,attr"`
+ Rounding string `xml:"rounding,attr"`
+ CashRounding string `xml:"cashRounding,attr"`
+ Tender string `xml:"tender,attr"`
+ Alternate []*struct {
+ Common
+ Iso4217 string `xml:"iso4217,attr"`
+ } `xml:"alternate"`
+ } `xml:"currency"`
+ } `xml:"region"`
+ } `xml:"currencyData"`
+ TerritoryContainment *struct {
+ Common
+ Group []*struct {
+ Common
+ Contains string `xml:"contains,attr"`
+ Grouping string `xml:"grouping,attr"`
+ Status string `xml:"status,attr"`
+ } `xml:"group"`
+ } `xml:"territoryContainment"`
+ SubdivisionContainment *struct {
+ Common
+ Subgroup []*struct {
+ Common
+ Subtype string `xml:"subtype,attr"`
+ Contains string `xml:"contains,attr"`
+ } `xml:"subgroup"`
+ } `xml:"subdivisionContainment"`
+ LanguageData *struct {
+ Common
+ Language []*struct {
+ Common
+ Scripts string `xml:"scripts,attr"`
+ Territories string `xml:"territories,attr"`
+ Variants string `xml:"variants,attr"`
+ } `xml:"language"`
+ } `xml:"languageData"`
+ TerritoryInfo *struct {
+ Common
+ Territory []*struct {
+ Common
+ Gdp string `xml:"gdp,attr"`
+ LiteracyPercent string `xml:"literacyPercent,attr"`
+ Population string `xml:"population,attr"`
+ LanguagePopulation []*struct {
+ Common
+ LiteracyPercent string `xml:"literacyPercent,attr"`
+ WritingPercent string `xml:"writingPercent,attr"`
+ PopulationPercent string `xml:"populationPercent,attr"`
+ OfficialStatus string `xml:"officialStatus,attr"`
+ } `xml:"languagePopulation"`
+ } `xml:"territory"`
+ } `xml:"territoryInfo"`
+ PostalCodeData *struct {
+ Common
+ PostCodeRegex []*struct {
+ Common
+ TerritoryId string `xml:"territoryId,attr"`
+ } `xml:"postCodeRegex"`
+ } `xml:"postalCodeData"`
+ CalendarData *struct {
+ Common
+ Calendar []*struct {
+ Common
+ Territories string `xml:"territories,attr"`
+ CalendarSystem *Common `xml:"calendarSystem"`
+ Eras *struct {
+ Common
+ Era []*struct {
+ Common
+ Start string `xml:"start,attr"`
+ End string `xml:"end,attr"`
+ } `xml:"era"`
+ } `xml:"eras"`
+ } `xml:"calendar"`
+ } `xml:"calendarData"`
+ CalendarPreferenceData *struct {
+ Common
+ CalendarPreference []*struct {
+ Common
+ Territories string `xml:"territories,attr"`
+ Ordering string `xml:"ordering,attr"`
+ } `xml:"calendarPreference"`
+ } `xml:"calendarPreferenceData"`
+ WeekData *struct {
+ Common
+ MinDays []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ Territories string `xml:"territories,attr"`
+ } `xml:"minDays"`
+ FirstDay []*struct {
+ Common
+ Day string `xml:"day,attr"`
+ Territories string `xml:"territories,attr"`
+ } `xml:"firstDay"`
+ WeekendStart []*struct {
+ Common
+ Day string `xml:"day,attr"`
+ Territories string `xml:"territories,attr"`
+ } `xml:"weekendStart"`
+ WeekendEnd []*struct {
+ Common
+ Day string `xml:"day,attr"`
+ Territories string `xml:"territories,attr"`
+ } `xml:"weekendEnd"`
+ WeekOfPreference []*struct {
+ Common
+ Locales string `xml:"locales,attr"`
+ Ordering string `xml:"ordering,attr"`
+ } `xml:"weekOfPreference"`
+ } `xml:"weekData"`
+ TimeData *struct {
+ Common
+ Hours []*struct {
+ Common
+ Allowed string `xml:"allowed,attr"`
+ Preferred string `xml:"preferred,attr"`
+ Regions string `xml:"regions,attr"`
+ } `xml:"hours"`
+ } `xml:"timeData"`
+ MeasurementData *struct {
+ Common
+ MeasurementSystem []*struct {
+ Common
+ Category string `xml:"category,attr"`
+ Territories string `xml:"territories,attr"`
+ } `xml:"measurementSystem"`
+ PaperSize []*struct {
+ Common
+ Territories string `xml:"territories,attr"`
+ } `xml:"paperSize"`
+ } `xml:"measurementData"`
+ UnitPreferenceData *struct {
+ Common
+ UnitPreferences []*struct {
+ Common
+ Category string `xml:"category,attr"`
+ Usage string `xml:"usage,attr"`
+ Scope string `xml:"scope,attr"`
+ UnitPreference []*struct {
+ Common
+ Regions string `xml:"regions,attr"`
+ } `xml:"unitPreference"`
+ } `xml:"unitPreferences"`
+ } `xml:"unitPreferenceData"`
+ TimezoneData *struct {
+ Common
+ MapTimezones []*struct {
+ Common
+ OtherVersion string `xml:"otherVersion,attr"`
+ TypeVersion string `xml:"typeVersion,attr"`
+ MapZone []*struct {
+ Common
+ Other string `xml:"other,attr"`
+ Territory string `xml:"territory,attr"`
+ } `xml:"mapZone"`
+ } `xml:"mapTimezones"`
+ ZoneFormatting []*struct {
+ Common
+ Multizone string `xml:"multizone,attr"`
+ TzidVersion string `xml:"tzidVersion,attr"`
+ ZoneItem []*struct {
+ Common
+ Territory string `xml:"territory,attr"`
+ Aliases string `xml:"aliases,attr"`
+ } `xml:"zoneItem"`
+ } `xml:"zoneFormatting"`
+ } `xml:"timezoneData"`
+ Characters *struct {
+ Common
+ CharacterFallback []*struct {
+ Common
+ Character []*struct {
+ Common
+ Value string `xml:"value,attr"`
+ Substitute []*Common `xml:"substitute"`
+ } `xml:"character"`
+ } `xml:"character-fallback"`
+ } `xml:"characters"`
+ Transforms *struct {
+ Common
+ Transform []*struct {
+ Common
+ Source string `xml:"source,attr"`
+ Target string `xml:"target,attr"`
+ Variant string `xml:"variant,attr"`
+ Direction string `xml:"direction,attr"`
+ Alias string `xml:"alias,attr"`
+ BackwardAlias string `xml:"backwardAlias,attr"`
+ Visibility string `xml:"visibility,attr"`
+ Comment []*Common `xml:"comment"`
+ TRule []*Common `xml:"tRule"`
+ } `xml:"transform"`
+ } `xml:"transforms"`
+ Metadata *struct {
+ Common
+ AttributeOrder *Common `xml:"attributeOrder"`
+ ElementOrder *Common `xml:"elementOrder"`
+ SerialElements *Common `xml:"serialElements"`
+ Suppress *struct {
+ Common
+ Attributes []*struct {
+ Common
+ Element string `xml:"element,attr"`
+ Attribute string `xml:"attribute,attr"`
+ AttributeValue string `xml:"attributeValue,attr"`
+ } `xml:"attributes"`
+ } `xml:"suppress"`
+ Validity *struct {
+ Common
+ Variable []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ } `xml:"variable"`
+ AttributeValues []*struct {
+ Common
+ Dtds string `xml:"dtds,attr"`
+ Elements string `xml:"elements,attr"`
+ Attributes string `xml:"attributes,attr"`
+ Order string `xml:"order,attr"`
+ } `xml:"attributeValues"`
+ } `xml:"validity"`
+ Alias *struct {
+ Common
+ LanguageAlias []*struct {
+ Common
+ Replacement string `xml:"replacement,attr"`
+ Reason string `xml:"reason,attr"`
+ } `xml:"languageAlias"`
+ ScriptAlias []*struct {
+ Common
+ Replacement string `xml:"replacement,attr"`
+ Reason string `xml:"reason,attr"`
+ } `xml:"scriptAlias"`
+ TerritoryAlias []*struct {
+ Common
+ Replacement string `xml:"replacement,attr"`
+ Reason string `xml:"reason,attr"`
+ } `xml:"territoryAlias"`
+ SubdivisionAlias []*struct {
+ Common
+ Replacement string `xml:"replacement,attr"`
+ Reason string `xml:"reason,attr"`
+ } `xml:"subdivisionAlias"`
+ VariantAlias []*struct {
+ Common
+ Replacement string `xml:"replacement,attr"`
+ Reason string `xml:"reason,attr"`
+ } `xml:"variantAlias"`
+ ZoneAlias []*struct {
+ Common
+ Replacement string `xml:"replacement,attr"`
+ Reason string `xml:"reason,attr"`
+ } `xml:"zoneAlias"`
+ } `xml:"alias"`
+ Deprecated *struct {
+ Common
+ DeprecatedItems []*struct {
+ Common
+ Elements string `xml:"elements,attr"`
+ Attributes string `xml:"attributes,attr"`
+ Values string `xml:"values,attr"`
+ } `xml:"deprecatedItems"`
+ } `xml:"deprecated"`
+ Distinguishing *struct {
+ Common
+ DistinguishingItems []*struct {
+ Common
+ Exclude string `xml:"exclude,attr"`
+ Elements string `xml:"elements,attr"`
+ Attributes string `xml:"attributes,attr"`
+ } `xml:"distinguishingItems"`
+ } `xml:"distinguishing"`
+ Blocking *struct {
+ Common
+ BlockingItems []*struct {
+ Common
+ Elements string `xml:"elements,attr"`
+ } `xml:"blockingItems"`
+ } `xml:"blocking"`
+ CoverageAdditions *struct {
+ Common
+ LanguageCoverage []*struct {
+ Common
+ Values string `xml:"values,attr"`
+ } `xml:"languageCoverage"`
+ ScriptCoverage []*struct {
+ Common
+ Values string `xml:"values,attr"`
+ } `xml:"scriptCoverage"`
+ TerritoryCoverage []*struct {
+ Common
+ Values string `xml:"values,attr"`
+ } `xml:"territoryCoverage"`
+ CurrencyCoverage []*struct {
+ Common
+ Values string `xml:"values,attr"`
+ } `xml:"currencyCoverage"`
+ TimezoneCoverage []*struct {
+ Common
+ Values string `xml:"values,attr"`
+ } `xml:"timezoneCoverage"`
+ } `xml:"coverageAdditions"`
+ SkipDefaultLocale *struct {
+ Common
+ Services string `xml:"services,attr"`
+ } `xml:"skipDefaultLocale"`
+ DefaultContent *struct {
+ Common
+ Locales string `xml:"locales,attr"`
+ } `xml:"defaultContent"`
+ } `xml:"metadata"`
+ CodeMappings *struct {
+ Common
+ LanguageCodes []*struct {
+ Common
+ Alpha3 string `xml:"alpha3,attr"`
+ } `xml:"languageCodes"`
+ TerritoryCodes []*struct {
+ Common
+ Numeric string `xml:"numeric,attr"`
+ Alpha3 string `xml:"alpha3,attr"`
+ Fips10 string `xml:"fips10,attr"`
+ Internet string `xml:"internet,attr"`
+ } `xml:"territoryCodes"`
+ CurrencyCodes []*struct {
+ Common
+ Numeric string `xml:"numeric,attr"`
+ } `xml:"currencyCodes"`
+ } `xml:"codeMappings"`
+ ParentLocales *struct {
+ Common
+ ParentLocale []*struct {
+ Common
+ Parent string `xml:"parent,attr"`
+ Locales string `xml:"locales,attr"`
+ } `xml:"parentLocale"`
+ } `xml:"parentLocales"`
+ LikelySubtags *struct {
+ Common
+ LikelySubtag []*struct {
+ Common
+ From string `xml:"from,attr"`
+ To string `xml:"to,attr"`
+ } `xml:"likelySubtag"`
+ } `xml:"likelySubtags"`
+ MetazoneInfo *struct {
+ Common
+ Timezone []*struct {
+ Common
+ UsesMetazone []*struct {
+ Common
+ From string `xml:"from,attr"`
+ To string `xml:"to,attr"`
+ Mzone string `xml:"mzone,attr"`
+ } `xml:"usesMetazone"`
+ } `xml:"timezone"`
+ } `xml:"metazoneInfo"`
+ Plurals []*struct {
+ Common
+ PluralRules []*struct {
+ Common
+ Locales string `xml:"locales,attr"`
+ PluralRule []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"pluralRule"`
+ } `xml:"pluralRules"`
+ PluralRanges []*struct {
+ Common
+ Locales string `xml:"locales,attr"`
+ PluralRange []*struct {
+ Common
+ Start string `xml:"start,attr"`
+ End string `xml:"end,attr"`
+ Result string `xml:"result,attr"`
+ } `xml:"pluralRange"`
+ } `xml:"pluralRanges"`
+ } `xml:"plurals"`
+ TelephoneCodeData *struct {
+ Common
+ CodesByTerritory []*struct {
+ Common
+ Territory string `xml:"territory,attr"`
+ TelephoneCountryCode []*struct {
+ Common
+ Code string `xml:"code,attr"`
+ From string `xml:"from,attr"`
+ To string `xml:"to,attr"`
+ } `xml:"telephoneCountryCode"`
+ } `xml:"codesByTerritory"`
+ } `xml:"telephoneCodeData"`
+ NumberingSystems *struct {
+ Common
+ NumberingSystem []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ Radix string `xml:"radix,attr"`
+ Digits string `xml:"digits,attr"`
+ Rules string `xml:"rules,attr"`
+ } `xml:"numberingSystem"`
+ } `xml:"numberingSystems"`
+ Bcp47KeywordMappings *struct {
+ Common
+ MapKeys *struct {
+ Common
+ KeyMap []*struct {
+ Common
+ Bcp47 string `xml:"bcp47,attr"`
+ } `xml:"keyMap"`
+ } `xml:"mapKeys"`
+ MapTypes []*struct {
+ Common
+ TypeMap []*struct {
+ Common
+ Bcp47 string `xml:"bcp47,attr"`
+ } `xml:"typeMap"`
+ } `xml:"mapTypes"`
+ } `xml:"bcp47KeywordMappings"`
+ Gender *struct {
+ Common
+ PersonList []*struct {
+ Common
+ Locales string `xml:"locales,attr"`
+ } `xml:"personList"`
+ } `xml:"gender"`
+ References *struct {
+ Common
+ Reference []*struct {
+ Common
+ Uri string `xml:"uri,attr"`
+ } `xml:"reference"`
+ } `xml:"references"`
+ LanguageMatching *struct {
+ Common
+ LanguageMatches []*struct {
+ Common
+ ParadigmLocales []*struct {
+ Common
+ Locales string `xml:"locales,attr"`
+ } `xml:"paradigmLocales"`
+ MatchVariable []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ Value string `xml:"value,attr"`
+ } `xml:"matchVariable"`
+ LanguageMatch []*struct {
+ Common
+ Desired string `xml:"desired,attr"`
+ Supported string `xml:"supported,attr"`
+ Percent string `xml:"percent,attr"`
+ Distance string `xml:"distance,attr"`
+ Oneway string `xml:"oneway,attr"`
+ } `xml:"languageMatch"`
+ } `xml:"languageMatches"`
+ } `xml:"languageMatching"`
+ DayPeriodRuleSet []*struct {
+ Common
+ DayPeriodRules []*struct {
+ Common
+ Locales string `xml:"locales,attr"`
+ DayPeriodRule []*struct {
+ Common
+ At string `xml:"at,attr"`
+ After string `xml:"after,attr"`
+ Before string `xml:"before,attr"`
+ From string `xml:"from,attr"`
+ To string `xml:"to,attr"`
+ } `xml:"dayPeriodRule"`
+ } `xml:"dayPeriodRules"`
+ } `xml:"dayPeriodRuleSet"`
+ MetaZones *struct {
+ Common
+ MetazoneInfo *struct {
+ Common
+ Timezone []*struct {
+ Common
+ UsesMetazone []*struct {
+ Common
+ From string `xml:"from,attr"`
+ To string `xml:"to,attr"`
+ Mzone string `xml:"mzone,attr"`
+ } `xml:"usesMetazone"`
+ } `xml:"timezone"`
+ } `xml:"metazoneInfo"`
+ MapTimezones *struct {
+ Common
+ OtherVersion string `xml:"otherVersion,attr"`
+ TypeVersion string `xml:"typeVersion,attr"`
+ MapZone []*struct {
+ Common
+ Other string `xml:"other,attr"`
+ Territory string `xml:"territory,attr"`
+ } `xml:"mapZone"`
+ } `xml:"mapTimezones"`
+ } `xml:"metaZones"`
+ PrimaryZones *struct {
+ Common
+ PrimaryZone []*struct {
+ Common
+ Iso3166 string `xml:"iso3166,attr"`
+ } `xml:"primaryZone"`
+ } `xml:"primaryZones"`
+ WindowsZones *struct {
+ Common
+ MapTimezones *struct {
+ Common
+ OtherVersion string `xml:"otherVersion,attr"`
+ TypeVersion string `xml:"typeVersion,attr"`
+ MapZone []*struct {
+ Common
+ Other string `xml:"other,attr"`
+ Territory string `xml:"territory,attr"`
+ } `xml:"mapZone"`
+ } `xml:"mapTimezones"`
+ } `xml:"windowsZones"`
+ CoverageLevels *struct {
+ Common
+ ApprovalRequirements *struct {
+ Common
+ ApprovalRequirement []*struct {
+ Common
+ Votes string `xml:"votes,attr"`
+ Locales string `xml:"locales,attr"`
+ Paths string `xml:"paths,attr"`
+ } `xml:"approvalRequirement"`
+ } `xml:"approvalRequirements"`
+ CoverageVariable []*struct {
+ Common
+ Key string `xml:"key,attr"`
+ Value string `xml:"value,attr"`
+ } `xml:"coverageVariable"`
+ CoverageLevel []*struct {
+ Common
+ InLanguage string `xml:"inLanguage,attr"`
+ InScript string `xml:"inScript,attr"`
+ InTerritory string `xml:"inTerritory,attr"`
+ Value string `xml:"value,attr"`
+ Match string `xml:"match,attr"`
+ } `xml:"coverageLevel"`
+ } `xml:"coverageLevels"`
+ IdValidity *struct {
+ Common
+ Id []*struct {
+ Common
+ IdStatus string `xml:"idStatus,attr"`
+ } `xml:"id"`
+ } `xml:"idValidity"`
+ RgScope *struct {
+ Common
+ RgPath []*struct {
+ Common
+ Path string `xml:"path,attr"`
+ } `xml:"rgPath"`
+ } `xml:"rgScope"`
+ LanguageGroups *struct {
+ Common
+ LanguageGroup []*struct {
+ Common
+ Parent string `xml:"parent,attr"`
+ } `xml:"languageGroup"`
+ } `xml:"languageGroups"`
+}
+
+// LDML is the top-level type for locale-specific data.
+type LDML struct {
+ Common
+ Version string `xml:"version,attr"`
+ Identity *struct {
+ Common
+ Version *struct {
+ Common
+ Number string `xml:"number,attr"`
+ } `xml:"version"`
+ Generation *struct {
+ Common
+ Date string `xml:"date,attr"`
+ } `xml:"generation"`
+ Language *Common `xml:"language"`
+ Script *Common `xml:"script"`
+ Territory *Common `xml:"territory"`
+ Variant *Common `xml:"variant"`
+ } `xml:"identity"`
+ LocaleDisplayNames *LocaleDisplayNames `xml:"localeDisplayNames"`
+ Layout *struct {
+ Common
+ Orientation []*struct {
+ Common
+ Characters string `xml:"characters,attr"`
+ Lines string `xml:"lines,attr"`
+ CharacterOrder []*Common `xml:"characterOrder"`
+ LineOrder []*Common `xml:"lineOrder"`
+ } `xml:"orientation"`
+ InList []*struct {
+ Common
+ Casing string `xml:"casing,attr"`
+ } `xml:"inList"`
+ InText []*Common `xml:"inText"`
+ } `xml:"layout"`
+ ContextTransforms *struct {
+ Common
+ ContextTransformUsage []*struct {
+ Common
+ ContextTransform []*Common `xml:"contextTransform"`
+ } `xml:"contextTransformUsage"`
+ } `xml:"contextTransforms"`
+ Characters *struct {
+ Common
+ ExemplarCharacters []*Common `xml:"exemplarCharacters"`
+ Ellipsis []*Common `xml:"ellipsis"`
+ MoreInformation []*Common `xml:"moreInformation"`
+ Stopwords []*struct {
+ Common
+ StopwordList []*Common `xml:"stopwordList"`
+ } `xml:"stopwords"`
+ IndexLabels []*struct {
+ Common
+ IndexSeparator []*Common `xml:"indexSeparator"`
+ CompressedIndexSeparator []*Common `xml:"compressedIndexSeparator"`
+ IndexRangePattern []*Common `xml:"indexRangePattern"`
+ IndexLabelBefore []*Common `xml:"indexLabelBefore"`
+ IndexLabelAfter []*Common `xml:"indexLabelAfter"`
+ IndexLabel []*struct {
+ Common
+ IndexSource string `xml:"indexSource,attr"`
+ Priority string `xml:"priority,attr"`
+ } `xml:"indexLabel"`
+ } `xml:"indexLabels"`
+ Mapping []*struct {
+ Common
+ Registry string `xml:"registry,attr"`
+ } `xml:"mapping"`
+ ParseLenients []*struct {
+ Common
+ Scope string `xml:"scope,attr"`
+ Level string `xml:"level,attr"`
+ ParseLenient []*struct {
+ Common
+ Sample string `xml:"sample,attr"`
+ } `xml:"parseLenient"`
+ } `xml:"parseLenients"`
+ } `xml:"characters"`
+ Delimiters *struct {
+ Common
+ QuotationStart []*Common `xml:"quotationStart"`
+ QuotationEnd []*Common `xml:"quotationEnd"`
+ AlternateQuotationStart []*Common `xml:"alternateQuotationStart"`
+ AlternateQuotationEnd []*Common `xml:"alternateQuotationEnd"`
+ } `xml:"delimiters"`
+ Measurement *struct {
+ Common
+ MeasurementSystem []*Common `xml:"measurementSystem"`
+ PaperSize []*struct {
+ Common
+ Height []*Common `xml:"height"`
+ Width []*Common `xml:"width"`
+ } `xml:"paperSize"`
+ } `xml:"measurement"`
+ Dates *struct {
+ Common
+ LocalizedPatternChars []*Common `xml:"localizedPatternChars"`
+ DateRangePattern []*Common `xml:"dateRangePattern"`
+ Calendars *struct {
+ Common
+ Calendar []*Calendar `xml:"calendar"`
+ } `xml:"calendars"`
+ Fields *struct {
+ Common
+ Field []*struct {
+ Common
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ Relative []*Common `xml:"relative"`
+ RelativeTime []*struct {
+ Common
+ RelativeTimePattern []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"relativeTimePattern"`
+ } `xml:"relativeTime"`
+ RelativePeriod []*Common `xml:"relativePeriod"`
+ } `xml:"field"`
+ } `xml:"fields"`
+ TimeZoneNames *TimeZoneNames `xml:"timeZoneNames"`
+ } `xml:"dates"`
+ Numbers *Numbers `xml:"numbers"`
+ Units *struct {
+ Common
+ Unit []*struct {
+ Common
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ UnitPattern []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"unitPattern"`
+ PerUnitPattern []*Common `xml:"perUnitPattern"`
+ } `xml:"unit"`
+ UnitLength []*struct {
+ Common
+ CompoundUnit []*struct {
+ Common
+ CompoundUnitPattern []*Common `xml:"compoundUnitPattern"`
+ } `xml:"compoundUnit"`
+ Unit []*struct {
+ Common
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ UnitPattern []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"unitPattern"`
+ PerUnitPattern []*Common `xml:"perUnitPattern"`
+ } `xml:"unit"`
+ CoordinateUnit []*struct {
+ Common
+ CoordinateUnitPattern []*Common `xml:"coordinateUnitPattern"`
+ } `xml:"coordinateUnit"`
+ } `xml:"unitLength"`
+ DurationUnit []*struct {
+ Common
+ DurationUnitPattern []*Common `xml:"durationUnitPattern"`
+ } `xml:"durationUnit"`
+ } `xml:"units"`
+ ListPatterns *struct {
+ Common
+ ListPattern []*struct {
+ Common
+ ListPatternPart []*Common `xml:"listPatternPart"`
+ } `xml:"listPattern"`
+ } `xml:"listPatterns"`
+ Collations *struct {
+ Common
+ Version string `xml:"version,attr"`
+ DefaultCollation *Common `xml:"defaultCollation"`
+ Collation []*Collation `xml:"collation"`
+ } `xml:"collations"`
+ Posix *struct {
+ Common
+ Messages []*struct {
+ Common
+ Yesstr []*Common `xml:"yesstr"`
+ Nostr []*Common `xml:"nostr"`
+ Yesexpr []*Common `xml:"yesexpr"`
+ Noexpr []*Common `xml:"noexpr"`
+ } `xml:"messages"`
+ } `xml:"posix"`
+ CharacterLabels *struct {
+ Common
+ CharacterLabelPattern []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"characterLabelPattern"`
+ CharacterLabel []*Common `xml:"characterLabel"`
+ } `xml:"characterLabels"`
+ Segmentations *struct {
+ Common
+ Segmentation []*struct {
+ Common
+ Variables *struct {
+ Common
+ Variable []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ } `xml:"variable"`
+ } `xml:"variables"`
+ SegmentRules *struct {
+ Common
+ Rule []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ } `xml:"rule"`
+ } `xml:"segmentRules"`
+ Exceptions *struct {
+ Common
+ Exception []*Common `xml:"exception"`
+ } `xml:"exceptions"`
+ Suppressions *struct {
+ Common
+ Suppression []*Common `xml:"suppression"`
+ } `xml:"suppressions"`
+ } `xml:"segmentation"`
+ } `xml:"segmentations"`
+ Rbnf *struct {
+ Common
+ RulesetGrouping []*struct {
+ Common
+ Ruleset []*struct {
+ Common
+ Access string `xml:"access,attr"`
+ AllowsParsing string `xml:"allowsParsing,attr"`
+ Rbnfrule []*struct {
+ Common
+ Value string `xml:"value,attr"`
+ Radix string `xml:"radix,attr"`
+ Decexp string `xml:"decexp,attr"`
+ } `xml:"rbnfrule"`
+ } `xml:"ruleset"`
+ } `xml:"rulesetGrouping"`
+ } `xml:"rbnf"`
+ Annotations *struct {
+ Common
+ Annotation []*struct {
+ Common
+ Cp string `xml:"cp,attr"`
+ Tts string `xml:"tts,attr"`
+ } `xml:"annotation"`
+ } `xml:"annotations"`
+ Metadata *struct {
+ Common
+ CasingData *struct {
+ Common
+ CasingItem []*struct {
+ Common
+ Override string `xml:"override,attr"`
+ ForceError string `xml:"forceError,attr"`
+ } `xml:"casingItem"`
+ } `xml:"casingData"`
+ } `xml:"metadata"`
+ References *struct {
+ Common
+ Reference []*struct {
+ Common
+ Uri string `xml:"uri,attr"`
+ } `xml:"reference"`
+ } `xml:"references"`
+}
+
+// Collation contains rules that specify a certain sort-order,
+// as a tailoring of the root order.
+// The parsed rules are obtained by passing a RuleProcessor to Collation's
+// Process method.
+type Collation struct {
+ Common
+ Visibility string `xml:"visibility,attr"`
+ Base *Common `xml:"base"`
+ Import []*struct {
+ Common
+ Source string `xml:"source,attr"`
+ } `xml:"import"`
+ Settings *struct {
+ Common
+ Strength string `xml:"strength,attr"`
+ Alternate string `xml:"alternate,attr"`
+ Backwards string `xml:"backwards,attr"`
+ Normalization string `xml:"normalization,attr"`
+ CaseLevel string `xml:"caseLevel,attr"`
+ CaseFirst string `xml:"caseFirst,attr"`
+ HiraganaQuaternary string `xml:"hiraganaQuaternary,attr"`
+ MaxVariable string `xml:"maxVariable,attr"`
+ Numeric string `xml:"numeric,attr"`
+ Private string `xml:"private,attr"`
+ VariableTop string `xml:"variableTop,attr"`
+ Reorder string `xml:"reorder,attr"`
+ } `xml:"settings"`
+ SuppressContractions *Common `xml:"suppress_contractions"`
+ Optimize *Common `xml:"optimize"`
+ Cr []*Common `xml:"cr"`
+ rulesElem
+}
+
+// Calendar specifies the fields used for formatting and parsing dates and times.
+// The month and quarter names are identified numerically, starting at 1.
+// The day (of the week) names are identified with short strings, since there is
+// no universally-accepted numeric designation.
+type Calendar struct {
+ Common
+ Months *struct {
+ Common
+ MonthContext []*struct {
+ Common
+ MonthWidth []*struct {
+ Common
+ Month []*struct {
+ Common
+ Yeartype string `xml:"yeartype,attr"`
+ } `xml:"month"`
+ } `xml:"monthWidth"`
+ } `xml:"monthContext"`
+ } `xml:"months"`
+ MonthNames *struct {
+ Common
+ Month []*struct {
+ Common
+ Yeartype string `xml:"yeartype,attr"`
+ } `xml:"month"`
+ } `xml:"monthNames"`
+ MonthAbbr *struct {
+ Common
+ Month []*struct {
+ Common
+ Yeartype string `xml:"yeartype,attr"`
+ } `xml:"month"`
+ } `xml:"monthAbbr"`
+ MonthPatterns *struct {
+ Common
+ MonthPatternContext []*struct {
+ Common
+ MonthPatternWidth []*struct {
+ Common
+ MonthPattern []*Common `xml:"monthPattern"`
+ } `xml:"monthPatternWidth"`
+ } `xml:"monthPatternContext"`
+ } `xml:"monthPatterns"`
+ Days *struct {
+ Common
+ DayContext []*struct {
+ Common
+ DayWidth []*struct {
+ Common
+ Day []*Common `xml:"day"`
+ } `xml:"dayWidth"`
+ } `xml:"dayContext"`
+ } `xml:"days"`
+ DayNames *struct {
+ Common
+ Day []*Common `xml:"day"`
+ } `xml:"dayNames"`
+ DayAbbr *struct {
+ Common
+ Day []*Common `xml:"day"`
+ } `xml:"dayAbbr"`
+ Quarters *struct {
+ Common
+ QuarterContext []*struct {
+ Common
+ QuarterWidth []*struct {
+ Common
+ Quarter []*Common `xml:"quarter"`
+ } `xml:"quarterWidth"`
+ } `xml:"quarterContext"`
+ } `xml:"quarters"`
+ Week *struct {
+ Common
+ MinDays []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"minDays"`
+ FirstDay []*struct {
+ Common
+ Day string `xml:"day,attr"`
+ } `xml:"firstDay"`
+ WeekendStart []*struct {
+ Common
+ Day string `xml:"day,attr"`
+ Time string `xml:"time,attr"`
+ } `xml:"weekendStart"`
+ WeekendEnd []*struct {
+ Common
+ Day string `xml:"day,attr"`
+ Time string `xml:"time,attr"`
+ } `xml:"weekendEnd"`
+ } `xml:"week"`
+ Am []*Common `xml:"am"`
+ Pm []*Common `xml:"pm"`
+ DayPeriods *struct {
+ Common
+ DayPeriodContext []*struct {
+ Common
+ DayPeriodWidth []*struct {
+ Common
+ DayPeriod []*Common `xml:"dayPeriod"`
+ } `xml:"dayPeriodWidth"`
+ } `xml:"dayPeriodContext"`
+ } `xml:"dayPeriods"`
+ Eras *struct {
+ Common
+ EraNames *struct {
+ Common
+ Era []*Common `xml:"era"`
+ } `xml:"eraNames"`
+ EraAbbr *struct {
+ Common
+ Era []*Common `xml:"era"`
+ } `xml:"eraAbbr"`
+ EraNarrow *struct {
+ Common
+ Era []*Common `xml:"era"`
+ } `xml:"eraNarrow"`
+ } `xml:"eras"`
+ CyclicNameSets *struct {
+ Common
+ CyclicNameSet []*struct {
+ Common
+ CyclicNameContext []*struct {
+ Common
+ CyclicNameWidth []*struct {
+ Common
+ CyclicName []*Common `xml:"cyclicName"`
+ } `xml:"cyclicNameWidth"`
+ } `xml:"cyclicNameContext"`
+ } `xml:"cyclicNameSet"`
+ } `xml:"cyclicNameSets"`
+ DateFormats *struct {
+ Common
+ DateFormatLength []*struct {
+ Common
+ DateFormat []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ } `xml:"dateFormat"`
+ } `xml:"dateFormatLength"`
+ } `xml:"dateFormats"`
+ TimeFormats *struct {
+ Common
+ TimeFormatLength []*struct {
+ Common
+ TimeFormat []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ } `xml:"timeFormat"`
+ } `xml:"timeFormatLength"`
+ } `xml:"timeFormats"`
+ DateTimeFormats *struct {
+ Common
+ DateTimeFormatLength []*struct {
+ Common
+ DateTimeFormat []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ } `xml:"dateTimeFormat"`
+ } `xml:"dateTimeFormatLength"`
+ AvailableFormats []*struct {
+ Common
+ DateFormatItem []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"dateFormatItem"`
+ } `xml:"availableFormats"`
+ AppendItems []*struct {
+ Common
+ AppendItem []*struct {
+ Common
+ Request string `xml:"request,attr"`
+ } `xml:"appendItem"`
+ } `xml:"appendItems"`
+ IntervalFormats []*struct {
+ Common
+ IntervalFormatFallback []*Common `xml:"intervalFormatFallback"`
+ IntervalFormatItem []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ GreatestDifference []*struct {
+ Common
+ Id string `xml:"id,attr"`
+ } `xml:"greatestDifference"`
+ } `xml:"intervalFormatItem"`
+ } `xml:"intervalFormats"`
+ } `xml:"dateTimeFormats"`
+ Fields []*struct {
+ Common
+ Field []*struct {
+ Common
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ Relative []*Common `xml:"relative"`
+ RelativeTime []*struct {
+ Common
+ RelativeTimePattern []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"relativeTimePattern"`
+ } `xml:"relativeTime"`
+ RelativePeriod []*Common `xml:"relativePeriod"`
+ } `xml:"field"`
+ } `xml:"fields"`
+}
+type TimeZoneNames struct {
+ Common
+ HourFormat []*Common `xml:"hourFormat"`
+ HoursFormat []*Common `xml:"hoursFormat"`
+ GmtFormat []*Common `xml:"gmtFormat"`
+ GmtZeroFormat []*Common `xml:"gmtZeroFormat"`
+ RegionFormat []*Common `xml:"regionFormat"`
+ FallbackFormat []*Common `xml:"fallbackFormat"`
+ FallbackRegionFormat []*Common `xml:"fallbackRegionFormat"`
+ AbbreviationFallback []*Common `xml:"abbreviationFallback"`
+ PreferenceOrdering []*Common `xml:"preferenceOrdering"`
+ SingleCountries []*struct {
+ Common
+ List string `xml:"list,attr"`
+ } `xml:"singleCountries"`
+ Zone []*struct {
+ Common
+ Long []*struct {
+ Common
+ Generic []*Common `xml:"generic"`
+ Standard []*Common `xml:"standard"`
+ Daylight []*Common `xml:"daylight"`
+ } `xml:"long"`
+ Short []*struct {
+ Common
+ Generic []*Common `xml:"generic"`
+ Standard []*Common `xml:"standard"`
+ Daylight []*Common `xml:"daylight"`
+ } `xml:"short"`
+ CommonlyUsed []*struct {
+ Common
+ Used string `xml:"used,attr"`
+ } `xml:"commonlyUsed"`
+ ExemplarCity []*Common `xml:"exemplarCity"`
+ } `xml:"zone"`
+ Metazone []*struct {
+ Common
+ Long []*struct {
+ Common
+ Generic []*Common `xml:"generic"`
+ Standard []*Common `xml:"standard"`
+ Daylight []*Common `xml:"daylight"`
+ } `xml:"long"`
+ Short []*struct {
+ Common
+ Generic []*Common `xml:"generic"`
+ Standard []*Common `xml:"standard"`
+ Daylight []*Common `xml:"daylight"`
+ } `xml:"short"`
+ CommonlyUsed []*struct {
+ Common
+ Used string `xml:"used,attr"`
+ } `xml:"commonlyUsed"`
+ } `xml:"metazone"`
+}
+
+// LocaleDisplayNames specifies localized display names for scripts, languages,
+// countries, currencies, and variants.
+type LocaleDisplayNames struct {
+ Common
+ LocaleDisplayPattern *struct {
+ Common
+ LocalePattern []*Common `xml:"localePattern"`
+ LocaleSeparator []*Common `xml:"localeSeparator"`
+ LocaleKeyTypePattern []*Common `xml:"localeKeyTypePattern"`
+ } `xml:"localeDisplayPattern"`
+ Languages *struct {
+ Common
+ Language []*Common `xml:"language"`
+ } `xml:"languages"`
+ Scripts *struct {
+ Common
+ Script []*Common `xml:"script"`
+ } `xml:"scripts"`
+ Territories *struct {
+ Common
+ Territory []*Common `xml:"territory"`
+ } `xml:"territories"`
+ Subdivisions *struct {
+ Common
+ Subdivision []*Common `xml:"subdivision"`
+ } `xml:"subdivisions"`
+ Variants *struct {
+ Common
+ Variant []*Common `xml:"variant"`
+ } `xml:"variants"`
+ Keys *struct {
+ Common
+ Key []*Common `xml:"key"`
+ } `xml:"keys"`
+ Types *struct {
+ Common
+ Type []*struct {
+ Common
+ Key string `xml:"key,attr"`
+ } `xml:"type"`
+ } `xml:"types"`
+ TransformNames *struct {
+ Common
+ TransformName []*Common `xml:"transformName"`
+ } `xml:"transformNames"`
+ MeasurementSystemNames *struct {
+ Common
+ MeasurementSystemName []*Common `xml:"measurementSystemName"`
+ } `xml:"measurementSystemNames"`
+ CodePatterns *struct {
+ Common
+ CodePattern []*Common `xml:"codePattern"`
+ } `xml:"codePatterns"`
+}
+
+// Numbers supplies information for formatting and parsing numbers and currencies.
+type Numbers struct {
+ Common
+ DefaultNumberingSystem []*Common `xml:"defaultNumberingSystem"`
+ OtherNumberingSystems []*struct {
+ Common
+ Native []*Common `xml:"native"`
+ Traditional []*Common `xml:"traditional"`
+ Finance []*Common `xml:"finance"`
+ } `xml:"otherNumberingSystems"`
+ MinimumGroupingDigits []*Common `xml:"minimumGroupingDigits"`
+ Symbols []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ Decimal []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"decimal"`
+ Group []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"group"`
+ List []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"list"`
+ PercentSign []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"percentSign"`
+ NativeZeroDigit []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"nativeZeroDigit"`
+ PatternDigit []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"patternDigit"`
+ PlusSign []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"plusSign"`
+ MinusSign []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"minusSign"`
+ Exponential []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"exponential"`
+ SuperscriptingExponent []*Common `xml:"superscriptingExponent"`
+ PerMille []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"perMille"`
+ Infinity []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"infinity"`
+ Nan []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"nan"`
+ CurrencyDecimal []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"currencyDecimal"`
+ CurrencyGroup []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"currencyGroup"`
+ TimeSeparator []*Common `xml:"timeSeparator"`
+ } `xml:"symbols"`
+ DecimalFormats []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ DecimalFormatLength []*struct {
+ Common
+ DecimalFormat []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ } `xml:"decimalFormat"`
+ } `xml:"decimalFormatLength"`
+ } `xml:"decimalFormats"`
+ ScientificFormats []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ ScientificFormatLength []*struct {
+ Common
+ ScientificFormat []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ } `xml:"scientificFormat"`
+ } `xml:"scientificFormatLength"`
+ } `xml:"scientificFormats"`
+ PercentFormats []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ PercentFormatLength []*struct {
+ Common
+ PercentFormat []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ } `xml:"percentFormat"`
+ } `xml:"percentFormatLength"`
+ } `xml:"percentFormats"`
+ CurrencyFormats []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ CurrencySpacing []*struct {
+ Common
+ BeforeCurrency []*struct {
+ Common
+ CurrencyMatch []*Common `xml:"currencyMatch"`
+ SurroundingMatch []*Common `xml:"surroundingMatch"`
+ InsertBetween []*Common `xml:"insertBetween"`
+ } `xml:"beforeCurrency"`
+ AfterCurrency []*struct {
+ Common
+ CurrencyMatch []*Common `xml:"currencyMatch"`
+ SurroundingMatch []*Common `xml:"surroundingMatch"`
+ InsertBetween []*Common `xml:"insertBetween"`
+ } `xml:"afterCurrency"`
+ } `xml:"currencySpacing"`
+ CurrencyFormatLength []*struct {
+ Common
+ CurrencyFormat []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ } `xml:"currencyFormat"`
+ } `xml:"currencyFormatLength"`
+ UnitPattern []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"unitPattern"`
+ } `xml:"currencyFormats"`
+ Currencies *struct {
+ Common
+ Currency []*struct {
+ Common
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ DisplayName []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"displayName"`
+ Symbol []*Common `xml:"symbol"`
+ Decimal []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"decimal"`
+ Group []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ } `xml:"group"`
+ } `xml:"currency"`
+ } `xml:"currencies"`
+ MiscPatterns []*struct {
+ Common
+ NumberSystem string `xml:"numberSystem,attr"`
+ Pattern []*struct {
+ Common
+ Numbers string `xml:"numbers,attr"`
+ Count string `xml:"count,attr"`
+ } `xml:"pattern"`
+ } `xml:"miscPatterns"`
+ MinimalPairs []*struct {
+ Common
+ PluralMinimalPairs []*struct {
+ Common
+ Count string `xml:"count,attr"`
+ } `xml:"pluralMinimalPairs"`
+ OrdinalMinimalPairs []*struct {
+ Common
+ Ordinal string `xml:"ordinal,attr"`
+ } `xml:"ordinalMinimalPairs"`
+ } `xml:"minimalPairs"`
+}
+
+// Version is the version of CLDR from which the XML definitions are generated.
+const Version = "32"
diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE
new file mode 100644
index 0000000..2a7cf70
--- /dev/null
+++ b/vendor/golang.org/x/tools/LICENSE
@@ -0,0 +1,27 @@
+Copyright 2009 The Go Authors.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google LLC nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS
new file mode 100644
index 0000000..7330990
--- /dev/null
+++ b/vendor/golang.org/x/tools/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
new file mode 100644
index 0000000..6e34df4
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
@@ -0,0 +1,654 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "sort"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+// z := x + y // add them
+// <-A->
+// <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace. It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
+//
+// intervals C, D and E are inexact. C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +). So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment. E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// The resulting path is never empty; it always contains at least the
+// 'root' *ast.File. Ideally PathEnclosingInterval would reject
+// intervals that lie wholly or partially outside the range of the
+// file, but unfortunately ast.File records only the token.Pos of
+// the 'package' keyword, but not of the start of the file itself.
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+ // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+ // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+ var visit func(node ast.Node) bool
+ visit = func(node ast.Node) bool {
+ path = append(path, node)
+
+ nodePos := node.Pos()
+ nodeEnd := node.End()
+
+ // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+ // Intersect [start, end) with interval of node.
+ if start < nodePos {
+ start = nodePos
+ }
+ if end > nodeEnd {
+ end = nodeEnd
+ }
+
+ // Find sole child that contains [start, end).
+ children := childrenOf(node)
+ l := len(children)
+ for i, child := range children {
+ // [childPos, childEnd) is unaugmented interval of child.
+ childPos := child.Pos()
+ childEnd := child.End()
+
+ // [augPos, augEnd) is whitespace-augmented interval of child.
+ augPos := childPos
+ augEnd := childEnd
+ if i > 0 {
+ augPos = children[i-1].End() // start of preceding whitespace
+ }
+ if i < l-1 {
+ nextChildPos := children[i+1].Pos()
+ // Does [start, end) lie between child and next child?
+ if start >= augEnd && end <= nextChildPos {
+ return false // inexact match
+ }
+ augEnd = nextChildPos // end of following whitespace
+ }
+
+ // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+ // i, augPos, augEnd, start, end) // debugging
+
+ // Does augmented child strictly contain [start, end)?
+ if augPos <= start && end <= augEnd {
+ if is[tokenNode](child) {
+ return true
+ }
+
+ // childrenOf elides the FuncType node beneath FuncDecl.
+ // Add it back here for TypeParams, Params, Results,
+ // all FieldLists). But we don't add it back for the "func" token
+ // even though it is is the tree at FuncDecl.Type.Func.
+ if decl, ok := node.(*ast.FuncDecl); ok {
+ if fields, ok := child.(*ast.FieldList); ok && fields != decl.Recv {
+ path = append(path, decl.Type)
+ }
+ }
+
+ return visit(child)
+ }
+
+ // Does [start, end) overlap multiple children?
+ // i.e. left-augmented child contains start
+ // but LR-augmented child does not contain end.
+ if start < childEnd && end > augEnd {
+ break
+ }
+ }
+
+ // No single child contained [start, end),
+ // so node is the result. Is it exact?
+
+ // (It's tempting to put this condition before the
+ // child loop, but it gives the wrong result in the
+ // case where a node (e.g. ExprStmt) and its sole
+ // child have equal intervals.)
+ if start == nodePos && end == nodeEnd {
+ return true // exact match
+ }
+
+ return false // inexact: overlaps multiple children
+ }
+
+ // Ensure [start,end) is nondecreasing.
+ if start > end {
+ start, end = end, start
+ }
+
+ if start < root.End() && end > root.Pos() {
+ if start == end {
+ end = start + 1 // empty interval => interval of size 1
+ }
+ exact = visit(root)
+
+ // Reverse the path:
+ for i, l := 0, len(path); i < l/2; i++ {
+ path[i], path[l-1-i] = path[l-1-i], path[i]
+ }
+ } else {
+ // Selection lies within whitespace preceding the
+ // first (or following the last) declaration in the file.
+ // The result nonetheless always includes the ast.File.
+ path = append(path, root)
+ }
+
+ return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+type tokenNode struct {
+ pos token.Pos
+ end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+ return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+ return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+ return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+func childrenOf(n ast.Node) []ast.Node {
+ var children []ast.Node
+
+ // First add nodes for all true subtrees.
+ ast.Inspect(n, func(node ast.Node) bool {
+ if node == n { // push n
+ return true // recur
+ }
+ if node != nil { // push child
+ children = append(children, node)
+ }
+ return false // no recursion
+ })
+
+ // Then add fake Nodes for bare tokens.
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Elt.End(), len("]")))
+
+ case *ast.AssignStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.BasicLit:
+ children = append(children,
+ tok(n.ValuePos, len(n.Value)))
+
+ case *ast.BinaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.BlockStmt:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("}")))
+
+ case *ast.BranchStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.CallExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ if n.Ellipsis != 0 {
+ children = append(children, tok(n.Ellipsis, len("...")))
+ }
+
+ case *ast.CaseClause:
+ if n.List == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.ChanType:
+ switch n.Dir {
+ case ast.RECV:
+ children = append(children, tok(n.Begin, len("<-chan")))
+ case ast.SEND:
+ children = append(children, tok(n.Begin, len("chan<-")))
+ case ast.RECV | ast.SEND:
+ children = append(children, tok(n.Begin, len("chan")))
+ }
+
+ case *ast.CommClause:
+ if n.Comm == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.Comment:
+ // nop
+
+ case *ast.CommentGroup:
+ // nop
+
+ case *ast.CompositeLit:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("{")))
+
+ case *ast.DeclStmt:
+ // nop
+
+ case *ast.DeferStmt:
+ children = append(children,
+ tok(n.Defer, len("defer")))
+
+ case *ast.Ellipsis:
+ children = append(children,
+ tok(n.Ellipsis, len("...")))
+
+ case *ast.EmptyStmt:
+ // nop
+
+ case *ast.ExprStmt:
+ // nop
+
+ case *ast.Field:
+ // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+ case *ast.FieldList:
+ children = append(children,
+ tok(n.Opening, len("(")), // or len("[")
+ tok(n.Closing, len(")"))) // or len("]")
+
+ case *ast.File:
+ // TODO test: Doc
+ children = append(children,
+ tok(n.Package, len("package")))
+
+ case *ast.ForStmt:
+ children = append(children,
+ tok(n.For, len("for")))
+
+ case *ast.FuncDecl:
+ // TODO(adonovan): FuncDecl.Comment?
+
+ // Uniquely, FuncDecl breaks the invariant that
+ // preorder traversal yields tokens in lexical order:
+ // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+ //
+ // As a workaround, we inline the case for FuncType
+ // here and order things correctly.
+ // We also need to insert the elided FuncType just
+ // before the 'visit' recursion.
+ //
+ children = nil // discard ast.Walk(FuncDecl) info subtrees
+ children = append(children, tok(n.Type.Func, len("func")))
+ if n.Recv != nil {
+ children = append(children, n.Recv)
+ }
+ children = append(children, n.Name)
+ if tparams := n.Type.TypeParams; tparams != nil {
+ children = append(children, tparams)
+ }
+ if n.Type.Params != nil {
+ children = append(children, n.Type.Params)
+ }
+ if n.Type.Results != nil {
+ children = append(children, n.Type.Results)
+ }
+ if n.Body != nil {
+ children = append(children, n.Body)
+ }
+
+ case *ast.FuncLit:
+ // nop
+
+ case *ast.FuncType:
+ if n.Func != 0 {
+ children = append(children,
+ tok(n.Func, len("func")))
+ }
+
+ case *ast.GenDecl:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+ if n.Lparen != 0 {
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ }
+
+ case *ast.GoStmt:
+ children = append(children,
+ tok(n.Go, len("go")))
+
+ case *ast.Ident:
+ children = append(children,
+ tok(n.NamePos, len(n.Name)))
+
+ case *ast.IfStmt:
+ children = append(children,
+ tok(n.If, len("if")))
+
+ case *ast.ImportSpec:
+ // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+ case *ast.IncDecStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.IndexExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.IndexListExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.InterfaceType:
+ children = append(children,
+ tok(n.Interface, len("interface")))
+
+ case *ast.KeyValueExpr:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.LabeledStmt:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.MapType:
+ children = append(children,
+ tok(n.Map, len("map")))
+
+ case *ast.ParenExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.RangeStmt:
+ children = append(children,
+ tok(n.For, len("for")),
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.ReturnStmt:
+ children = append(children,
+ tok(n.Return, len("return")))
+
+ case *ast.SelectStmt:
+ children = append(children,
+ tok(n.Select, len("select")))
+
+ case *ast.SelectorExpr:
+ // nop
+
+ case *ast.SendStmt:
+ children = append(children,
+ tok(n.Arrow, len("<-")))
+
+ case *ast.SliceExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.StarExpr:
+ children = append(children, tok(n.Star, len("*")))
+
+ case *ast.StructType:
+ children = append(children, tok(n.Struct, len("struct")))
+
+ case *ast.SwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.TypeAssertExpr:
+ children = append(children,
+ tok(n.Lparen-1, len(".")),
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.TypeSpec:
+ // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+ case *ast.TypeSwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.UnaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.ValueSpec:
+ // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+ case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+ // nop
+ }
+
+ // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+ // the switch above so we can make interleaved callbacks for
+ // both Nodes and Tokens in the right order and avoid the need
+ // to sort.
+ sort.Sort(byPos(children))
+
+ return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+ return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+ return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+ sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root. Perhaps we should do that.
+func NodeDescription(n ast.Node) string {
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ return "array type"
+ case *ast.AssignStmt:
+ return "assignment"
+ case *ast.BadDecl:
+ return "bad declaration"
+ case *ast.BadExpr:
+ return "bad expression"
+ case *ast.BadStmt:
+ return "bad statement"
+ case *ast.BasicLit:
+ return "basic literal"
+ case *ast.BinaryExpr:
+ return fmt.Sprintf("binary %s operation", n.Op)
+ case *ast.BlockStmt:
+ return "block"
+ case *ast.BranchStmt:
+ switch n.Tok {
+ case token.BREAK:
+ return "break statement"
+ case token.CONTINUE:
+ return "continue statement"
+ case token.GOTO:
+ return "goto statement"
+ case token.FALLTHROUGH:
+ return "fall-through statement"
+ }
+ case *ast.CallExpr:
+ if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+ return "function call (or conversion)"
+ }
+ return "function call"
+ case *ast.CaseClause:
+ return "case clause"
+ case *ast.ChanType:
+ return "channel type"
+ case *ast.CommClause:
+ return "communication clause"
+ case *ast.Comment:
+ return "comment"
+ case *ast.CommentGroup:
+ return "comment group"
+ case *ast.CompositeLit:
+ return "composite literal"
+ case *ast.DeclStmt:
+ return NodeDescription(n.Decl) + " statement"
+ case *ast.DeferStmt:
+ return "defer statement"
+ case *ast.Ellipsis:
+ return "ellipsis"
+ case *ast.EmptyStmt:
+ return "empty statement"
+ case *ast.ExprStmt:
+ return "expression statement"
+ case *ast.Field:
+ // Can be any of these:
+ // struct {x, y int} -- struct field(s)
+ // struct {T} -- anon struct field
+ // interface {I} -- interface embedding
+ // interface {f()} -- interface method
+ // func (A) func(B) C -- receiver, param(s), result(s)
+ return "field/method/parameter"
+ case *ast.FieldList:
+ return "field/method/parameter list"
+ case *ast.File:
+ return "source file"
+ case *ast.ForStmt:
+ return "for loop"
+ case *ast.FuncDecl:
+ return "function declaration"
+ case *ast.FuncLit:
+ return "function literal"
+ case *ast.FuncType:
+ return "function type"
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.IMPORT:
+ return "import declaration"
+ case token.CONST:
+ return "constant declaration"
+ case token.TYPE:
+ return "type declaration"
+ case token.VAR:
+ return "variable declaration"
+ }
+ case *ast.GoStmt:
+ return "go statement"
+ case *ast.Ident:
+ return "identifier"
+ case *ast.IfStmt:
+ return "if statement"
+ case *ast.ImportSpec:
+ return "import specification"
+ case *ast.IncDecStmt:
+ if n.Tok == token.INC {
+ return "increment statement"
+ }
+ return "decrement statement"
+ case *ast.IndexExpr:
+ return "index expression"
+ case *ast.IndexListExpr:
+ return "index list expression"
+ case *ast.InterfaceType:
+ return "interface type"
+ case *ast.KeyValueExpr:
+ return "key/value association"
+ case *ast.LabeledStmt:
+ return "statement label"
+ case *ast.MapType:
+ return "map type"
+ case *ast.Package:
+ return "package"
+ case *ast.ParenExpr:
+ return "parenthesized " + NodeDescription(n.X)
+ case *ast.RangeStmt:
+ return "range loop"
+ case *ast.ReturnStmt:
+ return "return statement"
+ case *ast.SelectStmt:
+ return "select statement"
+ case *ast.SelectorExpr:
+ return "selector"
+ case *ast.SendStmt:
+ return "channel send"
+ case *ast.SliceExpr:
+ return "slice expression"
+ case *ast.StarExpr:
+ return "*-operation" // load/store expr or pointer type
+ case *ast.StructType:
+ return "struct type"
+ case *ast.SwitchStmt:
+ return "switch statement"
+ case *ast.TypeAssertExpr:
+ return "type assertion"
+ case *ast.TypeSpec:
+ return "type specification"
+ case *ast.TypeSwitchStmt:
+ return "type switch"
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("unary %s operation", n.Op)
+ case *ast.ValueSpec:
+ return "value specification"
+
+ }
+ panic(fmt.Sprintf("unexpected node type: %T", n))
+}
+
+func is[T any](x any) bool {
+ _, ok := x.(T)
+ return ok
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
new file mode 100644
index 0000000..18d1adb
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -0,0 +1,485 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strconv"
+ "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
+ return AddNamedImport(fset, f, "", path)
+}
+
+// AddNamedImport adds the import with the given name and path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+//
+// AddNamedImport(fset, f, "pathpkg", "path")
+//
+// adds
+//
+// import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
+ if imports(f, name, path) {
+ return false
+ }
+
+ newImport := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(path),
+ },
+ }
+ if name != "" {
+ newImport.Name = &ast.Ident{Name: name}
+ }
+
+ // Find an import decl to add to.
+ // The goal is to find an existing import
+ // whose import path has the longest shared
+ // prefix with path.
+ var (
+ bestMatch = -1 // length of longest shared prefix
+ lastImport = -1 // index in f.Decls of the file's final import decl
+ impDecl *ast.GenDecl // import decl containing the best match
+ impIndex = -1 // spec index in impDecl containing the best match
+
+ isThirdPartyPath = isThirdParty(path)
+ )
+ for i, decl := range f.Decls {
+ gen, ok := decl.(*ast.GenDecl)
+ if ok && gen.Tok == token.IMPORT {
+ lastImport = i
+ // Do not add to import "C", to avoid disrupting the
+ // association with its doc comment, breaking cgo.
+ if declImports(gen, "C") {
+ continue
+ }
+
+ // Match an empty import decl if that's all that is available.
+ if len(gen.Specs) == 0 && bestMatch == -1 {
+ impDecl = gen
+ }
+
+ // Compute longest shared prefix with imports in this group and find best
+ // matched import spec.
+ // 1. Always prefer import spec with longest shared prefix.
+ // 2. While match length is 0,
+ // - for stdlib package: prefer first import spec.
+ // - for third party package: prefer first third party import spec.
+ // We cannot use last import spec as best match for third party package
+ // because grouped imports are usually placed last by goimports -local
+ // flag.
+ // See issue #19190.
+ seenAnyThirdParty := false
+ for j, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ p := importPath(impspec)
+ n := matchLen(p, path)
+ if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+ bestMatch = n
+ impDecl = gen
+ impIndex = j
+ }
+ seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+ }
+ }
+ }
+
+ // If no import decl found, add one after the last import.
+ if impDecl == nil {
+ impDecl = &ast.GenDecl{
+ Tok: token.IMPORT,
+ }
+ if lastImport >= 0 {
+ impDecl.TokPos = f.Decls[lastImport].End()
+ } else {
+ // There are no existing imports.
+ // Our new import, preceded by a blank line, goes after the package declaration
+ // and after the comment, if any, that starts on the same line as the
+ // package declaration.
+ impDecl.TokPos = f.Package
+
+ file := fset.File(f.Package)
+ pkgLine := file.Line(f.Package)
+ for _, c := range f.Comments {
+ if file.Line(c.Pos()) > pkgLine {
+ break
+ }
+ // +2 for a blank line
+ impDecl.TokPos = c.End() + 2
+ }
+ }
+ f.Decls = append(f.Decls, nil)
+ copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+ f.Decls[lastImport+1] = impDecl
+ }
+
+ // Insert new import at insertAt.
+ insertAt := 0
+ if impIndex >= 0 {
+ // insert after the found import
+ insertAt = impIndex + 1
+ }
+ impDecl.Specs = append(impDecl.Specs, nil)
+ copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+ impDecl.Specs[insertAt] = newImport
+ pos := impDecl.Pos()
+ if insertAt > 0 {
+ // If there is a comment after an existing import, preserve the comment
+ // position by adding the new import after the comment.
+ if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+ pos = spec.Comment.End()
+ } else {
+ // Assign same position as the previous import,
+ // so that the sorter sees it as being in the same block.
+ pos = impDecl.Specs[insertAt-1].Pos()
+ }
+ }
+ if newImport.Name != nil {
+ newImport.Name.NamePos = pos
+ }
+ newImport.Path.ValuePos = pos
+ newImport.EndPos = pos
+
+ // Clean up parens. impDecl contains at least one spec.
+ if len(impDecl.Specs) == 1 {
+ // Remove unneeded parens.
+ impDecl.Lparen = token.NoPos
+ } else if !impDecl.Lparen.IsValid() {
+ // impDecl needs parens added.
+ impDecl.Lparen = impDecl.Specs[0].Pos()
+ }
+
+ f.Imports = append(f.Imports, newImport)
+
+ if len(f.Decls) <= 1 {
+ return true
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+
+ return true
+}
+
+func isThirdParty(importPath string) bool {
+ // Third party package import path usually contains "." (".com", ".org", ...)
+ // This logic is taken from golang.org/x/tools/imports package.
+ return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+ return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+ var delspecs []*ast.ImportSpec
+ var delcomments []*ast.CommentGroup
+
+ // Find the import nodes that import path, if any.
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT {
+ continue
+ }
+ for j := 0; j < len(gen.Specs); j++ {
+ spec := gen.Specs[j]
+ impspec := spec.(*ast.ImportSpec)
+ if importName(impspec) != name || importPath(impspec) != path {
+ continue
+ }
+
+ // We found an import spec that imports path.
+ // Delete it.
+ delspecs = append(delspecs, impspec)
+ deleted = true
+ copy(gen.Specs[j:], gen.Specs[j+1:])
+ gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+ // If this was the last import spec in this decl,
+ // delete the decl, too.
+ if len(gen.Specs) == 0 {
+ copy(f.Decls[i:], f.Decls[i+1:])
+ f.Decls = f.Decls[:len(f.Decls)-1]
+ i--
+ break
+ } else if len(gen.Specs) == 1 {
+ if impspec.Doc != nil {
+ delcomments = append(delcomments, impspec.Doc)
+ }
+ if impspec.Comment != nil {
+ delcomments = append(delcomments, impspec.Comment)
+ }
+ for _, cg := range f.Comments {
+ // Found comment on the same line as the import spec.
+ if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+ delcomments = append(delcomments, cg)
+ break
+ }
+ }
+
+ spec := gen.Specs[0].(*ast.ImportSpec)
+
+ // Move the documentation right after the import decl.
+ if spec.Doc != nil {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ }
+ for _, cg := range f.Comments {
+ if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ break
+ }
+ }
+ }
+ if j > 0 {
+ lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+ lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
+ line := fset.PositionFor(impspec.Path.ValuePos, false).Line
+
+ // We deleted an entry but now there may be
+ // a blank line-sized hole where the import was.
+ if line-lastLine > 1 || !gen.Rparen.IsValid() {
+ // There was a blank line immediately preceding the deleted import,
+ // so there's no need to close the hole. The right parenthesis is
+ // invalid after AddImport to an import statement without parenthesis.
+ // Do nothing.
+ } else if line != fset.File(gen.Rparen).LineCount() {
+ // There was no blank line. Close the hole.
+ fset.File(gen.Rparen).MergeLine(line)
+ }
+ }
+ j--
+ }
+ }
+
+ // Delete imports from f.Imports.
+ for i := 0; i < len(f.Imports); i++ {
+ imp := f.Imports[i]
+ for j, del := range delspecs {
+ if imp == del {
+ copy(f.Imports[i:], f.Imports[i+1:])
+ f.Imports = f.Imports[:len(f.Imports)-1]
+ copy(delspecs[j:], delspecs[j+1:])
+ delspecs = delspecs[:len(delspecs)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ // Delete comments from f.Comments.
+ for i := 0; i < len(f.Comments); i++ {
+ cg := f.Comments[i]
+ for j, del := range delcomments {
+ if cg == del {
+ copy(f.Comments[i:], f.Comments[i+1:])
+ f.Comments = f.Comments[:len(f.Comments)-1]
+ copy(delcomments[j:], delcomments[j+1:])
+ delcomments = delcomments[:len(delcomments)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ if len(delspecs) > 0 {
+ panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+ }
+
+ return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+ for _, imp := range f.Imports {
+ if importPath(imp) == oldPath {
+ rewrote = true
+ // record old End, because the default is to compute
+ // it using the length of imp.Path.Value.
+ imp.EndPos = imp.End()
+ imp.Path.Value = strconv.Quote(newPath)
+ }
+ }
+ return
+}
+
+// UsesImport reports whether a given import is used.
+func UsesImport(f *ast.File, path string) (used bool) {
+ spec := importSpec(f, path)
+ if spec == nil {
+ return
+ }
+
+ name := spec.Name.String()
+ switch name {
+ case "<nil>":
+ // If the package name is not explicitly specified,
+ // make an educated guess. This is not guaranteed to be correct.
+ lastSlash := strings.LastIndex(path, "/")
+ if lastSlash == -1 {
+ name = path
+ } else {
+ name = path[lastSlash+1:]
+ }
+ case "_", ".":
+ // Not sure if this import is used - err on the side of caution.
+ return true
+ }
+
+ ast.Walk(visitFn(func(n ast.Node) {
+ sel, ok := n.(*ast.SelectorExpr)
+ if ok && isTopName(sel.X, name) {
+ used = true
+ }
+ }), f)
+
+ return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ fn(node)
+ return fn
+}
+
+// imports reports whether f has an import with the specified name and path.
+func imports(f *ast.File, name, path string) bool {
+ for _, s := range f.Imports {
+ if importName(s) == name && importPath(s) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+ for _, s := range f.Imports {
+ if importPath(s) == path {
+ return s
+ }
+ }
+ return nil
+}
+
+// importName returns the name of s,
+// or "" if the import is not named.
+func importName(s *ast.ImportSpec) string {
+ if s.Name == nil {
+ return ""
+ }
+ return s.Name.Name
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+ t, err := strconv.Unquote(s.Path.Value)
+ if err != nil {
+ return ""
+ }
+ return t
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+ n := 0
+ for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+ if x[i] == '/' {
+ n++
+ }
+ }
+ return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+ id, ok := n.(*ast.Ident)
+ return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+ var groups [][]*ast.ImportSpec
+
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.IMPORT {
+ break
+ }
+
+ group := []*ast.ImportSpec{}
+
+ var lastLine int
+ for _, spec := range genDecl.Specs {
+ importSpec := spec.(*ast.ImportSpec)
+ pos := importSpec.Path.ValuePos
+ line := fset.Position(pos).Line
+ if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+ groups = append(groups, group)
+ group = []*ast.ImportSpec{}
+ }
+ group = append(group, importSpec)
+ lastLine = line
+ }
+ groups = append(groups, group)
+ }
+
+ return groups
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
new file mode 100644
index 0000000..58934f7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
@@ -0,0 +1,486 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+ "sort"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+ parent := &struct{ ast.Node }{root}
+ defer func() {
+ if r := recover(); r != nil && r != abort {
+ panic(r)
+ }
+ result = parent.Node
+ }()
+ a := &application{pre: pre, post: post}
+ a.apply(parent, "Node", nil, root)
+ return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+type Cursor struct {
+ parent ast.Node
+ name string
+ iter *iterator // valid if non-nil
+ node ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+ if c.iter != nil {
+ return c.iter.index
+ }
+ return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+ return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+ if _, ok := c.node.(*ast.File); ok {
+ file, ok := n.(*ast.File)
+ if !ok {
+ panic("attempt to replace *ast.File with non-*ast.File")
+ }
+ c.parent.(*ast.Package).Files[c.name] = file
+ return
+ }
+
+ v := c.field()
+ if i := c.Index(); i >= 0 {
+ v = v.Index(i)
+ }
+ v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+ if _, ok := c.node.(*ast.File); ok {
+ delete(c.parent.(*ast.Package).Files, c.name)
+ return
+ }
+
+ i := c.Index()
+ if i < 0 {
+ panic("Delete node not contained in slice")
+ }
+ v := c.field()
+ l := v.Len()
+ reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+ v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+ v.SetLen(l - 1)
+ c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertAfter node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+ v.Index(i + 1).Set(reflect.ValueOf(n))
+ c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertBefore node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+ v.Index(i).Set(reflect.ValueOf(n))
+ c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+ pre, post ApplyFunc
+ cursor Cursor
+ iter iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+ // convert typed nil into untyped nil
+ if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
+ n = nil
+ }
+
+ // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+ saved := a.cursor
+ a.cursor.parent = parent
+ a.cursor.name = name
+ a.cursor.iter = iter
+ a.cursor.node = n
+
+ if a.pre != nil && !a.pre(&a.cursor) {
+ a.cursor = saved
+ return
+ }
+
+ // walk children
+ // (the order of the cases matches the order of the corresponding node types in go/ast)
+ switch n := n.(type) {
+ case nil:
+ // nothing to do
+
+ // Comments and fields
+ case *ast.Comment:
+ // nothing to do
+
+ case *ast.CommentGroup:
+ if n != nil {
+ a.applyList(n, "List")
+ }
+
+ case *ast.Field:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.FieldList:
+ a.applyList(n, "List")
+
+ // Expressions
+ case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+ // nothing to do
+
+ case *ast.Ellipsis:
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.FuncLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CompositeLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Elts")
+
+ case *ast.ParenExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SelectorExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Sel", nil, n.Sel)
+
+ case *ast.IndexExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Index", nil, n.Index)
+
+ case *ast.IndexListExpr:
+ a.apply(n, "X", nil, n.X)
+ a.applyList(n, "Indices")
+
+ case *ast.SliceExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Low", nil, n.Low)
+ a.apply(n, "High", nil, n.High)
+ a.apply(n, "Max", nil, n.Max)
+
+ case *ast.TypeAssertExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Type", nil, n.Type)
+
+ case *ast.CallExpr:
+ a.apply(n, "Fun", nil, n.Fun)
+ a.applyList(n, "Args")
+
+ case *ast.StarExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.UnaryExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.BinaryExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Y", nil, n.Y)
+
+ case *ast.KeyValueExpr:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ // Types
+ case *ast.ArrayType:
+ a.apply(n, "Len", nil, n.Len)
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.StructType:
+ a.apply(n, "Fields", nil, n.Fields)
+
+ case *ast.FuncType:
+ if tparams := n.TypeParams; tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
+ a.apply(n, "Params", nil, n.Params)
+ a.apply(n, "Results", nil, n.Results)
+
+ case *ast.InterfaceType:
+ a.apply(n, "Methods", nil, n.Methods)
+
+ case *ast.MapType:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.ChanType:
+ a.apply(n, "Value", nil, n.Value)
+
+ // Statements
+ case *ast.BadStmt:
+ // nothing to do
+
+ case *ast.DeclStmt:
+ a.apply(n, "Decl", nil, n.Decl)
+
+ case *ast.EmptyStmt:
+ // nothing to do
+
+ case *ast.LabeledStmt:
+ a.apply(n, "Label", nil, n.Label)
+ a.apply(n, "Stmt", nil, n.Stmt)
+
+ case *ast.ExprStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SendStmt:
+ a.apply(n, "Chan", nil, n.Chan)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.IncDecStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.AssignStmt:
+ a.applyList(n, "Lhs")
+ a.applyList(n, "Rhs")
+
+ case *ast.GoStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.DeferStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.ReturnStmt:
+ a.applyList(n, "Results")
+
+ case *ast.BranchStmt:
+ a.apply(n, "Label", nil, n.Label)
+
+ case *ast.BlockStmt:
+ a.applyList(n, "List")
+
+ case *ast.IfStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Body", nil, n.Body)
+ a.apply(n, "Else", nil, n.Else)
+
+ case *ast.CaseClause:
+ a.applyList(n, "List")
+ a.applyList(n, "Body")
+
+ case *ast.SwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.TypeSwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Assign", nil, n.Assign)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CommClause:
+ a.apply(n, "Comm", nil, n.Comm)
+ a.applyList(n, "Body")
+
+ case *ast.SelectStmt:
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.ForStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Post", nil, n.Post)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.RangeStmt:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Declarations
+ case *ast.ImportSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Path", nil, n.Path)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.ValueSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Values")
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.TypeSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ if tparams := n.TypeParams; tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.BadDecl:
+ // nothing to do
+
+ case *ast.GenDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Specs")
+
+ case *ast.FuncDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Recv", nil, n.Recv)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Files and packages
+ case *ast.File:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.applyList(n, "Decls")
+ // Don't walk n.Comments; they have either been walked already if
+ // they are Doc comments, or they can be easily walked explicitly.
+
+ case *ast.Package:
+ // collect and sort names for reproducible behavior
+ var names []string
+ for name := range n.Files {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ a.apply(n, name, nil, n.Files[name])
+ }
+
+ default:
+ panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+ }
+
+ if a.post != nil && !a.post(&a.cursor) {
+ panic(abort)
+ }
+
+ a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+ index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+ // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+ saved := a.iter
+ a.iter.index = 0
+ for {
+ // must reload parent.name each time, since cursor modifications might change it
+ v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+ if a.iter.index >= v.Len() {
+ break
+ }
+
+ // element x may be nil in a bad AST - be cautious
+ var x ast.Node
+ if e := v.Index(a.iter.index); e.IsValid() {
+ x = e.Interface().(ast.Node)
+ }
+
+ a.iter.step = 1
+ a.apply(parent, name, &a.iter, x)
+ a.iter.index += a.iter.step
+ }
+ a.iter = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go
new file mode 100644
index 0000000..6bdcf70
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go
@@ -0,0 +1,19 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+// TODO(adonovan): use go1.22's ast.Unparen.
+func Unparen(e ast.Expr) ast.Expr {
+ for {
+ p, ok := e.(*ast.ParenExpr)
+ if !ok {
+ return e
+ }
+ e = p.X
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
new file mode 100644
index 0000000..dfb8cd6
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
@@ -0,0 +1,195 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package buildutil provides utilities related to the go/build
+// package in the standard library.
+//
+// All I/O is done via the build.Context file system interface, which must
+// be concurrency-safe.
+package buildutil // import "golang.org/x/tools/go/buildutil"
+
+import (
+ "go/build"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "sync"
+)
+
+// AllPackages returns the package path of each Go package in any source
+// directory of the specified build context (e.g. $GOROOT or an element
+// of $GOPATH). Errors are ignored. The results are sorted.
+// All package paths are canonical, and thus may contain "/vendor/".
+//
+// The result may include import paths for directories that contain no
+// *.go files, such as "archive" (in $GOROOT/src).
+//
+// All I/O is done via the build.Context file system interface,
+// which must be concurrency-safe.
+func AllPackages(ctxt *build.Context) []string {
+ var list []string
+ ForEachPackage(ctxt, func(pkg string, _ error) {
+ list = append(list, pkg)
+ })
+ sort.Strings(list)
+ return list
+}
+
+// ForEachPackage calls the found function with the package path of
+// each Go package it finds in any source directory of the specified
+// build context (e.g. $GOROOT or an element of $GOPATH).
+// All package paths are canonical, and thus may contain "/vendor/".
+//
+// If the package directory exists but could not be read, the second
+// argument to the found function provides the error.
+//
+// All I/O is done via the build.Context file system interface,
+// which must be concurrency-safe.
+func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
+ ch := make(chan item)
+
+ var wg sync.WaitGroup
+ for _, root := range ctxt.SrcDirs() {
+ root := root
+ wg.Add(1)
+ go func() {
+ allPackages(ctxt, root, ch)
+ wg.Done()
+ }()
+ }
+ go func() {
+ wg.Wait()
+ close(ch)
+ }()
+
+ // All calls to found occur in the caller's goroutine.
+ for i := range ch {
+ found(i.importPath, i.err)
+ }
+}
+
+type item struct {
+ importPath string
+ err error // (optional)
+}
+
+// We use a process-wide counting semaphore to limit
+// the number of parallel calls to ReadDir.
+var ioLimit = make(chan bool, 20)
+
+func allPackages(ctxt *build.Context, root string, ch chan<- item) {
+ root = filepath.Clean(root) + string(os.PathSeparator)
+
+ var wg sync.WaitGroup
+
+ var walkDir func(dir string)
+ walkDir = func(dir string) {
+ // Avoid .foo, _foo, and testdata directory trees.
+ base := filepath.Base(dir)
+ if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" {
+ return
+ }
+
+ pkg := filepath.ToSlash(strings.TrimPrefix(dir, root))
+
+ // Prune search if we encounter any of these import paths.
+ switch pkg {
+ case "builtin":
+ return
+ }
+
+ ioLimit <- true
+ files, err := ReadDir(ctxt, dir)
+ <-ioLimit
+ if pkg != "" || err != nil {
+ ch <- item{pkg, err}
+ }
+ for _, fi := range files {
+ fi := fi
+ if fi.IsDir() {
+ wg.Add(1)
+ go func() {
+ walkDir(filepath.Join(dir, fi.Name()))
+ wg.Done()
+ }()
+ }
+ }
+ }
+
+ walkDir(root)
+ wg.Wait()
+}
+
+// ExpandPatterns returns the set of packages matched by patterns,
+// which may have the following forms:
+//
+// golang.org/x/tools/cmd/guru # a single package
+// golang.org/x/tools/... # all packages beneath dir
+// ... # the entire workspace.
+//
+// Order is significant: a pattern preceded by '-' removes matching
+// packages from the set. For example, these patterns match all encoding
+// packages except encoding/xml:
+//
+// encoding/... -encoding/xml
+//
+// A trailing slash in a pattern is ignored. (Path components of Go
+// package names are separated by slash, not the platform's path separator.)
+func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
+ // TODO(adonovan): support other features of 'go list':
+ // - "std"/"cmd"/"all" meta-packages
+ // - "..." not at the end of a pattern
+ // - relative patterns using "./" or "../" prefix
+
+ pkgs := make(map[string]bool)
+ doPkg := func(pkg string, neg bool) {
+ if neg {
+ delete(pkgs, pkg)
+ } else {
+ pkgs[pkg] = true
+ }
+ }
+
+ // Scan entire workspace if wildcards are present.
+ // TODO(adonovan): opt: scan only the necessary subtrees of the workspace.
+ var all []string
+ for _, arg := range patterns {
+ if strings.HasSuffix(arg, "...") {
+ all = AllPackages(ctxt)
+ break
+ }
+ }
+
+ for _, arg := range patterns {
+ if arg == "" {
+ continue
+ }
+
+ neg := arg[0] == '-'
+ if neg {
+ arg = arg[1:]
+ }
+
+ if arg == "..." {
+ // ... matches all packages
+ for _, pkg := range all {
+ doPkg(pkg, neg)
+ }
+ } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg {
+ // dir/... matches all packages beneath dir
+ for _, pkg := range all {
+ if strings.HasPrefix(pkg, dir) &&
+ (len(pkg) == len(dir) || pkg[len(dir)] == '/') {
+ doPkg(pkg, neg)
+ }
+ }
+ } else {
+ // single package
+ doPkg(strings.TrimSuffix(arg, "/"), neg)
+ }
+ }
+
+ return pkgs
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
new file mode 100644
index 0000000..763d188
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
@@ -0,0 +1,111 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "fmt"
+ "go/build"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "sort"
+ "strings"
+ "time"
+)
+
+// FakeContext returns a build.Context for the fake file tree specified
+// by pkgs, which maps package import paths to a mapping from file base
+// names to contents.
+//
+// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides
+// the necessary file access methods to read from memory instead of the
+// real file system.
+//
+// Unlike a real file tree, the fake one has only two levels---packages
+// and files---so ReadDir("/go/src/") returns all packages under
+// /go/src/ including, for instance, "math" and "math/big".
+// ReadDir("/go/src/math/big") would return all the files in the
+// "math/big" package.
+func FakeContext(pkgs map[string]map[string]string) *build.Context {
+ clean := func(filename string) string {
+ f := path.Clean(filepath.ToSlash(filename))
+ // Removing "/go/src" while respecting segment
+ // boundaries has this unfortunate corner case:
+ if f == "/go/src" {
+ return ""
+ }
+ return strings.TrimPrefix(f, "/go/src/")
+ }
+
+ ctxt := build.Default // copy
+ ctxt.GOROOT = "/go"
+ ctxt.GOPATH = ""
+ ctxt.Compiler = "gc"
+ ctxt.IsDir = func(dir string) bool {
+ dir = clean(dir)
+ if dir == "" {
+ return true // needed by (*build.Context).SrcDirs
+ }
+ return pkgs[dir] != nil
+ }
+ ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
+ dir = clean(dir)
+ var fis []os.FileInfo
+ if dir == "" {
+ // enumerate packages
+ for importPath := range pkgs {
+ fis = append(fis, fakeDirInfo(importPath))
+ }
+ } else {
+ // enumerate files of package
+ for basename := range pkgs[dir] {
+ fis = append(fis, fakeFileInfo(basename))
+ }
+ }
+ sort.Sort(byName(fis))
+ return fis, nil
+ }
+ ctxt.OpenFile = func(filename string) (io.ReadCloser, error) {
+ filename = clean(filename)
+ dir, base := path.Split(filename)
+ content, ok := pkgs[path.Clean(dir)][base]
+ if !ok {
+ return nil, fmt.Errorf("file not found: %s", filename)
+ }
+ return io.NopCloser(strings.NewReader(content)), nil
+ }
+ ctxt.IsAbsPath = func(path string) bool {
+ path = filepath.ToSlash(path)
+ // Don't rely on the default (filepath.Path) since on
+ // Windows, it reports virtual paths as non-absolute.
+ return strings.HasPrefix(path, "/")
+ }
+ return &ctxt
+}
+
+type byName []os.FileInfo
+
+func (s byName) Len() int { return len(s) }
+func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
+
+type fakeFileInfo string
+
+func (fi fakeFileInfo) Name() string { return string(fi) }
+func (fakeFileInfo) Sys() interface{} { return nil }
+func (fakeFileInfo) ModTime() time.Time { return time.Time{} }
+func (fakeFileInfo) IsDir() bool { return false }
+func (fakeFileInfo) Size() int64 { return 0 }
+func (fakeFileInfo) Mode() os.FileMode { return 0644 }
+
+type fakeDirInfo string
+
+func (fd fakeDirInfo) Name() string { return string(fd) }
+func (fakeDirInfo) Sys() interface{} { return nil }
+func (fakeDirInfo) ModTime() time.Time { return time.Time{} }
+func (fakeDirInfo) IsDir() bool { return true }
+func (fakeDirInfo) Size() int64 { return 0 }
+func (fakeDirInfo) Mode() os.FileMode { return 0755 }
diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go
new file mode 100644
index 0000000..7e37165
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/overlay.go
@@ -0,0 +1,101 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/build"
+ "io"
+ "path/filepath"
+ "strconv"
+ "strings"
+)
+
+// OverlayContext overlays a build.Context with additional files from
+// a map. Files in the map take precedence over other files.
+//
+// In addition to plain string comparison, two file names are
+// considered equal if their base names match and their directory
+// components point at the same directory on the file system. That is,
+// symbolic links are followed for directories, but not files.
+//
+// A common use case for OverlayContext is to allow editors to pass in
+// a set of unsaved, modified files.
+//
+// Currently, only the Context.OpenFile function will respect the
+// overlay. This may change in the future.
+func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context {
+ // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir
+
+ rc := func(data []byte) (io.ReadCloser, error) {
+ return io.NopCloser(bytes.NewBuffer(data)), nil
+ }
+
+ copy := *orig // make a copy
+ ctxt := &copy
+ ctxt.OpenFile = func(path string) (io.ReadCloser, error) {
+ // Fast path: names match exactly.
+ if content, ok := overlay[path]; ok {
+ return rc(content)
+ }
+
+ // Slow path: check for same file under a different
+ // alias, perhaps due to a symbolic link.
+ for filename, content := range overlay {
+ if sameFile(path, filename) {
+ return rc(content)
+ }
+ }
+
+ return OpenFile(orig, path)
+ }
+ return ctxt
+}
+
+// ParseOverlayArchive parses an archive containing Go files and their
+// contents. The result is intended to be used with OverlayContext.
+//
+// # Archive format
+//
+// The archive consists of a series of files. Each file consists of a
+// name, a decimal file size and the file contents, separated by
+// newlines. No newline follows after the file contents.
+func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) {
+ overlay := make(map[string][]byte)
+ r := bufio.NewReader(archive)
+ for {
+ // Read file name.
+ filename, err := r.ReadString('\n')
+ if err != nil {
+ if err == io.EOF {
+ break // OK
+ }
+ return nil, fmt.Errorf("reading archive file name: %v", err)
+ }
+ filename = filepath.Clean(strings.TrimSpace(filename))
+
+ // Read file size.
+ sz, err := r.ReadString('\n')
+ if err != nil {
+ return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err)
+ }
+ sz = strings.TrimSpace(sz)
+ size, err := strconv.ParseUint(sz, 10, 32)
+ if err != nil {
+ return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err)
+ }
+
+ // Read file content.
+ content := make([]byte, size)
+ if _, err := io.ReadFull(r, content); err != nil {
+ return nil, fmt.Errorf("reading archive file %s: %v", filename, err)
+ }
+ overlay[filename] = content
+ }
+
+ return overlay, nil
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go
new file mode 100644
index 0000000..32c8d14
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/tags.go
@@ -0,0 +1,100 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+// This duplicated logic must be kept in sync with that from go build:
+// $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set)
+// $GOROOT/src/cmd/go/internal/base/flag.go (StringsFlag.Set)
+// $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split)
+
+import (
+ "fmt"
+ "strings"
+)
+
+const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " +
+ "For more information about build tags, see the description of " +
+ "build constraints in the documentation for the go/build package"
+
+// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses
+// a flag value the same as go build's -tags flag and populates a []string slice.
+//
+// See $GOROOT/src/go/build/doc.go for description of build tags.
+// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
+//
+// Example:
+//
+// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
+type TagsFlag []string
+
+func (v *TagsFlag) Set(s string) error {
+ // See $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set)
+ // For compatibility with Go 1.12 and earlier, allow "-tags='a b c'" or even just "-tags='a'".
+ if strings.Contains(s, " ") || strings.Contains(s, "'") {
+ var err error
+ *v, err = splitQuotedFields(s)
+ if *v == nil {
+ *v = []string{}
+ }
+ return err
+ }
+
+ // Starting in Go 1.13, the -tags flag is a comma-separated list of build tags.
+ *v = []string{}
+ for _, s := range strings.Split(s, ",") {
+ if s != "" {
+ *v = append(*v, s)
+ }
+ }
+ return nil
+}
+
+func (v *TagsFlag) Get() interface{} { return *v }
+
+func splitQuotedFields(s string) ([]string, error) {
+ // See $GOROOT/src/cmd/internal/quoted/quoted.go (Split)
+ // This must remain in sync with that logic.
+ var f []string
+ for len(s) > 0 {
+ for len(s) > 0 && isSpaceByte(s[0]) {
+ s = s[1:]
+ }
+ if len(s) == 0 {
+ break
+ }
+ // Accepted quoted string. No unescaping inside.
+ if s[0] == '"' || s[0] == '\'' {
+ quote := s[0]
+ s = s[1:]
+ i := 0
+ for i < len(s) && s[i] != quote {
+ i++
+ }
+ if i >= len(s) {
+ return nil, fmt.Errorf("unterminated %c string", quote)
+ }
+ f = append(f, s[:i])
+ s = s[i+1:]
+ continue
+ }
+ i := 0
+ for i < len(s) && !isSpaceByte(s[i]) {
+ i++
+ }
+ f = append(f, s[:i])
+ s = s[i:]
+ }
+ return f, nil
+}
+
+func (v *TagsFlag) String() string {
+ return "<tagsFlag>"
+}
+
+func isSpaceByte(c byte) bool {
+ // See $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split)
+ // This list must remain in sync with that.
+ return c == ' ' || c == '\t' || c == '\n' || c == '\r'
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go
new file mode 100644
index 0000000..bee6390
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/util.go
@@ -0,0 +1,209 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+// ParseFile behaves like parser.ParseFile,
+// but uses the build context's file system interface, if any.
+//
+// If file is not absolute (as defined by IsAbsPath), the (dir, file)
+// components are joined using JoinPath; dir must be absolute.
+//
+// The displayPath function, if provided, is used to transform the
+// filename that will be attached to the ASTs.
+//
+// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
+func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
+ if !IsAbsPath(ctxt, file) {
+ file = JoinPath(ctxt, dir, file)
+ }
+ rd, err := OpenFile(ctxt, file)
+ if err != nil {
+ return nil, err
+ }
+ defer rd.Close() // ignore error
+ if displayPath != nil {
+ file = displayPath(file)
+ }
+ return parser.ParseFile(fset, file, rd, mode)
+}
+
+// ContainingPackage returns the package containing filename.
+//
+// If filename is not absolute, it is interpreted relative to working directory dir.
+// All I/O is via the build context's file system interface, if any.
+//
+// The '...Files []string' fields of the resulting build.Package are not
+// populated (build.FindOnly mode).
+func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
+ if !IsAbsPath(ctxt, filename) {
+ filename = JoinPath(ctxt, dir, filename)
+ }
+
+ // We must not assume the file tree uses
+ // "/" always,
+ // `\` always,
+ // or os.PathSeparator (which varies by platform),
+ // but to make any progress, we are forced to assume that
+ // paths will not use `\` unless the PathSeparator
+ // is also `\`, thus we can rely on filepath.ToSlash for some sanity.
+
+ dirSlash := path.Dir(filepath.ToSlash(filename)) + "/"
+
+ // We assume that no source root (GOPATH[i] or GOROOT) contains any other.
+ for _, srcdir := range ctxt.SrcDirs() {
+ srcdirSlash := filepath.ToSlash(srcdir) + "/"
+ if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok {
+ return ctxt.Import(importPath, dir, build.FindOnly)
+ }
+ }
+
+ return nil, fmt.Errorf("can't find package containing %s", filename)
+}
+
+// -- Effective methods of file system interface -------------------------
+
+// (go/build.Context defines these as methods, but does not export them.)
+
+// HasSubdir calls ctxt.HasSubdir (if not nil) or else uses
+// the local file system to answer the question.
+func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) {
+ if f := ctxt.HasSubdir; f != nil {
+ return f(root, dir)
+ }
+
+ // Try using paths we received.
+ if rel, ok = hasSubdir(root, dir); ok {
+ return
+ }
+
+ // Try expanding symlinks and comparing
+ // expanded against unexpanded and
+ // expanded against expanded.
+ rootSym, _ := filepath.EvalSymlinks(root)
+ dirSym, _ := filepath.EvalSymlinks(dir)
+
+ if rel, ok = hasSubdir(rootSym, dir); ok {
+ return
+ }
+ if rel, ok = hasSubdir(root, dirSym); ok {
+ return
+ }
+ return hasSubdir(rootSym, dirSym)
+}
+
+func hasSubdir(root, dir string) (rel string, ok bool) {
+ const sep = string(filepath.Separator)
+ root = filepath.Clean(root)
+ if !strings.HasSuffix(root, sep) {
+ root += sep
+ }
+
+ dir = filepath.Clean(dir)
+ if !strings.HasPrefix(dir, root) {
+ return "", false
+ }
+
+ return filepath.ToSlash(dir[len(root):]), true
+}
+
+// FileExists returns true if the specified file exists,
+// using the build context's file system interface.
+func FileExists(ctxt *build.Context, path string) bool {
+ if ctxt.OpenFile != nil {
+ r, err := ctxt.OpenFile(path)
+ if err != nil {
+ return false
+ }
+ r.Close() // ignore error
+ return true
+ }
+ _, err := os.Stat(path)
+ return err == nil
+}
+
+// OpenFile behaves like os.Open,
+// but uses the build context's file system interface, if any.
+func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) {
+ if ctxt.OpenFile != nil {
+ return ctxt.OpenFile(path)
+ }
+ return os.Open(path)
+}
+
+// IsAbsPath behaves like filepath.IsAbs,
+// but uses the build context's file system interface, if any.
+func IsAbsPath(ctxt *build.Context, path string) bool {
+ if ctxt.IsAbsPath != nil {
+ return ctxt.IsAbsPath(path)
+ }
+ return filepath.IsAbs(path)
+}
+
+// JoinPath behaves like filepath.Join,
+// but uses the build context's file system interface, if any.
+func JoinPath(ctxt *build.Context, path ...string) string {
+ if ctxt.JoinPath != nil {
+ return ctxt.JoinPath(path...)
+ }
+ return filepath.Join(path...)
+}
+
+// IsDir behaves like os.Stat plus IsDir,
+// but uses the build context's file system interface, if any.
+func IsDir(ctxt *build.Context, path string) bool {
+ if ctxt.IsDir != nil {
+ return ctxt.IsDir(path)
+ }
+ fi, err := os.Stat(path)
+ return err == nil && fi.IsDir()
+}
+
+// ReadDir behaves like ioutil.ReadDir,
+// but uses the build context's file system interface, if any.
+func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) {
+ if ctxt.ReadDir != nil {
+ return ctxt.ReadDir(path)
+ }
+ return ioutil.ReadDir(path)
+}
+
+// SplitPathList behaves like filepath.SplitList,
+// but uses the build context's file system interface, if any.
+func SplitPathList(ctxt *build.Context, s string) []string {
+ if ctxt.SplitPathList != nil {
+ return ctxt.SplitPathList(s)
+ }
+ return filepath.SplitList(s)
+}
+
+// sameFile returns true if x and y have the same basename and denote
+// the same file.
+func sameFile(x, y string) bool {
+ if path.Clean(x) == path.Clean(y) {
+ return true
+ }
+ if filepath.Base(x) == filepath.Base(y) { // (optimisation)
+ if xi, err := os.Stat(x); err == nil {
+ if yi, err := os.Stat(y); err == nil {
+ return os.SameFile(xi, yi)
+ }
+ }
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/tools/go/callgraph/callgraph.go b/vendor/golang.org/x/tools/go/callgraph/callgraph.go
new file mode 100644
index 0000000..a1b0ca5
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/callgraph.go
@@ -0,0 +1,129 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package callgraph defines the call graph and various algorithms
+and utilities to operate on it.
+
+A call graph is a labelled directed graph whose nodes represent
+functions and whose edge labels represent syntactic function call
+sites. The presence of a labelled edge (caller, site, callee)
+indicates that caller may call callee at the specified call site.
+
+A call graph is a multigraph: it may contain multiple edges (caller,
+*, callee) connecting the same pair of nodes, so long as the edges
+differ by label; this occurs when one function calls another function
+from multiple call sites. Also, it may contain multiple edges
+(caller, site, *) that differ only by callee; this indicates a
+polymorphic call.
+
+A SOUND call graph is one that overapproximates the dynamic calling
+behaviors of the program in all possible executions. One call graph
+is more PRECISE than another if it is a smaller overapproximation of
+the dynamic behavior.
+
+All call graphs have a synthetic root node which is responsible for
+calling main() and init().
+
+Calls to built-in functions (e.g. panic, println) are not represented
+in the call graph; they are treated like built-in operators of the
+language.
+*/
+package callgraph // import "golang.org/x/tools/go/callgraph"
+
+// TODO(adonovan): add a function to eliminate wrappers from the
+// callgraph, preserving topology.
+// More generally, we could eliminate "uninteresting" nodes such as
+// nodes from packages we don't care about.
+
+// TODO(zpavlinovic): decide how callgraphs handle calls to and from generic function bodies.
+
+import (
+ "fmt"
+ "go/token"
+
+ "golang.org/x/tools/go/ssa"
+)
+
+// A Graph represents a call graph.
+//
+// A graph may contain nodes that are not reachable from the root.
+// If the call graph is sound, such nodes indicate unreachable
+// functions.
+type Graph struct {
+ Root *Node // the distinguished root node
+ Nodes map[*ssa.Function]*Node // all nodes by function
+}
+
+// New returns a new Graph with the specified root node.
+func New(root *ssa.Function) *Graph {
+ g := &Graph{Nodes: make(map[*ssa.Function]*Node)}
+ g.Root = g.CreateNode(root)
+ return g
+}
+
+// CreateNode returns the Node for fn, creating it if not present.
+// The root node may have fn=nil.
+func (g *Graph) CreateNode(fn *ssa.Function) *Node {
+ n, ok := g.Nodes[fn]
+ if !ok {
+ n = &Node{Func: fn, ID: len(g.Nodes)}
+ g.Nodes[fn] = n
+ }
+ return n
+}
+
+// A Node represents a node in a call graph.
+type Node struct {
+ Func *ssa.Function // the function this node represents
+ ID int // 0-based sequence number
+ In []*Edge // unordered set of incoming call edges (n.In[*].Callee == n)
+ Out []*Edge // unordered set of outgoing call edges (n.Out[*].Caller == n)
+}
+
+func (n *Node) String() string {
+ return fmt.Sprintf("n%d:%s", n.ID, n.Func)
+}
+
+// A Edge represents an edge in the call graph.
+//
+// Site is nil for edges originating in synthetic or intrinsic
+// functions, e.g. reflect.Value.Call or the root of the call graph.
+type Edge struct {
+ Caller *Node
+ Site ssa.CallInstruction
+ Callee *Node
+}
+
+func (e Edge) String() string {
+ return fmt.Sprintf("%s --> %s", e.Caller, e.Callee)
+}
+
+func (e Edge) Description() string {
+ var prefix string
+ switch e.Site.(type) {
+ case nil:
+ return "synthetic call"
+ case *ssa.Go:
+ prefix = "concurrent "
+ case *ssa.Defer:
+ prefix = "deferred "
+ }
+ return prefix + e.Site.Common().Description()
+}
+
+func (e Edge) Pos() token.Pos {
+ if e.Site == nil {
+ return token.NoPos
+ }
+ return e.Site.Pos()
+}
+
+// AddEdge adds the edge (caller, site, callee) to the call graph.
+// Elimination of duplicate edges is the caller's responsibility.
+func AddEdge(caller *Node, site ssa.CallInstruction, callee *Node) {
+ e := &Edge{caller, site, callee}
+ callee.In = append(callee.In, e)
+ caller.Out = append(caller.Out, e)
+}
diff --git a/vendor/golang.org/x/tools/go/callgraph/cha/cha.go b/vendor/golang.org/x/tools/go/callgraph/cha/cha.go
new file mode 100644
index 0000000..3040f3d
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/cha/cha.go
@@ -0,0 +1,164 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cha computes the call graph of a Go program using the Class
+// Hierarchy Analysis (CHA) algorithm.
+//
+// CHA was first described in "Optimization of Object-Oriented Programs
+// Using Static Class Hierarchy Analysis", Jeffrey Dean, David Grove,
+// and Craig Chambers, ECOOP'95.
+//
+// CHA is related to RTA (see go/callgraph/rta); the difference is that
+// CHA conservatively computes the entire "implements" relation between
+// interfaces and concrete types ahead of time, whereas RTA uses dynamic
+// programming to construct it on the fly as it encounters new functions
+// reachable from main. CHA may thus include spurious call edges for
+// types that haven't been instantiated yet, or types that are never
+// instantiated.
+//
+// Since CHA conservatively assumes that all functions are address-taken
+// and all concrete types are put into interfaces, it is sound to run on
+// partial programs, such as libraries without a main or test function.
+package cha // import "golang.org/x/tools/go/callgraph/cha"
+
+// TODO(zpavlinovic): update CHA for how it handles generic function bodies.
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/go/callgraph"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+// CallGraph computes the call graph of the specified program using the
+// Class Hierarchy Analysis algorithm.
+func CallGraph(prog *ssa.Program) *callgraph.Graph {
+ cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph
+
+ allFuncs := ssautil.AllFunctions(prog)
+
+ calleesOf := lazyCallees(allFuncs)
+
+ addEdge := func(fnode *callgraph.Node, site ssa.CallInstruction, g *ssa.Function) {
+ gnode := cg.CreateNode(g)
+ callgraph.AddEdge(fnode, site, gnode)
+ }
+
+ addEdges := func(fnode *callgraph.Node, site ssa.CallInstruction, callees []*ssa.Function) {
+ // Because every call to a highly polymorphic and
+ // frequently used abstract method such as
+ // (io.Writer).Write is assumed to call every concrete
+ // Write method in the program, the call graph can
+ // contain a lot of duplication.
+ //
+ // TODO(taking): opt: consider making lazyCallees public.
+ // Using the same benchmarks as callgraph_test.go, removing just
+ // the explicit callgraph.Graph construction is 4x less memory
+ // and is 37% faster.
+ // CHA 86 ms/op 16 MB/op
+ // lazyCallees 63 ms/op 4 MB/op
+ for _, g := range callees {
+ addEdge(fnode, site, g)
+ }
+ }
+
+ for f := range allFuncs {
+ fnode := cg.CreateNode(f)
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ if site, ok := instr.(ssa.CallInstruction); ok {
+ if g := site.Common().StaticCallee(); g != nil {
+ addEdge(fnode, site, g)
+ } else {
+ addEdges(fnode, site, calleesOf(site))
+ }
+ }
+ }
+ }
+ }
+
+ return cg
+}
+
+// lazyCallees returns a function that maps a call site (in a function in fns)
+// to its callees within fns.
+//
+// The resulting function is not concurrency safe.
+func lazyCallees(fns map[*ssa.Function]bool) func(site ssa.CallInstruction) []*ssa.Function {
+ // funcsBySig contains all functions, keyed by signature. It is
+ // the effective set of address-taken functions used to resolve
+ // a dynamic call of a particular signature.
+ var funcsBySig typeutil.Map // value is []*ssa.Function
+
+ // methodsByID contains all methods, grouped by ID for efficient
+ // lookup.
+ //
+ // We must key by ID, not name, for correct resolution of interface
+ // calls to a type with two (unexported) methods spelled the same but
+ // from different packages. The fact that the concrete type implements
+ // the interface does not mean the call dispatches to both methods.
+ methodsByID := make(map[string][]*ssa.Function)
+
+ // An imethod represents an interface method I.m.
+ // (There's no go/types object for it;
+ // a *types.Func may be shared by many interfaces due to interface embedding.)
+ type imethod struct {
+ I *types.Interface
+ id string
+ }
+ // methodsMemo records, for every abstract method call I.m on
+ // interface type I, the set of concrete methods C.m of all
+ // types C that satisfy interface I.
+ //
+ // Abstract methods may be shared by several interfaces,
+ // hence we must pass I explicitly, not guess from m.
+ //
+ // methodsMemo is just a cache, so it needn't be a typeutil.Map.
+ methodsMemo := make(map[imethod][]*ssa.Function)
+ lookupMethods := func(I *types.Interface, m *types.Func) []*ssa.Function {
+ id := m.Id()
+ methods, ok := methodsMemo[imethod{I, id}]
+ if !ok {
+ for _, f := range methodsByID[id] {
+ C := f.Signature.Recv().Type() // named or *named
+ if types.Implements(C, I) {
+ methods = append(methods, f)
+ }
+ }
+ methodsMemo[imethod{I, id}] = methods
+ }
+ return methods
+ }
+
+ for f := range fns {
+ if f.Signature.Recv() == nil {
+ // Package initializers can never be address-taken.
+ if f.Name() == "init" && f.Synthetic == "package initializer" {
+ continue
+ }
+ funcs, _ := funcsBySig.At(f.Signature).([]*ssa.Function)
+ funcs = append(funcs, f)
+ funcsBySig.Set(f.Signature, funcs)
+ } else if obj := f.Object(); obj != nil {
+ id := obj.(*types.Func).Id()
+ methodsByID[id] = append(methodsByID[id], f)
+ }
+ }
+
+ return func(site ssa.CallInstruction) []*ssa.Function {
+ call := site.Common()
+ if call.IsInvoke() {
+ tiface := call.Value.Type().Underlying().(*types.Interface)
+ return lookupMethods(tiface, call.Method)
+ } else if g := call.StaticCallee(); g != nil {
+ return []*ssa.Function{g}
+ } else if _, ok := call.Value.(*ssa.Builtin); !ok {
+ fns, _ := funcsBySig.At(call.Signature()).([]*ssa.Function)
+ return fns
+ }
+ return nil
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/callgraph/util.go b/vendor/golang.org/x/tools/go/callgraph/util.go
new file mode 100644
index 0000000..5499320
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/callgraph/util.go
@@ -0,0 +1,180 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package callgraph
+
+import "golang.org/x/tools/go/ssa"
+
+// This file provides various utilities over call graphs, such as
+// visitation and path search.
+
+// CalleesOf returns a new set containing all direct callees of the
+// caller node.
+func CalleesOf(caller *Node) map[*Node]bool {
+ callees := make(map[*Node]bool)
+ for _, e := range caller.Out {
+ callees[e.Callee] = true
+ }
+ return callees
+}
+
+// GraphVisitEdges visits all the edges in graph g in depth-first order.
+// The edge function is called for each edge in postorder. If it
+// returns non-nil, visitation stops and GraphVisitEdges returns that
+// value.
+func GraphVisitEdges(g *Graph, edge func(*Edge) error) error {
+ seen := make(map[*Node]bool)
+ var visit func(n *Node) error
+ visit = func(n *Node) error {
+ if !seen[n] {
+ seen[n] = true
+ for _, e := range n.Out {
+ if err := visit(e.Callee); err != nil {
+ return err
+ }
+ if err := edge(e); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }
+ for _, n := range g.Nodes {
+ if err := visit(n); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// PathSearch finds an arbitrary path starting at node start and
+// ending at some node for which isEnd() returns true. On success,
+// PathSearch returns the path as an ordered list of edges; on
+// failure, it returns nil.
+func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge {
+ stack := make([]*Edge, 0, 32)
+ seen := make(map[*Node]bool)
+ var search func(n *Node) []*Edge
+ search = func(n *Node) []*Edge {
+ if !seen[n] {
+ seen[n] = true
+ if isEnd(n) {
+ return stack
+ }
+ for _, e := range n.Out {
+ stack = append(stack, e) // push
+ if found := search(e.Callee); found != nil {
+ return found
+ }
+ stack = stack[:len(stack)-1] // pop
+ }
+ }
+ return nil
+ }
+ return search(start)
+}
+
+// DeleteSyntheticNodes removes from call graph g all nodes for
+// functions that do not correspond to source syntax. For historical
+// reasons, nodes for g.Root and package initializers are always
+// kept.
+//
+// As nodes are removed, edges are created to preserve the
+// reachability relation of the remaining nodes.
+func (g *Graph) DeleteSyntheticNodes() {
+ // Measurements on the standard library and go.tools show that
+ // resulting graph has ~15% fewer nodes and 4-8% fewer edges
+ // than the input.
+ //
+ // Inlining a wrapper of in-degree m, out-degree n adds m*n
+ // and removes m+n edges. Since most wrappers are monomorphic
+ // (n=1) this results in a slight reduction. Polymorphic
+ // wrappers (n>1), e.g. from embedding an interface value
+ // inside a struct to satisfy some interface, cause an
+ // increase in the graph, but they seem to be uncommon.
+
+ // Hash all existing edges to avoid creating duplicates.
+ edges := make(map[Edge]bool)
+ for _, cgn := range g.Nodes {
+ for _, e := range cgn.Out {
+ edges[*e] = true
+ }
+ }
+ for fn, cgn := range g.Nodes {
+ if cgn == g.Root || isInit(cgn.Func) || fn.Syntax() != nil {
+ continue // keep
+ }
+ for _, eIn := range cgn.In {
+ for _, eOut := range cgn.Out {
+ newEdge := Edge{eIn.Caller, eIn.Site, eOut.Callee}
+ if edges[newEdge] {
+ continue // don't add duplicate
+ }
+ AddEdge(eIn.Caller, eIn.Site, eOut.Callee)
+ edges[newEdge] = true
+ }
+ }
+ g.DeleteNode(cgn)
+ }
+}
+
+func isInit(fn *ssa.Function) bool {
+ return fn.Pkg != nil && fn.Pkg.Func("init") == fn
+}
+
+// DeleteNode removes node n and its edges from the graph g.
+// (NB: not efficient for batch deletion.)
+func (g *Graph) DeleteNode(n *Node) {
+ n.deleteIns()
+ n.deleteOuts()
+ delete(g.Nodes, n.Func)
+}
+
+// deleteIns deletes all incoming edges to n.
+func (n *Node) deleteIns() {
+ for _, e := range n.In {
+ removeOutEdge(e)
+ }
+ n.In = nil
+}
+
+// deleteOuts deletes all outgoing edges from n.
+func (n *Node) deleteOuts() {
+ for _, e := range n.Out {
+ removeInEdge(e)
+ }
+ n.Out = nil
+}
+
+// removeOutEdge removes edge.Caller's outgoing edge 'edge'.
+func removeOutEdge(edge *Edge) {
+ caller := edge.Caller
+ n := len(caller.Out)
+ for i, e := range caller.Out {
+ if e == edge {
+ // Replace it with the final element and shrink the slice.
+ caller.Out[i] = caller.Out[n-1]
+ caller.Out[n-1] = nil // aid GC
+ caller.Out = caller.Out[:n-1]
+ return
+ }
+ }
+ panic("edge not found: " + edge.String())
+}
+
+// removeInEdge removes edge.Callee's incoming edge 'edge'.
+func removeInEdge(edge *Edge) {
+ caller := edge.Callee
+ n := len(caller.In)
+ for i, e := range caller.In {
+ if e == edge {
+ // Replace it with the final element and shrink the slice.
+ caller.In[i] = caller.In[n-1]
+ caller.In[n-1] = nil // aid GC
+ caller.In = caller.In[:n-1]
+ return
+ }
+ }
+ panic("edge not found: " + edge.String())
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
new file mode 100644
index 0000000..137cc8d
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -0,0 +1,186 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gcexportdata provides functions for locating, reading, and
+// writing export data files containing type information produced by the
+// gc compiler. This package supports go1.7 export data format and all
+// later versions.
+//
+// Although it might seem convenient for this package to live alongside
+// go/types in the standard library, this would cause version skew
+// problems for developer tools that use it, since they must be able to
+// consume the outputs of the gc compiler both before and after a Go
+// update such as from Go 1.7 to Go 1.8. Because this package lives in
+// golang.org/x/tools, sites can update their version of this repo some
+// time before the Go 1.8 release and rebuild and redeploy their
+// developer tools, which will then be able to consume both Go 1.7 and
+// Go 1.8 export data files, so they will work before and after the
+// Go update. (See discussion at https://golang.org/issue/15651.)
+package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "go/token"
+ "go/types"
+ "io"
+ "os/exec"
+
+ "golang.org/x/tools/internal/gcimporter"
+)
+
+// Find returns the name of an object (.o) or archive (.a) file
+// containing type information for the specified import path,
+// using the go command.
+// If no file was found, an empty filename is returned.
+//
+// A relative srcDir is interpreted relative to the current working directory.
+//
+// Find also returns the package's resolved (canonical) import path,
+// reflecting the effects of srcDir and vendoring on importPath.
+//
+// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
+// which is more efficient.
+func Find(importPath, srcDir string) (filename, path string) {
+ cmd := exec.Command("go", "list", "-json", "-export", "--", importPath)
+ cmd.Dir = srcDir
+ out, err := cmd.Output()
+ if err != nil {
+ return "", ""
+ }
+ var data struct {
+ ImportPath string
+ Export string
+ }
+ json.Unmarshal(out, &data)
+ return data.Export, data.ImportPath
+}
+
+// NewReader returns a reader for the export data section of an object
+// (.o) or archive (.a) file read from r. The new reader may provide
+// additional trailing data beyond the end of the export data.
+func NewReader(r io.Reader) (io.Reader, error) {
+ buf := bufio.NewReader(r)
+ _, size, err := gcimporter.FindExportData(buf)
+ if err != nil {
+ return nil, err
+ }
+
+ if size >= 0 {
+ // We were given an archive and found the __.PKGDEF in it.
+ // This tells us the size of the export data, and we don't
+ // need to return the entire file.
+ return &io.LimitedReader{
+ R: buf,
+ N: size,
+ }, nil
+ } else {
+ // We were given an object file. As such, we don't know how large
+ // the export data is and must return the entire file.
+ return buf, nil
+ }
+}
+
+// readAll works the same way as io.ReadAll, but avoids allocations and copies
+// by preallocating a byte slice of the necessary size if the size is known up
+// front. This is always possible when the input is an archive. In that case,
+// NewReader will return the known size using an io.LimitedReader.
+func readAll(r io.Reader) ([]byte, error) {
+ if lr, ok := r.(*io.LimitedReader); ok {
+ data := make([]byte, lr.N)
+ _, err := io.ReadFull(lr, data)
+ return data, err
+ }
+ return io.ReadAll(r)
+}
+
+// Read reads export data from in, decodes it, and returns type
+// information for the package.
+//
+// The package path (effectively its linker symbol prefix) is
+// specified by path, since unlike the package name, this information
+// may not be recorded in the export data.
+//
+// File position information is added to fset.
+//
+// Read may inspect and add to the imports map to ensure that references
+// within the export data to other packages are consistent. The caller
+// must ensure that imports[path] does not exist, or exists but is
+// incomplete (see types.Package.Complete), and Read inserts the
+// resulting package into this map entry.
+//
+// On return, the state of the reader is undefined.
+func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
+ data, err := readAll(in)
+ if err != nil {
+ return nil, fmt.Errorf("reading export data for %q: %v", path, err)
+ }
+
+ if bytes.HasPrefix(data, []byte("!<arch>")) {
+ return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
+ }
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 {
+ switch data[0] {
+ case 'v', 'c', 'd': // binary, till go1.10
+ return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0])
+
+ case 'i': // indexed, till go1.19
+ _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
+ return pkg, err
+
+ case 'u': // unified, from go1.20
+ _, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path)
+ return pkg, err
+
+ default:
+ l := len(data)
+ if l > 10 {
+ l = 10
+ }
+ return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path)
+ }
+ }
+ return nil, fmt.Errorf("empty export data for %s", path)
+}
+
+// Write writes encoded type information for the specified package to out.
+// The FileSet provides file position information for named objects.
+func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
+ if _, err := io.WriteString(out, "i"); err != nil {
+ return err
+ }
+ return gcimporter.IExportData(out, fset, pkg)
+}
+
+// ReadBundle reads an export bundle from in, decodes it, and returns type
+// information for the packages.
+// File position information is added to fset.
+//
+// ReadBundle may inspect and add to the imports map to ensure that references
+// within the export bundle to other packages are consistent.
+//
+// On return, the state of the reader is undefined.
+//
+// Experimental: This API is experimental and may change in the future.
+func ReadBundle(in io.Reader, fset *token.FileSet, imports map[string]*types.Package) ([]*types.Package, error) {
+ data, err := readAll(in)
+ if err != nil {
+ return nil, fmt.Errorf("reading export bundle: %v", err)
+ }
+ return gcimporter.IImportBundle(fset, imports, data)
+}
+
+// WriteBundle writes encoded type information for the specified packages to out.
+// The FileSet provides file position information for named objects.
+//
+// Experimental: This API is experimental and may change in the future.
+func WriteBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error {
+ return gcimporter.IExportBundle(out, fset, pkgs)
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
new file mode 100644
index 0000000..37a7247
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
@@ -0,0 +1,75 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gcexportdata
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "os"
+)
+
+// NewImporter returns a new instance of the types.Importer interface
+// that reads type information from export data files written by gc.
+// The Importer also satisfies types.ImporterFrom.
+//
+// Export data files are located using "go build" workspace conventions
+// and the build.Default context.
+//
+// Use this importer instead of go/importer.For("gc", ...) to avoid the
+// version-skew problems described in the documentation of this package,
+// or to control the FileSet or access the imports map populated during
+// package loading.
+//
+// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
+// which is more efficient.
+func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
+ return importer{fset, imports}
+}
+
+type importer struct {
+ fset *token.FileSet
+ imports map[string]*types.Package
+}
+
+func (imp importer) Import(importPath string) (*types.Package, error) {
+ return imp.ImportFrom(importPath, "", 0)
+}
+
+func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
+ filename, path := Find(importPath, srcDir)
+ if filename == "" {
+ if importPath == "unsafe" {
+ // Even for unsafe, call Find first in case
+ // the package was vendored.
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %s", importPath)
+ }
+
+ if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
+ return pkg, nil // cache hit
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ f.Close()
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("reading export data: %s: %v", filename, err)
+ }
+ }()
+
+ r, err := NewReader(f)
+ if err != nil {
+ return nil, err
+ }
+
+ return Read(r, imp.fset, imp.imports, path)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
new file mode 100644
index 0000000..697974b
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
@@ -0,0 +1,219 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cgo handles cgo preprocessing of files containing `import "C"`.
+//
+// DESIGN
+//
+// The approach taken is to run the cgo processor on the package's
+// CgoFiles and parse the output, faking the filenames of the
+// resulting ASTs so that the synthetic file containing the C types is
+// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
+// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
+// not the names of the actual temporary files.
+//
+// The advantage of this approach is its fidelity to 'go build'. The
+// downside is that the token.Position.Offset for each AST node is
+// incorrect, being an offset within the temporary file. Line numbers
+// should still be correct because of the //line comments.
+//
+// The logic of this file is mostly plundered from the 'go build'
+// tool, which also invokes the cgo preprocessor.
+//
+//
+// REJECTED ALTERNATIVE
+//
+// An alternative approach that we explored is to extend go/types'
+// Importer mechanism to provide the identity of the importing package
+// so that each time `import "C"` appears it resolves to a different
+// synthetic package containing just the objects needed in that case.
+// The loader would invoke cgo but parse only the cgo_types.go file
+// defining the package-level objects, discarding the other files
+// resulting from preprocessing.
+//
+// The benefit of this approach would have been that source-level
+// syntax information would correspond exactly to the original cgo
+// file, with no preprocessing involved, making source tools like
+// godoc, guru, and eg happy. However, the approach was rejected
+// due to the additional complexity it would impose on go/types. (It
+// made for a beautiful demo, though.)
+//
+// cgo files, despite their *.go extension, are not legal Go source
+// files per the specification since they may refer to unexported
+// members of package "C" such as C.int. Also, a function such as
+// C.getpwent has in effect two types, one matching its C type and one
+// which additionally returns (errno C.int). The cgo preprocessor
+// uses name mangling to distinguish these two functions in the
+// processed code, but go/types would need to duplicate this logic in
+// its handling of function calls, analogous to the treatment of map
+// lookups in which y=m[k] and y,ok=m[k] are both legal.
+
+package cgo
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
+// the output and returns the resulting ASTs.
+func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
+ tmpdir, err := os.MkdirTemp("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
+ if err != nil {
+ return nil, err
+ }
+ defer os.RemoveAll(tmpdir)
+
+ pkgdir := bp.Dir
+ if DisplayPath != nil {
+ pkgdir = DisplayPath(pkgdir)
+ }
+
+ cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
+ if err != nil {
+ return nil, err
+ }
+ var files []*ast.File
+ for i := range cgoFiles {
+ rd, err := os.Open(cgoFiles[i])
+ if err != nil {
+ return nil, err
+ }
+ display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
+ f, err := parser.ParseFile(fset, display, rd, mode)
+ rd.Close()
+ if err != nil {
+ return nil, err
+ }
+ files = append(files, f)
+ }
+ return files, nil
+}
+
+var cgoRe = regexp.MustCompile(`[/\\:]`)
+
+// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
+// lists of files: the resulting processed files (in temporary
+// directory tmpdir) and the corresponding names of the unprocessed files.
+//
+// Run is adapted from (*builder).cgo in
+// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
+// Objective C, CGOPKGPATH, CGO_FLAGS.
+//
+// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
+// to the cgo preprocessor. This in turn will set the // line comments
+// referring to those files to use absolute paths. This is needed for
+// go/packages using the legacy go list support so it is able to find
+// the original files.
+func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
+ cgoCPPFLAGS, _, _, _ := cflags(bp, true)
+ _, cgoexeCFLAGS, _, _ := cflags(bp, false)
+
+ if len(bp.CgoPkgConfig) > 0 {
+ pcCFLAGS, err := pkgConfigFlags(bp)
+ if err != nil {
+ return nil, nil, err
+ }
+ cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
+ }
+
+ // Allows including _cgo_export.h from .[ch] files in the package.
+ cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
+
+ // _cgo_gotypes.go (displayed "C") contains the type definitions.
+ files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
+ displayFiles = append(displayFiles, "C")
+ for _, fn := range bp.CgoFiles {
+ // "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
+ f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
+ files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
+ displayFiles = append(displayFiles, fn)
+ }
+
+ var cgoflags []string
+ if bp.Goroot && bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_runtime_cgo=false")
+ }
+ if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_syscall=false")
+ }
+
+ var cgoFiles []string = bp.CgoFiles
+ if useabs {
+ cgoFiles = make([]string, len(bp.CgoFiles))
+ for i := range cgoFiles {
+ cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
+ }
+ }
+
+ args := stringList(
+ "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
+ cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
+ )
+ if false {
+ log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
+ }
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Dir = pkgdir
+ cmd.Env = append(os.Environ(), "PWD="+pkgdir)
+ cmd.Stdout = os.Stderr
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
+ }
+
+ return files, displayFiles, nil
+}
+
+// -- unmodified from 'go build' ---------------------------------------
+
+// Return the flags to use when invoking the C or C++ compilers, or cgo.
+func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
+ var defaults string
+ if def {
+ defaults = "-g -O2"
+ }
+
+ cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
+ cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
+ cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
+ ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
+ return
+}
+
+// envList returns the value of the given environment variable broken
+// into fields, using the default value when the variable is empty.
+func envList(key, def string) []string {
+ v := os.Getenv(key)
+ if v == "" {
+ v = def
+ }
+ return strings.Fields(v)
+}
+
+// stringList's arguments should be a sequence of string or []string values.
+// stringList flattens them into a single []string.
+func stringList(args ...interface{}) []string {
+ var x []string
+ for _, arg := range args {
+ switch arg := arg.(type) {
+ case []string:
+ x = append(x, arg...)
+ case string:
+ x = append(x, arg)
+ default:
+ panic("stringList: invalid argument")
+ }
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
new file mode 100644
index 0000000..2455be5
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
@@ -0,0 +1,42 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cgo
+
+import (
+ "errors"
+ "fmt"
+ "go/build"
+ "os/exec"
+ "strings"
+)
+
+// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
+func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
+ cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
+ out, err := cmd.Output()
+ if err != nil {
+ s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
+ if len(out) > 0 {
+ s = fmt.Sprintf("%s: %s", s, out)
+ }
+ if err, ok := err.(*exec.ExitError); ok && len(err.Stderr) > 0 {
+ s = fmt.Sprintf("%s\nstderr:\n%s", s, err.Stderr)
+ }
+ return nil, errors.New(s)
+ }
+ if len(out) > 0 {
+ flags = strings.Fields(string(out))
+ }
+ return
+}
+
+// pkgConfigFlags calls pkg-config if needed and returns the cflags
+// needed to build the package.
+func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
+ if len(p.CgoPkgConfig) == 0 {
+ return nil, nil
+ }
+ return pkgConfig("--cflags", p.CgoPkgConfig)
+}
diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go
new file mode 100644
index 0000000..e35b1fd
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/doc.go
@@ -0,0 +1,202 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package loader loads a complete Go program from source code, parsing
+// and type-checking the initial packages plus their transitive closure
+// of dependencies. The ASTs and the derived facts are retained for
+// later use.
+//
+// Deprecated: This is an older API and does not have support
+// for modules. Use golang.org/x/tools/go/packages instead.
+//
+// The package defines two primary types: Config, which specifies a
+// set of initial packages to load and various other options; and
+// Program, which is the result of successfully loading the packages
+// specified by a configuration.
+//
+// The configuration can be set directly, but *Config provides various
+// convenience methods to simplify the common cases, each of which can
+// be called any number of times. Finally, these are followed by a
+// call to Load() to actually load and type-check the program.
+//
+// var conf loader.Config
+//
+// // Use the command-line arguments to specify
+// // a set of initial packages to load from source.
+// // See FromArgsUsage for help.
+// rest, err := conf.FromArgs(os.Args[1:], wantTests)
+//
+// // Parse the specified files and create an ad hoc package with path "foo".
+// // All files must have the same 'package' declaration.
+// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
+//
+// // Create an ad hoc package with path "foo" from
+// // the specified already-parsed files.
+// // All ASTs must have the same 'package' declaration.
+// conf.CreateFromFiles("foo", parsedFiles)
+//
+// // Add "runtime" to the set of packages to be loaded.
+// conf.Import("runtime")
+//
+// // Adds "fmt" and "fmt_test" to the set of packages
+// // to be loaded. "fmt" will include *_test.go files.
+// conf.ImportWithTests("fmt")
+//
+// // Finally, load all the packages specified by the configuration.
+// prog, err := conf.Load()
+//
+// See examples_test.go for examples of API usage.
+//
+// # CONCEPTS AND TERMINOLOGY
+//
+// The WORKSPACE is the set of packages accessible to the loader. The
+// workspace is defined by Config.Build, a *build.Context. The
+// default context treats subdirectories of $GOROOT and $GOPATH as
+// packages, but this behavior may be overridden.
+//
+// An AD HOC package is one specified as a set of source files on the
+// command line. In the simplest case, it may consist of a single file
+// such as $GOROOT/src/net/http/triv.go.
+//
+// EXTERNAL TEST packages are those comprised of a set of *_test.go
+// files all with the same 'package foo_test' declaration, all in the
+// same directory. (go/build.Package calls these files XTestFiles.)
+//
+// An IMPORTABLE package is one that can be referred to by some import
+// spec. Every importable package is uniquely identified by its
+// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json",
+// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path
+// typically denotes a subdirectory of the workspace.
+//
+// An import declaration uses an IMPORT PATH to refer to a package.
+// Most import declarations use the package path as the import path.
+//
+// Due to VENDORING (https://golang.org/s/go15vendor), the
+// interpretation of an import path may depend on the directory in which
+// it appears. To resolve an import path to a package path, go/build
+// must search the enclosing directories for a subdirectory named
+// "vendor".
+//
+// ad hoc packages and external test packages are NON-IMPORTABLE. The
+// path of an ad hoc package is inferred from the package
+// declarations of its files and is therefore not a unique package key.
+// For example, Config.CreatePkgs may specify two initial ad hoc
+// packages, both with path "main".
+//
+// An AUGMENTED package is an importable package P plus all the
+// *_test.go files with same 'package foo' declaration as P.
+// (go/build.Package calls these files TestFiles.)
+//
+// The INITIAL packages are those specified in the configuration. A
+// DEPENDENCY is a package loaded to satisfy an import in an initial
+// package or another dependency.
+package loader
+
+// IMPLEMENTATION NOTES
+//
+// 'go test', in-package test files, and import cycles
+// ---------------------------------------------------
+//
+// An external test package may depend upon members of the augmented
+// package that are not in the unaugmented package, such as functions
+// that expose internals. (See bufio/export_test.go for an example.)
+// So, the loader must ensure that for each external test package
+// it loads, it also augments the corresponding non-test package.
+//
+// The import graph over n unaugmented packages must be acyclic; the
+// import graph over n-1 unaugmented packages plus one augmented
+// package must also be acyclic. ('go test' relies on this.) But the
+// import graph over n augmented packages may contain cycles.
+//
+// First, all the (unaugmented) non-test packages and their
+// dependencies are imported in the usual way; the loader reports an
+// error if it detects an import cycle.
+//
+// Then, each package P for which testing is desired is augmented by
+// the list P' of its in-package test files, by calling
+// (*types.Checker).Files. This arrangement ensures that P' may
+// reference definitions within P, but P may not reference definitions
+// within P'. Furthermore, P' may import any other package, including
+// ones that depend upon P, without an import cycle error.
+//
+// Consider two packages A and B, both of which have lists of
+// in-package test files we'll call A' and B', and which have the
+// following import graph edges:
+// B imports A
+// B' imports A
+// A' imports B
+// This last edge would be expected to create an error were it not
+// for the special type-checking discipline above.
+// Cycles of size greater than two are possible. For example:
+// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil"
+// io/ioutil/tempfile_test.go (package ioutil) imports "regexp"
+// regexp/exec_test.go (package regexp) imports "compress/bzip2"
+//
+//
+// Concurrency
+// -----------
+//
+// Let us define the import dependency graph as follows. Each node is a
+// list of files passed to (Checker).Files at once. Many of these lists
+// are the production code of an importable Go package, so those nodes
+// are labelled by the package's path. The remaining nodes are
+// ad hoc packages and lists of in-package *_test.go files that augment
+// an importable package; those nodes have no label.
+//
+// The edges of the graph represent import statements appearing within a
+// file. An edge connects a node (a list of files) to the node it
+// imports, which is importable and thus always labelled.
+//
+// Loading is controlled by this dependency graph.
+//
+// To reduce I/O latency, we start loading a package's dependencies
+// asynchronously as soon as we've parsed its files and enumerated its
+// imports (scanImports). This performs a preorder traversal of the
+// import dependency graph.
+//
+// To exploit hardware parallelism, we type-check unrelated packages in
+// parallel, where "unrelated" means not ordered by the partial order of
+// the import dependency graph.
+//
+// We use a concurrency-safe non-blocking cache (importer.imported) to
+// record the results of type-checking, whether success or failure. An
+// entry is created in this cache by startLoad the first time the
+// package is imported. The first goroutine to request an entry becomes
+// responsible for completing the task and broadcasting completion to
+// subsequent requestors, which block until then.
+//
+// Type checking occurs in (parallel) postorder: we cannot type-check a
+// set of files until we have loaded and type-checked all of their
+// immediate dependencies (and thus all of their transitive
+// dependencies). If the input were guaranteed free of import cycles,
+// this would be trivial: we could simply wait for completion of the
+// dependencies and then invoke the typechecker.
+//
+// But as we saw in the 'go test' section above, some cycles in the
+// import graph over packages are actually legal, so long as the
+// cycle-forming edge originates in the in-package test files that
+// augment the package. This explains why the nodes of the import
+// dependency graph are not packages, but lists of files: the unlabelled
+// nodes avoid the cycles. Consider packages A and B where B imports A
+// and A's in-package tests AT import B. The naively constructed import
+// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but
+// the graph over lists of files is AT --> B --> A, where AT is an
+// unlabelled node.
+//
+// Awaiting completion of the dependencies in a cyclic graph would
+// deadlock, so we must materialize the import dependency graph (as
+// importer.graph) and check whether each import edge forms a cycle. If
+// x imports y, and the graph already contains a path from y to x, then
+// there is an import cycle, in which case the processing of x must not
+// wait for the completion of processing of y.
+//
+// When the type-checker makes a callback (doImport) to the loader for a
+// given import edge, there are two possible cases. In the normal case,
+// the dependency has already been completely type-checked; doImport
+// does a cache lookup and returns it. In the cyclic case, the entry in
+// the cache is still necessarily incomplete, indicating a cycle. We
+// perform the cycle check again to obtain the error message, and return
+// the error.
+//
+// The result of using concurrency is about a 2.5x speedup for stdlib_test.
diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go
new file mode 100644
index 0000000..013c0f5
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/loader.go
@@ -0,0 +1,1066 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+// See doc.go for package documentation and implementation notes.
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/internal/cgo"
+ "golang.org/x/tools/internal/versions"
+)
+
+var ignoreVendor build.ImportMode
+
+const trace = false // show timing info for type-checking
+
+// Config specifies the configuration for loading a whole program from
+// Go source code.
+// The zero value for Config is a ready-to-use default configuration.
+type Config struct {
+ // Fset is the file set for the parser to use when loading the
+ // program. If nil, it may be lazily initialized by any
+ // method of Config.
+ Fset *token.FileSet
+
+ // ParserMode specifies the mode to be used by the parser when
+ // loading source packages.
+ ParserMode parser.Mode
+
+ // TypeChecker contains options relating to the type checker.
+ //
+ // The supplied IgnoreFuncBodies is not used; the effective
+ // value comes from the TypeCheckFuncBodies func below.
+ // The supplied Import function is not used either.
+ TypeChecker types.Config
+
+ // TypeCheckFuncBodies is a predicate over package paths.
+ // A package for which the predicate is false will
+ // have its package-level declarations type checked, but not
+ // its function bodies; this can be used to quickly load
+ // dependencies from source. If nil, all func bodies are type
+ // checked.
+ TypeCheckFuncBodies func(path string) bool
+
+ // If Build is non-nil, it is used to locate source packages.
+ // Otherwise &build.Default is used.
+ //
+ // By default, cgo is invoked to preprocess Go files that
+ // import the fake package "C". This behaviour can be
+ // disabled by setting CGO_ENABLED=0 in the environment prior
+ // to startup, or by setting Build.CgoEnabled=false.
+ Build *build.Context
+
+ // The current directory, used for resolving relative package
+ // references such as "./go/loader". If empty, os.Getwd will be
+ // used instead.
+ Cwd string
+
+ // If DisplayPath is non-nil, it is used to transform each
+ // file name obtained from Build.Import(). This can be used
+ // to prevent a virtualized build.Config's file names from
+ // leaking into the user interface.
+ DisplayPath func(path string) string
+
+ // If AllowErrors is true, Load will return a Program even
+ // if some of the its packages contained I/O, parser or type
+ // errors; such errors are accessible via PackageInfo.Errors. If
+ // false, Load will fail if any package had an error.
+ AllowErrors bool
+
+ // CreatePkgs specifies a list of non-importable initial
+ // packages to create. The resulting packages will appear in
+ // the corresponding elements of the Program.Created slice.
+ CreatePkgs []PkgSpec
+
+ // ImportPkgs specifies a set of initial packages to load.
+ // The map keys are package paths.
+ //
+ // The map value indicates whether to load tests. If true, Load
+ // will add and type-check two lists of files to the package:
+ // non-test files followed by in-package *_test.go files. In
+ // addition, it will append the external test package (if any)
+ // to Program.Created.
+ ImportPkgs map[string]bool
+
+ // FindPackage is called during Load to create the build.Package
+ // for a given import path from a given directory.
+ // If FindPackage is nil, (*build.Context).Import is used.
+ // A client may use this hook to adapt to a proprietary build
+ // system that does not follow the "go build" layout
+ // conventions, for example.
+ //
+ // It must be safe to call concurrently from multiple goroutines.
+ FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error)
+
+ // AfterTypeCheck is called immediately after a list of files
+ // has been type-checked and appended to info.Files.
+ //
+ // This optional hook function is the earliest opportunity for
+ // the client to observe the output of the type checker,
+ // which may be useful to reduce analysis latency when loading
+ // a large program.
+ //
+ // The function is permitted to modify info.Info, for instance
+ // to clear data structures that are no longer needed, which can
+ // dramatically reduce peak memory consumption.
+ //
+ // The function may be called twice for the same PackageInfo:
+ // once for the files of the package and again for the
+ // in-package test files.
+ //
+ // It must be safe to call concurrently from multiple goroutines.
+ AfterTypeCheck func(info *PackageInfo, files []*ast.File)
+}
+
+// A PkgSpec specifies a non-importable package to be created by Load.
+// Files are processed first, but typically only one of Files and
+// Filenames is provided. The path needn't be globally unique.
+//
+// For vendoring purposes, the package's directory is the one that
+// contains the first file.
+type PkgSpec struct {
+ Path string // package path ("" => use package declaration)
+ Files []*ast.File // ASTs of already-parsed files
+ Filenames []string // names of files to be parsed
+}
+
+// A Program is a Go program loaded from source as specified by a Config.
+type Program struct {
+ Fset *token.FileSet // the file set for this program
+
+ // Created[i] contains the initial package whose ASTs or
+ // filenames were supplied by Config.CreatePkgs[i], followed by
+ // the external test package, if any, of each package in
+ // Config.ImportPkgs ordered by ImportPath.
+ //
+ // NOTE: these files must not import "C". Cgo preprocessing is
+ // only performed on imported packages, not ad hoc packages.
+ //
+ // TODO(adonovan): we need to copy and adapt the logic of
+ // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make
+ // Config.Import and Config.Create methods return the same kind
+ // of entity, essentially a build.Package.
+ // Perhaps we can even reuse that type directly.
+ Created []*PackageInfo
+
+ // Imported contains the initially imported packages,
+ // as specified by Config.ImportPkgs.
+ Imported map[string]*PackageInfo
+
+ // AllPackages contains the PackageInfo of every package
+ // encountered by Load: all initial packages and all
+ // dependencies, including incomplete ones.
+ AllPackages map[*types.Package]*PackageInfo
+
+ // importMap is the canonical mapping of package paths to
+ // packages. It contains all Imported initial packages, but not
+ // Created ones, and all imported dependencies.
+ importMap map[string]*types.Package
+}
+
+// PackageInfo holds the ASTs and facts derived by the type-checker
+// for a single package.
+//
+// Not mutated once exposed via the API.
+type PackageInfo struct {
+ Pkg *types.Package
+ Importable bool // true if 'import "Pkg.Path()"' would resolve to this
+ TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors
+ Files []*ast.File // syntax trees for the package's files
+ Errors []error // non-nil if the package had errors
+ types.Info // type-checker deductions.
+ dir string // package directory
+
+ checker *types.Checker // transient type-checker state
+ errorFunc func(error)
+}
+
+func (info *PackageInfo) String() string { return info.Pkg.Path() }
+
+func (info *PackageInfo) appendError(err error) {
+ if info.errorFunc != nil {
+ info.errorFunc(err)
+ } else {
+ fmt.Fprintln(os.Stderr, err)
+ }
+ info.Errors = append(info.Errors, err)
+}
+
+func (conf *Config) fset() *token.FileSet {
+ if conf.Fset == nil {
+ conf.Fset = token.NewFileSet()
+ }
+ return conf.Fset
+}
+
+// ParseFile is a convenience function (intended for testing) that invokes
+// the parser using the Config's FileSet, which is initialized if nil.
+//
+// src specifies the parser input as a string, []byte, or io.Reader, and
+// filename is its apparent name. If src is nil, the contents of
+// filename are read from the file system.
+func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
+ // TODO(adonovan): use conf.build() etc like parseFiles does.
+ return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
+}
+
+// FromArgsUsage is a partial usage message that applications calling
+// FromArgs may wish to include in their -help output.
+const FromArgsUsage = `
+<args> is a list of arguments denoting a set of initial packages.
+It may take one of two forms:
+
+1. A list of *.go source files.
+
+ All of the specified files are loaded, parsed and type-checked
+ as a single package. All the files must belong to the same directory.
+
+2. A list of import paths, each denoting a package.
+
+ The package's directory is found relative to the $GOROOT and
+ $GOPATH using similar logic to 'go build', and the *.go files in
+ that directory are loaded, parsed and type-checked as a single
+ package.
+
+ In addition, all *_test.go files in the directory are then loaded
+ and parsed. Those files whose package declaration equals that of
+ the non-*_test.go files are included in the primary package. Test
+ files whose package declaration ends with "_test" are type-checked
+ as another package, the 'external' test package, so that a single
+ import path may denote two packages. (Whether this behaviour is
+ enabled is tool-specific, and may depend on additional flags.)
+
+A '--' argument terminates the list of packages.
+`
+
+// FromArgs interprets args as a set of initial packages to load from
+// source and updates the configuration. It returns the list of
+// unconsumed arguments.
+//
+// It is intended for use in command-line interfaces that require a
+// set of initial packages to be specified; see FromArgsUsage message
+// for details.
+//
+// Only superficial errors are reported at this stage; errors dependent
+// on I/O are detected during Load.
+func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
+ var rest []string
+ for i, arg := range args {
+ if arg == "--" {
+ rest = args[i+1:]
+ args = args[:i]
+ break // consume "--" and return the remaining args
+ }
+ }
+
+ if len(args) > 0 && strings.HasSuffix(args[0], ".go") {
+ // Assume args is a list of a *.go files
+ // denoting a single ad hoc package.
+ for _, arg := range args {
+ if !strings.HasSuffix(arg, ".go") {
+ return nil, fmt.Errorf("named files must be .go files: %s", arg)
+ }
+ }
+ conf.CreateFromFilenames("", args...)
+ } else {
+ // Assume args are directories each denoting a
+ // package and (perhaps) an external test, iff xtest.
+ for _, arg := range args {
+ if xtest {
+ conf.ImportWithTests(arg)
+ } else {
+ conf.Import(arg)
+ }
+ }
+ }
+
+ return rest, nil
+}
+
+// CreateFromFilenames is a convenience function that adds
+// a conf.CreatePkgs entry to create a package of the specified *.go
+// files.
+func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
+ conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
+}
+
+// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
+// entry to create package of the specified path and parsed files.
+func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
+ conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
+}
+
+// ImportWithTests is a convenience function that adds path to
+// ImportPkgs, the set of initial source packages located relative to
+// $GOPATH. The package will be augmented by any *_test.go files in
+// its directory that contain a "package x" (not "package x_test")
+// declaration.
+//
+// In addition, if any *_test.go files contain a "package x_test"
+// declaration, an additional package comprising just those files will
+// be added to CreatePkgs.
+func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
+
+// Import is a convenience function that adds path to ImportPkgs, the
+// set of initial packages that will be imported from source.
+func (conf *Config) Import(path string) { conf.addImport(path, false) }
+
+func (conf *Config) addImport(path string, tests bool) {
+ if path == "C" {
+ return // ignore; not a real package
+ }
+ if conf.ImportPkgs == nil {
+ conf.ImportPkgs = make(map[string]bool)
+ }
+ conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests
+}
+
+// PathEnclosingInterval returns the PackageInfo and ast.Node that
+// contain source interval [start, end), and all the node's ancestors
+// up to the AST root. It searches all ast.Files of all packages in prog.
+// exact is defined as for astutil.PathEnclosingInterval.
+//
+// The zero value is returned if not found.
+func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
+ for _, info := range prog.AllPackages {
+ for _, f := range info.Files {
+ if f.Pos() == token.NoPos {
+ // This can happen if the parser saw
+ // too many errors and bailed out.
+ // (Use parser.AllErrors to prevent that.)
+ continue
+ }
+ if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) {
+ continue
+ }
+ if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
+ return info, path, exact
+ }
+ }
+ }
+ return nil, nil, false
+}
+
+// InitialPackages returns a new slice containing the set of initial
+// packages (Created + Imported) in unspecified order.
+func (prog *Program) InitialPackages() []*PackageInfo {
+ infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
+ infos = append(infos, prog.Created...)
+ for _, info := range prog.Imported {
+ infos = append(infos, info)
+ }
+ return infos
+}
+
+// Package returns the ASTs and results of type checking for the
+// specified package.
+func (prog *Program) Package(path string) *PackageInfo {
+ if info, ok := prog.AllPackages[prog.importMap[path]]; ok {
+ return info
+ }
+ for _, info := range prog.Created {
+ if path == info.Pkg.Path() {
+ return info
+ }
+ }
+ return nil
+}
+
+// ---------- Implementation ----------
+
+// importer holds the working state of the algorithm.
+type importer struct {
+ conf *Config // the client configuration
+ start time.Time // for logging
+
+ progMu sync.Mutex // guards prog
+ prog *Program // the resulting program
+
+ // findpkg is a memoization of FindPackage.
+ findpkgMu sync.Mutex // guards findpkg
+ findpkg map[findpkgKey]*findpkgValue
+
+ importedMu sync.Mutex // guards imported
+ imported map[string]*importInfo // all imported packages (incl. failures) by import path
+
+ // import dependency graph: graph[x][y] => x imports y
+ //
+ // Since non-importable packages cannot be cyclic, we ignore
+ // their imports, thus we only need the subgraph over importable
+ // packages. Nodes are identified by their import paths.
+ graphMu sync.Mutex
+ graph map[string]map[string]bool
+}
+
+type findpkgKey struct {
+ importPath string
+ fromDir string
+ mode build.ImportMode
+}
+
+type findpkgValue struct {
+ ready chan struct{} // closed to broadcast readiness
+ bp *build.Package
+ err error
+}
+
+// importInfo tracks the success or failure of a single import.
+//
+// Upon completion, exactly one of info and err is non-nil:
+// info on successful creation of a package, err otherwise.
+// A successful package may still contain type errors.
+type importInfo struct {
+ path string // import path
+ info *PackageInfo // results of typechecking (including errors)
+ complete chan struct{} // closed to broadcast that info is set.
+}
+
+// awaitCompletion blocks until ii is complete,
+// i.e. the info field is safe to inspect.
+func (ii *importInfo) awaitCompletion() {
+ <-ii.complete // wait for close
+}
+
+// Complete marks ii as complete.
+// Its info and err fields will not be subsequently updated.
+func (ii *importInfo) Complete(info *PackageInfo) {
+ if info == nil {
+ panic("info == nil")
+ }
+ ii.info = info
+ close(ii.complete)
+}
+
+type importError struct {
+ path string // import path
+ err error // reason for failure to create a package
+}
+
+// Load creates the initial packages specified by conf.{Create,Import}Pkgs,
+// loading their dependencies packages as needed.
+//
+// On success, Load returns a Program containing a PackageInfo for
+// each package. On failure, it returns an error.
+//
+// If AllowErrors is true, Load will return a Program even if some
+// packages contained I/O, parser or type errors, or if dependencies
+// were missing. (Such errors are accessible via PackageInfo.Errors. If
+// false, Load will fail if any package had an error.
+//
+// It is an error if no packages were loaded.
+func (conf *Config) Load() (*Program, error) {
+ // Create a simple default error handler for parse/type errors.
+ if conf.TypeChecker.Error == nil {
+ conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) }
+ }
+
+ // Set default working directory for relative package references.
+ if conf.Cwd == "" {
+ var err error
+ conf.Cwd, err = os.Getwd()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Install default FindPackage hook using go/build logic.
+ if conf.FindPackage == nil {
+ conf.FindPackage = (*build.Context).Import
+ }
+
+ prog := &Program{
+ Fset: conf.fset(),
+ Imported: make(map[string]*PackageInfo),
+ importMap: make(map[string]*types.Package),
+ AllPackages: make(map[*types.Package]*PackageInfo),
+ }
+
+ imp := importer{
+ conf: conf,
+ prog: prog,
+ findpkg: make(map[findpkgKey]*findpkgValue),
+ imported: make(map[string]*importInfo),
+ start: time.Now(),
+ graph: make(map[string]map[string]bool),
+ }
+
+ // -- loading proper (concurrent phase) --------------------------------
+
+ var errpkgs []string // packages that contained errors
+
+ // Load the initially imported packages and their dependencies,
+ // in parallel.
+ // No vendor check on packages imported from the command line.
+ infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor)
+ for _, ie := range importErrors {
+ conf.TypeChecker.Error(ie.err) // failed to create package
+ errpkgs = append(errpkgs, ie.path)
+ }
+ for _, info := range infos {
+ prog.Imported[info.Pkg.Path()] = info
+ }
+
+ // Augment the designated initial packages by their tests.
+ // Dependencies are loaded in parallel.
+ var xtestPkgs []*build.Package
+ for importPath, augment := range conf.ImportPkgs {
+ if !augment {
+ continue
+ }
+
+ // No vendor check on packages imported from command line.
+ bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor)
+ if err != nil {
+ // Package not found, or can't even parse package declaration.
+ // Already reported by previous loop; ignore it.
+ continue
+ }
+
+ // Needs external test package?
+ if len(bp.XTestGoFiles) > 0 {
+ xtestPkgs = append(xtestPkgs, bp)
+ }
+
+ // Consult the cache using the canonical package path.
+ path := bp.ImportPath
+ imp.importedMu.Lock() // (unnecessary, we're sequential here)
+ ii, ok := imp.imported[path]
+ // Paranoid checks added due to issue #11012.
+ if !ok {
+ // Unreachable.
+ // The previous loop called importAll and thus
+ // startLoad for each path in ImportPkgs, which
+ // populates imp.imported[path] with a non-zero value.
+ panic(fmt.Sprintf("imported[%q] not found", path))
+ }
+ if ii == nil {
+ // Unreachable.
+ // The ii values in this loop are the same as in
+ // the previous loop, which enforced the invariant
+ // that at least one of ii.err and ii.info is non-nil.
+ panic(fmt.Sprintf("imported[%q] == nil", path))
+ }
+ if ii.info == nil {
+ // Unreachable.
+ // awaitCompletion has the postcondition
+ // ii.info != nil.
+ panic(fmt.Sprintf("imported[%q].info = nil", path))
+ }
+ info := ii.info
+ imp.importedMu.Unlock()
+
+ // Parse the in-package test files.
+ files, errs := imp.conf.parsePackageFiles(bp, 't')
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ // The test files augmenting package P cannot be imported,
+ // but may import packages that import P,
+ // so we must disable the cycle check.
+ imp.addFiles(info, files, false)
+ }
+
+ createPkg := func(path, dir string, files []*ast.File, errs []error) {
+ info := imp.newPackageInfo(path, dir)
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ // Ad hoc packages are non-importable,
+ // so no cycle check is needed.
+ // addFiles loads dependencies in parallel.
+ imp.addFiles(info, files, false)
+ prog.Created = append(prog.Created, info)
+ }
+
+ // Create packages specified by conf.CreatePkgs.
+ for _, cp := range conf.CreatePkgs {
+ files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode)
+ files = append(files, cp.Files...)
+
+ path := cp.Path
+ if path == "" {
+ if len(files) > 0 {
+ path = files[0].Name.Name
+ } else {
+ path = "(unnamed)"
+ }
+ }
+
+ dir := conf.Cwd
+ if len(files) > 0 && files[0].Pos().IsValid() {
+ dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name())
+ }
+ createPkg(path, dir, files, errs)
+ }
+
+ // Create external test packages.
+ sort.Sort(byImportPath(xtestPkgs))
+ for _, bp := range xtestPkgs {
+ files, errs := imp.conf.parsePackageFiles(bp, 'x')
+ createPkg(bp.ImportPath+"_test", bp.Dir, files, errs)
+ }
+
+ // -- finishing up (sequential) ----------------------------------------
+
+ if len(prog.Imported)+len(prog.Created) == 0 {
+ return nil, errors.New("no initial packages were loaded")
+ }
+
+ // Create infos for indirectly imported packages.
+ // e.g. incomplete packages without syntax, loaded from export data.
+ for _, obj := range prog.importMap {
+ info := prog.AllPackages[obj]
+ if info == nil {
+ prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true}
+ } else {
+ // finished
+ info.checker = nil
+ info.errorFunc = nil
+ }
+ }
+
+ if !conf.AllowErrors {
+ // Report errors in indirectly imported packages.
+ for _, info := range prog.AllPackages {
+ if len(info.Errors) > 0 {
+ errpkgs = append(errpkgs, info.Pkg.Path())
+ }
+ }
+ if errpkgs != nil {
+ var more string
+ if len(errpkgs) > 3 {
+ more = fmt.Sprintf(" and %d more", len(errpkgs)-3)
+ errpkgs = errpkgs[:3]
+ }
+ return nil, fmt.Errorf("couldn't load packages due to errors: %s%s",
+ strings.Join(errpkgs, ", "), more)
+ }
+ }
+
+ markErrorFreePackages(prog.AllPackages)
+
+ return prog, nil
+}
+
+type byImportPath []*build.Package
+
+func (b byImportPath) Len() int { return len(b) }
+func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath }
+func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+
+// markErrorFreePackages sets the TransitivelyErrorFree flag on all
+// applicable packages.
+func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) {
+ // Build the transpose of the import graph.
+ importedBy := make(map[*types.Package]map[*types.Package]bool)
+ for P := range allPackages {
+ for _, Q := range P.Imports() {
+ clients, ok := importedBy[Q]
+ if !ok {
+ clients = make(map[*types.Package]bool)
+ importedBy[Q] = clients
+ }
+ clients[P] = true
+ }
+ }
+
+ // Find all packages reachable from some error package.
+ reachable := make(map[*types.Package]bool)
+ var visit func(*types.Package)
+ visit = func(p *types.Package) {
+ if !reachable[p] {
+ reachable[p] = true
+ for q := range importedBy[p] {
+ visit(q)
+ }
+ }
+ }
+ for _, info := range allPackages {
+ if len(info.Errors) > 0 {
+ visit(info.Pkg)
+ }
+ }
+
+ // Mark the others as "transitively error-free".
+ for _, info := range allPackages {
+ if !reachable[info.Pkg] {
+ info.TransitivelyErrorFree = true
+ }
+ }
+}
+
+// build returns the effective build context.
+func (conf *Config) build() *build.Context {
+ if conf.Build != nil {
+ return conf.Build
+ }
+ return &build.Default
+}
+
+// parsePackageFiles enumerates the files belonging to package path,
+// then loads, parses and returns them, plus a list of I/O or parse
+// errors that were encountered.
+//
+// 'which' indicates which files to include:
+//
+// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
+// 't': include in-package *_test.go source files (TestGoFiles)
+// 'x': include external *_test.go source files. (XTestGoFiles)
+func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
+ if bp.ImportPath == "unsafe" {
+ return nil, nil
+ }
+ var filenames []string
+ switch which {
+ case 'g':
+ filenames = bp.GoFiles
+ case 't':
+ filenames = bp.TestGoFiles
+ case 'x':
+ filenames = bp.XTestGoFiles
+ default:
+ panic(which)
+ }
+
+ files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode)
+
+ // Preprocess CgoFiles and parse the outputs (sequentially).
+ if which == 'g' && bp.CgoFiles != nil {
+ cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode)
+ if err != nil {
+ errs = append(errs, err)
+ } else {
+ files = append(files, cgofiles...)
+ }
+ }
+
+ return files, errs
+}
+
+// doImport imports the package denoted by path.
+// It implements the types.Importer signature.
+//
+// It returns an error if a package could not be created
+// (e.g. go/build or parse error), but type errors are reported via
+// the types.Config.Error callback (the first of which is also saved
+// in the package's PackageInfo).
+//
+// Idempotent.
+func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
+ if to == "C" {
+ // This should be unreachable, but ad hoc packages are
+ // not currently subject to cgo preprocessing.
+ // See https://golang.org/issue/11627.
+ return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`,
+ from.Pkg.Path())
+ }
+
+ bp, err := imp.findPackage(to, from.dir, 0)
+ if err != nil {
+ return nil, err
+ }
+
+ // The standard unsafe package is handled specially,
+ // and has no PackageInfo.
+ if bp.ImportPath == "unsafe" {
+ return types.Unsafe, nil
+ }
+
+ // Look for the package in the cache using its canonical path.
+ path := bp.ImportPath
+ imp.importedMu.Lock()
+ ii := imp.imported[path]
+ imp.importedMu.Unlock()
+ if ii == nil {
+ panic("internal error: unexpected import: " + path)
+ }
+ if ii.info != nil {
+ return ii.info.Pkg, nil
+ }
+
+ // Import of incomplete package: this indicates a cycle.
+ fromPath := from.Pkg.Path()
+ if cycle := imp.findPath(path, fromPath); cycle != nil {
+ // Normalize cycle: start from alphabetically largest node.
+ pos, start := -1, ""
+ for i, s := range cycle {
+ if pos < 0 || s > start {
+ pos, start = i, s
+ }
+ }
+ cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest
+ cycle = append(cycle, cycle[0]) // add start node to end to show cycliness
+ return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
+ }
+
+ panic("internal error: import of incomplete (yet acyclic) package: " + fromPath)
+}
+
+// findPackage locates the package denoted by the importPath in the
+// specified directory.
+func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) {
+ // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7)
+ // to avoid holding the lock around FindPackage.
+ key := findpkgKey{importPath, fromDir, mode}
+ imp.findpkgMu.Lock()
+ v, ok := imp.findpkg[key]
+ if ok {
+ // cache hit
+ imp.findpkgMu.Unlock()
+
+ <-v.ready // wait for entry to become ready
+ } else {
+ // Cache miss: this goroutine becomes responsible for
+ // populating the map entry and broadcasting its readiness.
+ v = &findpkgValue{ready: make(chan struct{})}
+ imp.findpkg[key] = v
+ imp.findpkgMu.Unlock()
+
+ ioLimit <- true
+ v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode)
+ <-ioLimit
+
+ if _, ok := v.err.(*build.NoGoError); ok {
+ v.err = nil // empty directory is not an error
+ }
+
+ close(v.ready) // broadcast ready condition
+ }
+ return v.bp, v.err
+}
+
+// importAll loads, parses, and type-checks the specified packages in
+// parallel and returns their completed importInfos in unspecified order.
+//
+// fromPath is the package path of the importing package, if it is
+// importable, "" otherwise. It is used for cycle detection.
+//
+// fromDir is the directory containing the import declaration that
+// caused these imports.
+func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
+ if fromPath != "" {
+ // We're loading a set of imports.
+ //
+ // We must record graph edges from the importing package
+ // to its dependencies, and check for cycles.
+ imp.graphMu.Lock()
+ deps, ok := imp.graph[fromPath]
+ if !ok {
+ deps = make(map[string]bool)
+ imp.graph[fromPath] = deps
+ }
+ for importPath := range imports {
+ deps[importPath] = true
+ }
+ imp.graphMu.Unlock()
+ }
+
+ var pending []*importInfo
+ for importPath := range imports {
+ if fromPath != "" {
+ if cycle := imp.findPath(importPath, fromPath); cycle != nil {
+ // Cycle-forming import: we must not check it
+ // since it would deadlock.
+ if trace {
+ fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle)
+ }
+ continue
+ }
+ }
+ bp, err := imp.findPackage(importPath, fromDir, mode)
+ if err != nil {
+ errors = append(errors, importError{
+ path: importPath,
+ err: err,
+ })
+ continue
+ }
+ pending = append(pending, imp.startLoad(bp))
+ }
+
+ for _, ii := range pending {
+ ii.awaitCompletion()
+ infos = append(infos, ii.info)
+ }
+
+ return infos, errors
+}
+
+// findPath returns an arbitrary path from 'from' to 'to' in the import
+// graph, or nil if there was none.
+func (imp *importer) findPath(from, to string) []string {
+ imp.graphMu.Lock()
+ defer imp.graphMu.Unlock()
+
+ seen := make(map[string]bool)
+ var search func(stack []string, importPath string) []string
+ search = func(stack []string, importPath string) []string {
+ if !seen[importPath] {
+ seen[importPath] = true
+ stack = append(stack, importPath)
+ if importPath == to {
+ return stack
+ }
+ for x := range imp.graph[importPath] {
+ if p := search(stack, x); p != nil {
+ return p
+ }
+ }
+ }
+ return nil
+ }
+ return search(make([]string, 0, 20), from)
+}
+
+// startLoad initiates the loading, parsing and type-checking of the
+// specified package and its dependencies, if it has not already begun.
+//
+// It returns an importInfo, not necessarily in a completed state. The
+// caller must call awaitCompletion() before accessing its info field.
+//
+// startLoad is concurrency-safe and idempotent.
+func (imp *importer) startLoad(bp *build.Package) *importInfo {
+ path := bp.ImportPath
+ imp.importedMu.Lock()
+ ii, ok := imp.imported[path]
+ if !ok {
+ ii = &importInfo{path: path, complete: make(chan struct{})}
+ imp.imported[path] = ii
+ go func() {
+ info := imp.load(bp)
+ ii.Complete(info)
+ }()
+ }
+ imp.importedMu.Unlock()
+
+ return ii
+}
+
+// load implements package loading by parsing Go source files
+// located by go/build.
+func (imp *importer) load(bp *build.Package) *PackageInfo {
+ info := imp.newPackageInfo(bp.ImportPath, bp.Dir)
+ info.Importable = true
+ files, errs := imp.conf.parsePackageFiles(bp, 'g')
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ imp.addFiles(info, files, true)
+
+ imp.progMu.Lock()
+ imp.prog.importMap[bp.ImportPath] = info.Pkg
+ imp.progMu.Unlock()
+
+ return info
+}
+
+// addFiles adds and type-checks the specified files to info, loading
+// their dependencies if needed. The order of files determines the
+// package initialization order. It may be called multiple times on the
+// same package. Errors are appended to the info.Errors field.
+//
+// cycleCheck determines whether the imports within files create
+// dependency edges that should be checked for potential cycles.
+func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
+ // Ensure the dependencies are loaded, in parallel.
+ var fromPath string
+ if cycleCheck {
+ fromPath = info.Pkg.Path()
+ }
+ // TODO(adonovan): opt: make the caller do scanImports.
+ // Callers with a build.Package can skip it.
+ imp.importAll(fromPath, info.dir, scanImports(files), 0)
+
+ if trace {
+ fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n",
+ time.Since(imp.start), info.Pkg.Path(), len(files))
+ }
+
+ // Don't call checker.Files on Unsafe, even with zero files,
+ // because it would mutate the package, which is a global.
+ if info.Pkg == types.Unsafe {
+ if len(files) > 0 {
+ panic(`"unsafe" package contains unexpected files`)
+ }
+ } else {
+ // Ignore the returned (first) error since we
+ // already collect them all in the PackageInfo.
+ info.checker.Files(files)
+ info.Files = append(info.Files, files...)
+ }
+
+ if imp.conf.AfterTypeCheck != nil {
+ imp.conf.AfterTypeCheck(info, files)
+ }
+
+ if trace {
+ fmt.Fprintf(os.Stderr, "%s: stop %q\n",
+ time.Since(imp.start), info.Pkg.Path())
+ }
+}
+
+func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
+ var pkg *types.Package
+ if path == "unsafe" {
+ pkg = types.Unsafe
+ } else {
+ pkg = types.NewPackage(path, "")
+ }
+ info := &PackageInfo{
+ Pkg: pkg,
+ Info: types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ },
+ errorFunc: imp.conf.TypeChecker.Error,
+ dir: dir,
+ }
+ versions.InitFileVersions(&info.Info)
+
+ // Copy the types.Config so we can vary it across PackageInfos.
+ tc := imp.conf.TypeChecker
+ tc.IgnoreFuncBodies = false
+ if f := imp.conf.TypeCheckFuncBodies; f != nil {
+ tc.IgnoreFuncBodies = !f(path)
+ }
+ tc.Importer = closure{imp, info}
+ tc.Error = info.appendError // appendError wraps the user's Error function
+
+ info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info)
+ imp.progMu.Lock()
+ imp.prog.AllPackages[pkg] = info
+ imp.progMu.Unlock()
+ return info
+}
+
+type closure struct {
+ imp *importer
+ info *PackageInfo
+}
+
+func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) }
diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go
new file mode 100644
index 0000000..3a80aca
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/util.go
@@ -0,0 +1,123 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+import (
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io"
+ "os"
+ "strconv"
+ "sync"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+// We use a counting semaphore to limit
+// the number of parallel I/O calls per process.
+var ioLimit = make(chan bool, 10)
+
+// parseFiles parses the Go source files within directory dir and
+// returns the ASTs of the ones that could be at least partially parsed,
+// along with a list of I/O and parse errors encountered.
+//
+// I/O is done via ctxt, which may specify a virtual file system.
+// displayPath is used to transform the filenames attached to the ASTs.
+func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
+ if displayPath == nil {
+ displayPath = func(path string) string { return path }
+ }
+ var wg sync.WaitGroup
+ n := len(files)
+ parsed := make([]*ast.File, n)
+ errors := make([]error, n)
+ for i, file := range files {
+ if !buildutil.IsAbsPath(ctxt, file) {
+ file = buildutil.JoinPath(ctxt, dir, file)
+ }
+ wg.Add(1)
+ go func(i int, file string) {
+ ioLimit <- true // wait
+ defer func() {
+ wg.Done()
+ <-ioLimit // signal
+ }()
+ var rd io.ReadCloser
+ var err error
+ if ctxt.OpenFile != nil {
+ rd, err = ctxt.OpenFile(file)
+ } else {
+ rd, err = os.Open(file)
+ }
+ if err != nil {
+ errors[i] = err // open failed
+ return
+ }
+
+ // ParseFile may return both an AST and an error.
+ parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode)
+ rd.Close()
+ }(i, file)
+ }
+ wg.Wait()
+
+ // Eliminate nils, preserving order.
+ var o int
+ for _, f := range parsed {
+ if f != nil {
+ parsed[o] = f
+ o++
+ }
+ }
+ parsed = parsed[:o]
+
+ o = 0
+ for _, err := range errors {
+ if err != nil {
+ errors[o] = err
+ o++
+ }
+ }
+ errors = errors[:o]
+
+ return parsed, errors
+}
+
+// scanImports returns the set of all import paths from all
+// import specs in the specified files.
+func scanImports(files []*ast.File) map[string]bool {
+ imports := make(map[string]bool)
+ for _, f := range files {
+ for _, decl := range f.Decls {
+ if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
+ for _, spec := range decl.Specs {
+ spec := spec.(*ast.ImportSpec)
+
+ // NB: do not assume the program is well-formed!
+ path, err := strconv.Unquote(spec.Path.Value)
+ if err != nil {
+ continue // quietly ignore the error
+ }
+ if path == "C" {
+ continue // skip pseudopackage
+ }
+ imports[path] = true
+ }
+ }
+ }
+ }
+ return imports
+}
+
+// ---------- Internal helpers ----------
+
+// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
+func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
+ p := int(pos)
+ base := f.Base()
+ return base <= p && p < base+f.Size()
+}
diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go
new file mode 100644
index 0000000..3531ac8
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/doc.go
@@ -0,0 +1,242 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package packages loads Go packages for inspection and analysis.
+
+The [Load] function takes as input a list of patterns and returns a
+list of [Package] values describing individual packages matched by those
+patterns.
+A [Config] specifies configuration options, the most important of which is
+the [LoadMode], which controls the amount of detail in the loaded packages.
+
+Load passes most patterns directly to the underlying build tool.
+The default build tool is the go command.
+Its supported patterns are described at
+https://pkg.go.dev/cmd/go#hdr-Package_lists_and_patterns.
+Other build systems may be supported by providing a "driver";
+see [The driver protocol].
+
+All patterns with the prefix "query=", where query is a
+non-empty string of letters from [a-z], are reserved and may be
+interpreted as query operators.
+
+Two query operators are currently supported: "file" and "pattern".
+
+The query "file=path/to/file.go" matches the package or packages enclosing
+the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go"
+might return the packages "fmt" and "fmt [fmt.test]".
+
+The query "pattern=string" causes "string" to be passed directly to
+the underlying build tool. In most cases this is unnecessary,
+but an application can use Load("pattern=" + x) as an escaping mechanism
+to ensure that x is not interpreted as a query operator if it contains '='.
+
+All other query operators are reserved for future use and currently
+cause Load to report an error.
+
+The Package struct provides basic information about the package, including
+
+ - ID, a unique identifier for the package in the returned set;
+ - GoFiles, the names of the package's Go source files;
+ - Imports, a map from source import strings to the Packages they name;
+ - Types, the type information for the package's exported symbols;
+ - Syntax, the parsed syntax trees for the package's source code; and
+ - TypesInfo, the result of a complete type-check of the package syntax trees.
+
+(See the documentation for type Package for the complete list of fields
+and more detailed descriptions.)
+
+For example,
+
+ Load(nil, "bytes", "unicode...")
+
+returns four Package structs describing the standard library packages
+bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern
+can match multiple packages and that a package might be matched by
+multiple patterns: in general it is not possible to determine which
+packages correspond to which patterns.
+
+Note that the list returned by Load contains only the packages matched
+by the patterns. Their dependencies can be found by walking the import
+graph using the Imports fields.
+
+The Load function can be configured by passing a pointer to a Config as
+the first argument. A nil Config is equivalent to the zero Config, which
+causes Load to run in LoadFiles mode, collecting minimal information.
+See the documentation for type Config for details.
+
+As noted earlier, the Config.Mode controls the amount of detail
+reported about the loaded packages. See the documentation for type LoadMode
+for details.
+
+Most tools should pass their command-line arguments (after any flags)
+uninterpreted to [Load], so that it can interpret them
+according to the conventions of the underlying build system.
+
+See the Example function for typical usage.
+
+# The driver protocol
+
+[Load] may be used to load Go packages even in Go projects that use
+alternative build systems, by installing an appropriate "driver"
+program for the build system and specifying its location in the
+GOPACKAGESDRIVER environment variable.
+For example,
+https://github.com/bazelbuild/rules_go/wiki/Editor-and-tool-integration
+explains how to use the driver for Bazel.
+
+The driver program is responsible for interpreting patterns in its
+preferred notation and reporting information about the packages that
+those patterns identify. Drivers must also support the special "file="
+and "pattern=" patterns described above.
+
+The patterns are provided as positional command-line arguments. A
+JSON-encoded [DriverRequest] message providing additional information
+is written to the driver's standard input. The driver must write a
+JSON-encoded [DriverResponse] message to its standard output. (This
+message differs from the JSON schema produced by 'go list'.)
+*/
+package packages // import "golang.org/x/tools/go/packages"
+
+/*
+
+Motivation and design considerations
+
+The new package's design solves problems addressed by two existing
+packages: go/build, which locates and describes packages, and
+golang.org/x/tools/go/loader, which loads, parses and type-checks them.
+The go/build.Package structure encodes too much of the 'go build' way
+of organizing projects, leaving us in need of a data type that describes a
+package of Go source code independent of the underlying build system.
+We wanted something that works equally well with go build and vgo, and
+also other build systems such as Bazel and Blaze, making it possible to
+construct analysis tools that work in all these environments.
+Tools such as errcheck and staticcheck were essentially unavailable to
+the Go community at Google, and some of Google's internal tools for Go
+are unavailable externally.
+This new package provides a uniform way to obtain package metadata by
+querying each of these build systems, optionally supporting their
+preferred command-line notations for packages, so that tools integrate
+neatly with users' build environments. The Metadata query function
+executes an external query tool appropriate to the current workspace.
+
+Loading packages always returns the complete import graph "all the way down",
+even if all you want is information about a single package, because the query
+mechanisms of all the build systems we currently support ({go,vgo} list, and
+blaze/bazel aspect-based query) cannot provide detailed information
+about one package without visiting all its dependencies too, so there is
+no additional asymptotic cost to providing transitive information.
+(This property might not be true of a hypothetical 5th build system.)
+
+In calls to TypeCheck, all initial packages, and any package that
+transitively depends on one of them, must be loaded from source.
+Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from
+source; D may be loaded from export data, and E may not be loaded at all
+(though it's possible that D's export data mentions it, so a
+types.Package may be created for it and exposed.)
+
+The old loader had a feature to suppress type-checking of function
+bodies on a per-package basis, primarily intended to reduce the work of
+obtaining type information for imported packages. Now that imports are
+satisfied by export data, the optimization no longer seems necessary.
+
+Despite some early attempts, the old loader did not exploit export data,
+instead always using the equivalent of WholeProgram mode. This was due
+to the complexity of mixing source and export data packages (now
+resolved by the upward traversal mentioned above), and because export data
+files were nearly always missing or stale. Now that 'go build' supports
+caching, all the underlying build systems can guarantee to produce
+export data in a reasonable (amortized) time.
+
+Test "main" packages synthesized by the build system are now reported as
+first-class packages, avoiding the need for clients (such as go/ssa) to
+reinvent this generation logic.
+
+One way in which go/packages is simpler than the old loader is in its
+treatment of in-package tests. In-package tests are packages that
+consist of all the files of the library under test, plus the test files.
+The old loader constructed in-package tests by a two-phase process of
+mutation called "augmentation": first it would construct and type check
+all the ordinary library packages and type-check the packages that
+depend on them; then it would add more (test) files to the package and
+type-check again. This two-phase approach had four major problems:
+1) in processing the tests, the loader modified the library package,
+ leaving no way for a client application to see both the test
+ package and the library package; one would mutate into the other.
+2) because test files can declare additional methods on types defined in
+ the library portion of the package, the dispatch of method calls in
+ the library portion was affected by the presence of the test files.
+ This should have been a clue that the packages were logically
+ different.
+3) this model of "augmentation" assumed at most one in-package test
+ per library package, which is true of projects using 'go build',
+ but not other build systems.
+4) because of the two-phase nature of test processing, all packages that
+ import the library package had to be processed before augmentation,
+ forcing a "one-shot" API and preventing the client from calling Load
+ in several times in sequence as is now possible in WholeProgram mode.
+ (TypeCheck mode has a similar one-shot restriction for a different reason.)
+
+Early drafts of this package supported "multi-shot" operation.
+Although it allowed clients to make a sequence of calls (or concurrent
+calls) to Load, building up the graph of Packages incrementally,
+it was of marginal value: it complicated the API
+(since it allowed some options to vary across calls but not others),
+it complicated the implementation,
+it cannot be made to work in Types mode, as explained above,
+and it was less efficient than making one combined call (when this is possible).
+Among the clients we have inspected, none made multiple calls to load
+but could not be easily and satisfactorily modified to make only a single call.
+However, applications changes may be required.
+For example, the ssadump command loads the user-specified packages
+and in addition the runtime package. It is tempting to simply append
+"runtime" to the user-provided list, but that does not work if the user
+specified an ad-hoc package such as [a.go b.go].
+Instead, ssadump no longer requests the runtime package,
+but seeks it among the dependencies of the user-specified packages,
+and emits an error if it is not found.
+
+Questions & Tasks
+
+- Add GOARCH/GOOS?
+ They are not portable concepts, but could be made portable.
+ Our goal has been to allow users to express themselves using the conventions
+ of the underlying build system: if the build system honors GOARCH
+ during a build and during a metadata query, then so should
+ applications built atop that query mechanism.
+ Conversely, if the target architecture of the build is determined by
+ command-line flags, the application can pass the relevant
+ flags through to the build system using a command such as:
+ myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin"
+ However, this approach is low-level, unwieldy, and non-portable.
+ GOOS and GOARCH seem important enough to warrant a dedicated option.
+
+- How should we handle partial failures such as a mixture of good and
+ malformed patterns, existing and non-existent packages, successful and
+ failed builds, import failures, import cycles, and so on, in a call to
+ Load?
+
+- Support bazel, blaze, and go1.10 list, not just go1.11 list.
+
+- Handle (and test) various partial success cases, e.g.
+ a mixture of good packages and:
+ invalid patterns
+ nonexistent packages
+ empty packages
+ packages with malformed package or import declarations
+ unreadable files
+ import cycles
+ other parse errors
+ type errors
+ Make sure we record errors at the correct place in the graph.
+
+- Missing packages among initial arguments are not reported.
+ Return bogus packages for them, like golist does.
+
+- "undeclared name" errors (for example) are reported out of source file
+ order. I suspect this is due to the breadth-first resolution now used
+ by go/types. Is that a bug? Discuss with gri.
+
+*/
diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go
new file mode 100644
index 0000000..c2b4b71
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/external.go
@@ -0,0 +1,156 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+// This file defines the protocol that enables an external "driver"
+// tool to supply package metadata in place of 'go list'.
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+)
+
+// DriverRequest defines the schema of a request for package metadata
+// from an external driver program. The JSON-encoded DriverRequest
+// message is provided to the driver program's standard input. The
+// query patterns are provided as command-line arguments.
+//
+// See the package documentation for an overview.
+type DriverRequest struct {
+ Mode LoadMode `json:"mode"`
+
+ // Env specifies the environment the underlying build system should be run in.
+ Env []string `json:"env"`
+
+ // BuildFlags are flags that should be passed to the underlying build system.
+ BuildFlags []string `json:"build_flags"`
+
+ // Tests specifies whether the patterns should also return test packages.
+ Tests bool `json:"tests"`
+
+ // Overlay maps file paths (relative to the driver's working directory)
+ // to the contents of overlay files (see Config.Overlay).
+ Overlay map[string][]byte `json:"overlay"`
+}
+
+// DriverResponse defines the schema of a response from an external
+// driver program, providing the results of a query for package
+// metadata. The driver program must write a JSON-encoded
+// DriverResponse message to its standard output.
+//
+// See the package documentation for an overview.
+type DriverResponse struct {
+ // NotHandled is returned if the request can't be handled by the current
+ // driver. If an external driver returns a response with NotHandled, the
+ // rest of the DriverResponse is ignored, and go/packages will fallback
+ // to the next driver. If go/packages is extended in the future to support
+ // lists of multiple drivers, go/packages will fall back to the next driver.
+ NotHandled bool
+
+ // Compiler and Arch are the arguments pass of types.SizesFor
+ // to get a types.Sizes to use when type checking.
+ Compiler string
+ Arch string
+
+ // Roots is the set of package IDs that make up the root packages.
+ // We have to encode this separately because when we encode a single package
+ // we cannot know if it is one of the roots as that requires knowledge of the
+ // graph it is part of.
+ Roots []string `json:",omitempty"`
+
+ // Packages is the full set of packages in the graph.
+ // The packages are not connected into a graph.
+ // The Imports if populated will be stubs that only have their ID set.
+ // Imports will be connected and then type and syntax information added in a
+ // later pass (see refine).
+ Packages []*Package
+
+ // GoVersion is the minor version number used by the driver
+ // (e.g. the go command on the PATH) when selecting .go files.
+ // Zero means unknown.
+ GoVersion int
+}
+
+// driver is the type for functions that query the build system for the
+// packages named by the patterns.
+type driver func(cfg *Config, patterns ...string) (*DriverResponse, error)
+
+// findExternalDriver returns the file path of a tool that supplies
+// the build system package structure, or "" if not found."
+// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its
+// value, otherwise it searches for a binary named gopackagesdriver on the PATH.
+func findExternalDriver(cfg *Config) driver {
+ const toolPrefix = "GOPACKAGESDRIVER="
+ tool := ""
+ for _, env := range cfg.Env {
+ if val := strings.TrimPrefix(env, toolPrefix); val != env {
+ tool = val
+ }
+ }
+ if tool != "" && tool == "off" {
+ return nil
+ }
+ if tool == "" {
+ var err error
+ tool, err = exec.LookPath("gopackagesdriver")
+ if err != nil {
+ return nil
+ }
+ }
+ return func(cfg *Config, words ...string) (*DriverResponse, error) {
+ req, err := json.Marshal(DriverRequest{
+ Mode: cfg.Mode,
+ Env: cfg.Env,
+ BuildFlags: cfg.BuildFlags,
+ Tests: cfg.Tests,
+ Overlay: cfg.Overlay,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to encode message to driver tool: %v", err)
+ }
+
+ buf := new(bytes.Buffer)
+ stderr := new(bytes.Buffer)
+ cmd := exec.CommandContext(cfg.Context, tool, words...)
+ cmd.Dir = cfg.Dir
+ // The cwd gets resolved to the real path. On Darwin, where
+ // /tmp is a symlink, this breaks anything that expects the
+ // working directory to keep the original path, including the
+ // go command when dealing with modules.
+ //
+ // os.Getwd stdlib has a special feature where if the
+ // cwd and the PWD are the same node then it trusts
+ // the PWD, so by setting it in the env for the child
+ // process we fix up all the paths returned by the go
+ // command.
+ //
+ // (See similar trick in Invocation.run in ../../internal/gocommand/invoke.go)
+ cmd.Env = append(slicesClip(cfg.Env), "PWD="+cfg.Dir)
+ cmd.Stdin = bytes.NewReader(req)
+ cmd.Stdout = buf
+ cmd.Stderr = stderr
+
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
+ }
+ if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTDRIVERERRORS") != "" {
+ fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd), stderr)
+ }
+
+ var response DriverResponse
+ if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
+ return nil, err
+ }
+ return &response, nil
+ }
+}
+
+// slicesClip removes unused capacity from the slice, returning s[:len(s):len(s)].
+// TODO(adonovan): use go1.21 slices.Clip.
+func slicesClip[S ~[]E, E any](s S) S { return s[:len(s):len(s)] }
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
new file mode 100644
index 0000000..1a3a5b4
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -0,0 +1,1066 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "log"
+ "os"
+ "os/exec"
+ "path"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/packagesinternal"
+)
+
+// debug controls verbose logging.
+var debug, _ = strconv.ParseBool(os.Getenv("GOPACKAGESDEBUG"))
+
+// A goTooOldError reports that the go command
+// found by exec.LookPath is too old to use the new go list behavior.
+type goTooOldError struct {
+ error
+}
+
+// responseDeduper wraps a DriverResponse, deduplicating its contents.
+type responseDeduper struct {
+ seenRoots map[string]bool
+ seenPackages map[string]*Package
+ dr *DriverResponse
+}
+
+func newDeduper() *responseDeduper {
+ return &responseDeduper{
+ dr: &DriverResponse{},
+ seenRoots: map[string]bool{},
+ seenPackages: map[string]*Package{},
+ }
+}
+
+// addAll fills in r with a DriverResponse.
+func (r *responseDeduper) addAll(dr *DriverResponse) {
+ for _, pkg := range dr.Packages {
+ r.addPackage(pkg)
+ }
+ for _, root := range dr.Roots {
+ r.addRoot(root)
+ }
+ r.dr.GoVersion = dr.GoVersion
+}
+
+func (r *responseDeduper) addPackage(p *Package) {
+ if r.seenPackages[p.ID] != nil {
+ return
+ }
+ r.seenPackages[p.ID] = p
+ r.dr.Packages = append(r.dr.Packages, p)
+}
+
+func (r *responseDeduper) addRoot(id string) {
+ if r.seenRoots[id] {
+ return
+ }
+ r.seenRoots[id] = true
+ r.dr.Roots = append(r.dr.Roots, id)
+}
+
+type golistState struct {
+ cfg *Config
+ ctx context.Context
+
+ envOnce sync.Once
+ goEnvError error
+ goEnv map[string]string
+
+ rootsOnce sync.Once
+ rootDirsError error
+ rootDirs map[string]string
+
+ goVersionOnce sync.Once
+ goVersionError error
+ goVersion int // The X in Go 1.X.
+
+ // vendorDirs caches the (non)existence of vendor directories.
+ vendorDirs map[string]bool
+}
+
+// getEnv returns Go environment variables. Only specific variables are
+// populated -- computing all of them is slow.
+func (state *golistState) getEnv() (map[string]string, error) {
+ state.envOnce.Do(func() {
+ var b *bytes.Buffer
+ b, state.goEnvError = state.invokeGo("env", "-json", "GOMOD", "GOPATH")
+ if state.goEnvError != nil {
+ return
+ }
+
+ state.goEnv = make(map[string]string)
+ decoder := json.NewDecoder(b)
+ if state.goEnvError = decoder.Decode(&state.goEnv); state.goEnvError != nil {
+ return
+ }
+ })
+ return state.goEnv, state.goEnvError
+}
+
+// mustGetEnv is a convenience function that can be used if getEnv has already succeeded.
+func (state *golistState) mustGetEnv() map[string]string {
+ env, err := state.getEnv()
+ if err != nil {
+ panic(fmt.Sprintf("mustGetEnv: %v", err))
+ }
+ return env
+}
+
+// goListDriver uses the go list command to interpret the patterns and produce
+// the build system package structure.
+// See driver for more details.
+func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error) {
+ // Make sure that any asynchronous go commands are killed when we return.
+ parentCtx := cfg.Context
+ if parentCtx == nil {
+ parentCtx = context.Background()
+ }
+ ctx, cancel := context.WithCancel(parentCtx)
+ defer cancel()
+
+ response := newDeduper()
+
+ state := &golistState{
+ cfg: cfg,
+ ctx: ctx,
+ vendorDirs: map[string]bool{},
+ }
+
+ // Fill in response.Sizes asynchronously if necessary.
+ if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 {
+ errCh := make(chan error)
+ go func() {
+ compiler, arch, err := getSizesForArgs(ctx, state.cfgInvocation(), cfg.gocmdRunner)
+ response.dr.Compiler = compiler
+ response.dr.Arch = arch
+ errCh <- err
+ }()
+ defer func() {
+ if sizesErr := <-errCh; sizesErr != nil {
+ err = sizesErr
+ }
+ }()
+ }
+
+ // Determine files requested in contains patterns
+ var containFiles []string
+ restPatterns := make([]string, 0, len(patterns))
+ // Extract file= and other [querytype]= patterns. Report an error if querytype
+ // doesn't exist.
+extractQueries:
+ for _, pattern := range patterns {
+ eqidx := strings.Index(pattern, "=")
+ if eqidx < 0 {
+ restPatterns = append(restPatterns, pattern)
+ } else {
+ query, value := pattern[:eqidx], pattern[eqidx+len("="):]
+ switch query {
+ case "file":
+ containFiles = append(containFiles, value)
+ case "pattern":
+ restPatterns = append(restPatterns, value)
+ case "": // not a reserved query
+ restPatterns = append(restPatterns, pattern)
+ default:
+ for _, rune := range query {
+ if rune < 'a' || rune > 'z' { // not a reserved query
+ restPatterns = append(restPatterns, pattern)
+ continue extractQueries
+ }
+ }
+ // Reject all other patterns containing "="
+ return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern)
+ }
+ }
+ }
+
+ // See if we have any patterns to pass through to go list. Zero initial
+ // patterns also requires a go list call, since it's the equivalent of
+ // ".".
+ if len(restPatterns) > 0 || len(patterns) == 0 {
+ dr, err := state.createDriverResponse(restPatterns...)
+ if err != nil {
+ return nil, err
+ }
+ response.addAll(dr)
+ }
+
+ if len(containFiles) != 0 {
+ if err := state.runContainsQueries(response, containFiles); err != nil {
+ return nil, err
+ }
+ }
+
+ // (We may yet return an error due to defer.)
+ return response.dr, nil
+}
+
+func (state *golistState) runContainsQueries(response *responseDeduper, queries []string) error {
+ for _, query := range queries {
+ // TODO(matloob): Do only one query per directory.
+ fdir := filepath.Dir(query)
+ // Pass absolute path of directory to go list so that it knows to treat it as a directory,
+ // not a package path.
+ pattern, err := filepath.Abs(fdir)
+ if err != nil {
+ return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
+ }
+ dirResponse, err := state.createDriverResponse(pattern)
+
+ // If there was an error loading the package, or no packages are returned,
+ // or the package is returned with errors, try to load the file as an
+ // ad-hoc package.
+ // Usually the error will appear in a returned package, but may not if we're
+ // in module mode and the ad-hoc is located outside a module.
+ if err != nil || len(dirResponse.Packages) == 0 || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
+ len(dirResponse.Packages[0].Errors) == 1 {
+ var queryErr error
+ if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
+ return err // return the original error
+ }
+ }
+ isRoot := make(map[string]bool, len(dirResponse.Roots))
+ for _, root := range dirResponse.Roots {
+ isRoot[root] = true
+ }
+ for _, pkg := range dirResponse.Packages {
+ // Add any new packages to the main set
+ // We don't bother to filter packages that will be dropped by the changes of roots,
+ // that will happen anyway during graph construction outside this function.
+ // Over-reporting packages is not a problem.
+ response.addPackage(pkg)
+ // if the package was not a root one, it cannot have the file
+ if !isRoot[pkg.ID] {
+ continue
+ }
+ for _, pkgFile := range pkg.GoFiles {
+ if filepath.Base(query) == filepath.Base(pkgFile) {
+ response.addRoot(pkg.ID)
+ break
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// adhocPackage attempts to load or construct an ad-hoc package for a given
+// query, if the original call to the driver produced inadequate results.
+func (state *golistState) adhocPackage(pattern, query string) (*DriverResponse, error) {
+ response, err := state.createDriverResponse(query)
+ if err != nil {
+ return nil, err
+ }
+ // If we get nothing back from `go list`,
+ // try to make this file into its own ad-hoc package.
+ // TODO(rstambler): Should this check against the original response?
+ if len(response.Packages) == 0 {
+ response.Packages = append(response.Packages, &Package{
+ ID: "command-line-arguments",
+ PkgPath: query,
+ GoFiles: []string{query},
+ CompiledGoFiles: []string{query},
+ Imports: make(map[string]*Package),
+ })
+ response.Roots = append(response.Roots, "command-line-arguments")
+ }
+ // Handle special cases.
+ if len(response.Packages) == 1 {
+ // golang/go#33482: If this is a file= query for ad-hoc packages where
+ // the file only exists on an overlay, and exists outside of a module,
+ // add the file to the package and remove the errors.
+ if response.Packages[0].ID == "command-line-arguments" ||
+ filepath.ToSlash(response.Packages[0].PkgPath) == filepath.ToSlash(query) {
+ if len(response.Packages[0].GoFiles) == 0 {
+ filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
+ // TODO(matloob): check if the file is outside of a root dir?
+ for path := range state.cfg.Overlay {
+ if path == filename {
+ response.Packages[0].Errors = nil
+ response.Packages[0].GoFiles = []string{path}
+ response.Packages[0].CompiledGoFiles = []string{path}
+ }
+ }
+ }
+ }
+ }
+ return response, nil
+}
+
+// Fields must match go list;
+// see $GOROOT/src/cmd/go/internal/load/pkg.go.
+type jsonPackage struct {
+ ImportPath string
+ Dir string
+ Name string
+ Export string
+ GoFiles []string
+ CompiledGoFiles []string
+ IgnoredGoFiles []string
+ IgnoredOtherFiles []string
+ EmbedPatterns []string
+ EmbedFiles []string
+ CFiles []string
+ CgoFiles []string
+ CXXFiles []string
+ MFiles []string
+ HFiles []string
+ FFiles []string
+ SFiles []string
+ SwigFiles []string
+ SwigCXXFiles []string
+ SysoFiles []string
+ Imports []string
+ ImportMap map[string]string
+ Deps []string
+ Module *Module
+ TestGoFiles []string
+ TestImports []string
+ XTestGoFiles []string
+ XTestImports []string
+ ForTest string // q in a "p [q.test]" package, else ""
+ DepOnly bool
+
+ Error *packagesinternal.PackageError
+ DepsErrors []*packagesinternal.PackageError
+}
+
+type jsonPackageError struct {
+ ImportStack []string
+ Pos string
+ Err string
+}
+
+func otherFiles(p *jsonPackage) [][]string {
+ return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
+}
+
+// createDriverResponse uses the "go list" command to expand the pattern
+// words and return a response for the specified packages.
+func (state *golistState) createDriverResponse(words ...string) (*DriverResponse, error) {
+ // go list uses the following identifiers in ImportPath and Imports:
+ //
+ // "p" -- importable package or main (command)
+ // "q.test" -- q's test executable
+ // "p [q.test]" -- variant of p as built for q's test executable
+ // "q_test [q.test]" -- q's external test package
+ //
+ // The packages p that are built differently for a test q.test
+ // are q itself, plus any helpers used by the external test q_test,
+ // typically including "testing" and all its dependencies.
+
+ // Run "go list" for complete
+ // information on the specified packages.
+ goVersion, err := state.getGoVersion()
+ if err != nil {
+ return nil, err
+ }
+ buf, err := state.invokeGo("list", golistargs(state.cfg, words, goVersion)...)
+ if err != nil {
+ return nil, err
+ }
+
+ seen := make(map[string]*jsonPackage)
+ pkgs := make(map[string]*Package)
+ additionalErrors := make(map[string][]Error)
+ // Decode the JSON and convert it to Package form.
+ response := &DriverResponse{
+ GoVersion: goVersion,
+ }
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ if p.ImportPath == "" {
+ // The documentation for go list says that “[e]rroneous packages will have
+ // a non-empty ImportPath”. If for some reason it comes back empty, we
+ // prefer to error out rather than silently discarding data or handing
+ // back a package without any way to refer to it.
+ if p.Error != nil {
+ return nil, Error{
+ Pos: p.Error.Pos,
+ Msg: p.Error.Err,
+ }
+ }
+ return nil, fmt.Errorf("package missing import path: %+v", p)
+ }
+
+ // Work around https://golang.org/issue/33157:
+ // go list -e, when given an absolute path, will find the package contained at
+ // that directory. But when no package exists there, it will return a fake package
+ // with an error and the ImportPath set to the absolute path provided to go list.
+ // Try to convert that absolute path to what its package path would be if it's
+ // contained in a known module or GOPATH entry. This will allow the package to be
+ // properly "reclaimed" when overlays are processed.
+ if filepath.IsAbs(p.ImportPath) && p.Error != nil {
+ pkgPath, ok, err := state.getPkgPath(p.ImportPath)
+ if err != nil {
+ return nil, err
+ }
+ if ok {
+ p.ImportPath = pkgPath
+ }
+ }
+
+ if old, found := seen[p.ImportPath]; found {
+ // If one version of the package has an error, and the other doesn't, assume
+ // that this is a case where go list is reporting a fake dependency variant
+ // of the imported package: When a package tries to invalidly import another
+ // package, go list emits a variant of the imported package (with the same
+ // import path, but with an error on it, and the package will have a
+ // DepError set on it). An example of when this can happen is for imports of
+ // main packages: main packages can not be imported, but they may be
+ // separately matched and listed by another pattern.
+ // See golang.org/issue/36188 for more details.
+
+ // The plan is that eventually, hopefully in Go 1.15, the error will be
+ // reported on the importing package rather than the duplicate "fake"
+ // version of the imported package. Once all supported versions of Go
+ // have the new behavior this logic can be deleted.
+ // TODO(matloob): delete the workaround logic once all supported versions of
+ // Go return the errors on the proper package.
+
+ // There should be exactly one version of a package that doesn't have an
+ // error.
+ if old.Error == nil && p.Error == nil {
+ if !reflect.DeepEqual(p, old) {
+ return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath)
+ }
+ continue
+ }
+
+ // Determine if this package's error needs to be bubbled up.
+ // This is a hack, and we expect for go list to eventually set the error
+ // on the package.
+ if old.Error != nil {
+ var errkind string
+ if strings.Contains(old.Error.Err, "not an importable package") {
+ errkind = "not an importable package"
+ } else if strings.Contains(old.Error.Err, "use of internal package") && strings.Contains(old.Error.Err, "not allowed") {
+ errkind = "use of internal package not allowed"
+ }
+ if errkind != "" {
+ if len(old.Error.ImportStack) < 1 {
+ return nil, fmt.Errorf(`internal error: go list gave a %q error with empty import stack`, errkind)
+ }
+ importingPkg := old.Error.ImportStack[len(old.Error.ImportStack)-1]
+ if importingPkg == old.ImportPath {
+ // Using an older version of Go which put this package itself on top of import
+ // stack, instead of the importer. Look for importer in second from top
+ // position.
+ if len(old.Error.ImportStack) < 2 {
+ return nil, fmt.Errorf(`internal error: go list gave a %q error with an import stack without importing package`, errkind)
+ }
+ importingPkg = old.Error.ImportStack[len(old.Error.ImportStack)-2]
+ }
+ additionalErrors[importingPkg] = append(additionalErrors[importingPkg], Error{
+ Pos: old.Error.Pos,
+ Msg: old.Error.Err,
+ Kind: ListError,
+ })
+ }
+ }
+
+ // Make sure that if there's a version of the package without an error,
+ // that's the one reported to the user.
+ if old.Error == nil {
+ continue
+ }
+
+ // This package will replace the old one at the end of the loop.
+ }
+ seen[p.ImportPath] = p
+
+ pkg := &Package{
+ Name: p.Name,
+ ID: p.ImportPath,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
+ CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ EmbedFiles: absJoin(p.Dir, p.EmbedFiles),
+ EmbedPatterns: absJoin(p.Dir, p.EmbedPatterns),
+ IgnoredFiles: absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles),
+ forTest: p.ForTest,
+ depsErrors: p.DepsErrors,
+ Module: p.Module,
+ }
+
+ if (state.cfg.Mode&typecheckCgo) != 0 && len(p.CgoFiles) != 0 {
+ if len(p.CompiledGoFiles) > len(p.GoFiles) {
+ // We need the cgo definitions, which are in the first
+ // CompiledGoFile after the non-cgo ones. This is a hack but there
+ // isn't currently a better way to find it. We also need the pure
+ // Go files and unprocessed cgo files, all of which are already
+ // in pkg.GoFiles.
+ cgoTypes := p.CompiledGoFiles[len(p.GoFiles)]
+ pkg.CompiledGoFiles = append([]string{cgoTypes}, pkg.GoFiles...)
+ } else {
+ // golang/go#38990: go list silently fails to do cgo processing
+ pkg.CompiledGoFiles = nil
+ pkg.Errors = append(pkg.Errors, Error{
+ Msg: "go list failed to return CompiledGoFiles. This may indicate failure to perform cgo processing; try building at the command line. See https://golang.org/issue/38990.",
+ Kind: ListError,
+ })
+ }
+ }
+
+ // Work around https://golang.org/issue/28749:
+ // cmd/go puts assembly, C, and C++ files in CompiledGoFiles.
+ // Remove files from CompiledGoFiles that are non-go files
+ // (or are not files that look like they are from the cache).
+ if len(pkg.CompiledGoFiles) > 0 {
+ out := pkg.CompiledGoFiles[:0]
+ for _, f := range pkg.CompiledGoFiles {
+ if ext := filepath.Ext(f); ext != ".go" && ext != "" { // ext == "" means the file is from the cache, so probably cgo-processed file
+ continue
+ }
+ out = append(out, f)
+ }
+ pkg.CompiledGoFiles = out
+ }
+
+ // Extract the PkgPath from the package's ID.
+ if i := strings.IndexByte(pkg.ID, ' '); i >= 0 {
+ pkg.PkgPath = pkg.ID[:i]
+ } else {
+ pkg.PkgPath = pkg.ID
+ }
+
+ if pkg.PkgPath == "unsafe" {
+ pkg.CompiledGoFiles = nil // ignore fake unsafe.go file (#59929)
+ } else if len(pkg.CompiledGoFiles) == 0 {
+ // Work around for pre-go.1.11 versions of go list.
+ // TODO(matloob): they should be handled by the fallback.
+ // Can we delete this?
+ pkg.CompiledGoFiles = pkg.GoFiles
+ }
+
+ // Assume go list emits only absolute paths for Dir.
+ if p.Dir != "" && !filepath.IsAbs(p.Dir) {
+ log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir)
+ }
+
+ if p.Export != "" && !filepath.IsAbs(p.Export) {
+ pkg.ExportFile = filepath.Join(p.Dir, p.Export)
+ } else {
+ pkg.ExportFile = p.Export
+ }
+
+ // imports
+ //
+ // Imports contains the IDs of all imported packages.
+ // ImportsMap records (path, ID) only where they differ.
+ ids := make(map[string]bool)
+ for _, id := range p.Imports {
+ ids[id] = true
+ }
+ pkg.Imports = make(map[string]*Package)
+ for path, id := range p.ImportMap {
+ pkg.Imports[path] = &Package{ID: id} // non-identity import
+ delete(ids, id)
+ }
+ for id := range ids {
+ if id == "C" {
+ continue
+ }
+
+ pkg.Imports[id] = &Package{ID: id} // identity import
+ }
+ if !p.DepOnly {
+ response.Roots = append(response.Roots, pkg.ID)
+ }
+
+ // Temporary work-around for golang/go#39986. Parse filenames out of
+ // error messages. This happens if there are unrecoverable syntax
+ // errors in the source, so we can't match on a specific error message.
+ //
+ // TODO(rfindley): remove this heuristic, in favor of considering
+ // InvalidGoFiles from the list driver.
+ if err := p.Error; err != nil && state.shouldAddFilenameFromError(p) {
+ addFilenameFromPos := func(pos string) bool {
+ split := strings.Split(pos, ":")
+ if len(split) < 1 {
+ return false
+ }
+ filename := strings.TrimSpace(split[0])
+ if filename == "" {
+ return false
+ }
+ if !filepath.IsAbs(filename) {
+ filename = filepath.Join(state.cfg.Dir, filename)
+ }
+ info, _ := os.Stat(filename)
+ if info == nil {
+ return false
+ }
+ pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, filename)
+ pkg.GoFiles = append(pkg.GoFiles, filename)
+ return true
+ }
+ found := addFilenameFromPos(err.Pos)
+ // In some cases, go list only reports the error position in the
+ // error text, not the error position. One such case is when the
+ // file's package name is a keyword (see golang.org/issue/39763).
+ if !found {
+ addFilenameFromPos(err.Err)
+ }
+ }
+
+ if p.Error != nil {
+ msg := strings.TrimSpace(p.Error.Err) // Trim to work around golang.org/issue/32363.
+ // Address golang.org/issue/35964 by appending import stack to error message.
+ if msg == "import cycle not allowed" && len(p.Error.ImportStack) != 0 {
+ msg += fmt.Sprintf(": import stack: %v", p.Error.ImportStack)
+ }
+ pkg.Errors = append(pkg.Errors, Error{
+ Pos: p.Error.Pos,
+ Msg: msg,
+ Kind: ListError,
+ })
+ }
+
+ pkgs[pkg.ID] = pkg
+ }
+
+ for id, errs := range additionalErrors {
+ if p, ok := pkgs[id]; ok {
+ p.Errors = append(p.Errors, errs...)
+ }
+ }
+ for _, pkg := range pkgs {
+ response.Packages = append(response.Packages, pkg)
+ }
+ sort.Slice(response.Packages, func(i, j int) bool { return response.Packages[i].ID < response.Packages[j].ID })
+
+ return response, nil
+}
+
+func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool {
+ if len(p.GoFiles) > 0 || len(p.CompiledGoFiles) > 0 {
+ return false
+ }
+
+ goV, err := state.getGoVersion()
+ if err != nil {
+ return false
+ }
+
+ // On Go 1.14 and earlier, only add filenames from errors if the import stack is empty.
+ // The import stack behaves differently for these versions than newer Go versions.
+ if goV < 15 {
+ return len(p.Error.ImportStack) == 0
+ }
+
+ // On Go 1.15 and later, only parse filenames out of error if there's no import stack,
+ // or the current package is at the top of the import stack. This is not guaranteed
+ // to work perfectly, but should avoid some cases where files in errors don't belong to this
+ // package.
+ return len(p.Error.ImportStack) == 0 || p.Error.ImportStack[len(p.Error.ImportStack)-1] == p.ImportPath
+}
+
+// getGoVersion returns the effective minor version of the go command.
+func (state *golistState) getGoVersion() (int, error) {
+ state.goVersionOnce.Do(func() {
+ state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner)
+ })
+ return state.goVersion, state.goVersionError
+}
+
+// getPkgPath finds the package path of a directory if it's relative to a root
+// directory.
+func (state *golistState) getPkgPath(dir string) (string, bool, error) {
+ absDir, err := filepath.Abs(dir)
+ if err != nil {
+ return "", false, err
+ }
+ roots, err := state.determineRootDirs()
+ if err != nil {
+ return "", false, err
+ }
+
+ for rdir, rpath := range roots {
+ // Make sure that the directory is in the module,
+ // to avoid creating a path relative to another module.
+ if !strings.HasPrefix(absDir, rdir) {
+ continue
+ }
+ // TODO(matloob): This doesn't properly handle symlinks.
+ r, err := filepath.Rel(rdir, dir)
+ if err != nil {
+ continue
+ }
+ if rpath != "" {
+ // We choose only one root even though the directory even it can belong in multiple modules
+ // or GOPATH entries. This is okay because we only need to work with absolute dirs when a
+ // file is missing from disk, for instance when gopls calls go/packages in an overlay.
+ // Once the file is saved, gopls, or the next invocation of the tool will get the correct
+ // result straight from golist.
+ // TODO(matloob): Implement module tiebreaking?
+ return path.Join(rpath, filepath.ToSlash(r)), true, nil
+ }
+ return filepath.ToSlash(r), true, nil
+ }
+ return "", false, nil
+}
+
+// absJoin absolutizes and flattens the lists of files.
+func absJoin(dir string, fileses ...[]string) (res []string) {
+ for _, files := range fileses {
+ for _, file := range files {
+ if !filepath.IsAbs(file) {
+ file = filepath.Join(dir, file)
+ }
+ res = append(res, file)
+ }
+ }
+ return res
+}
+
+func jsonFlag(cfg *Config, goVersion int) string {
+ if goVersion < 19 {
+ return "-json"
+ }
+ var fields []string
+ added := make(map[string]bool)
+ addFields := func(fs ...string) {
+ for _, f := range fs {
+ if !added[f] {
+ added[f] = true
+ fields = append(fields, f)
+ }
+ }
+ }
+ addFields("Name", "ImportPath", "Error") // These fields are always needed
+ if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 {
+ addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles",
+ "CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles",
+ "SwigFiles", "SwigCXXFiles", "SysoFiles")
+ if cfg.Tests {
+ addFields("TestGoFiles", "XTestGoFiles")
+ }
+ }
+ if cfg.Mode&NeedTypes != 0 {
+ // CompiledGoFiles seems to be required for the test case TestCgoNoSyntax,
+ // even when -compiled isn't passed in.
+ // TODO(#52435): Should we make the test ask for -compiled, or automatically
+ // request CompiledGoFiles in certain circumstances?
+ addFields("Dir", "CompiledGoFiles")
+ }
+ if cfg.Mode&NeedCompiledGoFiles != 0 {
+ addFields("Dir", "CompiledGoFiles", "Export")
+ }
+ if cfg.Mode&NeedImports != 0 {
+ // When imports are requested, DepOnly is used to distinguish between packages
+ // explicitly requested and transitive imports of those packages.
+ addFields("DepOnly", "Imports", "ImportMap")
+ if cfg.Tests {
+ addFields("TestImports", "XTestImports")
+ }
+ }
+ if cfg.Mode&NeedDeps != 0 {
+ addFields("DepOnly")
+ }
+ if usesExportData(cfg) {
+ // Request Dir in the unlikely case Export is not absolute.
+ addFields("Dir", "Export")
+ }
+ if cfg.Mode&needInternalForTest != 0 {
+ addFields("ForTest")
+ }
+ if cfg.Mode&needInternalDepsErrors != 0 {
+ addFields("DepsErrors")
+ }
+ if cfg.Mode&NeedModule != 0 {
+ addFields("Module")
+ }
+ if cfg.Mode&NeedEmbedFiles != 0 {
+ addFields("EmbedFiles")
+ }
+ if cfg.Mode&NeedEmbedPatterns != 0 {
+ addFields("EmbedPatterns")
+ }
+ return "-json=" + strings.Join(fields, ",")
+}
+
+func golistargs(cfg *Config, words []string, goVersion int) []string {
+ const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
+ fullargs := []string{
+ "-e", jsonFlag(cfg, goVersion),
+ fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
+ fmt.Sprintf("-test=%t", cfg.Tests),
+ fmt.Sprintf("-export=%t", usesExportData(cfg)),
+ fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
+ // go list doesn't let you pass -test and -find together,
+ // probably because you'd just get the TestMain.
+ fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0 && !usesExportData(cfg)),
+ }
+
+ // golang/go#60456: with go1.21 and later, go list serves pgo variants, which
+ // can be costly to compute and may result in redundant processing for the
+ // caller. Disable these variants. If someone wants to add e.g. a NeedPGO
+ // mode flag, that should be a separate proposal.
+ if goVersion >= 21 {
+ fullargs = append(fullargs, "-pgo=off")
+ }
+
+ fullargs = append(fullargs, cfg.BuildFlags...)
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ return fullargs
+}
+
+// cfgInvocation returns an Invocation that reflects cfg's settings.
+func (state *golistState) cfgInvocation() gocommand.Invocation {
+ cfg := state.cfg
+ return gocommand.Invocation{
+ BuildFlags: cfg.BuildFlags,
+ ModFile: cfg.modFile,
+ ModFlag: cfg.modFlag,
+ CleanEnv: cfg.Env != nil,
+ Env: cfg.Env,
+ Logf: cfg.Logf,
+ WorkingDir: cfg.Dir,
+ Overlay: cfg.goListOverlayFile,
+ }
+}
+
+// invokeGo returns the stdout of a go command invocation.
+func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, error) {
+ cfg := state.cfg
+
+ inv := state.cfgInvocation()
+ inv.Verb = verb
+ inv.Args = args
+ gocmdRunner := cfg.gocmdRunner
+ if gocmdRunner == nil {
+ gocmdRunner = &gocommand.Runner{}
+ }
+ stdout, stderr, friendlyErr, err := gocmdRunner.RunRaw(cfg.Context, inv)
+ if err != nil {
+ // Check for 'go' executable not being found.
+ if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
+ return nil, fmt.Errorf("'go list' driver requires 'go', but %s", exec.ErrNotFound)
+ }
+
+ exitErr, ok := err.(*exec.ExitError)
+ if !ok {
+ // Catastrophic error:
+ // - context cancellation
+ return nil, fmt.Errorf("couldn't run 'go': %w", err)
+ }
+
+ // Old go version?
+ if strings.Contains(stderr.String(), "flag provided but not defined") {
+ return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)}
+ }
+
+ // Related to #24854
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "unexpected directory layout") {
+ return nil, friendlyErr
+ }
+
+ // Is there an error running the C compiler in cgo? This will be reported in the "Error" field
+ // and should be suppressed by go list -e.
+ //
+ // This condition is not perfect yet because the error message can include other error messages than runtime/cgo.
+ isPkgPathRune := func(r rune) bool {
+ // From https://golang.org/ref/spec#Import_declarations:
+ // Implementation restriction: A compiler may restrict ImportPaths to non-empty strings
+ // using only characters belonging to Unicode's L, M, N, P, and S general categories
+ // (the Graphic characters without spaces) and may also exclude the
+ // characters !"#$%&'()*,:;<=>?[\]^`{|} and the Unicode replacement character U+FFFD.
+ return unicode.IsOneOf([]*unicode.RangeTable{unicode.L, unicode.M, unicode.N, unicode.P, unicode.S}, r) &&
+ !strings.ContainsRune("!\"#$%&'()*,:;<=>?[\\]^`{|}\uFFFD", r)
+ }
+ // golang/go#36770: Handle case where cmd/go prints module download messages before the error.
+ msg := stderr.String()
+ for strings.HasPrefix(msg, "go: downloading") {
+ msg = msg[strings.IndexRune(msg, '\n')+1:]
+ }
+ if len(stderr.String()) > 0 && strings.HasPrefix(stderr.String(), "# ") {
+ msg := msg[len("# "):]
+ if strings.HasPrefix(strings.TrimLeftFunc(msg, isPkgPathRune), "\n") {
+ return stdout, nil
+ }
+ // Treat pkg-config errors as a special case (golang.org/issue/36770).
+ if strings.HasPrefix(msg, "pkg-config") {
+ return stdout, nil
+ }
+ }
+
+ // This error only appears in stderr. See golang.org/cl/166398 for a fix in go list to show
+ // the error in the Err section of stdout in case -e option is provided.
+ // This fix is provided for backwards compatibility.
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must be .go files") {
+ output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
+ strings.Trim(stderr.String(), "\n"))
+ return bytes.NewBufferString(output), nil
+ }
+
+ // Similar to the previous error, but currently lacks a fix in Go.
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must all be in one directory") {
+ output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
+ strings.Trim(stderr.String(), "\n"))
+ return bytes.NewBufferString(output), nil
+ }
+
+ // Backwards compatibility for Go 1.11 because 1.12 and 1.13 put the directory in the ImportPath.
+ // If the package doesn't exist, put the absolute path of the directory into the error message,
+ // as Go 1.13 list does.
+ const noSuchDirectory = "no such directory"
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), noSuchDirectory) {
+ errstr := stderr.String()
+ abspath := strings.TrimSpace(errstr[strings.Index(errstr, noSuchDirectory)+len(noSuchDirectory):])
+ output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
+ abspath, strings.Trim(stderr.String(), "\n"))
+ return bytes.NewBufferString(output), nil
+ }
+
+ // Workaround for #29280: go list -e has incorrect behavior when an ad-hoc package doesn't exist.
+ // Note that the error message we look for in this case is different that the one looked for above.
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no such file or directory") {
+ output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
+ strings.Trim(stderr.String(), "\n"))
+ return bytes.NewBufferString(output), nil
+ }
+
+ // Workaround for #34273. go list -e with GO111MODULE=on has incorrect behavior when listing a
+ // directory outside any module.
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "outside available modules") {
+ output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
+ // TODO(matloob): command-line-arguments isn't correct here.
+ "command-line-arguments", strings.Trim(stderr.String(), "\n"))
+ return bytes.NewBufferString(output), nil
+ }
+
+ // Another variation of the previous error
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "outside module root") {
+ output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
+ // TODO(matloob): command-line-arguments isn't correct here.
+ "command-line-arguments", strings.Trim(stderr.String(), "\n"))
+ return bytes.NewBufferString(output), nil
+ }
+
+ // Workaround for an instance of golang.org/issue/26755: go list -e will return a non-zero exit
+ // status if there's a dependency on a package that doesn't exist. But it should return
+ // a zero exit status and set an error on that package.
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no Go files in") {
+ // Don't clobber stdout if `go list` actually returned something.
+ if len(stdout.String()) > 0 {
+ return stdout, nil
+ }
+ // try to extract package name from string
+ stderrStr := stderr.String()
+ var importPath string
+ colon := strings.Index(stderrStr, ":")
+ if colon > 0 && strings.HasPrefix(stderrStr, "go build ") {
+ importPath = stderrStr[len("go build "):colon]
+ }
+ output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`,
+ importPath, strings.Trim(stderrStr, "\n"))
+ return bytes.NewBufferString(output), nil
+ }
+
+ // Export mode entails a build.
+ // If that build fails, errors appear on stderr
+ // (despite the -e flag) and the Export field is blank.
+ // Do not fail in that case.
+ // The same is true if an ad-hoc package given to go list doesn't exist.
+ // TODO(matloob): Remove these once we can depend on go list to exit with a zero status with -e even when
+ // packages don't exist or a build fails.
+ if !usesExportData(cfg) && !containsGoFile(args) {
+ return nil, friendlyErr
+ }
+ }
+ return stdout, nil
+}
+
+func containsGoFile(s []string) bool {
+ for _, f := range s {
+ if strings.HasSuffix(f, ".go") {
+ return true
+ }
+ }
+ return false
+}
+
+func cmdDebugStr(cmd *exec.Cmd) string {
+ env := make(map[string]string)
+ for _, kv := range cmd.Env {
+ split := strings.SplitN(kv, "=", 2)
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+
+ var args []string
+ for _, arg := range cmd.Args {
+ quoted := strconv.Quote(arg)
+ if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") {
+ args = append(args, quoted)
+ } else {
+ args = append(args, arg)
+ }
+ }
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " "))
+}
+
+// getSizesForArgs queries 'go list' for the appropriate
+// Compiler and GOARCH arguments to pass to [types.SizesFor].
+func getSizesForArgs(ctx context.Context, inv gocommand.Invocation, gocmdRunner *gocommand.Runner) (string, string, error) {
+ inv.Verb = "list"
+ inv.Args = []string{"-f", "{{context.GOARCH}} {{context.Compiler}}", "--", "unsafe"}
+ stdout, stderr, friendlyErr, rawErr := gocmdRunner.RunRaw(ctx, inv)
+ var goarch, compiler string
+ if rawErr != nil {
+ rawErrMsg := rawErr.Error()
+ if strings.Contains(rawErrMsg, "cannot find main module") ||
+ strings.Contains(rawErrMsg, "go.mod file not found") {
+ // User's running outside of a module.
+ // All bets are off. Get GOARCH and guess compiler is gc.
+ // TODO(matloob): Is this a problem in practice?
+ inv.Verb = "env"
+ inv.Args = []string{"GOARCH"}
+ envout, enverr := gocmdRunner.Run(ctx, inv)
+ if enverr != nil {
+ return "", "", enverr
+ }
+ goarch = strings.TrimSpace(envout.String())
+ compiler = "gc"
+ } else if friendlyErr != nil {
+ return "", "", friendlyErr
+ } else {
+ // This should be unreachable, but be defensive
+ // in case RunRaw's error results are inconsistent.
+ return "", "", rawErr
+ }
+ } else {
+ fields := strings.Fields(stdout.String())
+ if len(fields) < 2 {
+ return "", "", fmt.Errorf("could not parse GOARCH and Go compiler in format \"<GOARCH> <compiler>\":\nstdout: <<%s>>\nstderr: <<%s>>",
+ stdout.String(), stderr.String())
+ }
+ goarch = fields[0]
+ compiler = fields[1]
+ }
+ return compiler, goarch, nil
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
new file mode 100644
index 0000000..d823c47
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
@@ -0,0 +1,83 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "encoding/json"
+ "path/filepath"
+
+ "golang.org/x/tools/internal/gocommand"
+)
+
+// determineRootDirs returns a mapping from absolute directories that could
+// contain code to their corresponding import path prefixes.
+func (state *golistState) determineRootDirs() (map[string]string, error) {
+ env, err := state.getEnv()
+ if err != nil {
+ return nil, err
+ }
+ if env["GOMOD"] != "" {
+ state.rootsOnce.Do(func() {
+ state.rootDirs, state.rootDirsError = state.determineRootDirsModules()
+ })
+ } else {
+ state.rootsOnce.Do(func() {
+ state.rootDirs, state.rootDirsError = state.determineRootDirsGOPATH()
+ })
+ }
+ return state.rootDirs, state.rootDirsError
+}
+
+func (state *golistState) determineRootDirsModules() (map[string]string, error) {
+ // List all of the modules--the first will be the directory for the main
+ // module. Any replaced modules will also need to be treated as roots.
+ // Editing files in the module cache isn't a great idea, so we don't
+ // plan to ever support that.
+ out, err := state.invokeGo("list", "-m", "-json", "all")
+ if err != nil {
+ // 'go list all' will fail if we're outside of a module and
+ // GO111MODULE=on. Try falling back without 'all'.
+ var innerErr error
+ out, innerErr = state.invokeGo("list", "-m", "-json")
+ if innerErr != nil {
+ return nil, err
+ }
+ }
+ roots := map[string]string{}
+ modules := map[string]string{}
+ var i int
+ for dec := json.NewDecoder(out); dec.More(); {
+ mod := new(gocommand.ModuleJSON)
+ if err := dec.Decode(mod); err != nil {
+ return nil, err
+ }
+ if mod.Dir != "" && mod.Path != "" {
+ // This is a valid module; add it to the map.
+ absDir, err := filepath.Abs(mod.Dir)
+ if err != nil {
+ return nil, err
+ }
+ modules[absDir] = mod.Path
+ // The first result is the main module.
+ if i == 0 || mod.Replace != nil && mod.Replace.Path != "" {
+ roots[absDir] = mod.Path
+ }
+ }
+ i++
+ }
+ return roots, nil
+}
+
+func (state *golistState) determineRootDirsGOPATH() (map[string]string, error) {
+ m := map[string]string{}
+ for _, dir := range filepath.SplitList(state.mustGetEnv()["GOPATH"]) {
+ absDir, err := filepath.Abs(dir)
+ if err != nil {
+ return nil, err
+ }
+ m[filepath.Join(absDir, "src")] = ""
+ }
+ return m, nil
+}
diff --git a/vendor/golang.org/x/tools/go/packages/loadmode_string.go b/vendor/golang.org/x/tools/go/packages/loadmode_string.go
new file mode 100644
index 0000000..5c080d2
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/loadmode_string.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "fmt"
+ "strings"
+)
+
+var allModes = []LoadMode{
+ NeedName,
+ NeedFiles,
+ NeedCompiledGoFiles,
+ NeedImports,
+ NeedDeps,
+ NeedExportFile,
+ NeedTypes,
+ NeedSyntax,
+ NeedTypesInfo,
+ NeedTypesSizes,
+}
+
+var modeStrings = []string{
+ "NeedName",
+ "NeedFiles",
+ "NeedCompiledGoFiles",
+ "NeedImports",
+ "NeedDeps",
+ "NeedExportFile",
+ "NeedTypes",
+ "NeedSyntax",
+ "NeedTypesInfo",
+ "NeedTypesSizes",
+}
+
+func (mod LoadMode) String() string {
+ m := mod
+ if m == 0 {
+ return "LoadMode(0)"
+ }
+ var out []string
+ for i, x := range allModes {
+ if x > m {
+ break
+ }
+ if (m & x) != 0 {
+ out = append(out, modeStrings[i])
+ m = m ^ x
+ }
+ }
+ if m != 0 {
+ out = append(out, "Unknown")
+ }
+ return fmt.Sprintf("LoadMode(%s)", strings.Join(out, "|"))
+}
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
new file mode 100644
index 0000000..0b6bfaf
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -0,0 +1,1515 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+// See doc.go for package documentation and implementation notes.
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/scanner"
+ "go/token"
+ "go/types"
+ "io"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/sync/errgroup"
+
+ "golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/packagesinternal"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/versions"
+)
+
+// A LoadMode controls the amount of detail to return when loading.
+// The bits below can be combined to specify which fields should be
+// filled in the result packages.
+//
+// The zero value is a special case, equivalent to combining
+// the NeedName, NeedFiles, and NeedCompiledGoFiles bits.
+//
+// ID and Errors (if present) will always be filled.
+// [Load] may return more information than requested.
+//
+// Unfortunately there are a number of open bugs related to
+// interactions among the LoadMode bits:
+// - https://github.com/golang/go/issues/56633
+// - https://github.com/golang/go/issues/56677
+// - https://github.com/golang/go/issues/58726
+// - https://github.com/golang/go/issues/63517
+type LoadMode int
+
+const (
+ // NeedName adds Name and PkgPath.
+ NeedName LoadMode = 1 << iota
+
+ // NeedFiles adds GoFiles and OtherFiles.
+ NeedFiles
+
+ // NeedCompiledGoFiles adds CompiledGoFiles.
+ NeedCompiledGoFiles
+
+ // NeedImports adds Imports. If NeedDeps is not set, the Imports field will contain
+ // "placeholder" Packages with only the ID set.
+ NeedImports
+
+ // NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
+ NeedDeps
+
+ // NeedExportFile adds ExportFile.
+ NeedExportFile
+
+ // NeedTypes adds Types, Fset, and IllTyped.
+ NeedTypes
+
+ // NeedSyntax adds Syntax and Fset.
+ NeedSyntax
+
+ // NeedTypesInfo adds TypesInfo.
+ NeedTypesInfo
+
+ // NeedTypesSizes adds TypesSizes.
+ NeedTypesSizes
+
+ // needInternalDepsErrors adds the internal deps errors field for use by gopls.
+ needInternalDepsErrors
+
+ // needInternalForTest adds the internal forTest field.
+ // Tests must also be set on the context for this field to be populated.
+ needInternalForTest
+
+ // typecheckCgo enables full support for type checking cgo. Requires Go 1.15+.
+ // Modifies CompiledGoFiles and Types, and has no effect on its own.
+ typecheckCgo
+
+ // NeedModule adds Module.
+ NeedModule
+
+ // NeedEmbedFiles adds EmbedFiles.
+ NeedEmbedFiles
+
+ // NeedEmbedPatterns adds EmbedPatterns.
+ NeedEmbedPatterns
+)
+
+const (
+ // Deprecated: LoadFiles exists for historical compatibility
+ // and should not be used. Please directly specify the needed fields using the Need values.
+ LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles
+
+ // Deprecated: LoadImports exists for historical compatibility
+ // and should not be used. Please directly specify the needed fields using the Need values.
+ LoadImports = LoadFiles | NeedImports
+
+ // Deprecated: LoadTypes exists for historical compatibility
+ // and should not be used. Please directly specify the needed fields using the Need values.
+ LoadTypes = LoadImports | NeedTypes | NeedTypesSizes
+
+ // Deprecated: LoadSyntax exists for historical compatibility
+ // and should not be used. Please directly specify the needed fields using the Need values.
+ LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo
+
+ // Deprecated: LoadAllSyntax exists for historical compatibility
+ // and should not be used. Please directly specify the needed fields using the Need values.
+ LoadAllSyntax = LoadSyntax | NeedDeps
+
+ // Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile.
+ NeedExportsFile = NeedExportFile
+)
+
+// A Config specifies details about how packages should be loaded.
+// The zero value is a valid configuration.
+//
+// Calls to Load do not modify this struct.
+//
+// TODO(adonovan): #67702: this is currently false: in fact,
+// calls to [Load] do not modify the public fields of this struct, but
+// may modify hidden fields, so concurrent calls to [Load] must not
+// use the same Config. But perhaps we should reestablish the
+// documented invariant.
+type Config struct {
+ // Mode controls the level of information returned for each package.
+ Mode LoadMode
+
+ // Context specifies the context for the load operation.
+ // Cancelling the context may cause [Load] to abort and
+ // return an error.
+ Context context.Context
+
+ // Logf is the logger for the config.
+ // If the user provides a logger, debug logging is enabled.
+ // If the GOPACKAGESDEBUG environment variable is set to true,
+ // but the logger is nil, default to log.Printf.
+ Logf func(format string, args ...interface{})
+
+ // Dir is the directory in which to run the build system's query tool
+ // that provides information about the packages.
+ // If Dir is empty, the tool is run in the current directory.
+ Dir string
+
+ // Env is the environment to use when invoking the build system's query tool.
+ // If Env is nil, the current environment is used.
+ // As in os/exec's Cmd, only the last value in the slice for
+ // each environment key is used. To specify the setting of only
+ // a few variables, append to the current environment, as in:
+ //
+ // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386")
+ //
+ Env []string
+
+ // gocmdRunner guards go command calls from concurrency errors.
+ gocmdRunner *gocommand.Runner
+
+ // BuildFlags is a list of command-line flags to be passed through to
+ // the build system's query tool.
+ BuildFlags []string
+
+ // modFile will be used for -modfile in go command invocations.
+ modFile string
+
+ // modFlag will be used for -modfile in go command invocations.
+ modFlag string
+
+ // Fset provides source position information for syntax trees and types.
+ // If Fset is nil, Load will use a new fileset, but preserve Fset's value.
+ Fset *token.FileSet
+
+ // ParseFile is called to read and parse each file
+ // when preparing a package's type-checked syntax tree.
+ // It must be safe to call ParseFile simultaneously from multiple goroutines.
+ // If ParseFile is nil, the loader will uses parser.ParseFile.
+ //
+ // ParseFile should parse the source from src and use filename only for
+ // recording position information.
+ //
+ // An application may supply a custom implementation of ParseFile
+ // to change the effective file contents or the behavior of the parser,
+ // or to modify the syntax tree. For example, selectively eliminating
+ // unwanted function bodies can significantly accelerate type checking.
+ ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error)
+
+ // If Tests is set, the loader includes not just the packages
+ // matching a particular pattern but also any related test packages,
+ // including test-only variants of the package and the test executable.
+ //
+ // For example, when using the go command, loading "fmt" with Tests=true
+ // returns four packages, with IDs "fmt" (the standard package),
+ // "fmt [fmt.test]" (the package as compiled for the test),
+ // "fmt_test" (the test functions from source files in package fmt_test),
+ // and "fmt.test" (the test binary).
+ //
+ // In build systems with explicit names for tests,
+ // setting Tests may have no effect.
+ Tests bool
+
+ // Overlay is a mapping from absolute file paths to file contents.
+ //
+ // For each map entry, [Load] uses the alternative file
+ // contents provided by the overlay mapping instead of reading
+ // from the file system. This mechanism can be used to enable
+ // editor-integrated tools to correctly analyze the contents
+ // of modified but unsaved buffers, for example.
+ //
+ // The overlay mapping is passed to the build system's driver
+ // (see "The driver protocol") so that it too can report
+ // consistent package metadata about unsaved files. However,
+ // drivers may vary in their level of support for overlays.
+ Overlay map[string][]byte
+
+ // goListOverlayFile is the JSON file that encodes the Overlay
+ // mapping, used by 'go list -overlay=...'
+ goListOverlayFile string
+}
+
+// Load loads and returns the Go packages named by the given patterns.
+//
+// Config specifies loading options;
+// nil behaves the same as an empty Config.
+//
+// The [Config.Mode] field is a set of bits that determine what kinds
+// of information should be computed and returned. Modes that require
+// more information tend to be slower. See [LoadMode] for details
+// and important caveats. Its zero value is equivalent to
+// NeedName | NeedFiles | NeedCompiledGoFiles.
+//
+// Each call to Load returns a new set of [Package] instances.
+// The Packages and their Imports form a directed acyclic graph.
+//
+// If the [NeedTypes] mode flag was set, each call to Load uses a new
+// [types.Importer], so [types.Object] and [types.Type] values from
+// different calls to Load must not be mixed as they will have
+// inconsistent notions of type identity.
+//
+// If any of the patterns was invalid as defined by the
+// underlying build system, Load returns an error.
+// It may return an empty list of packages without an error,
+// for instance for an empty expansion of a valid wildcard.
+// Errors associated with a particular package are recorded in the
+// corresponding Package's Errors list, and do not cause Load to
+// return an error. Clients may need to handle such errors before
+// proceeding with further analysis. The PrintErrors function is
+// provided for convenient display of all errors.
+func Load(cfg *Config, patterns ...string) ([]*Package, error) {
+ ld := newLoader(cfg)
+ response, external, err := defaultDriver(&ld.Config, patterns...)
+ if err != nil {
+ return nil, err
+ }
+
+ ld.sizes = types.SizesFor(response.Compiler, response.Arch)
+ if ld.sizes == nil && ld.Config.Mode&(NeedTypes|NeedTypesSizes|NeedTypesInfo) != 0 {
+ // Type size information is needed but unavailable.
+ if external {
+ // An external driver may fail to populate the Compiler/GOARCH fields,
+ // especially since they are relatively new (see #63700).
+ // Provide a sensible fallback in this case.
+ ld.sizes = types.SizesFor("gc", runtime.GOARCH)
+ if ld.sizes == nil { // gccgo-only arch
+ ld.sizes = types.SizesFor("gc", "amd64")
+ }
+ } else {
+ // Go list should never fail to deliver accurate size information.
+ // Reject the whole Load since the error is the same for every package.
+ return nil, fmt.Errorf("can't determine type sizes for compiler %q on GOARCH %q",
+ response.Compiler, response.Arch)
+ }
+ }
+
+ return ld.refine(response)
+}
+
+// defaultDriver is a driver that implements go/packages' fallback behavior.
+// It will try to request to an external driver, if one exists. If there's
+// no external driver, or the driver returns a response with NotHandled set,
+// defaultDriver will fall back to the go list driver.
+// The boolean result indicates that an external driver handled the request.
+func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, error) {
+ const (
+ // windowsArgMax specifies the maximum command line length for
+ // the Windows' CreateProcess function.
+ windowsArgMax = 32767
+ // maxEnvSize is a very rough estimation of the maximum environment
+ // size of a user.
+ maxEnvSize = 16384
+ // safeArgMax specifies the maximum safe command line length to use
+ // by the underlying driver excl. the environment. We choose the Windows'
+ // ARG_MAX as the starting point because it's one of the lowest ARG_MAX
+ // constants out of the different supported platforms,
+ // e.g., https://www.in-ulm.de/~mascheck/various/argmax/#results.
+ safeArgMax = windowsArgMax - maxEnvSize
+ )
+ chunks, err := splitIntoChunks(patterns, safeArgMax)
+ if err != nil {
+ return nil, false, err
+ }
+
+ if driver := findExternalDriver(cfg); driver != nil {
+ response, err := callDriverOnChunks(driver, cfg, chunks)
+ if err != nil {
+ return nil, false, err
+ } else if !response.NotHandled {
+ return response, true, nil
+ }
+ // (fall through)
+ }
+
+ // go list fallback
+ //
+ // Write overlays once, as there are many calls
+ // to 'go list' (one per chunk plus others too).
+ overlay, cleanupOverlay, err := gocommand.WriteOverlays(cfg.Overlay)
+ if err != nil {
+ return nil, false, err
+ }
+ defer cleanupOverlay()
+ cfg.goListOverlayFile = overlay
+
+ response, err := callDriverOnChunks(goListDriver, cfg, chunks)
+ if err != nil {
+ return nil, false, err
+ }
+ return response, false, err
+}
+
+// splitIntoChunks chunks the slice so that the total number of characters
+// in a chunk is no longer than argMax.
+func splitIntoChunks(patterns []string, argMax int) ([][]string, error) {
+ if argMax <= 0 {
+ return nil, errors.New("failed to split patterns into chunks, negative safe argMax value")
+ }
+ var chunks [][]string
+ charsInChunk := 0
+ nextChunkStart := 0
+ for i, v := range patterns {
+ vChars := len(v)
+ if vChars > argMax {
+ // a single pattern is longer than the maximum safe ARG_MAX, hardly should happen
+ return nil, errors.New("failed to split patterns into chunks, a pattern is too long")
+ }
+ charsInChunk += vChars + 1 // +1 is for a whitespace between patterns that has to be counted too
+ if charsInChunk > argMax {
+ chunks = append(chunks, patterns[nextChunkStart:i])
+ nextChunkStart = i
+ charsInChunk = vChars
+ }
+ }
+ // add the last chunk
+ if nextChunkStart < len(patterns) {
+ chunks = append(chunks, patterns[nextChunkStart:])
+ }
+ return chunks, nil
+}
+
+func callDriverOnChunks(driver driver, cfg *Config, chunks [][]string) (*DriverResponse, error) {
+ if len(chunks) == 0 {
+ return driver(cfg)
+ }
+ responses := make([]*DriverResponse, len(chunks))
+ errNotHandled := errors.New("driver returned NotHandled")
+ var g errgroup.Group
+ for i, chunk := range chunks {
+ i := i
+ chunk := chunk
+ g.Go(func() (err error) {
+ responses[i], err = driver(cfg, chunk...)
+ if responses[i] != nil && responses[i].NotHandled {
+ err = errNotHandled
+ }
+ return err
+ })
+ }
+ if err := g.Wait(); err != nil {
+ if errors.Is(err, errNotHandled) {
+ return &DriverResponse{NotHandled: true}, nil
+ }
+ return nil, err
+ }
+ return mergeResponses(responses...), nil
+}
+
+func mergeResponses(responses ...*DriverResponse) *DriverResponse {
+ if len(responses) == 0 {
+ return nil
+ }
+ response := newDeduper()
+ response.dr.NotHandled = false
+ response.dr.Compiler = responses[0].Compiler
+ response.dr.Arch = responses[0].Arch
+ response.dr.GoVersion = responses[0].GoVersion
+ for _, v := range responses {
+ response.addAll(v)
+ }
+ return response.dr
+}
+
+// A Package describes a loaded Go package.
+//
+// It also defines part of the JSON schema of [DriverResponse].
+// See the package documentation for an overview.
+type Package struct {
+ // ID is a unique identifier for a package,
+ // in a syntax provided by the underlying build system.
+ //
+ // Because the syntax varies based on the build system,
+ // clients should treat IDs as opaque and not attempt to
+ // interpret them.
+ ID string
+
+ // Name is the package name as it appears in the package source code.
+ Name string
+
+ // PkgPath is the package path as used by the go/types package.
+ PkgPath string
+
+ // Errors contains any errors encountered querying the metadata
+ // of the package, or while parsing or type-checking its files.
+ Errors []Error
+
+ // TypeErrors contains the subset of errors produced during type checking.
+ TypeErrors []types.Error
+
+ // GoFiles lists the absolute file paths of the package's Go source files.
+ // It may include files that should not be compiled, for example because
+ // they contain non-matching build tags, are documentary pseudo-files such as
+ // unsafe/unsafe.go or builtin/builtin.go, or are subject to cgo preprocessing.
+ GoFiles []string
+
+ // CompiledGoFiles lists the absolute file paths of the package's source
+ // files that are suitable for type checking.
+ // This may differ from GoFiles if files are processed before compilation.
+ CompiledGoFiles []string
+
+ // OtherFiles lists the absolute file paths of the package's non-Go source files,
+ // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
+ OtherFiles []string
+
+ // EmbedFiles lists the absolute file paths of the package's files
+ // embedded with go:embed.
+ EmbedFiles []string
+
+ // EmbedPatterns lists the absolute file patterns of the package's
+ // files embedded with go:embed.
+ EmbedPatterns []string
+
+ // IgnoredFiles lists source files that are not part of the package
+ // using the current build configuration but that might be part of
+ // the package using other build configurations.
+ IgnoredFiles []string
+
+ // ExportFile is the absolute path to a file containing type
+ // information for the package as provided by the build system.
+ ExportFile string
+
+ // Imports maps import paths appearing in the package's Go source files
+ // to corresponding loaded Packages.
+ Imports map[string]*Package
+
+ // Module is the module information for the package if it exists.
+ //
+ // Note: it may be missing for std and cmd; see Go issue #65816.
+ Module *Module
+
+ // -- The following fields are not part of the driver JSON schema. --
+
+ // Types provides type information for the package.
+ // The NeedTypes LoadMode bit sets this field for packages matching the
+ // patterns; type information for dependencies may be missing or incomplete,
+ // unless NeedDeps and NeedImports are also set.
+ //
+ // Each call to [Load] returns a consistent set of type
+ // symbols, as defined by the comment at [types.Identical].
+ // Avoid mixing type information from two or more calls to [Load].
+ Types *types.Package `json:"-"`
+
+ // Fset provides position information for Types, TypesInfo, and Syntax.
+ // It is set only when Types is set.
+ Fset *token.FileSet `json:"-"`
+
+ // IllTyped indicates whether the package or any dependency contains errors.
+ // It is set only when Types is set.
+ IllTyped bool `json:"-"`
+
+ // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles.
+ //
+ // The NeedSyntax LoadMode bit populates this field for packages matching the patterns.
+ // If NeedDeps and NeedImports are also set, this field will also be populated
+ // for dependencies.
+ //
+ // Syntax is kept in the same order as CompiledGoFiles, with the caveat that nils are
+ // removed. If parsing returned nil, Syntax may be shorter than CompiledGoFiles.
+ Syntax []*ast.File `json:"-"`
+
+ // TypesInfo provides type information about the package's syntax trees.
+ // It is set only when Syntax is set.
+ TypesInfo *types.Info `json:"-"`
+
+ // TypesSizes provides the effective size function for types in TypesInfo.
+ TypesSizes types.Sizes `json:"-"`
+
+ // -- internal --
+
+ // forTest is the package under test, if any.
+ forTest string
+
+ // depsErrors is the DepsErrors field from the go list response, if any.
+ depsErrors []*packagesinternal.PackageError
+}
+
+// Module provides module information for a package.
+//
+// It also defines part of the JSON schema of [DriverResponse].
+// See the package documentation for an overview.
+type Module struct {
+ Path string // module path
+ Version string // module version
+ Replace *Module // replaced by this module
+ Time *time.Time // time version was created
+ Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
+ Dir string // directory holding files for this module, if any
+ GoMod string // path to go.mod file used when loading this module, if any
+ GoVersion string // go version used in module
+ Error *ModuleError // error loading module
+}
+
+// ModuleError holds errors loading a module.
+type ModuleError struct {
+ Err string // the error itself
+}
+
+func init() {
+ packagesinternal.GetForTest = func(p interface{}) string {
+ return p.(*Package).forTest
+ }
+ packagesinternal.GetDepsErrors = func(p interface{}) []*packagesinternal.PackageError {
+ return p.(*Package).depsErrors
+ }
+ packagesinternal.SetModFile = func(config interface{}, value string) {
+ config.(*Config).modFile = value
+ }
+ packagesinternal.SetModFlag = func(config interface{}, value string) {
+ config.(*Config).modFlag = value
+ }
+ packagesinternal.TypecheckCgo = int(typecheckCgo)
+ packagesinternal.DepsErrors = int(needInternalDepsErrors)
+ packagesinternal.ForTest = int(needInternalForTest)
+}
+
+// An Error describes a problem with a package's metadata, syntax, or types.
+type Error struct {
+ Pos string // "file:line:col" or "file:line" or "" or "-"
+ Msg string
+ Kind ErrorKind
+}
+
+// ErrorKind describes the source of the error, allowing the user to
+// differentiate between errors generated by the driver, the parser, or the
+// type-checker.
+type ErrorKind int
+
+const (
+ UnknownError ErrorKind = iota
+ ListError
+ ParseError
+ TypeError
+)
+
+func (err Error) Error() string {
+ pos := err.Pos
+ if pos == "" {
+ pos = "-" // like token.Position{}.String()
+ }
+ return pos + ": " + err.Msg
+}
+
+// flatPackage is the JSON form of Package
+// It drops all the type and syntax fields, and transforms the Imports
+//
+// TODO(adonovan): identify this struct with Package, effectively
+// publishing the JSON protocol.
+type flatPackage struct {
+ ID string
+ Name string `json:",omitempty"`
+ PkgPath string `json:",omitempty"`
+ Errors []Error `json:",omitempty"`
+ GoFiles []string `json:",omitempty"`
+ CompiledGoFiles []string `json:",omitempty"`
+ OtherFiles []string `json:",omitempty"`
+ EmbedFiles []string `json:",omitempty"`
+ EmbedPatterns []string `json:",omitempty"`
+ IgnoredFiles []string `json:",omitempty"`
+ ExportFile string `json:",omitempty"`
+ Imports map[string]string `json:",omitempty"`
+}
+
+// MarshalJSON returns the Package in its JSON form.
+// For the most part, the structure fields are written out unmodified, and
+// the type and syntax fields are skipped.
+// The imports are written out as just a map of path to package id.
+// The errors are written using a custom type that tries to preserve the
+// structure of error types we know about.
+//
+// This method exists to enable support for additional build systems. It is
+// not intended for use by clients of the API and we may change the format.
+func (p *Package) MarshalJSON() ([]byte, error) {
+ flat := &flatPackage{
+ ID: p.ID,
+ Name: p.Name,
+ PkgPath: p.PkgPath,
+ Errors: p.Errors,
+ GoFiles: p.GoFiles,
+ CompiledGoFiles: p.CompiledGoFiles,
+ OtherFiles: p.OtherFiles,
+ EmbedFiles: p.EmbedFiles,
+ EmbedPatterns: p.EmbedPatterns,
+ IgnoredFiles: p.IgnoredFiles,
+ ExportFile: p.ExportFile,
+ }
+ if len(p.Imports) > 0 {
+ flat.Imports = make(map[string]string, len(p.Imports))
+ for path, ipkg := range p.Imports {
+ flat.Imports[path] = ipkg.ID
+ }
+ }
+ return json.Marshal(flat)
+}
+
+// UnmarshalJSON reads in a Package from its JSON format.
+// See MarshalJSON for details about the format accepted.
+func (p *Package) UnmarshalJSON(b []byte) error {
+ flat := &flatPackage{}
+ if err := json.Unmarshal(b, &flat); err != nil {
+ return err
+ }
+ *p = Package{
+ ID: flat.ID,
+ Name: flat.Name,
+ PkgPath: flat.PkgPath,
+ Errors: flat.Errors,
+ GoFiles: flat.GoFiles,
+ CompiledGoFiles: flat.CompiledGoFiles,
+ OtherFiles: flat.OtherFiles,
+ EmbedFiles: flat.EmbedFiles,
+ EmbedPatterns: flat.EmbedPatterns,
+ IgnoredFiles: flat.IgnoredFiles,
+ ExportFile: flat.ExportFile,
+ }
+ if len(flat.Imports) > 0 {
+ p.Imports = make(map[string]*Package, len(flat.Imports))
+ for path, id := range flat.Imports {
+ p.Imports[path] = &Package{ID: id}
+ }
+ }
+ return nil
+}
+
+func (p *Package) String() string { return p.ID }
+
+// loaderPackage augments Package with state used during the loading phase
+type loaderPackage struct {
+ *Package
+ importErrors map[string]error // maps each bad import to its error
+ loadOnce sync.Once
+ color uint8 // for cycle detection
+ needsrc bool // load from source (Mode >= LoadTypes)
+ needtypes bool // type information is either requested or depended on
+ initial bool // package was matched by a pattern
+ goVersion int // minor version number of go command on PATH
+}
+
+// loader holds the working state of a single call to load.
+type loader struct {
+ pkgs map[string]*loaderPackage
+ Config
+ sizes types.Sizes // non-nil if needed by mode
+ parseCache map[string]*parseValue
+ parseCacheMu sync.Mutex
+ exportMu sync.Mutex // enforces mutual exclusion of exportdata operations
+
+ // Config.Mode contains the implied mode (see impliedLoadMode).
+ // Implied mode contains all the fields we need the data for.
+ // In requestedMode there are the actually requested fields.
+ // We'll zero them out before returning packages to the user.
+ // This makes it easier for us to get the conditions where
+ // we need certain modes right.
+ requestedMode LoadMode
+}
+
+type parseValue struct {
+ f *ast.File
+ err error
+ ready chan struct{}
+}
+
+func newLoader(cfg *Config) *loader {
+ ld := &loader{
+ parseCache: map[string]*parseValue{},
+ }
+ if cfg != nil {
+ ld.Config = *cfg
+ // If the user has provided a logger, use it.
+ ld.Config.Logf = cfg.Logf
+ }
+ if ld.Config.Logf == nil {
+ // If the GOPACKAGESDEBUG environment variable is set to true,
+ // but the user has not provided a logger, default to log.Printf.
+ if debug {
+ ld.Config.Logf = log.Printf
+ } else {
+ ld.Config.Logf = func(format string, args ...interface{}) {}
+ }
+ }
+ if ld.Config.Mode == 0 {
+ ld.Config.Mode = NeedName | NeedFiles | NeedCompiledGoFiles // Preserve zero behavior of Mode for backwards compatibility.
+ }
+ if ld.Config.Env == nil {
+ ld.Config.Env = os.Environ()
+ }
+ if ld.Config.gocmdRunner == nil {
+ ld.Config.gocmdRunner = &gocommand.Runner{}
+ }
+ if ld.Context == nil {
+ ld.Context = context.Background()
+ }
+ if ld.Dir == "" {
+ if dir, err := os.Getwd(); err == nil {
+ ld.Dir = dir
+ }
+ }
+
+ // Save the actually requested fields. We'll zero them out before returning packages to the user.
+ ld.requestedMode = ld.Mode
+ ld.Mode = impliedLoadMode(ld.Mode)
+
+ if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 {
+ if ld.Fset == nil {
+ ld.Fset = token.NewFileSet()
+ }
+
+ // ParseFile is required even in LoadTypes mode
+ // because we load source if export data is missing.
+ if ld.ParseFile == nil {
+ ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) {
+ const mode = parser.AllErrors | parser.ParseComments
+ return parser.ParseFile(fset, filename, src, mode)
+ }
+ }
+ }
+
+ return ld
+}
+
+// refine connects the supplied packages into a graph and then adds type
+// and syntax information as requested by the LoadMode.
+func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
+ roots := response.Roots
+ rootMap := make(map[string]int, len(roots))
+ for i, root := range roots {
+ rootMap[root] = i
+ }
+ ld.pkgs = make(map[string]*loaderPackage)
+ // first pass, fixup and build the map and roots
+ var initial = make([]*loaderPackage, len(roots))
+ for _, pkg := range response.Packages {
+ rootIndex := -1
+ if i, found := rootMap[pkg.ID]; found {
+ rootIndex = i
+ }
+
+ // Overlays can invalidate export data.
+ // TODO(matloob): make this check fine-grained based on dependencies on overlaid files
+ exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe"
+ // This package needs type information if the caller requested types and the package is
+ // either a root, or it's a non-root and the user requested dependencies ...
+ needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0))
+ // This package needs source if the call requested source (or types info, which implies source)
+ // and the package is either a root, or itas a non- root and the user requested dependencies...
+ needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
+ // ... or if we need types and the exportData is invalid. We fall back to (incompletely)
+ // typechecking packages from source if they fail to compile.
+ (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
+ lpkg := &loaderPackage{
+ Package: pkg,
+ needtypes: needtypes,
+ needsrc: needsrc,
+ goVersion: response.GoVersion,
+ }
+ ld.pkgs[lpkg.ID] = lpkg
+ if rootIndex >= 0 {
+ initial[rootIndex] = lpkg
+ lpkg.initial = true
+ }
+ }
+ for i, root := range roots {
+ if initial[i] == nil {
+ return nil, fmt.Errorf("root package %v is missing", root)
+ }
+ }
+
+ if ld.Mode&NeedImports != 0 {
+ // Materialize the import graph.
+
+ const (
+ white = 0 // new
+ grey = 1 // in progress
+ black = 2 // complete
+ )
+
+ // visit traverses the import graph, depth-first,
+ // and materializes the graph as Packages.Imports.
+ //
+ // Valid imports are saved in the Packages.Import map.
+ // Invalid imports (cycles and missing nodes) are saved in the importErrors map.
+ // Thus, even in the presence of both kinds of errors,
+ // the Import graph remains a DAG.
+ //
+ // visit returns whether the package needs src or has a transitive
+ // dependency on a package that does. These are the only packages
+ // for which we load source code.
+ var stack []*loaderPackage
+ var visit func(lpkg *loaderPackage) bool
+ visit = func(lpkg *loaderPackage) bool {
+ switch lpkg.color {
+ case black:
+ return lpkg.needsrc
+ case grey:
+ panic("internal error: grey node")
+ }
+ lpkg.color = grey
+ stack = append(stack, lpkg) // push
+ stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
+ lpkg.Imports = make(map[string]*Package, len(stubs))
+ for importPath, ipkg := range stubs {
+ var importErr error
+ imp := ld.pkgs[ipkg.ID]
+ if imp == nil {
+ // (includes package "C" when DisableCgo)
+ importErr = fmt.Errorf("missing package: %q", ipkg.ID)
+ } else if imp.color == grey {
+ importErr = fmt.Errorf("import cycle: %s", stack)
+ }
+ if importErr != nil {
+ if lpkg.importErrors == nil {
+ lpkg.importErrors = make(map[string]error)
+ }
+ lpkg.importErrors[importPath] = importErr
+ continue
+ }
+
+ if visit(imp) {
+ lpkg.needsrc = true
+ }
+ lpkg.Imports[importPath] = imp.Package
+ }
+
+ // Complete type information is required for the
+ // immediate dependencies of each source package.
+ if lpkg.needsrc && ld.Mode&NeedTypes != 0 {
+ for _, ipkg := range lpkg.Imports {
+ ld.pkgs[ipkg.ID].needtypes = true
+ }
+ }
+
+ // NeedTypeSizes causes TypeSizes to be set even
+ // on packages for which types aren't needed.
+ if ld.Mode&NeedTypesSizes != 0 {
+ lpkg.TypesSizes = ld.sizes
+ }
+ stack = stack[:len(stack)-1] // pop
+ lpkg.color = black
+
+ return lpkg.needsrc
+ }
+
+ // For each initial package, create its import DAG.
+ for _, lpkg := range initial {
+ visit(lpkg)
+ }
+
+ } else {
+ // !NeedImports: drop the stub (ID-only) import packages
+ // that we are not even going to try to resolve.
+ for _, lpkg := range initial {
+ lpkg.Imports = nil
+ }
+ }
+
+ // Load type data and syntax if needed, starting at
+ // the initial packages (roots of the import DAG).
+ if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 {
+ var wg sync.WaitGroup
+ for _, lpkg := range initial {
+ wg.Add(1)
+ go func(lpkg *loaderPackage) {
+ ld.loadRecursive(lpkg)
+ wg.Done()
+ }(lpkg)
+ }
+ wg.Wait()
+ }
+
+ // If the context is done, return its error and
+ // throw out [likely] incomplete packages.
+ if err := ld.Context.Err(); err != nil {
+ return nil, err
+ }
+
+ result := make([]*Package, len(initial))
+ for i, lpkg := range initial {
+ result[i] = lpkg.Package
+ }
+ for i := range ld.pkgs {
+ // Clear all unrequested fields,
+ // to catch programs that use more than they request.
+ if ld.requestedMode&NeedName == 0 {
+ ld.pkgs[i].Name = ""
+ ld.pkgs[i].PkgPath = ""
+ }
+ if ld.requestedMode&NeedFiles == 0 {
+ ld.pkgs[i].GoFiles = nil
+ ld.pkgs[i].OtherFiles = nil
+ ld.pkgs[i].IgnoredFiles = nil
+ }
+ if ld.requestedMode&NeedEmbedFiles == 0 {
+ ld.pkgs[i].EmbedFiles = nil
+ }
+ if ld.requestedMode&NeedEmbedPatterns == 0 {
+ ld.pkgs[i].EmbedPatterns = nil
+ }
+ if ld.requestedMode&NeedCompiledGoFiles == 0 {
+ ld.pkgs[i].CompiledGoFiles = nil
+ }
+ if ld.requestedMode&NeedImports == 0 {
+ ld.pkgs[i].Imports = nil
+ }
+ if ld.requestedMode&NeedExportFile == 0 {
+ ld.pkgs[i].ExportFile = ""
+ }
+ if ld.requestedMode&NeedTypes == 0 {
+ ld.pkgs[i].Types = nil
+ ld.pkgs[i].IllTyped = false
+ }
+ if ld.requestedMode&NeedSyntax == 0 {
+ ld.pkgs[i].Syntax = nil
+ }
+ if ld.requestedMode&NeedTypes == 0 && ld.requestedMode&NeedSyntax == 0 {
+ ld.pkgs[i].Fset = nil
+ }
+ if ld.requestedMode&NeedTypesInfo == 0 {
+ ld.pkgs[i].TypesInfo = nil
+ }
+ if ld.requestedMode&NeedTypesSizes == 0 {
+ ld.pkgs[i].TypesSizes = nil
+ }
+ if ld.requestedMode&NeedModule == 0 {
+ ld.pkgs[i].Module = nil
+ }
+ }
+
+ return result, nil
+}
+
+// loadRecursive loads the specified package and its dependencies,
+// recursively, in parallel, in topological order.
+// It is atomic and idempotent.
+// Precondition: ld.Mode&NeedTypes.
+func (ld *loader) loadRecursive(lpkg *loaderPackage) {
+ lpkg.loadOnce.Do(func() {
+ // Load the direct dependencies, in parallel.
+ var wg sync.WaitGroup
+ for _, ipkg := range lpkg.Imports {
+ imp := ld.pkgs[ipkg.ID]
+ wg.Add(1)
+ go func(imp *loaderPackage) {
+ ld.loadRecursive(imp)
+ wg.Done()
+ }(imp)
+ }
+ wg.Wait()
+ ld.loadPackage(lpkg)
+ })
+}
+
+// loadPackage loads the specified package.
+// It must be called only once per Package,
+// after immediate dependencies are loaded.
+// Precondition: ld.Mode & NeedTypes.
+func (ld *loader) loadPackage(lpkg *loaderPackage) {
+ if lpkg.PkgPath == "unsafe" {
+ // Fill in the blanks to avoid surprises.
+ lpkg.Types = types.Unsafe
+ lpkg.Fset = ld.Fset
+ lpkg.Syntax = []*ast.File{}
+ lpkg.TypesInfo = new(types.Info)
+ lpkg.TypesSizes = ld.sizes
+ return
+ }
+
+ // Call NewPackage directly with explicit name.
+ // This avoids skew between golist and go/types when the files'
+ // package declarations are inconsistent.
+ lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name)
+ lpkg.Fset = ld.Fset
+
+ // Start shutting down if the context is done and do not load
+ // source or export data files.
+ // Packages that import this one will have ld.Context.Err() != nil.
+ // ld.Context.Err() will be returned later by refine.
+ if ld.Context.Err() != nil {
+ return
+ }
+
+ // Subtle: we populate all Types fields with an empty Package
+ // before loading export data so that export data processing
+ // never has to create a types.Package for an indirect dependency,
+ // which would then require that such created packages be explicitly
+ // inserted back into the Import graph as a final step after export data loading.
+ // (Hence this return is after the Types assignment.)
+ // The Diamond test exercises this case.
+ if !lpkg.needtypes && !lpkg.needsrc {
+ return
+ }
+ if !lpkg.needsrc {
+ if err := ld.loadFromExportData(lpkg); err != nil {
+ lpkg.Errors = append(lpkg.Errors, Error{
+ Pos: "-",
+ Msg: err.Error(),
+ Kind: UnknownError, // e.g. can't find/open/parse export data
+ })
+ }
+ return // not a source package, don't get syntax trees
+ }
+
+ appendError := func(err error) {
+ // Convert various error types into the one true Error.
+ var errs []Error
+ switch err := err.(type) {
+ case Error:
+ // from driver
+ errs = append(errs, err)
+
+ case *os.PathError:
+ // from parser
+ errs = append(errs, Error{
+ Pos: err.Path + ":1",
+ Msg: err.Err.Error(),
+ Kind: ParseError,
+ })
+
+ case scanner.ErrorList:
+ // from parser
+ for _, err := range err {
+ errs = append(errs, Error{
+ Pos: err.Pos.String(),
+ Msg: err.Msg,
+ Kind: ParseError,
+ })
+ }
+
+ case types.Error:
+ // from type checker
+ lpkg.TypeErrors = append(lpkg.TypeErrors, err)
+ errs = append(errs, Error{
+ Pos: err.Fset.Position(err.Pos).String(),
+ Msg: err.Msg,
+ Kind: TypeError,
+ })
+
+ default:
+ // unexpected impoverished error from parser?
+ errs = append(errs, Error{
+ Pos: "-",
+ Msg: err.Error(),
+ Kind: UnknownError,
+ })
+
+ // If you see this error message, please file a bug.
+ log.Printf("internal error: error %q (%T) without position", err, err)
+ }
+
+ lpkg.Errors = append(lpkg.Errors, errs...)
+ }
+
+ // If the go command on the PATH is newer than the runtime,
+ // then the go/{scanner,ast,parser,types} packages from the
+ // standard library may be unable to process the files
+ // selected by go list.
+ //
+ // There is currently no way to downgrade the effective
+ // version of the go command (see issue 52078), so we proceed
+ // with the newer go command but, in case of parse or type
+ // errors, we emit an additional diagnostic.
+ //
+ // See:
+ // - golang.org/issue/52078 (flag to set release tags)
+ // - golang.org/issue/50825 (gopls legacy version support)
+ // - golang.org/issue/55883 (go/packages confusing error)
+ //
+ // Should we assert a hard minimum of (currently) go1.16 here?
+ var runtimeVersion int
+ if _, err := fmt.Sscanf(runtime.Version(), "go1.%d", &runtimeVersion); err == nil && runtimeVersion < lpkg.goVersion {
+ defer func() {
+ if len(lpkg.Errors) > 0 {
+ appendError(Error{
+ Pos: "-",
+ Msg: fmt.Sprintf("This application uses version go1.%d of the source-processing packages but runs version go1.%d of 'go list'. It may fail to process source files that rely on newer language features. If so, rebuild the application using a newer version of Go.", runtimeVersion, lpkg.goVersion),
+ Kind: UnknownError,
+ })
+ }
+ }()
+ }
+
+ if ld.Config.Mode&NeedTypes != 0 && len(lpkg.CompiledGoFiles) == 0 && lpkg.ExportFile != "" {
+ // The config requested loading sources and types, but sources are missing.
+ // Add an error to the package and fall back to loading from export data.
+ appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError})
+ _ = ld.loadFromExportData(lpkg) // ignore any secondary errors
+
+ return // can't get syntax trees for this package
+ }
+
+ files, errs := ld.parseFiles(lpkg.CompiledGoFiles)
+ for _, err := range errs {
+ appendError(err)
+ }
+
+ lpkg.Syntax = files
+ if ld.Config.Mode&NeedTypes == 0 {
+ return
+ }
+
+ // Start shutting down if the context is done and do not type check.
+ // Packages that import this one will have ld.Context.Err() != nil.
+ // ld.Context.Err() will be returned later by refine.
+ if ld.Context.Err() != nil {
+ return
+ }
+
+ lpkg.TypesInfo = &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ versions.InitFileVersions(lpkg.TypesInfo)
+ lpkg.TypesSizes = ld.sizes
+
+ importer := importerFunc(func(path string) (*types.Package, error) {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+
+ // The imports map is keyed by import path.
+ ipkg := lpkg.Imports[path]
+ if ipkg == nil {
+ if err := lpkg.importErrors[path]; err != nil {
+ return nil, err
+ }
+ // There was skew between the metadata and the
+ // import declarations, likely due to an edit
+ // race, or because the ParseFile feature was
+ // used to supply alternative file contents.
+ return nil, fmt.Errorf("no metadata for %s", path)
+ }
+
+ if ipkg.Types != nil && ipkg.Types.Complete() {
+ return ipkg.Types, nil
+ }
+ log.Fatalf("internal error: package %q without types was imported from %q", path, lpkg)
+ panic("unreachable")
+ })
+
+ // type-check
+ tc := &types.Config{
+ Importer: importer,
+
+ // Type-check bodies of functions only in initial packages.
+ // Example: for import graph A->B->C and initial packages {A,C},
+ // we can ignore function bodies in B.
+ IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial,
+
+ Error: appendError,
+ Sizes: ld.sizes, // may be nil
+ }
+ if lpkg.Module != nil && lpkg.Module.GoVersion != "" {
+ tc.GoVersion = "go" + lpkg.Module.GoVersion
+ }
+ if (ld.Mode & typecheckCgo) != 0 {
+ if !typesinternal.SetUsesCgo(tc) {
+ appendError(Error{
+ Msg: "typecheckCgo requires Go 1.15+",
+ Kind: ListError,
+ })
+ return
+ }
+ }
+
+ typErr := types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
+ lpkg.importErrors = nil // no longer needed
+
+ // In go/types go1.21 and go1.22, Checker.Files failed fast with a
+ // a "too new" error, without calling tc.Error and without
+ // proceeding to type-check the package (#66525).
+ // We rely on the runtimeVersion error to give the suggested remedy.
+ if typErr != nil && len(lpkg.Errors) == 0 && len(lpkg.Syntax) > 0 {
+ if msg := typErr.Error(); strings.HasPrefix(msg, "package requires newer Go version") {
+ appendError(types.Error{
+ Fset: ld.Fset,
+ Pos: lpkg.Syntax[0].Package,
+ Msg: msg,
+ })
+ }
+ }
+
+ // If !Cgo, the type-checker uses FakeImportC mode, so
+ // it doesn't invoke the importer for import "C",
+ // nor report an error for the import,
+ // or for any undefined C.f reference.
+ // We must detect this explicitly and correctly
+ // mark the package as IllTyped (by reporting an error).
+ // TODO(adonovan): if these errors are annoying,
+ // we could just set IllTyped quietly.
+ if tc.FakeImportC {
+ outer:
+ for _, f := range lpkg.Syntax {
+ for _, imp := range f.Imports {
+ if imp.Path.Value == `"C"` {
+ err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`}
+ appendError(err)
+ break outer
+ }
+ }
+ }
+ }
+
+ // If types.Checker.Files had an error that was unreported,
+ // make sure to report the unknown error so the package is illTyped.
+ if typErr != nil && len(lpkg.Errors) == 0 {
+ appendError(typErr)
+ }
+
+ // Record accumulated errors.
+ illTyped := len(lpkg.Errors) > 0
+ if !illTyped {
+ for _, imp := range lpkg.Imports {
+ if imp.IllTyped {
+ illTyped = true
+ break
+ }
+ }
+ }
+ lpkg.IllTyped = illTyped
+}
+
+// An importFunc is an implementation of the single-method
+// types.Importer interface based on a function value.
+type importerFunc func(path string) (*types.Package, error)
+
+func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
+
+// We use a counting semaphore to limit
+// the number of parallel I/O calls per process.
+var ioLimit = make(chan bool, 20)
+
+func (ld *loader) parseFile(filename string) (*ast.File, error) {
+ ld.parseCacheMu.Lock()
+ v, ok := ld.parseCache[filename]
+ if ok {
+ // cache hit
+ ld.parseCacheMu.Unlock()
+ <-v.ready
+ } else {
+ // cache miss
+ v = &parseValue{ready: make(chan struct{})}
+ ld.parseCache[filename] = v
+ ld.parseCacheMu.Unlock()
+
+ var src []byte
+ for f, contents := range ld.Config.Overlay {
+ if sameFile(f, filename) {
+ src = contents
+ }
+ }
+ var err error
+ if src == nil {
+ ioLimit <- true // wait
+ src, err = os.ReadFile(filename)
+ <-ioLimit // signal
+ }
+ if err != nil {
+ v.err = err
+ } else {
+ v.f, v.err = ld.ParseFile(ld.Fset, filename, src)
+ }
+
+ close(v.ready)
+ }
+ return v.f, v.err
+}
+
+// parseFiles reads and parses the Go source files and returns the ASTs
+// of the ones that could be at least partially parsed, along with a
+// list of I/O and parse errors encountered.
+//
+// Because files are scanned in parallel, the token.Pos
+// positions of the resulting ast.Files are not ordered.
+func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
+ var wg sync.WaitGroup
+ n := len(filenames)
+ parsed := make([]*ast.File, n)
+ errors := make([]error, n)
+ for i, file := range filenames {
+ wg.Add(1)
+ go func(i int, filename string) {
+ parsed[i], errors[i] = ld.parseFile(filename)
+ wg.Done()
+ }(i, file)
+ }
+ wg.Wait()
+
+ // Eliminate nils, preserving order.
+ var o int
+ for _, f := range parsed {
+ if f != nil {
+ parsed[o] = f
+ o++
+ }
+ }
+ parsed = parsed[:o]
+
+ o = 0
+ for _, err := range errors {
+ if err != nil {
+ errors[o] = err
+ o++
+ }
+ }
+ errors = errors[:o]
+
+ return parsed, errors
+}
+
+// sameFile returns true if x and y have the same basename and denote
+// the same file.
+func sameFile(x, y string) bool {
+ if x == y {
+ // It could be the case that y doesn't exist.
+ // For instance, it may be an overlay file that
+ // hasn't been written to disk. To handle that case
+ // let x == y through. (We added the exact absolute path
+ // string to the CompiledGoFiles list, so the unwritten
+ // overlay case implies x==y.)
+ return true
+ }
+ if strings.EqualFold(filepath.Base(x), filepath.Base(y)) { // (optimisation)
+ if xi, err := os.Stat(x); err == nil {
+ if yi, err := os.Stat(y); err == nil {
+ return os.SameFile(xi, yi)
+ }
+ }
+ }
+ return false
+}
+
+// loadFromExportData ensures that type information is present for the specified
+// package, loading it from an export data file on the first request.
+// On success it sets lpkg.Types to a new Package.
+func (ld *loader) loadFromExportData(lpkg *loaderPackage) error {
+ if lpkg.PkgPath == "" {
+ log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
+ }
+
+ // Because gcexportdata.Read has the potential to create or
+ // modify the types.Package for each node in the transitive
+ // closure of dependencies of lpkg, all exportdata operations
+ // must be sequential. (Finer-grained locking would require
+ // changes to the gcexportdata API.)
+ //
+ // The exportMu lock guards the lpkg.Types field and the
+ // types.Package it points to, for each loaderPackage in the graph.
+ //
+ // Not all accesses to Package.Pkg need to be protected by exportMu:
+ // graph ordering ensures that direct dependencies of source
+ // packages are fully loaded before the importer reads their Pkg field.
+ ld.exportMu.Lock()
+ defer ld.exportMu.Unlock()
+
+ if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
+ return nil // cache hit
+ }
+
+ lpkg.IllTyped = true // fail safe
+
+ if lpkg.ExportFile == "" {
+ // Errors while building export data will have been printed to stderr.
+ return fmt.Errorf("no export data file")
+ }
+ f, err := os.Open(lpkg.ExportFile)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Read gc export data.
+ //
+ // We don't currently support gccgo export data because all
+ // underlying workspaces use the gc toolchain. (Even build
+ // systems that support gccgo don't use it for workspace
+ // queries.)
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+
+ // Build the view.
+ //
+ // The gcexportdata machinery has no concept of package ID.
+ // It identifies packages by their PkgPath, which although not
+ // globally unique is unique within the scope of one invocation
+ // of the linker, type-checker, or gcexportdata.
+ //
+ // So, we must build a PkgPath-keyed view of the global
+ // (conceptually ID-keyed) cache of packages and pass it to
+ // gcexportdata. The view must contain every existing
+ // package that might possibly be mentioned by the
+ // current package---its transitive closure.
+ //
+ // In loadPackage, we unconditionally create a types.Package for
+ // each dependency so that export data loading does not
+ // create new ones.
+ //
+ // TODO(adonovan): it would be simpler and more efficient
+ // if the export data machinery invoked a callback to
+ // get-or-create a package instead of a map.
+ //
+ view := make(map[string]*types.Package) // view seen by gcexportdata
+ seen := make(map[*loaderPackage]bool) // all visited packages
+ var visit func(pkgs map[string]*Package)
+ visit = func(pkgs map[string]*Package) {
+ for _, p := range pkgs {
+ lpkg := ld.pkgs[p.ID]
+ if !seen[lpkg] {
+ seen[lpkg] = true
+ view[lpkg.PkgPath] = lpkg.Types
+ visit(lpkg.Imports)
+ }
+ }
+ }
+ visit(lpkg.Imports)
+
+ viewLen := len(view) + 1 // adding the self package
+ // Parse the export data.
+ // (May modify incomplete packages in view but not create new ones.)
+ tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
+ if err != nil {
+ return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+ if _, ok := view["go.shape"]; ok {
+ // Account for the pseudopackage "go.shape" that gets
+ // created by generic code.
+ viewLen++
+ }
+ if viewLen != len(view) {
+ log.Panicf("golang.org/x/tools/go/packages: unexpected new packages during load of %s", lpkg.PkgPath)
+ }
+
+ lpkg.Types = tpkg
+ lpkg.IllTyped = false
+ return nil
+}
+
+// impliedLoadMode returns loadMode with its dependencies.
+func impliedLoadMode(loadMode LoadMode) LoadMode {
+ if loadMode&(NeedDeps|NeedTypes|NeedTypesInfo) != 0 {
+ // All these things require knowing the import graph.
+ loadMode |= NeedImports
+ }
+ if loadMode&NeedTypes != 0 {
+ // Types require the GoVersion from Module.
+ loadMode |= NeedModule
+ }
+
+ return loadMode
+}
+
+func usesExportData(cfg *Config) bool {
+ return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
+}
+
+var _ interface{} = io.Discard // assert build toolchain is go1.16 or later
diff --git a/vendor/golang.org/x/tools/go/packages/visit.go b/vendor/golang.org/x/tools/go/packages/visit.go
new file mode 100644
index 0000000..df14ffd
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/visit.go
@@ -0,0 +1,68 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "fmt"
+ "os"
+ "sort"
+)
+
+// Visit visits all the packages in the import graph whose roots are
+// pkgs, calling the optional pre function the first time each package
+// is encountered (preorder), and the optional post function after a
+// package's dependencies have been visited (postorder).
+// The boolean result of pre(pkg) determines whether
+// the imports of package pkg are visited.
+func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) {
+ seen := make(map[*Package]bool)
+ var visit func(*Package)
+ visit = func(pkg *Package) {
+ if !seen[pkg] {
+ seen[pkg] = true
+
+ if pre == nil || pre(pkg) {
+ paths := make([]string, 0, len(pkg.Imports))
+ for path := range pkg.Imports {
+ paths = append(paths, path)
+ }
+ sort.Strings(paths) // Imports is a map, this makes visit stable
+ for _, path := range paths {
+ visit(pkg.Imports[path])
+ }
+ }
+
+ if post != nil {
+ post(pkg)
+ }
+ }
+ }
+ for _, pkg := range pkgs {
+ visit(pkg)
+ }
+}
+
+// PrintErrors prints to os.Stderr the accumulated errors of all
+// packages in the import graph rooted at pkgs, dependencies first.
+// PrintErrors returns the number of errors printed.
+func PrintErrors(pkgs []*Package) int {
+ var n int
+ errModules := make(map[*Module]bool)
+ Visit(pkgs, nil, func(pkg *Package) {
+ for _, err := range pkg.Errors {
+ fmt.Fprintln(os.Stderr, err)
+ n++
+ }
+
+ // Print pkg.Module.Error once if present.
+ mod := pkg.Module
+ if mod != nil && mod.Error != nil && !errModules[mod] {
+ errModules[mod] = true
+ fmt.Fprintln(os.Stderr, mod.Error.Err)
+ n++
+ }
+ })
+ return n
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/TODO b/vendor/golang.org/x/tools/go/ssa/TODO
new file mode 100644
index 0000000..6c35253
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/TODO
@@ -0,0 +1,16 @@
+-*- text -*-
+
+SSA Generics to-do list
+===========================
+
+DOCUMENTATION:
+- Read me for internals
+
+TYPE PARAMETERIZED GENERIC FUNCTIONS:
+- sanity.go updates.
+- Check source functions going to generics.
+- Tests, tests, tests...
+
+USAGE:
+- Back fill users for handling ssa.InstantiateGenerics being off.
+
diff --git a/vendor/golang.org/x/tools/go/ssa/block.go b/vendor/golang.org/x/tools/go/ssa/block.go
new file mode 100644
index 0000000..28170c7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/block.go
@@ -0,0 +1,113 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import "fmt"
+
+// This file implements the BasicBlock type.
+
+// addEdge adds a control-flow graph edge from from to to.
+func addEdge(from, to *BasicBlock) {
+ from.Succs = append(from.Succs, to)
+ to.Preds = append(to.Preds, from)
+}
+
+// Parent returns the function that contains block b.
+func (b *BasicBlock) Parent() *Function { return b.parent }
+
+// String returns a human-readable label of this block.
+// It is not guaranteed unique within the function.
+func (b *BasicBlock) String() string {
+ return fmt.Sprintf("%d", b.Index)
+}
+
+// emit appends an instruction to the current basic block.
+// If the instruction defines a Value, it is returned.
+func (b *BasicBlock) emit(i Instruction) Value {
+ i.setBlock(b)
+ b.Instrs = append(b.Instrs, i)
+ v, _ := i.(Value)
+ return v
+}
+
+// predIndex returns the i such that b.Preds[i] == c or panics if
+// there is none.
+func (b *BasicBlock) predIndex(c *BasicBlock) int {
+ for i, pred := range b.Preds {
+ if pred == c {
+ return i
+ }
+ }
+ panic(fmt.Sprintf("no edge %s -> %s", c, b))
+}
+
+// hasPhi returns true if b.Instrs contains φ-nodes.
+func (b *BasicBlock) hasPhi() bool {
+ _, ok := b.Instrs[0].(*Phi)
+ return ok
+}
+
+// phis returns the prefix of b.Instrs containing all the block's φ-nodes.
+func (b *BasicBlock) phis() []Instruction {
+ for i, instr := range b.Instrs {
+ if _, ok := instr.(*Phi); !ok {
+ return b.Instrs[:i]
+ }
+ }
+ return nil // unreachable in well-formed blocks
+}
+
+// replacePred replaces all occurrences of p in b's predecessor list with q.
+// Ordinarily there should be at most one.
+func (b *BasicBlock) replacePred(p, q *BasicBlock) {
+ for i, pred := range b.Preds {
+ if pred == p {
+ b.Preds[i] = q
+ }
+ }
+}
+
+// replaceSucc replaces all occurrences of p in b's successor list with q.
+// Ordinarily there should be at most one.
+func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
+ for i, succ := range b.Succs {
+ if succ == p {
+ b.Succs[i] = q
+ }
+ }
+}
+
+// removePred removes all occurrences of p in b's
+// predecessor list and φ-nodes.
+// Ordinarily there should be at most one.
+func (b *BasicBlock) removePred(p *BasicBlock) {
+ phis := b.phis()
+
+ // We must preserve edge order for φ-nodes.
+ j := 0
+ for i, pred := range b.Preds {
+ if pred != p {
+ b.Preds[j] = b.Preds[i]
+ // Strike out φ-edge too.
+ for _, instr := range phis {
+ phi := instr.(*Phi)
+ phi.Edges[j] = phi.Edges[i]
+ }
+ j++
+ }
+ }
+ // Nil out b.Preds[j:] and φ-edges[j:] to aid GC.
+ for i := j; i < len(b.Preds); i++ {
+ b.Preds[i] = nil
+ for _, instr := range phis {
+ instr.(*Phi).Edges[i] = nil
+ }
+ }
+ b.Preds = b.Preds[:j]
+ for _, instr := range phis {
+ phi := instr.(*Phi)
+ phi.Edges = phi.Edges[:j]
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/blockopt.go b/vendor/golang.org/x/tools/go/ssa/blockopt.go
new file mode 100644
index 0000000..7dabce8
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/blockopt.go
@@ -0,0 +1,183 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// Simple block optimizations to simplify the control flow graph.
+
+// TODO(adonovan): opt: instead of creating several "unreachable" blocks
+// per function in the Builder, reuse a single one (e.g. at Blocks[1])
+// to reduce garbage.
+
+import (
+ "fmt"
+ "os"
+)
+
+// If true, perform sanity checking and show progress at each
+// successive iteration of optimizeBlocks. Very verbose.
+const debugBlockOpt = false
+
+// markReachable sets Index=-1 for all blocks reachable from b.
+func markReachable(b *BasicBlock) {
+ b.Index = -1
+ for _, succ := range b.Succs {
+ if succ.Index == 0 {
+ markReachable(succ)
+ }
+ }
+}
+
+// deleteUnreachableBlocks marks all reachable blocks of f and
+// eliminates (nils) all others, including possibly cyclic subgraphs.
+func deleteUnreachableBlocks(f *Function) {
+ const white, black = 0, -1
+ // We borrow b.Index temporarily as the mark bit.
+ for _, b := range f.Blocks {
+ b.Index = white
+ }
+ markReachable(f.Blocks[0])
+ if f.Recover != nil {
+ markReachable(f.Recover)
+ }
+ for i, b := range f.Blocks {
+ if b.Index == white {
+ for _, c := range b.Succs {
+ if c.Index == black {
+ c.removePred(b) // delete white->black edge
+ }
+ }
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "unreachable", b)
+ }
+ f.Blocks[i] = nil // delete b
+ }
+ }
+ f.removeNilBlocks()
+}
+
+// jumpThreading attempts to apply simple jump-threading to block b,
+// in which a->b->c become a->c if b is just a Jump.
+// The result is true if the optimization was applied.
+func jumpThreading(f *Function, b *BasicBlock) bool {
+ if b.Index == 0 {
+ return false // don't apply to entry block
+ }
+ if b.Instrs == nil {
+ return false
+ }
+ if _, ok := b.Instrs[0].(*Jump); !ok {
+ return false // not just a jump
+ }
+ c := b.Succs[0]
+ if c == b {
+ return false // don't apply to degenerate jump-to-self.
+ }
+ if c.hasPhi() {
+ return false // not sound without more effort
+ }
+ for j, a := range b.Preds {
+ a.replaceSucc(b, c)
+
+ // If a now has two edges to c, replace its degenerate If by Jump.
+ if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c {
+ jump := new(Jump)
+ jump.setBlock(a)
+ a.Instrs[len(a.Instrs)-1] = jump
+ a.Succs = a.Succs[:1]
+ c.removePred(b)
+ } else {
+ if j == 0 {
+ c.replacePred(b, a)
+ } else {
+ c.Preds = append(c.Preds, a)
+ }
+ }
+
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c)
+ }
+ }
+ f.Blocks[b.Index] = nil // delete b
+ return true
+}
+
+// fuseBlocks attempts to apply the block fusion optimization to block
+// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1.
+// The result is true if the optimization was applied.
+func fuseBlocks(f *Function, a *BasicBlock) bool {
+ if len(a.Succs) != 1 {
+ return false
+ }
+ b := a.Succs[0]
+ if len(b.Preds) != 1 {
+ return false
+ }
+
+ // Degenerate &&/|| ops may result in a straight-line CFG
+ // containing φ-nodes. (Ideally we'd replace such them with
+ // their sole operand but that requires Referrers, built later.)
+ if b.hasPhi() {
+ return false // not sound without further effort
+ }
+
+ // Eliminate jump at end of A, then copy all of B across.
+ a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...)
+ for _, instr := range b.Instrs {
+ instr.setBlock(a)
+ }
+
+ // A inherits B's successors
+ a.Succs = append(a.succs2[:0], b.Succs...)
+
+ // Fix up Preds links of all successors of B.
+ for _, c := range b.Succs {
+ c.replacePred(b, a)
+ }
+
+ if debugBlockOpt {
+ fmt.Fprintln(os.Stderr, "fuseBlocks", a, b)
+ }
+
+ f.Blocks[b.Index] = nil // delete b
+ return true
+}
+
+// optimizeBlocks() performs some simple block optimizations on a
+// completed function: dead block elimination, block fusion, jump
+// threading.
+func optimizeBlocks(f *Function) {
+ deleteUnreachableBlocks(f)
+
+ // Loop until no further progress.
+ changed := true
+ for changed {
+ changed = false
+
+ if debugBlockOpt {
+ f.WriteTo(os.Stderr)
+ mustSanityCheck(f, nil)
+ }
+
+ for _, b := range f.Blocks {
+ // f.Blocks will temporarily contain nils to indicate
+ // deleted blocks; we remove them at the end.
+ if b == nil {
+ continue
+ }
+
+ // Fuse blocks. b->c becomes bc.
+ if fuseBlocks(f, b) {
+ changed = true
+ }
+
+ // a->b->c becomes a->c if b contains only a Jump.
+ if jumpThreading(f, b) {
+ changed = true
+ continue // (b was disconnected)
+ }
+ }
+ }
+ f.removeNilBlocks()
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go
new file mode 100644
index 0000000..55943e4
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/builder.go
@@ -0,0 +1,3276 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the builder, which builds SSA-form IR for function bodies.
+//
+// SSA construction has two phases, "create" and "build". First, one
+// or more packages are created in any order by a sequence of calls to
+// CreatePackage, either from syntax or from mere type information.
+// Each created package has a complete set of Members (const, var,
+// type, func) that can be accessed through methods like
+// Program.FuncValue.
+//
+// It is not necessary to call CreatePackage for all dependencies of
+// each syntax package, only for its direct imports. (In future
+// perhaps even this restriction may be lifted.)
+//
+// Second, packages created from syntax are built, by one or more
+// calls to Package.Build, which may be concurrent; or by a call to
+// Program.Build, which builds all packages in parallel. Building
+// traverses the type-annotated syntax tree of each function body and
+// creates SSA-form IR, a control-flow graph of instructions,
+// populating fields such as Function.Body, .Params, and others.
+//
+// Building may create additional methods, including:
+// - wrapper methods (e.g. for embeddding, or implicit &recv)
+// - bound method closures (e.g. for use(recv.f))
+// - thunks (e.g. for use(I.f) or use(T.f))
+// - generic instances (e.g. to produce f[int] from f[any]).
+// As these methods are created, they are added to the build queue,
+// and then processed in turn, until a fixed point is reached,
+// Since these methods might belong to packages that were not
+// created (by a call to CreatePackage), their Pkg field is unset.
+//
+// Instances of generic functions may be either instantiated (f[int]
+// is a copy of f[T] with substitutions) or wrapped (f[int] delegates
+// to f[T]), depending on the availability of generic syntax and the
+// InstantiateGenerics mode flag.
+//
+// Each package has an initializer function named "init" that calls
+// the initializer functions of each direct import, computes and
+// assigns the initial value of each global variable, and calls each
+// source-level function named "init". (These generate SSA functions
+// named "init#1", "init#2", etc.)
+//
+// Runtime types
+//
+// Each MakeInterface operation is a conversion from a non-interface
+// type to an interface type. The semantics of this operation requires
+// a runtime type descriptor, which is the type portion of an
+// interface, and the value abstracted by reflect.Type.
+//
+// The program accumulates all non-parameterized types that are
+// encountered as MakeInterface operands, along with all types that
+// may be derived from them using reflection. This set is available as
+// Program.RuntimeTypes, and the methods of these types may be
+// reachable via interface calls or reflection even if they are never
+// referenced from the SSA IR. (In practice, algorithms such as RTA
+// that compute reachability from package main perform their own
+// tracking of runtime types at a finer grain, so this feature is not
+// very useful.)
+//
+// Function literals
+//
+// Anonymous functions must be built as soon as they are encountered,
+// as it may affect locals of the enclosing function, but they are not
+// marked 'built' until the end of the outermost enclosing function.
+// (Among other things, this causes them to be logged in top-down order.)
+//
+// The Function.build fields determines the algorithm for building the
+// function body. It is cleared to mark that building is complete.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "os"
+ "runtime"
+ "sync"
+
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/versions"
+)
+
+type opaqueType struct{ name string }
+
+func (t *opaqueType) String() string { return t.name }
+func (t *opaqueType) Underlying() types.Type { return t }
+
+var (
+ varOk = newVar("ok", tBool)
+ varIndex = newVar("index", tInt)
+
+ // Type constants.
+ tBool = types.Typ[types.Bool]
+ tByte = types.Typ[types.Byte]
+ tInt = types.Typ[types.Int]
+ tInvalid = types.Typ[types.Invalid]
+ tString = types.Typ[types.String]
+ tUntypedNil = types.Typ[types.UntypedNil]
+
+ tRangeIter = &opaqueType{"iter"} // the type of all "range" iterators
+ tDeferStack = types.NewPointer(&opaqueType{"deferStack"}) // the type of a "deferStack" from ssa:deferstack()
+ tEface = types.NewInterfaceType(nil, nil).Complete()
+
+ // SSA Value constants.
+ vZero = intConst(0)
+ vOne = intConst(1)
+ vTrue = NewConst(constant.MakeBool(true), tBool)
+ vFalse = NewConst(constant.MakeBool(false), tBool)
+
+ jReady = intConst(0) // range-over-func jump is READY
+ jBusy = intConst(-1) // range-over-func jump is BUSY
+ jDone = intConst(-2) // range-over-func jump is DONE
+
+ // The ssa:deferstack intrinsic returns the current function's defer stack.
+ vDeferStack = &Builtin{
+ name: "ssa:deferstack",
+ sig: types.NewSignatureType(nil, nil, nil, nil, types.NewTuple(anonVar(tDeferStack)), false),
+ }
+)
+
+// builder holds state associated with the package currently being built.
+// Its methods contain all the logic for AST-to-SSA conversion.
+//
+// All Functions belong to the same Program.
+//
+// builders are not thread-safe.
+type builder struct {
+ fns []*Function // Functions that have finished their CREATE phases.
+
+ finished int // finished is the length of the prefix of fns containing built functions.
+
+ // The task of building shared functions within the builder.
+ // Shared functions are ones the the builder may either create or lookup.
+ // These may be built by other builders in parallel.
+ // The task is done when the builder has finished iterating, and it
+ // waits for all shared functions to finish building.
+ // nil implies there are no hared functions to wait on.
+ buildshared *task
+}
+
+// shared is done when the builder has built all of the
+// enqueued functions to a fixed-point.
+func (b *builder) shared() *task {
+ if b.buildshared == nil { // lazily-initialize
+ b.buildshared = &task{done: make(chan unit)}
+ }
+ return b.buildshared
+}
+
+// enqueue fn to be built by the builder.
+func (b *builder) enqueue(fn *Function) {
+ b.fns = append(b.fns, fn)
+}
+
+// waitForSharedFunction indicates that the builder should wait until
+// the potentially shared function fn has finished building.
+//
+// This should include any functions that may be built by other
+// builders.
+func (b *builder) waitForSharedFunction(fn *Function) {
+ if fn.buildshared != nil { // maybe need to wait?
+ s := b.shared()
+ s.addEdge(fn.buildshared)
+ }
+}
+
+// cond emits to fn code to evaluate boolean condition e and jump
+// to t or f depending on its value, performing various simplifications.
+//
+// Postcondition: fn.currentBlock is nil.
+func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ b.cond(fn, e.X, t, f)
+ return
+
+ case *ast.BinaryExpr:
+ switch e.Op {
+ case token.LAND:
+ ltrue := fn.newBasicBlock("cond.true")
+ b.cond(fn, e.X, ltrue, f)
+ fn.currentBlock = ltrue
+ b.cond(fn, e.Y, t, f)
+ return
+
+ case token.LOR:
+ lfalse := fn.newBasicBlock("cond.false")
+ b.cond(fn, e.X, t, lfalse)
+ fn.currentBlock = lfalse
+ b.cond(fn, e.Y, t, f)
+ return
+ }
+
+ case *ast.UnaryExpr:
+ if e.Op == token.NOT {
+ b.cond(fn, e.X, f, t)
+ return
+ }
+ }
+
+ // A traditional compiler would simplify "if false" (etc) here
+ // but we do not, for better fidelity to the source code.
+ //
+ // The value of a constant condition may be platform-specific,
+ // and may cause blocks that are reachable in some configuration
+ // to be hidden from subsequent analyses such as bug-finding tools.
+ emitIf(fn, b.expr(fn, e), t, f)
+}
+
+// logicalBinop emits code to fn to evaluate e, a &&- or
+// ||-expression whose reified boolean value is wanted.
+// The value is returned.
+func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
+ rhs := fn.newBasicBlock("binop.rhs")
+ done := fn.newBasicBlock("binop.done")
+
+ // T(e) = T(e.X) = T(e.Y) after untyped constants have been
+ // eliminated.
+ // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool.
+ t := fn.typeOf(e)
+
+ var short Value // value of the short-circuit path
+ switch e.Op {
+ case token.LAND:
+ b.cond(fn, e.X, rhs, done)
+ short = NewConst(constant.MakeBool(false), t)
+
+ case token.LOR:
+ b.cond(fn, e.X, done, rhs)
+ short = NewConst(constant.MakeBool(true), t)
+ }
+
+ // Is rhs unreachable?
+ if rhs.Preds == nil {
+ // Simplify false&&y to false, true||y to true.
+ fn.currentBlock = done
+ return short
+ }
+
+ // Is done unreachable?
+ if done.Preds == nil {
+ // Simplify true&&y (or false||y) to y.
+ fn.currentBlock = rhs
+ return b.expr(fn, e.Y)
+ }
+
+ // All edges from e.X to done carry the short-circuit value.
+ var edges []Value
+ for range done.Preds {
+ edges = append(edges, short)
+ }
+
+ // The edge from e.Y to done carries the value of e.Y.
+ fn.currentBlock = rhs
+ edges = append(edges, b.expr(fn, e.Y))
+ emitJump(fn, done)
+ fn.currentBlock = done
+
+ phi := &Phi{Edges: edges, Comment: e.Op.String()}
+ phi.pos = e.OpPos
+ phi.typ = t
+ return done.emit(phi)
+}
+
+// exprN lowers a multi-result expression e to SSA form, emitting code
+// to fn and returning a single Value whose type is a *types.Tuple.
+// The caller must access the components via Extract.
+//
+// Multi-result expressions include CallExprs in a multi-value
+// assignment or return statement, and "value,ok" uses of
+// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
+// is token.ARROW).
+func (b *builder) exprN(fn *Function, e ast.Expr) Value {
+ typ := fn.typeOf(e).(*types.Tuple)
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ return b.exprN(fn, e.X)
+
+ case *ast.CallExpr:
+ // Currently, no built-in function nor type conversion
+ // has multiple results, so we can avoid some of the
+ // cases for single-valued CallExpr.
+ var c Call
+ b.setCall(fn, e, &c.Call)
+ c.typ = typ
+ return fn.emit(&c)
+
+ case *ast.IndexExpr:
+ mapt := typeparams.CoreType(fn.typeOf(e.X)).(*types.Map) // ,ok must be a map.
+ lookup := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
+ CommaOk: true,
+ }
+ lookup.setType(typ)
+ lookup.setPos(e.Lbrack)
+ return fn.emit(lookup)
+
+ case *ast.TypeAssertExpr:
+ return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen)
+
+ case *ast.UnaryExpr: // must be receive <-
+ unop := &UnOp{
+ Op: token.ARROW,
+ X: b.expr(fn, e.X),
+ CommaOk: true,
+ }
+ unop.setType(typ)
+ unop.setPos(e.OpPos)
+ return fn.emit(unop)
+ }
+ panic(fmt.Sprintf("exprN(%T) in %s", e, fn))
+}
+
+// builtin emits to fn SSA instructions to implement a call to the
+// built-in function obj with the specified arguments
+// and return type. It returns the value defined by the result.
+//
+// The result is nil if no special handling was required; in this case
+// the caller should treat this like an ordinary library function
+// call.
+func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value {
+ typ = fn.typ(typ)
+ switch obj.Name() {
+ case "make":
+ switch ct := typeparams.CoreType(typ).(type) {
+ case *types.Slice:
+ n := b.expr(fn, args[1])
+ m := n
+ if len(args) == 3 {
+ m = b.expr(fn, args[2])
+ }
+ if m, ok := m.(*Const); ok {
+ // treat make([]T, n, m) as new([m]T)[:n]
+ cap := m.Int64()
+ at := types.NewArray(ct.Elem(), cap)
+ v := &Slice{
+ X: emitNew(fn, at, pos, "makeslice"),
+ High: n,
+ }
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+ }
+ v := &MakeSlice{
+ Len: n,
+ Cap: m,
+ }
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+
+ case *types.Map:
+ var res Value
+ if len(args) == 2 {
+ res = b.expr(fn, args[1])
+ }
+ v := &MakeMap{Reserve: res}
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+
+ case *types.Chan:
+ var sz Value = vZero
+ if len(args) == 2 {
+ sz = b.expr(fn, args[1])
+ }
+ v := &MakeChan{Size: sz}
+ v.setPos(pos)
+ v.setType(typ)
+ return fn.emit(v)
+ }
+
+ case "new":
+ return emitNew(fn, typeparams.MustDeref(typ), pos, "new")
+
+ case "len", "cap":
+ // Special case: len or cap of an array or *array is
+ // based on the type, not the value which may be nil.
+ // We must still evaluate the value, though. (If it
+ // was side-effect free, the whole call would have
+ // been constant-folded.)
+ t := typeparams.Deref(fn.typeOf(args[0]))
+ if at, ok := typeparams.CoreType(t).(*types.Array); ok {
+ b.expr(fn, args[0]) // for effects only
+ return intConst(at.Len())
+ }
+ // Otherwise treat as normal.
+
+ case "panic":
+ fn.emit(&Panic{
+ X: emitConv(fn, b.expr(fn, args[0]), tEface),
+ pos: pos,
+ })
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+ return vTrue // any non-nil Value will do
+ }
+ return nil // treat all others as a regular function call
+}
+
+// addr lowers a single-result addressable expression e to SSA form,
+// emitting code to fn and returning the location (an lvalue) defined
+// by the expression.
+//
+// If escaping is true, addr marks the base variable of the
+// addressable expression e as being a potentially escaping pointer
+// value. For example, in this code:
+//
+// a := A{
+// b: [1]B{B{c: 1}}
+// }
+// return &a.b[0].c
+//
+// the application of & causes a.b[0].c to have its address taken,
+// which means that ultimately the local variable a must be
+// heap-allocated. This is a simple but very conservative escape
+// analysis.
+//
+// Operations forming potentially escaping pointers include:
+// - &x, including when implicit in method call or composite literals.
+// - a[:] iff a is an array (not *array)
+// - references to variables in lexically enclosing functions.
+func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
+ switch e := e.(type) {
+ case *ast.Ident:
+ if isBlankIdent(e) {
+ return blank{}
+ }
+ obj := fn.objectOf(e).(*types.Var)
+ var v Value
+ if g := fn.Prog.packageLevelMember(obj); g != nil {
+ v = g.(*Global) // var (address)
+ } else {
+ v = fn.lookup(obj, escaping)
+ }
+ return &address{addr: v, pos: e.Pos(), expr: e}
+
+ case *ast.CompositeLit:
+ typ := typeparams.Deref(fn.typeOf(e))
+ var v *Alloc
+ if escaping {
+ v = emitNew(fn, typ, e.Lbrace, "complit")
+ } else {
+ v = emitLocal(fn, typ, e.Lbrace, "complit")
+ }
+ var sb storebuf
+ b.compLit(fn, v, e, true, &sb)
+ sb.emit(fn)
+ return &address{addr: v, pos: e.Lbrace, expr: e}
+
+ case *ast.ParenExpr:
+ return b.addr(fn, e.X, escaping)
+
+ case *ast.SelectorExpr:
+ sel := fn.selection(e)
+ if sel == nil {
+ // qualified identifier
+ return b.addr(fn, e.Sel, escaping)
+ }
+ if sel.kind != types.FieldVal {
+ panic(sel)
+ }
+ wantAddr := true
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
+ index := sel.index[len(sel.index)-1]
+ fld := fieldOf(typeparams.MustDeref(v.Type()), index) // v is an addr.
+
+ // Due to the two phases of resolving AssignStmt, a panic from x.f = p()
+ // when x is nil is required to come after the side-effects of
+ // evaluating x and p().
+ emit := func(fn *Function) Value {
+ return emitFieldSelection(fn, v, index, true, e.Sel)
+ }
+ return &lazyAddress{addr: emit, t: fld.Type(), pos: e.Sel.Pos(), expr: e.Sel}
+
+ case *ast.IndexExpr:
+ xt := fn.typeOf(e.X)
+ elem, mode := indexType(xt)
+ var x Value
+ var et types.Type
+ switch mode {
+ case ixArrVar: // array, array|slice, array|*array, or array|*array|slice.
+ x = b.addr(fn, e.X, escaping).address(fn)
+ et = types.NewPointer(elem)
+ case ixVar: // *array, slice, *array|slice
+ x = b.expr(fn, e.X)
+ et = types.NewPointer(elem)
+ case ixMap:
+ mt := typeparams.CoreType(xt).(*types.Map)
+ return &element{
+ m: b.expr(fn, e.X),
+ k: emitConv(fn, b.expr(fn, e.Index), mt.Key()),
+ t: mt.Elem(),
+ pos: e.Lbrack,
+ }
+ default:
+ panic("unexpected container type in IndexExpr: " + xt.String())
+ }
+ index := b.expr(fn, e.Index)
+ if isUntyped(index.Type()) {
+ index = emitConv(fn, index, tInt)
+ }
+ // Due to the two phases of resolving AssignStmt, a panic from x[i] = p()
+ // when x is nil or i is out-of-bounds is required to come after the
+ // side-effects of evaluating x, i and p().
+ emit := func(fn *Function) Value {
+ v := &IndexAddr{
+ X: x,
+ Index: index,
+ }
+ v.setPos(e.Lbrack)
+ v.setType(et)
+ return fn.emit(v)
+ }
+ return &lazyAddress{addr: emit, t: typeparams.MustDeref(et), pos: e.Lbrack, expr: e}
+
+ case *ast.StarExpr:
+ return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e}
+ }
+
+ panic(fmt.Sprintf("unexpected address expression: %T", e))
+}
+
+type store struct {
+ lhs lvalue
+ rhs Value
+}
+
+type storebuf struct{ stores []store }
+
+func (sb *storebuf) store(lhs lvalue, rhs Value) {
+ sb.stores = append(sb.stores, store{lhs, rhs})
+}
+
+func (sb *storebuf) emit(fn *Function) {
+ for _, s := range sb.stores {
+ s.lhs.store(fn, s.rhs)
+ }
+}
+
+// assign emits to fn code to initialize the lvalue loc with the value
+// of expression e. If isZero is true, assign assumes that loc holds
+// the zero value for its type.
+//
+// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate
+// better code in some cases, e.g., for composite literals in an
+// addressable location.
+//
+// If sb is not nil, assign generates code to evaluate expression e, but
+// not to update loc. Instead, the necessary stores are appended to the
+// storebuf sb so that they can be executed later. This allows correct
+// in-place update of existing variables when the RHS is a composite
+// literal that may reference parts of the LHS.
+func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) {
+ // Can we initialize it in place?
+ if e, ok := unparen(e).(*ast.CompositeLit); ok {
+ // A CompositeLit never evaluates to a pointer,
+ // so if the type of the location is a pointer,
+ // an &-operation is implied.
+ if !is[blank](loc) && isPointerCore(loc.typ()) { // avoid calling blank.typ()
+ ptr := b.addr(fn, e, true).address(fn)
+ // copy address
+ if sb != nil {
+ sb.store(loc, ptr)
+ } else {
+ loc.store(fn, ptr)
+ }
+ return
+ }
+
+ if _, ok := loc.(*address); ok {
+ if isNonTypeParamInterface(loc.typ()) {
+ // e.g. var x interface{} = T{...}
+ // Can't in-place initialize an interface value.
+ // Fall back to copying.
+ } else {
+ // x = T{...} or x := T{...}
+ addr := loc.address(fn)
+ if sb != nil {
+ b.compLit(fn, addr, e, isZero, sb)
+ } else {
+ var sb storebuf
+ b.compLit(fn, addr, e, isZero, &sb)
+ sb.emit(fn)
+ }
+
+ // Subtle: emit debug ref for aggregate types only;
+ // slice and map are handled by store ops in compLit.
+ switch typeparams.CoreType(loc.typ()).(type) {
+ case *types.Struct, *types.Array:
+ emitDebugRef(fn, e, addr, true)
+ }
+
+ return
+ }
+ }
+ }
+
+ // simple case: just copy
+ rhs := b.expr(fn, e)
+ if sb != nil {
+ sb.store(loc, rhs)
+ } else {
+ loc.store(fn, rhs)
+ }
+}
+
+// expr lowers a single-result expression e to SSA form, emitting code
+// to fn and returning the Value defined by the expression.
+func (b *builder) expr(fn *Function, e ast.Expr) Value {
+ e = unparen(e)
+
+ tv := fn.info.Types[e]
+
+ // Is expression a constant?
+ if tv.Value != nil {
+ return NewConst(tv.Value, fn.typ(tv.Type))
+ }
+
+ var v Value
+ if tv.Addressable() {
+ // Prefer pointer arithmetic ({Index,Field}Addr) followed
+ // by Load over subelement extraction (e.g. Index, Field),
+ // to avoid large copies.
+ v = b.addr(fn, e, false).load(fn)
+ } else {
+ v = b.expr0(fn, e, tv)
+ }
+ if fn.debugInfo() {
+ emitDebugRef(fn, e, v, false)
+ }
+ return v
+}
+
+func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
+ switch e := e.(type) {
+ case *ast.BasicLit:
+ panic("non-constant BasicLit") // unreachable
+
+ case *ast.FuncLit:
+ /* function literal */
+ anon := &Function{
+ name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
+ Signature: fn.typeOf(e.Type).(*types.Signature),
+ pos: e.Type.Func,
+ parent: fn,
+ anonIdx: int32(len(fn.AnonFuncs)),
+ Pkg: fn.Pkg,
+ Prog: fn.Prog,
+ syntax: e,
+ info: fn.info,
+ goversion: fn.goversion,
+ build: (*builder).buildFromSyntax,
+ topLevelOrigin: nil, // use anonIdx to lookup an anon instance's origin.
+ typeparams: fn.typeparams, // share the parent's type parameters.
+ typeargs: fn.typeargs, // share the parent's type arguments.
+ subst: fn.subst, // share the parent's type substitutions.
+ uniq: fn.uniq, // start from parent's unique values
+ }
+ fn.AnonFuncs = append(fn.AnonFuncs, anon)
+ // Build anon immediately, as it may cause fn's locals to escape.
+ // (It is not marked 'built' until the end of the enclosing FuncDecl.)
+ anon.build(b, anon)
+ fn.uniq = anon.uniq // resume after anon's unique values
+ if anon.FreeVars == nil {
+ return anon
+ }
+ v := &MakeClosure{Fn: anon}
+ v.setType(fn.typ(tv.Type))
+ for _, fv := range anon.FreeVars {
+ v.Bindings = append(v.Bindings, fv.outer)
+ fv.outer = nil
+ }
+ return fn.emit(v)
+
+ case *ast.TypeAssertExpr: // single-result form only
+ return emitTypeAssert(fn, b.expr(fn, e.X), fn.typ(tv.Type), e.Lparen)
+
+ case *ast.CallExpr:
+ if fn.info.Types[e.Fun].IsType() {
+ // Explicit type conversion, e.g. string(x) or big.Int(x)
+ x := b.expr(fn, e.Args[0])
+ y := emitConv(fn, x, fn.typ(tv.Type))
+ if y != x {
+ switch y := y.(type) {
+ case *Convert:
+ y.pos = e.Lparen
+ case *ChangeType:
+ y.pos = e.Lparen
+ case *MakeInterface:
+ y.pos = e.Lparen
+ case *SliceToArrayPointer:
+ y.pos = e.Lparen
+ case *UnOp: // conversion from slice to array.
+ y.pos = e.Lparen
+ }
+ }
+ return y
+ }
+ // Call to "intrinsic" built-ins, e.g. new, make, panic.
+ if id, ok := unparen(e.Fun).(*ast.Ident); ok {
+ if obj, ok := fn.info.Uses[id].(*types.Builtin); ok {
+ if v := b.builtin(fn, obj, e.Args, fn.typ(tv.Type), e.Lparen); v != nil {
+ return v
+ }
+ }
+ }
+ // Regular function call.
+ var v Call
+ b.setCall(fn, e, &v.Call)
+ v.setType(fn.typ(tv.Type))
+ return fn.emit(&v)
+
+ case *ast.UnaryExpr:
+ switch e.Op {
+ case token.AND: // &X --- potentially escaping.
+ addr := b.addr(fn, e.X, true)
+ if _, ok := unparen(e.X).(*ast.StarExpr); ok {
+ // &*p must panic if p is nil (http://golang.org/s/go12nil).
+ // For simplicity, we'll just (suboptimally) rely
+ // on the side effects of a load.
+ // TODO(adonovan): emit dedicated nilcheck.
+ addr.load(fn)
+ }
+ return addr.address(fn)
+ case token.ADD:
+ return b.expr(fn, e.X)
+ case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^
+ v := &UnOp{
+ Op: e.Op,
+ X: b.expr(fn, e.X),
+ }
+ v.setPos(e.OpPos)
+ v.setType(fn.typ(tv.Type))
+ return fn.emit(v)
+ default:
+ panic(e.Op)
+ }
+
+ case *ast.BinaryExpr:
+ switch e.Op {
+ case token.LAND, token.LOR:
+ return b.logicalBinop(fn, e)
+ case token.SHL, token.SHR:
+ fallthrough
+ case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+ return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.typ(tv.Type), e.OpPos)
+
+ case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
+ cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
+ // The type of x==y may be UntypedBool.
+ return emitConv(fn, cmp, types.Default(fn.typ(tv.Type)))
+ default:
+ panic("illegal op in BinaryExpr: " + e.Op.String())
+ }
+
+ case *ast.SliceExpr:
+ var low, high, max Value
+ var x Value
+ xtyp := fn.typeOf(e.X)
+ switch typeparams.CoreType(xtyp).(type) {
+ case *types.Array:
+ // Potentially escaping.
+ x = b.addr(fn, e.X, true).address(fn)
+ case *types.Basic, *types.Slice, *types.Pointer: // *array
+ x = b.expr(fn, e.X)
+ default:
+ // core type exception?
+ if isBytestring(xtyp) {
+ x = b.expr(fn, e.X) // bytestring is handled as string and []byte.
+ } else {
+ panic("unexpected sequence type in SliceExpr")
+ }
+ }
+ if e.Low != nil {
+ low = b.expr(fn, e.Low)
+ }
+ if e.High != nil {
+ high = b.expr(fn, e.High)
+ }
+ if e.Slice3 {
+ max = b.expr(fn, e.Max)
+ }
+ v := &Slice{
+ X: x,
+ Low: low,
+ High: high,
+ Max: max,
+ }
+ v.setPos(e.Lbrack)
+ v.setType(fn.typ(tv.Type))
+ return fn.emit(v)
+
+ case *ast.Ident:
+ obj := fn.info.Uses[e]
+ // Universal built-in or nil?
+ switch obj := obj.(type) {
+ case *types.Builtin:
+ return &Builtin{name: obj.Name(), sig: fn.instanceType(e).(*types.Signature)}
+ case *types.Nil:
+ return zeroConst(fn.instanceType(e))
+ }
+
+ // Package-level func or var?
+ // (obj must belong to same package or a direct import.)
+ if v := fn.Prog.packageLevelMember(obj); v != nil {
+ if g, ok := v.(*Global); ok {
+ return emitLoad(fn, g) // var (address)
+ }
+ callee := v.(*Function) // (func)
+ if callee.typeparams.Len() > 0 {
+ targs := fn.subst.types(instanceArgs(fn.info, e))
+ callee = callee.instance(targs, b)
+ }
+ return callee
+ }
+ // Local var.
+ return emitLoad(fn, fn.lookup(obj.(*types.Var), false)) // var (address)
+
+ case *ast.SelectorExpr:
+ sel := fn.selection(e)
+ if sel == nil {
+ // builtin unsafe.{Add,Slice}
+ if obj, ok := fn.info.Uses[e.Sel].(*types.Builtin); ok {
+ return &Builtin{name: obj.Name(), sig: fn.typ(tv.Type).(*types.Signature)}
+ }
+ // qualified identifier
+ return b.expr(fn, e.Sel)
+ }
+ switch sel.kind {
+ case types.MethodExpr:
+ // (*T).f or T.f, the method f from the method-set of type T.
+ // The result is a "thunk".
+ thunk := createThunk(fn.Prog, sel)
+ b.enqueue(thunk)
+ return emitConv(fn, thunk, fn.typ(tv.Type))
+
+ case types.MethodVal:
+ // e.f where e is an expression and f is a method.
+ // The result is a "bound".
+ obj := sel.obj.(*types.Func)
+ rt := fn.typ(recvType(obj))
+ wantAddr := isPointer(rt)
+ escaping := true
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
+
+ if types.IsInterface(rt) {
+ // If v may be an interface type I (after instantiating),
+ // we must emit a check that v is non-nil.
+ if recv, ok := aliases.Unalias(sel.recv).(*types.TypeParam); ok {
+ // Emit a nil check if any possible instantiation of the
+ // type parameter is an interface type.
+ if typeSetOf(recv).Len() > 0 {
+ // recv has a concrete term its typeset.
+ // So it cannot be instantiated as an interface.
+ //
+ // Example:
+ // func _[T interface{~int; Foo()}] () {
+ // var v T
+ // _ = v.Foo // <-- MethodVal
+ // }
+ } else {
+ // rt may be instantiated as an interface.
+ // Emit nil check: typeassert (any(v)).(any).
+ emitTypeAssert(fn, emitConv(fn, v, tEface), tEface, token.NoPos)
+ }
+ } else {
+ // non-type param interface
+ // Emit nil check: typeassert v.(I).
+ emitTypeAssert(fn, v, rt, e.Sel.Pos())
+ }
+ }
+ if targs := receiverTypeArgs(obj); len(targs) > 0 {
+ // obj is generic.
+ obj = fn.Prog.canon.instantiateMethod(obj, fn.subst.types(targs), fn.Prog.ctxt)
+ }
+ bound := createBound(fn.Prog, obj)
+ b.enqueue(bound)
+
+ c := &MakeClosure{
+ Fn: bound,
+ Bindings: []Value{v},
+ }
+ c.setPos(e.Sel.Pos())
+ c.setType(fn.typ(tv.Type))
+ return fn.emit(c)
+
+ case types.FieldVal:
+ indices := sel.index
+ last := len(indices) - 1
+ v := b.expr(fn, e.X)
+ v = emitImplicitSelections(fn, v, indices[:last], e.Pos())
+ v = emitFieldSelection(fn, v, indices[last], false, e.Sel)
+ return v
+ }
+
+ panic("unexpected expression-relative selector")
+
+ case *ast.IndexListExpr:
+ // f[X, Y] must be a generic function
+ if !instance(fn.info, e.X) {
+ panic("unexpected expression-could not match index list to instantiation")
+ }
+ return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases.
+
+ case *ast.IndexExpr:
+ if instance(fn.info, e.X) {
+ return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases.
+ }
+ // not a generic instantiation.
+ xt := fn.typeOf(e.X)
+ switch et, mode := indexType(xt); mode {
+ case ixVar:
+ // Addressable slice/array; use IndexAddr and Load.
+ return b.addr(fn, e, false).load(fn)
+
+ case ixArrVar, ixValue:
+ // An array in a register, a string or a combined type that contains
+ // either an [_]array (ixArrVar) or string (ixValue).
+
+ // Note: for ixArrVar and CoreType(xt)==nil can be IndexAddr and Load.
+ index := b.expr(fn, e.Index)
+ if isUntyped(index.Type()) {
+ index = emitConv(fn, index, tInt)
+ }
+ v := &Index{
+ X: b.expr(fn, e.X),
+ Index: index,
+ }
+ v.setPos(e.Lbrack)
+ v.setType(et)
+ return fn.emit(v)
+
+ case ixMap:
+ ct := typeparams.CoreType(xt).(*types.Map)
+ v := &Lookup{
+ X: b.expr(fn, e.X),
+ Index: emitConv(fn, b.expr(fn, e.Index), ct.Key()),
+ }
+ v.setPos(e.Lbrack)
+ v.setType(ct.Elem())
+ return fn.emit(v)
+ default:
+ panic("unexpected container type in IndexExpr: " + xt.String())
+ }
+
+ case *ast.CompositeLit, *ast.StarExpr:
+ // Addressable types (lvalues)
+ return b.addr(fn, e, false).load(fn)
+ }
+
+ panic(fmt.Sprintf("unexpected expr: %T", e))
+}
+
+// stmtList emits to fn code for all statements in list.
+func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
+ for _, s := range list {
+ b.stmt(fn, s)
+ }
+}
+
+// receiver emits to fn code for expression e in the "receiver"
+// position of selection e.f (where f may be a field or a method) and
+// returns the effective receiver after applying the implicit field
+// selections of sel.
+//
+// wantAddr requests that the result is an address. If
+// !sel.indirect, this may require that e be built in addr() mode; it
+// must thus be addressable.
+//
+// escaping is defined as per builder.addr().
+func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value {
+ var v Value
+ if wantAddr && !sel.indirect && !isPointerCore(fn.typeOf(e)) {
+ v = b.addr(fn, e, escaping).address(fn)
+ } else {
+ v = b.expr(fn, e)
+ }
+
+ last := len(sel.index) - 1
+ // The position of implicit selection is the position of the inducing receiver expression.
+ v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos())
+ if types.IsInterface(v.Type()) {
+ // When v is an interface, sel.Kind()==MethodValue and v.f is invoked.
+ // So v is not loaded, even if v has a pointer core type.
+ } else if !wantAddr && isPointerCore(v.Type()) {
+ v = emitLoad(fn, v)
+ }
+ return v
+}
+
+// setCallFunc populates the function parts of a CallCommon structure
+// (Func, Method, Recv, Args[0]) based on the kind of invocation
+// occurring in e.
+func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
+ c.pos = e.Lparen
+
+ // Is this a method call?
+ if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
+ sel := fn.selection(selector)
+ if sel != nil && sel.kind == types.MethodVal {
+ obj := sel.obj.(*types.Func)
+ recv := recvType(obj)
+
+ wantAddr := isPointer(recv)
+ escaping := true
+ v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
+ if types.IsInterface(recv) {
+ // Invoke-mode call.
+ c.Value = v // possibly type param
+ c.Method = obj
+ } else {
+ // "Call"-mode call.
+ c.Value = fn.Prog.objectMethod(obj, b)
+ c.Args = append(c.Args, v)
+ }
+ return
+ }
+
+ // sel.kind==MethodExpr indicates T.f() or (*T).f():
+ // a statically dispatched call to the method f in the
+ // method-set of T or *T. T may be an interface.
+ //
+ // e.Fun would evaluate to a concrete method, interface
+ // wrapper function, or promotion wrapper.
+ //
+ // For now, we evaluate it in the usual way.
+ //
+ // TODO(adonovan): opt: inline expr() here, to make the
+ // call static and to avoid generation of wrappers.
+ // It's somewhat tricky as it may consume the first
+ // actual parameter if the call is "invoke" mode.
+ //
+ // Examples:
+ // type T struct{}; func (T) f() {} // "call" mode
+ // type T interface { f() } // "invoke" mode
+ //
+ // type S struct{ T }
+ //
+ // var s S
+ // S.f(s)
+ // (*S).f(&s)
+ //
+ // Suggested approach:
+ // - consume the first actual parameter expression
+ // and build it with b.expr().
+ // - apply implicit field selections.
+ // - use MethodVal logic to populate fields of c.
+ }
+
+ // Evaluate the function operand in the usual way.
+ c.Value = b.expr(fn, e.Fun)
+}
+
+// emitCallArgs emits to f code for the actual parameters of call e to
+// a (possibly built-in) function of effective type sig.
+// The argument values are appended to args, which is then returned.
+func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
+ // f(x, y, z...): pass slice z straight through.
+ if e.Ellipsis != 0 {
+ for i, arg := range e.Args {
+ v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type())
+ args = append(args, v)
+ }
+ return args
+ }
+
+ offset := len(args) // 1 if call has receiver, 0 otherwise
+
+ // Evaluate actual parameter expressions.
+ //
+ // If this is a chained call of the form f(g()) where g has
+ // multiple return values (MRV), they are flattened out into
+ // args; a suffix of them may end up in a varargs slice.
+ for _, arg := range e.Args {
+ v := b.expr(fn, arg)
+ if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain
+ for i, n := 0, ttuple.Len(); i < n; i++ {
+ args = append(args, emitExtract(fn, v, i))
+ }
+ } else {
+ args = append(args, v)
+ }
+ }
+
+ // Actual->formal assignability conversions for normal parameters.
+ np := sig.Params().Len() // number of normal parameters
+ if sig.Variadic() {
+ np--
+ }
+ for i := 0; i < np; i++ {
+ args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type())
+ }
+
+ // Actual->formal assignability conversions for variadic parameter,
+ // and construction of slice.
+ if sig.Variadic() {
+ varargs := args[offset+np:]
+ st := sig.Params().At(np).Type().(*types.Slice)
+ vt := st.Elem()
+ if len(varargs) == 0 {
+ args = append(args, zeroConst(st))
+ } else {
+ // Replace a suffix of args with a slice containing it.
+ at := types.NewArray(vt, int64(len(varargs)))
+ a := emitNew(fn, at, token.NoPos, "varargs")
+ a.setPos(e.Rparen)
+ for i, arg := range varargs {
+ iaddr := &IndexAddr{
+ X: a,
+ Index: intConst(int64(i)),
+ }
+ iaddr.setType(types.NewPointer(vt))
+ fn.emit(iaddr)
+ emitStore(fn, iaddr, arg, arg.Pos())
+ }
+ s := &Slice{X: a}
+ s.setType(st)
+ args[offset+np] = fn.emit(s)
+ args = args[:offset+np+1]
+ }
+ }
+ return args
+}
+
+// setCall emits to fn code to evaluate all the parameters of a function
+// call e, and populates *c with those values.
+func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
+ // First deal with the f(...) part and optional receiver.
+ b.setCallFunc(fn, e, c)
+
+ // Then append the other actual parameters.
+ sig, _ := typeparams.CoreType(fn.typeOf(e.Fun)).(*types.Signature)
+ if sig == nil {
+ panic(fmt.Sprintf("no signature for call of %s", e.Fun))
+ }
+ c.Args = b.emitCallArgs(fn, sig, e, c.Args)
+}
+
+// assignOp emits to fn code to perform loc <op>= val.
+func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) {
+ loc.store(fn, emitArith(fn, op, loc.load(fn), val, loc.typ(), pos))
+}
+
+// localValueSpec emits to fn code to define all of the vars in the
+// function-local ValueSpec, spec.
+func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
+ switch {
+ case len(spec.Values) == len(spec.Names):
+ // e.g. var x, y = 0, 1
+ // 1:1 assignment
+ for i, id := range spec.Names {
+ if !isBlankIdent(id) {
+ emitLocalVar(fn, identVar(fn, id))
+ }
+ lval := b.addr(fn, id, false) // non-escaping
+ b.assign(fn, lval, spec.Values[i], true, nil)
+ }
+
+ case len(spec.Values) == 0:
+ // e.g. var x, y int
+ // Locals are implicitly zero-initialized.
+ for _, id := range spec.Names {
+ if !isBlankIdent(id) {
+ lhs := emitLocalVar(fn, identVar(fn, id))
+ if fn.debugInfo() {
+ emitDebugRef(fn, id, lhs, true)
+ }
+ }
+ }
+
+ default:
+ // e.g. var x, y = pos()
+ tuple := b.exprN(fn, spec.Values[0])
+ for i, id := range spec.Names {
+ if !isBlankIdent(id) {
+ emitLocalVar(fn, identVar(fn, id))
+ lhs := b.addr(fn, id, false) // non-escaping
+ lhs.store(fn, emitExtract(fn, tuple, i))
+ }
+ }
+ }
+}
+
+// assignStmt emits code to fn for a parallel assignment of rhss to lhss.
+// isDef is true if this is a short variable declaration (:=).
+//
+// Note the similarity with localValueSpec.
+func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
+ // Side effects of all LHSs and RHSs must occur in left-to-right order.
+ lvals := make([]lvalue, len(lhss))
+ isZero := make([]bool, len(lhss))
+ for i, lhs := range lhss {
+ var lval lvalue = blank{}
+ if !isBlankIdent(lhs) {
+ if isDef {
+ if obj, ok := fn.info.Defs[lhs.(*ast.Ident)].(*types.Var); ok {
+ emitLocalVar(fn, obj)
+ isZero[i] = true
+ }
+ }
+ lval = b.addr(fn, lhs, false) // non-escaping
+ }
+ lvals[i] = lval
+ }
+ if len(lhss) == len(rhss) {
+ // Simple assignment: x = f() (!isDef)
+ // Parallel assignment: x, y = f(), g() (!isDef)
+ // or short var decl: x, y := f(), g() (isDef)
+ //
+ // In all cases, the RHSs may refer to the LHSs,
+ // so we need a storebuf.
+ var sb storebuf
+ for i := range rhss {
+ b.assign(fn, lvals[i], rhss[i], isZero[i], &sb)
+ }
+ sb.emit(fn)
+ } else {
+ // e.g. x, y = pos()
+ tuple := b.exprN(fn, rhss[0])
+ emitDebugRef(fn, rhss[0], tuple, false)
+ for i, lval := range lvals {
+ lval.store(fn, emitExtract(fn, tuple, i))
+ }
+ }
+}
+
+// arrayLen returns the length of the array whose composite literal elements are elts.
+func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
+ var max int64 = -1
+ var i int64 = -1
+ for _, e := range elts {
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ i = b.expr(fn, kv.Key).(*Const).Int64()
+ } else {
+ i++
+ }
+ if i > max {
+ max = i
+ }
+ }
+ return max + 1
+}
+
+// compLit emits to fn code to initialize a composite literal e at
+// address addr with type typ.
+//
+// Nested composite literals are recursively initialized in place
+// where possible. If isZero is true, compLit assumes that addr
+// holds the zero value for typ.
+//
+// Because the elements of a composite literal may refer to the
+// variables being updated, as in the second line below,
+//
+// x := T{a: 1}
+// x = T{a: x.a}
+//
+// all the reads must occur before all the writes. Thus all stores to
+// loc are emitted to the storebuf sb for later execution.
+//
+// A CompositeLit may have pointer type only in the recursive (nested)
+// case when the type name is implicit. e.g. in []*T{{}}, the inner
+// literal has type *T behaves like &T{}.
+// In that case, addr must hold a T, not a *T.
+func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
+ typ := typeparams.Deref(fn.typeOf(e)) // retain the named/alias/param type, if any
+ switch t := typeparams.CoreType(typ).(type) {
+ case *types.Struct:
+ if !isZero && len(e.Elts) != t.NumFields() {
+ // memclear
+ zt := typeparams.MustDeref(addr.Type())
+ sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt))
+ isZero = true
+ }
+ for i, e := range e.Elts {
+ fieldIndex := i
+ pos := e.Pos()
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ fname := kv.Key.(*ast.Ident).Name
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ sf := t.Field(i)
+ if sf.Name() == fname {
+ fieldIndex = i
+ pos = kv.Colon
+ e = kv.Value
+ break
+ }
+ }
+ }
+ sf := t.Field(fieldIndex)
+ faddr := &FieldAddr{
+ X: addr,
+ Field: fieldIndex,
+ }
+ faddr.setPos(pos)
+ faddr.setType(types.NewPointer(sf.Type()))
+ fn.emit(faddr)
+ b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb)
+ }
+
+ case *types.Array, *types.Slice:
+ var at *types.Array
+ var array Value
+ switch t := t.(type) {
+ case *types.Slice:
+ at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
+ array = emitNew(fn, at, e.Lbrace, "slicelit")
+ case *types.Array:
+ at = t
+ array = addr
+
+ if !isZero && int64(len(e.Elts)) != at.Len() {
+ // memclear
+ zt := typeparams.MustDeref(array.Type())
+ sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt))
+ }
+ }
+
+ var idx *Const
+ for _, e := range e.Elts {
+ pos := e.Pos()
+ if kv, ok := e.(*ast.KeyValueExpr); ok {
+ idx = b.expr(fn, kv.Key).(*Const)
+ pos = kv.Colon
+ e = kv.Value
+ } else {
+ var idxval int64
+ if idx != nil {
+ idxval = idx.Int64() + 1
+ }
+ idx = intConst(idxval)
+ }
+ iaddr := &IndexAddr{
+ X: array,
+ Index: idx,
+ }
+ iaddr.setType(types.NewPointer(at.Elem()))
+ fn.emit(iaddr)
+ if t != at { // slice
+ // backing array is unaliased => storebuf not needed.
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil)
+ } else {
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb)
+ }
+ }
+
+ if t != at { // slice
+ s := &Slice{X: array}
+ s.setPos(e.Lbrace)
+ s.setType(typ)
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s))
+ }
+
+ case *types.Map:
+ m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))}
+ m.setPos(e.Lbrace)
+ m.setType(typ)
+ fn.emit(m)
+ for _, e := range e.Elts {
+ e := e.(*ast.KeyValueExpr)
+
+ // If a key expression in a map literal is itself a
+ // composite literal, the type may be omitted.
+ // For example:
+ // map[*struct{}]bool{{}: true}
+ // An &-operation may be implied:
+ // map[*struct{}]bool{&struct{}{}: true}
+ wantAddr := false
+ if _, ok := unparen(e.Key).(*ast.CompositeLit); ok {
+ wantAddr = isPointerCore(t.Key())
+ }
+
+ var key Value
+ if wantAddr {
+ // A CompositeLit never evaluates to a pointer,
+ // so if the type of the location is a pointer,
+ // an &-operation is implied.
+ key = b.addr(fn, e.Key, true).address(fn)
+ } else {
+ key = b.expr(fn, e.Key)
+ }
+
+ loc := element{
+ m: m,
+ k: emitConv(fn, key, t.Key()),
+ t: t.Elem(),
+ pos: e.Colon,
+ }
+
+ // We call assign() only because it takes care
+ // of any &-operation required in the recursive
+ // case, e.g.,
+ // map[int]*struct{}{0: {}} implies &struct{}{}.
+ // In-place update is of course impossible,
+ // and no storebuf is needed.
+ b.assign(fn, &loc, e.Value, true, nil)
+ }
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m)
+
+ default:
+ panic("unexpected CompositeLit type: " + typ.String())
+ }
+}
+
+// switchStmt emits to fn code for the switch statement s, optionally
+// labelled by label.
+func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
+ // We treat SwitchStmt like a sequential if-else chain.
+ // Multiway dispatch can be recovered later by ssautil.Switches()
+ // to those cases that are free of side effects.
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ var tag Value = vTrue
+ if s.Tag != nil {
+ tag = b.expr(fn, s.Tag)
+ }
+ done := fn.newBasicBlock("switch.done")
+ if label != nil {
+ label._break = done
+ }
+ // We pull the default case (if present) down to the end.
+ // But each fallthrough label must point to the next
+ // body block in source order, so we preallocate a
+ // body block (fallthru) for the next case.
+ // Unfortunately this makes for a confusing block order.
+ var dfltBody *[]ast.Stmt
+ var dfltFallthrough *BasicBlock
+ var fallthru, dfltBlock *BasicBlock
+ ncases := len(s.Body.List)
+ for i, clause := range s.Body.List {
+ body := fallthru
+ if body == nil {
+ body = fn.newBasicBlock("switch.body") // first case only
+ }
+
+ // Preallocate body block for the next case.
+ fallthru = done
+ if i+1 < ncases {
+ fallthru = fn.newBasicBlock("switch.body")
+ }
+
+ cc := clause.(*ast.CaseClause)
+ if cc.List == nil {
+ // Default case.
+ dfltBody = &cc.Body
+ dfltFallthrough = fallthru
+ dfltBlock = body
+ continue
+ }
+
+ var nextCond *BasicBlock
+ for _, cond := range cc.List {
+ nextCond = fn.newBasicBlock("switch.next")
+ // TODO(adonovan): opt: when tag==vTrue, we'd
+ // get better code if we use b.cond(cond)
+ // instead of BinOp(EQL, tag, b.expr(cond))
+ // followed by If. Don't forget conversions
+ // though.
+ cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos())
+ emitIf(fn, cond, body, nextCond)
+ fn.currentBlock = nextCond
+ }
+ fn.currentBlock = body
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _fallthrough: fallthru,
+ }
+ b.stmtList(fn, cc.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = nextCond
+ }
+ if dfltBlock != nil {
+ emitJump(fn, dfltBlock)
+ fn.currentBlock = dfltBlock
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _fallthrough: dfltFallthrough,
+ }
+ b.stmtList(fn, *dfltBody)
+ fn.targets = fn.targets.tail
+ }
+ emitJump(fn, done)
+ fn.currentBlock = done
+}
+
+// typeSwitchStmt emits to fn code for the type switch statement s, optionally
+// labelled by label.
+func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
+ // We treat TypeSwitchStmt like a sequential if-else chain.
+ // Multiway dispatch can be recovered later by ssautil.Switches().
+
+ // Typeswitch lowering:
+ //
+ // var x X
+ // switch y := x.(type) {
+ // case T1, T2: S1 // >1 (y := x)
+ // case nil: SN // nil (y := x)
+ // default: SD // 0 types (y := x)
+ // case T3: S3 // 1 type (y := x.(T3))
+ // }
+ //
+ // ...s.Init...
+ // x := eval x
+ // .caseT1:
+ // t1, ok1 := typeswitch,ok x <T1>
+ // if ok1 then goto S1 else goto .caseT2
+ // .caseT2:
+ // t2, ok2 := typeswitch,ok x <T2>
+ // if ok2 then goto S1 else goto .caseNil
+ // .S1:
+ // y := x
+ // ...S1...
+ // goto done
+ // .caseNil:
+ // if t2, ok2 := typeswitch,ok x <T2>
+ // if x == nil then goto SN else goto .caseT3
+ // .SN:
+ // y := x
+ // ...SN...
+ // goto done
+ // .caseT3:
+ // t3, ok3 := typeswitch,ok x <T3>
+ // if ok3 then goto S3 else goto default
+ // .S3:
+ // y := t3
+ // ...S3...
+ // goto done
+ // .default:
+ // y := x
+ // ...SD...
+ // goto done
+ // .done:
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+
+ var x Value
+ switch ass := s.Assign.(type) {
+ case *ast.ExprStmt: // x.(type)
+ x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X)
+ case *ast.AssignStmt: // y := x.(type)
+ x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
+ }
+
+ done := fn.newBasicBlock("typeswitch.done")
+ if label != nil {
+ label._break = done
+ }
+ var default_ *ast.CaseClause
+ for _, clause := range s.Body.List {
+ cc := clause.(*ast.CaseClause)
+ if cc.List == nil {
+ default_ = cc
+ continue
+ }
+ body := fn.newBasicBlock("typeswitch.body")
+ var next *BasicBlock
+ var casetype types.Type
+ var ti Value // ti, ok := typeassert,ok x <Ti>
+ for _, cond := range cc.List {
+ next = fn.newBasicBlock("typeswitch.next")
+ casetype = fn.typeOf(cond)
+ var condv Value
+ if casetype == tUntypedNil {
+ condv = emitCompare(fn, token.EQL, x, zeroConst(x.Type()), cond.Pos())
+ ti = x
+ } else {
+ yok := emitTypeTest(fn, x, casetype, cc.Case)
+ ti = emitExtract(fn, yok, 0)
+ condv = emitExtract(fn, yok, 1)
+ }
+ emitIf(fn, condv, body, next)
+ fn.currentBlock = next
+ }
+ if len(cc.List) != 1 {
+ ti = x
+ }
+ fn.currentBlock = body
+ b.typeCaseBody(fn, cc, ti, done)
+ fn.currentBlock = next
+ }
+ if default_ != nil {
+ b.typeCaseBody(fn, default_, x, done)
+ } else {
+ emitJump(fn, done)
+ }
+ fn.currentBlock = done
+}
+
+func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) {
+ if obj, ok := fn.info.Implicits[cc].(*types.Var); ok {
+ // In a switch y := x.(type), each case clause
+ // implicitly declares a distinct object y.
+ // In a single-type case, y has that type.
+ // In multi-type cases, 'case nil' and default,
+ // y has the same type as the interface operand.
+ emitStore(fn, emitLocalVar(fn, obj), x, obj.Pos())
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, cc.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+}
+
+// selectStmt emits to fn code for the select statement s, optionally
+// labelled by label.
+func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
+ // A blocking select of a single case degenerates to a
+ // simple send or receive.
+ // TODO(adonovan): opt: is this optimization worth its weight?
+ if len(s.Body.List) == 1 {
+ clause := s.Body.List[0].(*ast.CommClause)
+ if clause.Comm != nil {
+ b.stmt(fn, clause.Comm)
+ done := fn.newBasicBlock("select.done")
+ if label != nil {
+ label._break = done
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, clause.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = done
+ return
+ }
+ }
+
+ // First evaluate all channels in all cases, and find
+ // the directions of each state.
+ var states []*SelectState
+ blocking := true
+ debugInfo := fn.debugInfo()
+ for _, clause := range s.Body.List {
+ var st *SelectState
+ switch comm := clause.(*ast.CommClause).Comm.(type) {
+ case nil: // default case
+ blocking = false
+ continue
+
+ case *ast.SendStmt: // ch<- i
+ ch := b.expr(fn, comm.Chan)
+ chtyp := typeparams.CoreType(fn.typ(ch.Type())).(*types.Chan)
+ st = &SelectState{
+ Dir: types.SendOnly,
+ Chan: ch,
+ Send: emitConv(fn, b.expr(fn, comm.Value), chtyp.Elem()),
+ Pos: comm.Arrow,
+ }
+ if debugInfo {
+ st.DebugNode = comm
+ }
+
+ case *ast.AssignStmt: // x := <-ch
+ recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr)
+ st = &SelectState{
+ Dir: types.RecvOnly,
+ Chan: b.expr(fn, recv.X),
+ Pos: recv.OpPos,
+ }
+ if debugInfo {
+ st.DebugNode = recv
+ }
+
+ case *ast.ExprStmt: // <-ch
+ recv := unparen(comm.X).(*ast.UnaryExpr)
+ st = &SelectState{
+ Dir: types.RecvOnly,
+ Chan: b.expr(fn, recv.X),
+ Pos: recv.OpPos,
+ }
+ if debugInfo {
+ st.DebugNode = recv
+ }
+ }
+ states = append(states, st)
+ }
+
+ // We dispatch on the (fair) result of Select using a
+ // sequential if-else chain, in effect:
+ //
+ // idx, recvOk, r0...r_n-1 := select(...)
+ // if idx == 0 { // receive on channel 0 (first receive => r0)
+ // x, ok := r0, recvOk
+ // ...state0...
+ // } else if v == 1 { // send on channel 1
+ // ...state1...
+ // } else {
+ // ...default...
+ // }
+ sel := &Select{
+ States: states,
+ Blocking: blocking,
+ }
+ sel.setPos(s.Select)
+ var vars []*types.Var
+ vars = append(vars, varIndex, varOk)
+ for _, st := range states {
+ if st.Dir == types.RecvOnly {
+ chtyp := typeparams.CoreType(fn.typ(st.Chan.Type())).(*types.Chan)
+ vars = append(vars, anonVar(chtyp.Elem()))
+ }
+ }
+ sel.setType(types.NewTuple(vars...))
+
+ fn.emit(sel)
+ idx := emitExtract(fn, sel, 0)
+
+ done := fn.newBasicBlock("select.done")
+ if label != nil {
+ label._break = done
+ }
+
+ var defaultBody *[]ast.Stmt
+ state := 0
+ r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV
+ for _, cc := range s.Body.List {
+ clause := cc.(*ast.CommClause)
+ if clause.Comm == nil {
+ defaultBody = &clause.Body
+ continue
+ }
+ body := fn.newBasicBlock("select.body")
+ next := fn.newBasicBlock("select.next")
+ emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next)
+ fn.currentBlock = body
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ switch comm := clause.Comm.(type) {
+ case *ast.ExprStmt: // <-ch
+ if debugInfo {
+ v := emitExtract(fn, sel, r)
+ emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
+ }
+ r++
+
+ case *ast.AssignStmt: // x := <-states[state].Chan
+ if comm.Tok == token.DEFINE {
+ emitLocalVar(fn, identVar(fn, comm.Lhs[0].(*ast.Ident)))
+ }
+ x := b.addr(fn, comm.Lhs[0], false) // non-escaping
+ v := emitExtract(fn, sel, r)
+ if debugInfo {
+ emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
+ }
+ x.store(fn, v)
+
+ if len(comm.Lhs) == 2 { // x, ok := ...
+ if comm.Tok == token.DEFINE {
+ emitLocalVar(fn, identVar(fn, comm.Lhs[1].(*ast.Ident)))
+ }
+ ok := b.addr(fn, comm.Lhs[1], false) // non-escaping
+ ok.store(fn, emitExtract(fn, sel, 1))
+ }
+ r++
+ }
+ b.stmtList(fn, clause.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
+ fn.currentBlock = next
+ state++
+ }
+ if defaultBody != nil {
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, *defaultBody)
+ fn.targets = fn.targets.tail
+ } else {
+ // A blocking select must match some case.
+ // (This should really be a runtime.errorString, not a string.)
+ fn.emit(&Panic{
+ X: emitConv(fn, stringConst("blocking select matched no case"), tEface),
+ })
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+ }
+ emitJump(fn, done)
+ fn.currentBlock = done
+}
+
+// forStmt emits to fn code for the for statement s, optionally
+// labelled by label.
+func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
+ // Use forStmtGo122 instead if it applies.
+ if s.Init != nil {
+ if assign, ok := s.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE {
+ if versions.AtLeast(fn.goversion, versions.Go1_22) {
+ b.forStmtGo122(fn, s, label)
+ return
+ }
+ }
+ }
+
+ // ...init...
+ // jump loop
+ // loop:
+ // if cond goto body else done
+ // body:
+ // ...body...
+ // jump post
+ // post: (target of continue)
+ // ...post...
+ // jump loop
+ // done: (target of break)
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+
+ body := fn.newBasicBlock("for.body")
+ done := fn.newBasicBlock("for.done") // target of 'break'
+ loop := body // target of back-edge
+ if s.Cond != nil {
+ loop = fn.newBasicBlock("for.loop")
+ }
+ cont := loop // target of 'continue'
+ if s.Post != nil {
+ cont = fn.newBasicBlock("for.post")
+ }
+ if label != nil {
+ label._break = done
+ label._continue = cont
+ }
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+ if loop != body {
+ b.cond(fn, s.Cond, body, done)
+ fn.currentBlock = body
+ }
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _continue: cont,
+ }
+ b.stmt(fn, s.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, cont)
+
+ if s.Post != nil {
+ fn.currentBlock = cont
+ b.stmt(fn, s.Post)
+ emitJump(fn, loop) // back-edge
+ }
+ fn.currentBlock = done
+}
+
+// forStmtGo122 emits to fn code for the for statement s, optionally
+// labelled by label. s must define its variables.
+//
+// This allocates once per loop iteration. This is only correct in
+// GoVersions >= go1.22.
+func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) {
+ // i_outer = alloc[T]
+ // *i_outer = ...init... // under objects[i] = i_outer
+ // jump loop
+ // loop:
+ // i = phi [head: i_outer, loop: i_next]
+ // ...cond... // under objects[i] = i
+ // if cond goto body else done
+ // body:
+ // ...body... // under objects[i] = i (same as loop)
+ // jump post
+ // post:
+ // tmp = *i
+ // i_next = alloc[T]
+ // *i_next = tmp
+ // ...post... // under objects[i] = i_next
+ // goto loop
+ // done:
+
+ init := s.Init.(*ast.AssignStmt)
+ startingBlocks := len(fn.Blocks)
+
+ pre := fn.currentBlock // current block before starting
+ loop := fn.newBasicBlock("for.loop") // target of back-edge
+ body := fn.newBasicBlock("for.body")
+ post := fn.newBasicBlock("for.post") // target of 'continue'
+ done := fn.newBasicBlock("for.done") // target of 'break'
+
+ // For each of the n loop variables, we create five SSA values,
+ // outer, phi, next, load, and store in pre, loop, and post.
+ // There is no limit on n.
+ type loopVar struct {
+ obj *types.Var
+ outer *Alloc
+ phi *Phi
+ load *UnOp
+ next *Alloc
+ store *Store
+ }
+ vars := make([]loopVar, len(init.Lhs))
+ for i, lhs := range init.Lhs {
+ v := identVar(fn, lhs.(*ast.Ident))
+ typ := fn.typ(v.Type())
+
+ fn.currentBlock = pre
+ outer := emitLocal(fn, typ, v.Pos(), v.Name())
+
+ fn.currentBlock = loop
+ phi := &Phi{Comment: v.Name()}
+ phi.pos = v.Pos()
+ phi.typ = outer.Type()
+ fn.emit(phi)
+
+ fn.currentBlock = post
+ // If next is local, it reuses the address and zeroes the old value so
+ // load before allocating next.
+ load := emitLoad(fn, phi)
+ next := emitLocal(fn, typ, v.Pos(), v.Name())
+ store := emitStore(fn, next, load, token.NoPos)
+
+ phi.Edges = []Value{outer, next} // pre edge is emitted before post edge.
+
+ vars[i] = loopVar{v, outer, phi, load, next, store}
+ }
+
+ // ...init... under fn.objects[v] = i_outer
+ fn.currentBlock = pre
+ for _, v := range vars {
+ fn.vars[v.obj] = v.outer
+ }
+ const isDef = false // assign to already-allocated outers
+ b.assignStmt(fn, init.Lhs, init.Rhs, isDef)
+ if label != nil {
+ label._break = done
+ label._continue = post
+ }
+ emitJump(fn, loop)
+
+ // ...cond... under fn.objects[v] = i
+ fn.currentBlock = loop
+ for _, v := range vars {
+ fn.vars[v.obj] = v.phi
+ }
+ if s.Cond != nil {
+ b.cond(fn, s.Cond, body, done)
+ } else {
+ emitJump(fn, body)
+ }
+
+ // ...body... under fn.objects[v] = i
+ fn.currentBlock = body
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _continue: post,
+ }
+ b.stmt(fn, s.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, post)
+
+ // ...post... under fn.objects[v] = i_next
+ for _, v := range vars {
+ fn.vars[v.obj] = v.next
+ }
+ fn.currentBlock = post
+ if s.Post != nil {
+ b.stmt(fn, s.Post)
+ }
+ emitJump(fn, loop) // back-edge
+ fn.currentBlock = done
+
+ // For each loop variable that does not escape,
+ // (the common case), fuse its next cells into its
+ // (local) outer cell as they have disjoint live ranges.
+ //
+ // It is sufficient to test whether i_next escapes,
+ // because its Heap flag will be marked true if either
+ // the cond or post expression causes i to escape
+ // (because escape distributes over phi).
+ var nlocals int
+ for _, v := range vars {
+ if !v.next.Heap {
+ nlocals++
+ }
+ }
+ if nlocals > 0 {
+ replace := make(map[Value]Value, 2*nlocals)
+ dead := make(map[Instruction]bool, 4*nlocals)
+ for _, v := range vars {
+ if !v.next.Heap {
+ replace[v.next] = v.outer
+ replace[v.phi] = v.outer
+ dead[v.phi], dead[v.next], dead[v.load], dead[v.store] = true, true, true, true
+ }
+ }
+
+ // Replace all uses of i_next and phi with i_outer.
+ // Referrers have not been built for fn yet so only update Instruction operands.
+ // We need only look within the blocks added by the loop.
+ var operands []*Value // recycle storage
+ for _, b := range fn.Blocks[startingBlocks:] {
+ for _, instr := range b.Instrs {
+ operands = instr.Operands(operands[:0])
+ for _, ptr := range operands {
+ k := *ptr
+ if v := replace[k]; v != nil {
+ *ptr = v
+ }
+ }
+ }
+ }
+
+ // Remove instructions for phi, load, and store.
+ // lift() will remove the unused i_next *Alloc.
+ isDead := func(i Instruction) bool { return dead[i] }
+ loop.Instrs = removeInstrsIf(loop.Instrs, isDead)
+ post.Instrs = removeInstrsIf(post.Instrs, isDead)
+ }
+}
+
+// rangeIndexed emits to fn the header for an integer-indexed loop
+// over array, *array or slice value x.
+// The v result is defined only if tv is non-nil.
+// forPos is the position of the "for" token.
+func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
+ //
+ // length = len(x)
+ // index = -1
+ // loop: (target of continue)
+ // index++
+ // if index < length goto body else done
+ // body:
+ // k = index
+ // v = x[index]
+ // ...body...
+ // jump loop
+ // done: (target of break)
+
+ // Determine number of iterations.
+ var length Value
+ dt := typeparams.Deref(x.Type())
+ if arr, ok := typeparams.CoreType(dt).(*types.Array); ok {
+ // For array or *array, the number of iterations is
+ // known statically thanks to the type. We avoid a
+ // data dependence upon x, permitting later dead-code
+ // elimination if x is pure, static unrolling, etc.
+ // Ranging over a nil *array may have >0 iterations.
+ // We still generate code for x, in case it has effects.
+ length = intConst(arr.Len())
+ } else {
+ // length = len(x).
+ var c Call
+ c.Call.Value = makeLen(x.Type())
+ c.Call.Args = []Value{x}
+ c.setType(tInt)
+ length = fn.emit(&c)
+ }
+
+ index := emitLocal(fn, tInt, token.NoPos, "rangeindex")
+ emitStore(fn, index, intConst(-1), pos)
+
+ loop = fn.newBasicBlock("rangeindex.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+
+ incr := &BinOp{
+ Op: token.ADD,
+ X: emitLoad(fn, index),
+ Y: vOne,
+ }
+ incr.setType(tInt)
+ emitStore(fn, index, fn.emit(incr), pos)
+
+ body := fn.newBasicBlock("rangeindex.body")
+ done = fn.newBasicBlock("rangeindex.done")
+ emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done)
+ fn.currentBlock = body
+
+ k = emitLoad(fn, index)
+ if tv != nil {
+ switch t := typeparams.CoreType(x.Type()).(type) {
+ case *types.Array:
+ instr := &Index{
+ X: x,
+ Index: k,
+ }
+ instr.setType(t.Elem())
+ instr.setPos(x.Pos())
+ v = fn.emit(instr)
+
+ case *types.Pointer: // *array
+ instr := &IndexAddr{
+ X: x,
+ Index: k,
+ }
+ instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()))
+ instr.setPos(x.Pos())
+ v = emitLoad(fn, fn.emit(instr))
+
+ case *types.Slice:
+ instr := &IndexAddr{
+ X: x,
+ Index: k,
+ }
+ instr.setType(types.NewPointer(t.Elem()))
+ instr.setPos(x.Pos())
+ v = emitLoad(fn, fn.emit(instr))
+
+ default:
+ panic("rangeIndexed x:" + t.String())
+ }
+ }
+ return
+}
+
+// rangeIter emits to fn the header for a loop using
+// Range/Next/Extract to iterate over map or string value x.
+// tk and tv are the types of the key/value results k and v, or nil
+// if the respective component is not wanted.
+func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
+ //
+ // it = range x
+ // loop: (target of continue)
+ // okv = next it (ok, key, value)
+ // ok = extract okv #0
+ // if ok goto body else done
+ // body:
+ // k = extract okv #1
+ // v = extract okv #2
+ // ...body...
+ // jump loop
+ // done: (target of break)
+ //
+
+ if tk == nil {
+ tk = tInvalid
+ }
+ if tv == nil {
+ tv = tInvalid
+ }
+
+ rng := &Range{X: x}
+ rng.setPos(pos)
+ rng.setType(tRangeIter)
+ it := fn.emit(rng)
+
+ loop = fn.newBasicBlock("rangeiter.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+
+ okv := &Next{
+ Iter: it,
+ IsString: isBasic(typeparams.CoreType(x.Type())),
+ }
+ okv.setType(types.NewTuple(
+ varOk,
+ newVar("k", tk),
+ newVar("v", tv),
+ ))
+ fn.emit(okv)
+
+ body := fn.newBasicBlock("rangeiter.body")
+ done = fn.newBasicBlock("rangeiter.done")
+ emitIf(fn, emitExtract(fn, okv, 0), body, done)
+ fn.currentBlock = body
+
+ if tk != tInvalid {
+ k = emitExtract(fn, okv, 1)
+ }
+ if tv != tInvalid {
+ v = emitExtract(fn, okv, 2)
+ }
+ return
+}
+
+// rangeChan emits to fn the header for a loop that receives from
+// channel x until it fails.
+// tk is the channel's element type, or nil if the k result is
+// not wanted
+// pos is the position of the '=' or ':=' token.
+func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
+ //
+ // loop: (target of continue)
+ // ko = <-x (key, ok)
+ // ok = extract ko #1
+ // if ok goto body else done
+ // body:
+ // k = extract ko #0
+ // ...body...
+ // goto loop
+ // done: (target of break)
+
+ loop = fn.newBasicBlock("rangechan.loop")
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+ recv := &UnOp{
+ Op: token.ARROW,
+ X: x,
+ CommaOk: true,
+ }
+ recv.setPos(pos)
+ recv.setType(types.NewTuple(
+ newVar("k", typeparams.CoreType(x.Type()).(*types.Chan).Elem()),
+ varOk,
+ ))
+ ko := fn.emit(recv)
+ body := fn.newBasicBlock("rangechan.body")
+ done = fn.newBasicBlock("rangechan.done")
+ emitIf(fn, emitExtract(fn, ko, 1), body, done)
+ fn.currentBlock = body
+ if tk != nil {
+ k = emitExtract(fn, ko, 0)
+ }
+ return
+}
+
+// rangeInt emits to fn the header for a range loop with an integer operand.
+// tk is the key value's type, or nil if the k result is not wanted.
+// pos is the position of the "for" token.
+func (b *builder) rangeInt(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
+ //
+ // iter = 0
+ // if 0 < x goto body else done
+ // loop: (target of continue)
+ // iter++
+ // if iter < x goto body else done
+ // body:
+ // k = x
+ // ...body...
+ // jump loop
+ // done: (target of break)
+
+ if isUntyped(x.Type()) {
+ x = emitConv(fn, x, tInt)
+ }
+
+ T := x.Type()
+ iter := emitLocal(fn, T, token.NoPos, "rangeint.iter")
+ // x may be unsigned. Avoid initializing x to -1.
+
+ body := fn.newBasicBlock("rangeint.body")
+ done = fn.newBasicBlock("rangeint.done")
+ emitIf(fn, emitCompare(fn, token.LSS, zeroConst(T), x, token.NoPos), body, done)
+
+ loop = fn.newBasicBlock("rangeint.loop")
+ fn.currentBlock = loop
+
+ incr := &BinOp{
+ Op: token.ADD,
+ X: emitLoad(fn, iter),
+ Y: emitConv(fn, vOne, T),
+ }
+ incr.setType(T)
+ emitStore(fn, iter, fn.emit(incr), pos)
+ emitIf(fn, emitCompare(fn, token.LSS, incr, x, token.NoPos), body, done)
+ fn.currentBlock = body
+
+ if tk != nil {
+ // Integer types (int, uint8, etc.) are named and
+ // we know that k is assignable to x when tk != nil.
+ // This implies tk and T are identical so no conversion is needed.
+ k = emitLoad(fn, iter)
+ }
+
+ return
+}
+
+// rangeStmt emits to fn code for the range statement s, optionally
+// labelled by label.
+func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
+ var tk, tv types.Type
+ if s.Key != nil && !isBlankIdent(s.Key) {
+ tk = fn.typeOf(s.Key)
+ }
+ if s.Value != nil && !isBlankIdent(s.Value) {
+ tv = fn.typeOf(s.Value)
+ }
+
+ // create locals for s.Key and s.Value.
+ createVars := func() {
+ // Unlike a short variable declaration, a RangeStmt
+ // using := never redeclares an existing variable; it
+ // always creates a new one.
+ if tk != nil {
+ emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident)))
+ }
+ if tv != nil {
+ emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident)))
+ }
+ }
+
+ afterGo122 := versions.AtLeast(fn.goversion, versions.Go1_22)
+ if s.Tok == token.DEFINE && !afterGo122 {
+ // pre-go1.22: If iteration variables are defined (:=), this
+ // occurs once outside the loop.
+ createVars()
+ }
+
+ x := b.expr(fn, s.X)
+
+ var k, v Value
+ var loop, done *BasicBlock
+ switch rt := typeparams.CoreType(x.Type()).(type) {
+ case *types.Slice, *types.Array, *types.Pointer: // *array
+ k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For)
+
+ case *types.Chan:
+ k, loop, done = b.rangeChan(fn, x, tk, s.For)
+
+ case *types.Map:
+ k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For)
+
+ case *types.Basic:
+ switch {
+ case rt.Info()&types.IsString != 0:
+ k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For)
+
+ case rt.Info()&types.IsInteger != 0:
+ k, loop, done = b.rangeInt(fn, x, tk, s.For)
+
+ default:
+ panic("Cannot range over basic type: " + rt.String())
+ }
+
+ case *types.Signature:
+ // Special case rewrite (fn.goversion >= go1.23):
+ // for x := range f { ... }
+ // into
+ // f(func(x T) bool { ... })
+ b.rangeFunc(fn, x, tk, tv, s, label)
+ return
+
+ default:
+ panic("Cannot range over: " + rt.String())
+ }
+
+ if s.Tok == token.DEFINE && afterGo122 {
+ // go1.22: If iteration variables are defined (:=), this occurs inside the loop.
+ createVars()
+ }
+
+ // Evaluate both LHS expressions before we update either.
+ var kl, vl lvalue
+ if tk != nil {
+ kl = b.addr(fn, s.Key, false) // non-escaping
+ }
+ if tv != nil {
+ vl = b.addr(fn, s.Value, false) // non-escaping
+ }
+ if tk != nil {
+ kl.store(fn, k)
+ }
+ if tv != nil {
+ vl.store(fn, v)
+ }
+
+ if label != nil {
+ label._break = done
+ label._continue = loop
+ }
+
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ _continue: loop,
+ }
+ b.stmt(fn, s.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, loop) // back-edge
+ fn.currentBlock = done
+}
+
+// rangeFunc emits to fn code for the range-over-func rng.Body of the iterator
+// function x, optionally labelled by label. It creates a new anonymous function
+// yield for rng and builds the function.
+func (b *builder) rangeFunc(fn *Function, x Value, tk, tv types.Type, rng *ast.RangeStmt, label *lblock) {
+ // Consider the SSA code for the outermost range-over-func in fn:
+ //
+ // func fn(...) (ret R) {
+ // ...
+ // for k, v = range x {
+ // ...
+ // }
+ // ...
+ // }
+ //
+ // The code emitted into fn will look something like this.
+ //
+ // loop:
+ // jump := READY
+ // y := make closure yield [ret, deferstack, jump, k, v]
+ // x(y)
+ // switch jump {
+ // [see resuming execution]
+ // }
+ // goto done
+ // done:
+ // ...
+ //
+ // where yield is a new synthetic yield function:
+ //
+ // func yield(_k tk, _v tv) bool
+ // free variables: [ret, stack, jump, k, v]
+ // {
+ // entry:
+ // if jump != READY then goto invalid else valid
+ // invalid:
+ // panic("iterator called when it is not in a ready state")
+ // valid:
+ // jump = BUSY
+ // k = _k
+ // v = _v
+ // ...
+ // cont:
+ // jump = READY
+ // return true
+ // }
+ //
+ // Yield state:
+ //
+ // Each range loop has an associated jump variable that records
+ // the state of the iterator. A yield function is initially
+ // in a READY (0) and callable state. If the yield function is called
+ // and is not in READY state, it panics. When it is called in a callable
+ // state, it becomes BUSY. When execution reaches the end of the body
+ // of the loop (or a continue statement targeting the loop is executed),
+ // the yield function returns true and resumes being in a READY state.
+ // After the iterator function x(y) returns, then if the yield function
+ // is in a READY state, the yield enters the DONE state.
+ //
+ // Each lowered control statement (break X, continue X, goto Z, or return)
+ // that exits the loop sets the variable to a unique positive EXIT value,
+ // before returning false from the yield function.
+ //
+ // If the yield function returns abruptly due to a panic or GoExit,
+ // it remains in a BUSY state. The generated code asserts that, after
+ // the iterator call x(y) returns normally, the jump variable state
+ // is DONE.
+ //
+ // Resuming execution:
+ //
+ // The code generated for the range statement checks the jump
+ // variable to determine how to resume execution.
+ //
+ // switch jump {
+ // case BUSY: panic("...")
+ // case DONE: goto done
+ // case READY: state = DONE; goto done
+ // case 123: ... // action for exit 123.
+ // case 456: ... // action for exit 456.
+ // ...
+ // }
+ //
+ // Forward goto statements within a yield are jumps to labels that
+ // have not yet been traversed in fn. They may be in the Body of the
+ // function. What we emit for these is:
+ //
+ // goto target
+ // target:
+ // ...
+ //
+ // We leave an unresolved exit in yield.exits to check at the end
+ // of building yield if it encountered target in the body. If it
+ // encountered target, no additional work is required. Otherwise,
+ // the yield emits a new early exit in the basic block for target.
+ // We expect that blockopt will fuse the early exit into the case
+ // block later. The unresolved exit is then added to yield.parent.exits.
+
+ loop := fn.newBasicBlock("rangefunc.loop")
+ done := fn.newBasicBlock("rangefunc.done")
+
+ // These are targets within y.
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ // _continue is within y.
+ }
+ if label != nil {
+ label._break = done
+ // _continue is within y
+ }
+
+ emitJump(fn, loop)
+ fn.currentBlock = loop
+
+ // loop:
+ // jump := READY
+
+ anonIdx := len(fn.AnonFuncs)
+
+ jump := newVar(fmt.Sprintf("jump$%d", anonIdx+1), tInt)
+ emitLocalVar(fn, jump) // zero value is READY
+
+ xsig := typeparams.CoreType(x.Type()).(*types.Signature)
+ ysig := typeparams.CoreType(xsig.Params().At(0).Type()).(*types.Signature)
+
+ /* synthetic yield function for body of range-over-func loop */
+ y := &Function{
+ name: fmt.Sprintf("%s$%d", fn.Name(), anonIdx+1),
+ Signature: ysig,
+ Synthetic: "range-over-func yield",
+ pos: rangePosition(rng),
+ parent: fn,
+ anonIdx: int32(len(fn.AnonFuncs)),
+ Pkg: fn.Pkg,
+ Prog: fn.Prog,
+ syntax: rng,
+ info: fn.info,
+ goversion: fn.goversion,
+ build: (*builder).buildYieldFunc,
+ topLevelOrigin: nil,
+ typeparams: fn.typeparams,
+ typeargs: fn.typeargs,
+ subst: fn.subst,
+ jump: jump,
+ deferstack: fn.deferstack,
+ returnVars: fn.returnVars, // use the parent's return variables
+ uniq: fn.uniq, // start from parent's unique values
+ }
+
+ // If the RangeStmt has a label, this is how it is passed to buildYieldFunc.
+ if label != nil {
+ y.lblocks = map[*types.Label]*lblock{label.label: nil}
+ }
+ fn.AnonFuncs = append(fn.AnonFuncs, y)
+
+ // Build y immediately. It may:
+ // * cause fn's locals to escape, and
+ // * create new exit nodes in exits.
+ // (y is not marked 'built' until the end of the enclosing FuncDecl.)
+ unresolved := len(fn.exits)
+ y.build(b, y)
+ fn.uniq = y.uniq // resume after y's unique values
+
+ // Emit the call of y.
+ // c := MakeClosure y
+ // x(c)
+ c := &MakeClosure{Fn: y}
+ c.setType(ysig)
+ for _, fv := range y.FreeVars {
+ c.Bindings = append(c.Bindings, fv.outer)
+ fv.outer = nil
+ }
+ fn.emit(c)
+ call := Call{
+ Call: CallCommon{
+ Value: x,
+ Args: []Value{c},
+ pos: token.NoPos,
+ },
+ }
+ call.setType(xsig.Results())
+ fn.emit(&call)
+
+ exits := fn.exits[unresolved:]
+ b.buildYieldResume(fn, jump, exits, done)
+
+ emitJump(fn, done)
+ fn.currentBlock = done
+}
+
+// buildYieldResume emits to fn code for how to resume execution once a call to
+// the iterator function over the yield function returns x(y). It does this by building
+// a switch over the value of jump for when it is READY, BUSY, or EXIT(id).
+func (b *builder) buildYieldResume(fn *Function, jump *types.Var, exits []*exit, done *BasicBlock) {
+ // v := *jump
+ // switch v {
+ // case BUSY: panic("...")
+ // case READY: jump = DONE; goto done
+ // case EXIT(a): ...
+ // case EXIT(b): ...
+ // ...
+ // }
+ v := emitLoad(fn, fn.lookup(jump, false))
+
+ // case BUSY: panic("...")
+ isbusy := fn.newBasicBlock("rangefunc.resume.busy")
+ ifready := fn.newBasicBlock("rangefunc.resume.ready.check")
+ emitIf(fn, emitCompare(fn, token.EQL, v, jBusy, token.NoPos), isbusy, ifready)
+ fn.currentBlock = isbusy
+ fn.emit(&Panic{
+ X: emitConv(fn, stringConst("iterator call did not preserve panic"), tEface),
+ })
+ fn.currentBlock = ifready
+
+ // case READY: jump = DONE; goto done
+ isready := fn.newBasicBlock("rangefunc.resume.ready")
+ ifexit := fn.newBasicBlock("rangefunc.resume.exits")
+ emitIf(fn, emitCompare(fn, token.EQL, v, jReady, token.NoPos), isready, ifexit)
+ fn.currentBlock = isready
+ storeVar(fn, jump, jDone, token.NoPos)
+ emitJump(fn, done)
+ fn.currentBlock = ifexit
+
+ for _, e := range exits {
+ id := intConst(e.id)
+
+ // case EXIT(id): { /* do e */ }
+ cond := emitCompare(fn, token.EQL, v, id, e.pos)
+ matchb := fn.newBasicBlock("rangefunc.resume.match")
+ cndb := fn.newBasicBlock("rangefunc.resume.cnd")
+ emitIf(fn, cond, matchb, cndb)
+ fn.currentBlock = matchb
+
+ // Cases to fill in the { /* do e */ } bit.
+ switch {
+ case e.label != nil: // forward goto?
+ // case EXIT(id): goto lb // label
+ lb := fn.lblockOf(e.label)
+ // Do not mark lb as resolved.
+ // If fn does not contain label, lb remains unresolved and
+ // fn must itself be a range-over-func function. lb will be:
+ // lb:
+ // fn.jump = id
+ // return false
+ emitJump(fn, lb._goto)
+
+ case e.to != fn: // e jumps to an ancestor of fn?
+ // case EXIT(id): { fn.jump = id; return false }
+ // fn is a range-over-func function.
+ storeVar(fn, fn.jump, id, token.NoPos)
+ fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos})
+
+ case e.block == nil && e.label == nil: // return from fn?
+ // case EXIT(id): { return ... }
+ fn.emit(new(RunDefers))
+ results := make([]Value, len(fn.results))
+ for i, r := range fn.results {
+ results[i] = emitLoad(fn, r)
+ }
+ fn.emit(&Return{Results: results, pos: e.pos})
+
+ case e.block != nil:
+ // case EXIT(id): goto block
+ emitJump(fn, e.block)
+
+ default:
+ panic("unreachable")
+ }
+ fn.currentBlock = cndb
+ }
+}
+
+// stmt lowers statement s to SSA form, emitting code to fn.
+func (b *builder) stmt(fn *Function, _s ast.Stmt) {
+ // The label of the current statement. If non-nil, its _goto
+ // target is always set; its _break and _continue are set only
+ // within the body of switch/typeswitch/select/for/range.
+ // It is effectively an additional default-nil parameter of stmt().
+ var label *lblock
+start:
+ switch s := _s.(type) {
+ case *ast.EmptyStmt:
+ // ignore. (Usually removed by gofmt.)
+
+ case *ast.DeclStmt: // Con, Var or Typ
+ d := s.Decl.(*ast.GenDecl)
+ if d.Tok == token.VAR {
+ for _, spec := range d.Specs {
+ if vs, ok := spec.(*ast.ValueSpec); ok {
+ b.localValueSpec(fn, vs)
+ }
+ }
+ }
+
+ case *ast.LabeledStmt:
+ if s.Label.Name == "_" {
+ // Blank labels can't be the target of a goto, break,
+ // or continue statement, so we don't need a new block.
+ _s = s.Stmt
+ goto start
+ }
+ label = fn.lblockOf(fn.label(s.Label))
+ label.resolved = true
+ emitJump(fn, label._goto)
+ fn.currentBlock = label._goto
+ _s = s.Stmt
+ goto start // effectively: tailcall stmt(fn, s.Stmt, label)
+
+ case *ast.ExprStmt:
+ b.expr(fn, s.X)
+
+ case *ast.SendStmt:
+ chtyp := typeparams.CoreType(fn.typeOf(s.Chan)).(*types.Chan)
+ fn.emit(&Send{
+ Chan: b.expr(fn, s.Chan),
+ X: emitConv(fn, b.expr(fn, s.Value), chtyp.Elem()),
+ pos: s.Arrow,
+ })
+
+ case *ast.IncDecStmt:
+ op := token.ADD
+ if s.Tok == token.DEC {
+ op = token.SUB
+ }
+ loc := b.addr(fn, s.X, false)
+ b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos())
+
+ case *ast.AssignStmt:
+ switch s.Tok {
+ case token.ASSIGN, token.DEFINE:
+ b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE)
+
+ default: // +=, etc.
+ op := s.Tok + token.ADD - token.ADD_ASSIGN
+ b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos())
+ }
+
+ case *ast.GoStmt:
+ // The "intrinsics" new/make/len/cap are forbidden here.
+ // panic is treated like an ordinary function call.
+ v := Go{pos: s.Go}
+ b.setCall(fn, s.Call, &v.Call)
+ fn.emit(&v)
+
+ case *ast.DeferStmt:
+ // The "intrinsics" new/make/len/cap are forbidden here.
+ // panic is treated like an ordinary function call.
+ deferstack := emitLoad(fn, fn.lookup(fn.deferstack, false))
+ v := Defer{pos: s.Defer, DeferStack: deferstack}
+ b.setCall(fn, s.Call, &v.Call)
+ fn.emit(&v)
+
+ // A deferred call can cause recovery from panic,
+ // and control resumes at the Recover block.
+ createRecoverBlock(fn.source)
+
+ case *ast.ReturnStmt:
+ b.returnStmt(fn, s)
+
+ case *ast.BranchStmt:
+ b.branchStmt(fn, s)
+
+ case *ast.BlockStmt:
+ b.stmtList(fn, s.List)
+
+ case *ast.IfStmt:
+ if s.Init != nil {
+ b.stmt(fn, s.Init)
+ }
+ then := fn.newBasicBlock("if.then")
+ done := fn.newBasicBlock("if.done")
+ els := done
+ if s.Else != nil {
+ els = fn.newBasicBlock("if.else")
+ }
+ b.cond(fn, s.Cond, then, els)
+ fn.currentBlock = then
+ b.stmt(fn, s.Body)
+ emitJump(fn, done)
+
+ if s.Else != nil {
+ fn.currentBlock = els
+ b.stmt(fn, s.Else)
+ emitJump(fn, done)
+ }
+
+ fn.currentBlock = done
+
+ case *ast.SwitchStmt:
+ b.switchStmt(fn, s, label)
+
+ case *ast.TypeSwitchStmt:
+ b.typeSwitchStmt(fn, s, label)
+
+ case *ast.SelectStmt:
+ b.selectStmt(fn, s, label)
+
+ case *ast.ForStmt:
+ b.forStmt(fn, s, label)
+
+ case *ast.RangeStmt:
+ b.rangeStmt(fn, s, label)
+
+ default:
+ panic(fmt.Sprintf("unexpected statement kind: %T", s))
+ }
+}
+
+func (b *builder) branchStmt(fn *Function, s *ast.BranchStmt) {
+ var block *BasicBlock
+ if s.Label == nil {
+ block = targetedBlock(fn, s.Tok)
+ } else {
+ target := fn.label(s.Label)
+ block = labelledBlock(fn, target, s.Tok)
+ if block == nil { // forward goto
+ lb := fn.lblockOf(target)
+ block = lb._goto // jump to lb._goto
+ if fn.jump != nil {
+ // fn is a range-over-func and the goto may exit fn.
+ // Create an exit and resolve it at the end of
+ // builder.buildYieldFunc.
+ labelExit(fn, target, s.Pos())
+ }
+ }
+ }
+ to := block.parent
+
+ if to == fn {
+ emitJump(fn, block)
+ } else { // break outside of fn.
+ // fn must be a range-over-func
+ e := blockExit(fn, block, s.Pos())
+ storeVar(fn, fn.jump, intConst(e.id), e.pos)
+ fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos})
+ }
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+}
+
+func (b *builder) returnStmt(fn *Function, s *ast.ReturnStmt) {
+ var results []Value
+
+ sig := fn.source.Signature // signature of the enclosing source function
+
+ // Convert return operands to result type.
+ if len(s.Results) == 1 && sig.Results().Len() > 1 {
+ // Return of one expression in a multi-valued function.
+ tuple := b.exprN(fn, s.Results[0])
+ ttuple := tuple.Type().(*types.Tuple)
+ for i, n := 0, ttuple.Len(); i < n; i++ {
+ results = append(results,
+ emitConv(fn, emitExtract(fn, tuple, i),
+ sig.Results().At(i).Type()))
+ }
+ } else {
+ // 1:1 return, or no-arg return in non-void function.
+ for i, r := range s.Results {
+ v := emitConv(fn, b.expr(fn, r), sig.Results().At(i).Type())
+ results = append(results, v)
+ }
+ }
+
+ // Store the results.
+ for i, r := range results {
+ var result Value // fn.source.result[i] conceptually
+ if fn == fn.source {
+ result = fn.results[i]
+ } else { // lookup needed?
+ result = fn.lookup(fn.returnVars[i], false)
+ }
+ emitStore(fn, result, r, s.Return)
+ }
+
+ if fn.jump != nil {
+ // Return from body of a range-over-func.
+ // The return statement is syntactically within the loop,
+ // but the generated code is in the 'switch jump {...}' after it.
+ e := returnExit(fn, s.Pos())
+ storeVar(fn, fn.jump, intConst(e.id), e.pos)
+ fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos})
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+ return
+ }
+
+ // Run function calls deferred in this
+ // function when explicitly returning from it.
+ fn.emit(new(RunDefers))
+ // Reload (potentially) named result variables to form the result tuple.
+ results = results[:0]
+ for _, nr := range fn.results {
+ results = append(results, emitLoad(fn, nr))
+ }
+ fn.emit(&Return{Results: results, pos: s.Return})
+ fn.currentBlock = fn.newBasicBlock("unreachable")
+}
+
+// A buildFunc is a strategy for building the SSA body for a function.
+type buildFunc = func(*builder, *Function)
+
+// iterate causes all created but unbuilt functions to be built. As
+// this may create new methods, the process is iterated until it
+// converges.
+//
+// Waits for any dependencies to finish building.
+func (b *builder) iterate() {
+ for ; b.finished < len(b.fns); b.finished++ {
+ fn := b.fns[b.finished]
+ b.buildFunction(fn)
+ }
+
+ b.buildshared.markDone()
+ b.buildshared.wait()
+}
+
+// buildFunction builds SSA code for the body of function fn. Idempotent.
+func (b *builder) buildFunction(fn *Function) {
+ if fn.build != nil {
+ assert(fn.parent == nil, "anonymous functions should not be built by buildFunction()")
+
+ if fn.Prog.mode&LogSource != 0 {
+ defer logStack("build %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))()
+ }
+ fn.build(b, fn)
+ fn.done()
+ }
+}
+
+// buildParamsOnly builds fn.Params from fn.Signature, but does not build fn.Body.
+func (b *builder) buildParamsOnly(fn *Function) {
+ // For external (C, asm) functions or functions loaded from
+ // export data, we must set fn.Params even though there is no
+ // body code to reference them.
+ if recv := fn.Signature.Recv(); recv != nil {
+ fn.addParamVar(recv)
+ }
+ params := fn.Signature.Params()
+ for i, n := 0, params.Len(); i < n; i++ {
+ fn.addParamVar(params.At(i))
+ }
+}
+
+// buildFromSyntax builds fn.Body from fn.syntax, which must be non-nil.
+func (b *builder) buildFromSyntax(fn *Function) {
+ var (
+ recvField *ast.FieldList
+ body *ast.BlockStmt
+ functype *ast.FuncType
+ )
+ switch syntax := fn.syntax.(type) {
+ case *ast.FuncDecl:
+ functype = syntax.Type
+ recvField = syntax.Recv
+ body = syntax.Body
+ if body == nil {
+ b.buildParamsOnly(fn) // no body (non-Go function)
+ return
+ }
+ case *ast.FuncLit:
+ functype = syntax.Type
+ body = syntax.Body
+ case nil:
+ panic("no syntax")
+ default:
+ panic(syntax) // unexpected syntax
+ }
+ fn.source = fn
+ fn.startBody()
+ fn.createSyntacticParams(recvField, functype)
+ fn.createDeferStack()
+ b.stmt(fn, body)
+ if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) {
+ // Control fell off the end of the function's body block.
+ //
+ // Block optimizations eliminate the current block, if
+ // unreachable. It is a builder invariant that
+ // if this no-arg return is ill-typed for
+ // fn.Signature.Results, this block must be
+ // unreachable. The sanity checker checks this.
+ fn.emit(new(RunDefers))
+ fn.emit(new(Return))
+ }
+ fn.finishBody()
+}
+
+// buildYieldFunc builds the body of the yield function created
+// from a range-over-func *ast.RangeStmt.
+func (b *builder) buildYieldFunc(fn *Function) {
+ // See builder.rangeFunc for detailed documentation on how fn is set up.
+ //
+ // In psuedo-Go this roughly builds:
+ // func yield(_k tk, _v tv) bool {
+ // if jump != READY { panic("yield function called after range loop exit") }
+ // jump = BUSY
+ // k, v = _k, _v // assign the iterator variable (if needed)
+ // ... // rng.Body
+ // continue:
+ // jump = READY
+ // return true
+ // }
+ s := fn.syntax.(*ast.RangeStmt)
+ fn.source = fn.parent.source
+ fn.startBody()
+ params := fn.Signature.Params()
+ for i := 0; i < params.Len(); i++ {
+ fn.addParamVar(params.At(i))
+ }
+
+ // Initial targets
+ ycont := fn.newBasicBlock("yield-continue")
+ // lblocks is either {} or is {label: nil} where label is the label of syntax.
+ for label := range fn.lblocks {
+ fn.lblocks[label] = &lblock{
+ label: label,
+ resolved: true,
+ _goto: ycont,
+ _continue: ycont,
+ // `break label` statement targets fn.parent.targets._break
+ }
+ }
+ fn.targets = &targets{
+ _continue: ycont,
+ // `break` statement targets fn.parent.targets._break.
+ }
+
+ // continue:
+ // jump = READY
+ // return true
+ saved := fn.currentBlock
+ fn.currentBlock = ycont
+ storeVar(fn, fn.jump, jReady, s.Body.Rbrace)
+ // A yield function's own deferstack is always empty, so rundefers is not needed.
+ fn.emit(&Return{Results: []Value{vTrue}, pos: token.NoPos})
+
+ // Emit header:
+ //
+ // if jump != READY { panic("yield iterator accessed after exit") }
+ // jump = BUSY
+ // k, v = _k, _v
+ fn.currentBlock = saved
+ yloop := fn.newBasicBlock("yield-loop")
+ invalid := fn.newBasicBlock("yield-invalid")
+
+ jumpVal := emitLoad(fn, fn.lookup(fn.jump, true))
+ emitIf(fn, emitCompare(fn, token.EQL, jumpVal, jReady, token.NoPos), yloop, invalid)
+ fn.currentBlock = invalid
+ fn.emit(&Panic{
+ X: emitConv(fn, stringConst("yield function called after range loop exit"), tEface),
+ })
+
+ fn.currentBlock = yloop
+ storeVar(fn, fn.jump, jBusy, s.Body.Rbrace)
+
+ // Initialize k and v from params.
+ var tk, tv types.Type
+ if s.Key != nil && !isBlankIdent(s.Key) {
+ tk = fn.typeOf(s.Key) // fn.parent.typeOf is identical
+ }
+ if s.Value != nil && !isBlankIdent(s.Value) {
+ tv = fn.typeOf(s.Value)
+ }
+ if s.Tok == token.DEFINE {
+ if tk != nil {
+ emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident)))
+ }
+ if tv != nil {
+ emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident)))
+ }
+ }
+ var k, v Value
+ if len(fn.Params) > 0 {
+ k = fn.Params[0]
+ }
+ if len(fn.Params) > 1 {
+ v = fn.Params[1]
+ }
+ var kl, vl lvalue
+ if tk != nil {
+ kl = b.addr(fn, s.Key, false) // non-escaping
+ }
+ if tv != nil {
+ vl = b.addr(fn, s.Value, false) // non-escaping
+ }
+ if tk != nil {
+ kl.store(fn, k)
+ }
+ if tv != nil {
+ vl.store(fn, v)
+ }
+
+ // Build the body of the range loop.
+ b.stmt(fn, s.Body)
+ if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) {
+ // Control fell off the end of the function's body block.
+ // Block optimizations eliminate the current block, if
+ // unreachable.
+ emitJump(fn, ycont)
+ }
+
+ // Clean up exits and promote any unresolved exits to fn.parent.
+ for _, e := range fn.exits {
+ if e.label != nil {
+ lb := fn.lblocks[e.label]
+ if lb.resolved {
+ // label was resolved. Do not turn lb into an exit.
+ // e does not need to be handled by the parent.
+ continue
+ }
+
+ // _goto becomes an exit.
+ // _goto:
+ // jump = id
+ // return false
+ fn.currentBlock = lb._goto
+ id := intConst(e.id)
+ storeVar(fn, fn.jump, id, e.pos)
+ fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos})
+ }
+
+ if e.to != fn { // e needs to be handled by the parent too.
+ fn.parent.exits = append(fn.parent.exits, e)
+ }
+ }
+
+ fn.finishBody()
+}
+
+// addRuntimeType records t as a runtime type,
+// along with all types derivable from it using reflection.
+//
+// Acquires prog.runtimeTypesMu.
+func addRuntimeType(prog *Program, t types.Type) {
+ prog.runtimeTypesMu.Lock()
+ defer prog.runtimeTypesMu.Unlock()
+ forEachReachable(&prog.MethodSets, t, func(t types.Type) bool {
+ prev, _ := prog.runtimeTypes.Set(t, true).(bool)
+ return !prev // already seen?
+ })
+}
+
+// Build calls Package.Build for each package in prog.
+// Building occurs in parallel unless the BuildSerially mode flag was set.
+//
+// Build is intended for whole-program analysis; a typical compiler
+// need only build a single package.
+//
+// Build is idempotent and thread-safe.
+func (prog *Program) Build() {
+ var wg sync.WaitGroup
+ for _, p := range prog.packages {
+ if prog.mode&BuildSerially != 0 {
+ p.Build()
+ } else {
+ wg.Add(1)
+ cpuLimit <- unit{} // acquire a token
+ go func(p *Package) {
+ p.Build()
+ wg.Done()
+ <-cpuLimit // release a token
+ }(p)
+ }
+ }
+ wg.Wait()
+}
+
+// cpuLimit is a counting semaphore to limit CPU parallelism.
+var cpuLimit = make(chan unit, runtime.GOMAXPROCS(0))
+
+// Build builds SSA code for all functions and vars in package p.
+//
+// CreatePackage must have been called for all of p's direct imports
+// (and hence its direct imports must have been error-free). It is not
+// necessary to call CreatePackage for indirect dependencies.
+// Functions will be created for all necessary methods in those
+// packages on demand.
+//
+// Build is idempotent and thread-safe.
+func (p *Package) Build() { p.buildOnce.Do(p.build) }
+
+func (p *Package) build() {
+ if p.info == nil {
+ return // synthetic package, e.g. "testmain"
+ }
+ if p.Prog.mode&LogSource != 0 {
+ defer logStack("build %s", p)()
+ }
+
+ b := builder{fns: p.created}
+ b.iterate()
+
+ // We no longer need transient information: ASTs or go/types deductions.
+ p.info = nil
+ p.created = nil
+ p.files = nil
+ p.initVersion = nil
+
+ if p.Prog.mode&SanityCheckFunctions != 0 {
+ sanityCheckPackage(p)
+ }
+}
+
+// buildPackageInit builds fn.Body for the synthetic package initializer.
+func (b *builder) buildPackageInit(fn *Function) {
+ p := fn.Pkg
+ fn.startBody()
+
+ var done *BasicBlock
+
+ if p.Prog.mode&BareInits == 0 {
+ // Make init() skip if package is already initialized.
+ initguard := p.Var("init$guard")
+ doinit := fn.newBasicBlock("init.start")
+ done = fn.newBasicBlock("init.done")
+ emitIf(fn, emitLoad(fn, initguard), done, doinit)
+ fn.currentBlock = doinit
+ emitStore(fn, initguard, vTrue, token.NoPos)
+
+ // Call the init() function of each package we import.
+ for _, pkg := range p.Pkg.Imports() {
+ prereq := p.Prog.packages[pkg]
+ if prereq == nil {
+ panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path()))
+ }
+ var v Call
+ v.Call.Value = prereq.init
+ v.Call.pos = fn.pos
+ v.setType(types.NewTuple())
+ fn.emit(&v)
+ }
+ }
+
+ // Initialize package-level vars in correct order.
+ if len(p.info.InitOrder) > 0 && len(p.files) == 0 {
+ panic("no source files provided for package. cannot initialize globals")
+ }
+
+ for _, varinit := range p.info.InitOrder {
+ if fn.Prog.mode&LogSource != 0 {
+ fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
+ varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos()))
+ }
+ // Initializers for global vars are evaluated in dependency
+ // order, but may come from arbitrary files of the package
+ // with different versions, so we transiently update
+ // fn.goversion for each one. (Since init is a synthetic
+ // function it has no syntax of its own that needs a version.)
+ fn.goversion = p.initVersion[varinit.Rhs]
+ if len(varinit.Lhs) == 1 {
+ // 1:1 initialization: var x, y = a(), b()
+ var lval lvalue
+ if v := varinit.Lhs[0]; v.Name() != "_" {
+ lval = &address{addr: p.objects[v].(*Global), pos: v.Pos()}
+ } else {
+ lval = blank{}
+ }
+ b.assign(fn, lval, varinit.Rhs, true, nil)
+ } else {
+ // n:1 initialization: var x, y := f()
+ tuple := b.exprN(fn, varinit.Rhs)
+ for i, v := range varinit.Lhs {
+ if v.Name() == "_" {
+ continue
+ }
+ emitStore(fn, p.objects[v].(*Global), emitExtract(fn, tuple, i), v.Pos())
+ }
+ }
+ }
+
+ // The rest of the init function is synthetic:
+ // no syntax, info, goversion.
+ fn.info = nil
+ fn.goversion = ""
+
+ // Call all of the declared init() functions in source order.
+ for _, file := range p.files {
+ for _, decl := range file.Decls {
+ if decl, ok := decl.(*ast.FuncDecl); ok {
+ id := decl.Name
+ if !isBlankIdent(id) && id.Name == "init" && decl.Recv == nil {
+ declaredInit := p.objects[p.info.Defs[id]].(*Function)
+ var v Call
+ v.Call.Value = declaredInit
+ v.setType(types.NewTuple())
+ p.init.emit(&v)
+ }
+ }
+ }
+ }
+
+ // Finish up init().
+ if p.Prog.mode&BareInits == 0 {
+ emitJump(fn, done)
+ fn.currentBlock = done
+ }
+ fn.emit(new(Return))
+ fn.finishBody()
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/const.go b/vendor/golang.org/x/tools/go/ssa/const.go
new file mode 100644
index 0000000..2a4e0dd
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/const.go
@@ -0,0 +1,232 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the Const SSA value type.
+
+import (
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// NewConst returns a new constant of the specified value and type.
+// val must be valid according to the specification of Const.Value.
+func NewConst(val constant.Value, typ types.Type) *Const {
+ if val == nil {
+ switch soleTypeKind(typ) {
+ case types.IsBoolean:
+ val = constant.MakeBool(false)
+ case types.IsInteger:
+ val = constant.MakeInt64(0)
+ case types.IsString:
+ val = constant.MakeString("")
+ }
+ }
+ return &Const{typ, val}
+}
+
+// soleTypeKind returns a BasicInfo for which constant.Value can
+// represent all zero values for the types in the type set.
+//
+// types.IsBoolean for false is a representative.
+// types.IsInteger for 0
+// types.IsString for ""
+// 0 otherwise.
+func soleTypeKind(typ types.Type) types.BasicInfo {
+ // State records the set of possible zero values (false, 0, "").
+ // Candidates (perhaps all) are eliminated during the type-set
+ // iteration, which executes at least once.
+ state := types.IsBoolean | types.IsInteger | types.IsString
+ underIs(typeSetOf(typ), func(ut types.Type) bool {
+ var c types.BasicInfo
+ if t, ok := ut.(*types.Basic); ok {
+ c = t.Info()
+ }
+ if c&types.IsNumeric != 0 { // int/float/complex
+ c = types.IsInteger
+ }
+ state = state & c
+ return state != 0
+ })
+ return state
+}
+
+// intConst returns an 'int' constant that evaluates to i.
+// (i is an int64 in case the host is narrower than the target.)
+func intConst(i int64) *Const {
+ return NewConst(constant.MakeInt64(i), tInt)
+}
+
+// stringConst returns a 'string' constant that evaluates to s.
+func stringConst(s string) *Const {
+ return NewConst(constant.MakeString(s), tString)
+}
+
+// zeroConst returns a new "zero" constant of the specified type.
+func zeroConst(t types.Type) *Const {
+ return NewConst(nil, t)
+}
+
+func (c *Const) RelString(from *types.Package) string {
+ var s string
+ if c.Value == nil {
+ s = zeroString(c.typ, from)
+ } else if c.Value.Kind() == constant.String {
+ s = constant.StringVal(c.Value)
+ const max = 20
+ // TODO(adonovan): don't cut a rune in half.
+ if len(s) > max {
+ s = s[:max-3] + "..." // abbreviate
+ }
+ s = strconv.Quote(s)
+ } else {
+ s = c.Value.String()
+ }
+ return s + ":" + relType(c.Type(), from)
+}
+
+// zeroString returns the string representation of the "zero" value of the type t.
+func zeroString(t types.Type, from *types.Package) string {
+ switch t := t.(type) {
+ case *types.Basic:
+ switch {
+ case t.Info()&types.IsBoolean != 0:
+ return "false"
+ case t.Info()&types.IsNumeric != 0:
+ return "0"
+ case t.Info()&types.IsString != 0:
+ return `""`
+ case t.Kind() == types.UnsafePointer:
+ fallthrough
+ case t.Kind() == types.UntypedNil:
+ return "nil"
+ default:
+ panic(fmt.Sprint("zeroString for unexpected type:", t))
+ }
+ case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
+ return "nil"
+ case *types.Named, *aliases.Alias:
+ return zeroString(t.Underlying(), from)
+ case *types.Array, *types.Struct:
+ return relType(t, from) + "{}"
+ case *types.Tuple:
+ // Tuples are not normal values.
+ // We are currently format as "(t[0], ..., t[n])". Could be something else.
+ components := make([]string, t.Len())
+ for i := 0; i < t.Len(); i++ {
+ components[i] = zeroString(t.At(i).Type(), from)
+ }
+ return "(" + strings.Join(components, ", ") + ")"
+ case *types.TypeParam:
+ return "*new(" + relType(t, from) + ")"
+ }
+ panic(fmt.Sprint("zeroString: unexpected ", t))
+}
+
+func (c *Const) Name() string {
+ return c.RelString(nil)
+}
+
+func (c *Const) String() string {
+ return c.Name()
+}
+
+func (c *Const) Type() types.Type {
+ return c.typ
+}
+
+func (c *Const) Referrers() *[]Instruction {
+ return nil
+}
+
+func (c *Const) Parent() *Function { return nil }
+
+func (c *Const) Pos() token.Pos {
+ return token.NoPos
+}
+
+// IsNil returns true if this constant is a nil value of
+// a nillable reference type (pointer, slice, channel, map, or function),
+// a basic interface type, or
+// a type parameter all of whose possible instantiations are themselves nillable.
+func (c *Const) IsNil() bool {
+ return c.Value == nil && nillable(c.typ)
+}
+
+// nillable reports whether *new(T) == nil is legal for type T.
+func nillable(t types.Type) bool {
+ if typeparams.IsTypeParam(t) {
+ return underIs(typeSetOf(t), func(u types.Type) bool {
+ // empty type set (u==nil) => any underlying types => not nillable
+ return u != nil && nillable(u)
+ })
+ }
+ switch t.Underlying().(type) {
+ case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature:
+ return true
+ case *types.Interface:
+ return true // basic interface.
+ default:
+ return false
+ }
+}
+
+// TODO(adonovan): move everything below into golang.org/x/tools/go/ssa/interp.
+
+// Int64 returns the numeric value of this constant truncated to fit
+// a signed 64-bit integer.
+func (c *Const) Int64() int64 {
+ switch x := constant.ToInt(c.Value); x.Kind() {
+ case constant.Int:
+ if i, ok := constant.Int64Val(x); ok {
+ return i
+ }
+ return 0
+ case constant.Float:
+ f, _ := constant.Float64Val(x)
+ return int64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Uint64 returns the numeric value of this constant truncated to fit
+// an unsigned 64-bit integer.
+func (c *Const) Uint64() uint64 {
+ switch x := constant.ToInt(c.Value); x.Kind() {
+ case constant.Int:
+ if u, ok := constant.Uint64Val(x); ok {
+ return u
+ }
+ return 0
+ case constant.Float:
+ f, _ := constant.Float64Val(x)
+ return uint64(f)
+ }
+ panic(fmt.Sprintf("unexpected constant value: %T", c.Value))
+}
+
+// Float64 returns the numeric value of this constant truncated to fit
+// a float64.
+func (c *Const) Float64() float64 {
+ x := constant.ToFloat(c.Value) // (c.Value == nil) => x.Kind() == Unknown
+ f, _ := constant.Float64Val(x)
+ return f
+}
+
+// Complex128 returns the complex value of this constant truncated to
+// fit a complex128.
+func (c *Const) Complex128() complex128 {
+ x := constant.ToComplex(c.Value) // (c.Value == nil) => x.Kind() == Unknown
+ re, _ := constant.Float64Val(constant.Real(x))
+ im, _ := constant.Float64Val(constant.Imag(x))
+ return complex(re, im)
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/coretype.go b/vendor/golang.org/x/tools/go/ssa/coretype.go
new file mode 100644
index 0000000..8c218f9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/coretype.go
@@ -0,0 +1,161 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// Utilities for dealing with core types.
+
+// isBytestring returns true if T has the same terms as interface{[]byte | string}.
+// These act like a core type for some operations: slice expressions, append and copy.
+//
+// See https://go.dev/ref/spec#Core_types for the details on bytestring.
+func isBytestring(T types.Type) bool {
+ U := T.Underlying()
+ if _, ok := U.(*types.Interface); !ok {
+ return false
+ }
+
+ tset := typeSetOf(U)
+ if tset.Len() != 2 {
+ return false
+ }
+ hasBytes, hasString := false, false
+ underIs(tset, func(t types.Type) bool {
+ switch {
+ case isString(t):
+ hasString = true
+ case isByteSlice(t):
+ hasBytes = true
+ }
+ return hasBytes || hasString
+ })
+ return hasBytes && hasString
+}
+
+// termList is a list of types.
+type termList []*types.Term // type terms of the type set
+func (s termList) Len() int { return len(s) }
+func (s termList) At(i int) types.Type { return s[i].Type() }
+
+// typeSetOf returns the type set of typ. Returns an empty typeset on an error.
+func typeSetOf(typ types.Type) termList {
+ // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on.
+ var terms []*types.Term
+ var err error
+ // typeSetOf(t) == typeSetOf(Unalias(t))
+ switch typ := aliases.Unalias(typ).(type) {
+ case *types.TypeParam:
+ terms, err = typeparams.StructuralTerms(typ)
+ case *types.Union:
+ terms, err = typeparams.UnionTermSet(typ)
+ case *types.Interface:
+ terms, err = typeparams.InterfaceTermSet(typ)
+ default:
+ // Common case.
+ // Specializing the len=1 case to avoid a slice
+ // had no measurable space/time benefit.
+ terms = []*types.Term{types.NewTerm(false, typ)}
+ }
+
+ if err != nil {
+ return termList(nil)
+ }
+ return termList(terms)
+}
+
+// underIs calls f with the underlying types of the specific type terms
+// of s and reports whether all calls to f returned true. If there are
+// no specific terms, underIs returns the result of f(nil).
+func underIs(s termList, f func(types.Type) bool) bool {
+ if s.Len() == 0 {
+ return f(nil)
+ }
+ for i := 0; i < s.Len(); i++ {
+ u := s.At(i).Underlying()
+ if !f(u) {
+ return false
+ }
+ }
+ return true
+}
+
+// indexType returns the element type and index mode of a IndexExpr over a type.
+// It returns (nil, invalid) if the type is not indexable; this should never occur in a well-typed program.
+func indexType(typ types.Type) (types.Type, indexMode) {
+ switch U := typ.Underlying().(type) {
+ case *types.Array:
+ return U.Elem(), ixArrVar
+ case *types.Pointer:
+ if arr, ok := U.Elem().Underlying().(*types.Array); ok {
+ return arr.Elem(), ixVar
+ }
+ case *types.Slice:
+ return U.Elem(), ixVar
+ case *types.Map:
+ return U.Elem(), ixMap
+ case *types.Basic:
+ return tByte, ixValue // must be a string
+ case *types.Interface:
+ tset := typeSetOf(U)
+ if tset.Len() == 0 {
+ return nil, ixInvalid // no underlying terms or error is empty.
+ }
+
+ elem, mode := indexType(tset.At(0))
+ for i := 1; i < tset.Len() && mode != ixInvalid; i++ {
+ e, m := indexType(tset.At(i))
+ if !types.Identical(elem, e) { // if type checked, just a sanity check
+ return nil, ixInvalid
+ }
+ // Update the mode to the most constrained address type.
+ mode = mode.meet(m)
+ }
+ if mode != ixInvalid {
+ return elem, mode
+ }
+ }
+ return nil, ixInvalid
+}
+
+// An indexMode specifies the (addressing) mode of an index operand.
+//
+// Addressing mode of an index operation is based on the set of
+// underlying types.
+// Hasse diagram of the indexMode meet semi-lattice:
+//
+// ixVar ixMap
+// | |
+// ixArrVar |
+// | |
+// ixValue |
+// \ /
+// ixInvalid
+type indexMode byte
+
+const (
+ ixInvalid indexMode = iota // index is invalid
+ ixValue // index is a computed value (not addressable)
+ ixArrVar // like ixVar, but index operand contains an array
+ ixVar // index is an addressable variable
+ ixMap // index is a map index expression (acts like a variable on lhs, commaok on rhs of an assignment)
+)
+
+// meet is the address type that is constrained by both x and y.
+func (x indexMode) meet(y indexMode) indexMode {
+ if (x == ixMap || y == ixMap) && x != y {
+ return ixInvalid
+ }
+ // Use int representation and return min.
+ if x < y {
+ return y
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go
new file mode 100644
index 0000000..423bce8
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/create.go
@@ -0,0 +1,318 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the CREATE phase of SSA construction.
+// See builder.go for explanation.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "os"
+ "sync"
+
+ "golang.org/x/tools/internal/versions"
+)
+
+// NewProgram returns a new SSA Program.
+//
+// mode controls diagnostics and checking during SSA construction.
+//
+// To construct an SSA program:
+//
+// - Call NewProgram to create an empty Program.
+// - Call CreatePackage providing typed syntax for each package
+// you want to build, and call it with types but not
+// syntax for each of those package's direct dependencies.
+// - Call [Package.Build] on each syntax package you wish to build,
+// or [Program.Build] to build all of them.
+//
+// See the Example tests for simple examples.
+func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
+ return &Program{
+ Fset: fset,
+ imported: make(map[string]*Package),
+ packages: make(map[*types.Package]*Package),
+ mode: mode,
+ canon: newCanonizer(),
+ ctxt: types.NewContext(),
+ }
+}
+
+// memberFromObject populates package pkg with a member for the
+// typechecker object obj.
+//
+// For objects from Go source code, syntax is the associated syntax
+// tree (for funcs and vars only) and goversion defines the
+// appropriate interpretation; they will be used during the build
+// phase.
+func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node, goversion string) {
+ name := obj.Name()
+ switch obj := obj.(type) {
+ case *types.Builtin:
+ if pkg.Pkg != types.Unsafe {
+ panic("unexpected builtin object: " + obj.String())
+ }
+
+ case *types.TypeName:
+ if name != "_" {
+ pkg.Members[name] = &Type{
+ object: obj,
+ pkg: pkg,
+ }
+ }
+
+ case *types.Const:
+ c := &NamedConst{
+ object: obj,
+ Value: NewConst(obj.Val(), obj.Type()),
+ pkg: pkg,
+ }
+ pkg.objects[obj] = c
+ if name != "_" {
+ pkg.Members[name] = c
+ }
+
+ case *types.Var:
+ g := &Global{
+ Pkg: pkg,
+ name: name,
+ object: obj,
+ typ: types.NewPointer(obj.Type()), // address
+ pos: obj.Pos(),
+ }
+ pkg.objects[obj] = g
+ if name != "_" {
+ pkg.Members[name] = g
+ }
+
+ case *types.Func:
+ sig := obj.Type().(*types.Signature)
+ if sig.Recv() == nil && name == "init" {
+ pkg.ninit++
+ name = fmt.Sprintf("init#%d", pkg.ninit)
+ }
+ fn := createFunction(pkg.Prog, obj, name, syntax, pkg.info, goversion)
+ fn.Pkg = pkg
+ pkg.created = append(pkg.created, fn)
+ pkg.objects[obj] = fn
+ if name != "_" && sig.Recv() == nil {
+ pkg.Members[name] = fn // package-level function
+ }
+
+ default: // (incl. *types.Package)
+ panic("unexpected Object type: " + obj.String())
+ }
+}
+
+// createFunction creates a function or method. It supports both
+// CreatePackage (with or without syntax) and the on-demand creation
+// of methods in non-created packages based on their types.Func.
+func createFunction(prog *Program, obj *types.Func, name string, syntax ast.Node, info *types.Info, goversion string) *Function {
+ sig := obj.Type().(*types.Signature)
+
+ // Collect type parameters.
+ var tparams *types.TypeParamList
+ if rtparams := sig.RecvTypeParams(); rtparams.Len() > 0 {
+ tparams = rtparams // method of generic type
+ } else if sigparams := sig.TypeParams(); sigparams.Len() > 0 {
+ tparams = sigparams // generic function
+ }
+
+ /* declared function/method (from syntax or export data) */
+ fn := &Function{
+ name: name,
+ object: obj,
+ Signature: sig,
+ build: (*builder).buildFromSyntax,
+ syntax: syntax,
+ info: info,
+ goversion: goversion,
+ pos: obj.Pos(),
+ Pkg: nil, // may be set by caller
+ Prog: prog,
+ typeparams: tparams,
+ }
+ if fn.syntax == nil {
+ fn.Synthetic = "from type information"
+ fn.build = (*builder).buildParamsOnly
+ }
+ if tparams.Len() > 0 {
+ fn.generic = new(generic)
+ }
+ return fn
+}
+
+// membersFromDecl populates package pkg with members for each
+// typechecker object (var, func, const or type) associated with the
+// specified decl.
+func membersFromDecl(pkg *Package, decl ast.Decl, goversion string) {
+ switch decl := decl.(type) {
+ case *ast.GenDecl: // import, const, type or var
+ switch decl.Tok {
+ case token.CONST:
+ for _, spec := range decl.Specs {
+ for _, id := range spec.(*ast.ValueSpec).Names {
+ memberFromObject(pkg, pkg.info.Defs[id], nil, "")
+ }
+ }
+
+ case token.VAR:
+ for _, spec := range decl.Specs {
+ for _, rhs := range spec.(*ast.ValueSpec).Values {
+ pkg.initVersion[rhs] = goversion
+ }
+ for _, id := range spec.(*ast.ValueSpec).Names {
+ memberFromObject(pkg, pkg.info.Defs[id], spec, goversion)
+ }
+ }
+
+ case token.TYPE:
+ for _, spec := range decl.Specs {
+ id := spec.(*ast.TypeSpec).Name
+ memberFromObject(pkg, pkg.info.Defs[id], nil, "")
+ }
+ }
+
+ case *ast.FuncDecl:
+ id := decl.Name
+ memberFromObject(pkg, pkg.info.Defs[id], decl, goversion)
+ }
+}
+
+// CreatePackage creates and returns an SSA Package from the
+// specified type-checked, error-free file ASTs, and populates its
+// Members mapping.
+//
+// importable determines whether this package should be returned by a
+// subsequent call to ImportedPackage(pkg.Path()).
+//
+// The real work of building SSA form for each function is not done
+// until a subsequent call to Package.Build.
+//
+// CreatePackage should not be called after building any package in
+// the program.
+func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
+ // TODO(adonovan): assert that no package has yet been built.
+ if pkg == nil {
+ panic("nil pkg") // otherwise pkg.Scope below returns types.Universe!
+ }
+ p := &Package{
+ Prog: prog,
+ Members: make(map[string]Member),
+ objects: make(map[types.Object]Member),
+ Pkg: pkg,
+ syntax: info != nil,
+ // transient values (cleared after Package.Build)
+ info: info,
+ files: files,
+ initVersion: make(map[ast.Expr]string),
+ }
+
+ /* synthesized package initializer */
+ p.init = &Function{
+ name: "init",
+ Signature: new(types.Signature),
+ Synthetic: "package initializer",
+ Pkg: p,
+ Prog: prog,
+ build: (*builder).buildPackageInit,
+ info: p.info,
+ goversion: "", // See Package.build for details.
+ }
+ p.Members[p.init.name] = p.init
+ p.created = append(p.created, p.init)
+
+ // Allocate all package members: vars, funcs, consts and types.
+ if len(files) > 0 {
+ // Go source package.
+ for _, file := range files {
+ goversion := versions.Lang(versions.FileVersion(p.info, file))
+ for _, decl := range file.Decls {
+ membersFromDecl(p, decl, goversion)
+ }
+ }
+ } else {
+ // GC-compiled binary package (or "unsafe")
+ // No code.
+ // No position information.
+ scope := p.Pkg.Scope()
+ for _, name := range scope.Names() {
+ obj := scope.Lookup(name)
+ memberFromObject(p, obj, nil, "")
+ if obj, ok := obj.(*types.TypeName); ok {
+ // No Unalias: aliases should not duplicate methods.
+ if named, ok := obj.Type().(*types.Named); ok {
+ for i, n := 0, named.NumMethods(); i < n; i++ {
+ memberFromObject(p, named.Method(i), nil, "")
+ }
+ }
+ }
+ }
+ }
+
+ if prog.mode&BareInits == 0 {
+ // Add initializer guard variable.
+ initguard := &Global{
+ Pkg: p,
+ name: "init$guard",
+ typ: types.NewPointer(tBool),
+ }
+ p.Members[initguard.Name()] = initguard
+ }
+
+ if prog.mode&GlobalDebug != 0 {
+ p.SetDebugMode(true)
+ }
+
+ if prog.mode&PrintPackages != 0 {
+ printMu.Lock()
+ p.WriteTo(os.Stdout)
+ printMu.Unlock()
+ }
+
+ if importable {
+ prog.imported[p.Pkg.Path()] = p
+ }
+ prog.packages[p.Pkg] = p
+
+ return p
+}
+
+// printMu serializes printing of Packages/Functions to stdout.
+var printMu sync.Mutex
+
+// AllPackages returns a new slice containing all packages created by
+// prog.CreatePackage in unspecified order.
+func (prog *Program) AllPackages() []*Package {
+ pkgs := make([]*Package, 0, len(prog.packages))
+ for _, pkg := range prog.packages {
+ pkgs = append(pkgs, pkg)
+ }
+ return pkgs
+}
+
+// ImportedPackage returns the importable Package whose PkgPath
+// is path, or nil if no such Package has been created.
+//
+// A parameter to CreatePackage determines whether a package should be
+// considered importable. For example, no import declaration can resolve
+// to the ad-hoc main package created by 'go build foo.go'.
+//
+// TODO(adonovan): rethink this function and the "importable" concept;
+// most packages are importable. This function assumes that all
+// types.Package.Path values are unique within the ssa.Program, which is
+// false---yet this function remains very convenient.
+// Clients should use (*Program).Package instead where possible.
+// SSA doesn't really need a string-keyed map of packages.
+//
+// Furthermore, the graph of packages may contain multiple variants
+// (e.g. "p" vs "p as compiled for q.test"), and each has a different
+// view of its dependencies.
+func (prog *Program) ImportedPackage(path string) *Package {
+ return prog.imported[path]
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/doc.go b/vendor/golang.org/x/tools/go/ssa/doc.go
new file mode 100644
index 0000000..3310b55
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/doc.go
@@ -0,0 +1,122 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ssa defines a representation of the elements of Go programs
+// (packages, types, functions, variables and constants) using a
+// static single-assignment (SSA) form intermediate representation
+// (IR) for the bodies of functions.
+//
+// For an introduction to SSA form, see
+// http://en.wikipedia.org/wiki/Static_single_assignment_form.
+// This page provides a broader reading list:
+// http://www.dcs.gla.ac.uk/~jsinger/ssa.html.
+//
+// The level of abstraction of the SSA form is intentionally close to
+// the source language to facilitate construction of source analysis
+// tools. It is not intended for machine code generation.
+//
+// All looping, branching and switching constructs are replaced with
+// unstructured control flow. Higher-level control flow constructs
+// such as multi-way branch can be reconstructed as needed; see
+// [golang.org/x/tools/go/ssa/ssautil.Switches] for an example.
+//
+// The simplest way to create the SSA representation of a package is
+// to load typed syntax trees using [golang.org/x/tools/go/packages], then
+// invoke the [golang.org/x/tools/go/ssa/ssautil.Packages] helper function.
+// (See the package-level Examples named LoadPackages and LoadWholeProgram.)
+// The resulting [ssa.Program] contains all the packages and their
+// members, but SSA code is not created for function bodies until a
+// subsequent call to [Package.Build] or [Program.Build].
+//
+// The builder initially builds a naive SSA form in which all local
+// variables are addresses of stack locations with explicit loads and
+// stores. Registerisation of eligible locals and φ-node insertion
+// using dominance and dataflow are then performed as a second pass
+// called "lifting" to improve the accuracy and performance of
+// subsequent analyses; this pass can be skipped by setting the
+// NaiveForm builder flag.
+//
+// The primary interfaces of this package are:
+//
+// - [Member]: a named member of a Go package.
+// - [Value]: an expression that yields a value.
+// - [Instruction]: a statement that consumes values and performs computation.
+// - [Node]: a [Value] or [Instruction] (emphasizing its membership in the SSA value graph)
+//
+// A computation that yields a result implements both the [Value] and
+// [Instruction] interfaces. The following table shows for each
+// concrete type which of these interfaces it implements.
+//
+// Value? Instruction? Member?
+// *Alloc ✔ ✔
+// *BinOp ✔ ✔
+// *Builtin ✔
+// *Call ✔ ✔
+// *ChangeInterface ✔ ✔
+// *ChangeType ✔ ✔
+// *Const ✔
+// *Convert ✔ ✔
+// *DebugRef ✔
+// *Defer ✔
+// *Extract ✔ ✔
+// *Field ✔ ✔
+// *FieldAddr ✔ ✔
+// *FreeVar ✔
+// *Function ✔ ✔ (func)
+// *Global ✔ ✔ (var)
+// *Go ✔
+// *If ✔
+// *Index ✔ ✔
+// *IndexAddr ✔ ✔
+// *Jump ✔
+// *Lookup ✔ ✔
+// *MakeChan ✔ ✔
+// *MakeClosure ✔ ✔
+// *MakeInterface ✔ ✔
+// *MakeMap ✔ ✔
+// *MakeSlice ✔ ✔
+// *MapUpdate ✔
+// *MultiConvert ✔ ✔
+// *NamedConst ✔ (const)
+// *Next ✔ ✔
+// *Panic ✔
+// *Parameter ✔
+// *Phi ✔ ✔
+// *Range ✔ ✔
+// *Return ✔
+// *RunDefers ✔
+// *Select ✔ ✔
+// *Send ✔
+// *Slice ✔ ✔
+// *SliceToArrayPointer ✔ ✔
+// *Store ✔
+// *Type ✔ (type)
+// *TypeAssert ✔ ✔
+// *UnOp ✔ ✔
+//
+// Other key types in this package include: [Program], [Package], [Function]
+// and [BasicBlock].
+//
+// The program representation constructed by this package is fully
+// resolved internally, i.e. it does not rely on the names of Values,
+// Packages, Functions, Types or BasicBlocks for the correct
+// interpretation of the program. Only the identities of objects and
+// the topology of the SSA and type graphs are semantically
+// significant. (There is one exception: [types.Id] values, which identify field
+// and method names, contain strings.) Avoidance of name-based
+// operations simplifies the implementation of subsequent passes and
+// can make them very efficient. Many objects are nonetheless named
+// to aid in debugging, but it is not essential that the names be
+// either accurate or unambiguous. The public API exposes a number of
+// name-based maps for client convenience.
+//
+// The [golang.org/x/tools/go/ssa/ssautil] package provides various
+// helper functions, for example to simplify loading a Go program into
+// SSA form.
+//
+// TODO(adonovan): write a how-to document for all the various cases
+// of trying to determine corresponding elements across the four
+// domains of source locations, ast.Nodes, types.Objects,
+// ssa.Values/Instructions.
+package ssa // import "golang.org/x/tools/go/ssa"
diff --git a/vendor/golang.org/x/tools/go/ssa/dom.go b/vendor/golang.org/x/tools/go/ssa/dom.go
new file mode 100644
index 0000000..02c1ae8
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/dom.go
@@ -0,0 +1,340 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines algorithms related to dominance.
+
+// Dominator tree construction ----------------------------------------
+//
+// We use the algorithm described in Lengauer & Tarjan. 1979. A fast
+// algorithm for finding dominators in a flowgraph.
+// http://doi.acm.org/10.1145/357062.357071
+//
+// We also apply the optimizations to SLT described in Georgiadis et
+// al, Finding Dominators in Practice, JGAA 2006,
+// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf
+// to avoid the need for buckets of size > 1.
+
+import (
+ "bytes"
+ "fmt"
+ "math/big"
+ "os"
+ "sort"
+)
+
+// Idom returns the block that immediately dominates b:
+// its parent in the dominator tree, if any.
+// Neither the entry node (b.Index==0) nor recover node
+// (b==b.Parent().Recover()) have a parent.
+func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom }
+
+// Dominees returns the list of blocks that b immediately dominates:
+// its children in the dominator tree.
+func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children }
+
+// Dominates reports whether b dominates c.
+func (b *BasicBlock) Dominates(c *BasicBlock) bool {
+ return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post
+}
+
+// DomPreorder returns a new slice containing the blocks of f
+// in a preorder traversal of the dominator tree.
+func (f *Function) DomPreorder() []*BasicBlock {
+ slice := append([]*BasicBlock(nil), f.Blocks...)
+ sort.Slice(slice, func(i, j int) bool {
+ return slice[i].dom.pre < slice[j].dom.pre
+ })
+ return slice
+}
+
+// DomPostorder returns a new slice containing the blocks of f
+// in a postorder traversal of the dominator tree.
+// (This is not the same as a postdominance order.)
+func (f *Function) DomPostorder() []*BasicBlock {
+ slice := append([]*BasicBlock(nil), f.Blocks...)
+ sort.Slice(slice, func(i, j int) bool {
+ return slice[i].dom.post < slice[j].dom.post
+ })
+ return slice
+}
+
+// domInfo contains a BasicBlock's dominance information.
+type domInfo struct {
+ idom *BasicBlock // immediate dominator (parent in domtree)
+ children []*BasicBlock // nodes immediately dominated by this one
+ pre, post int32 // pre- and post-order numbering within domtree
+}
+
+// ltState holds the working state for Lengauer-Tarjan algorithm
+// (during which domInfo.pre is repurposed for CFG DFS preorder number).
+type ltState struct {
+ // Each slice is indexed by b.Index.
+ sdom []*BasicBlock // b's semidominator
+ parent []*BasicBlock // b's parent in DFS traversal of CFG
+ ancestor []*BasicBlock // b's ancestor with least sdom
+}
+
+// dfs implements the depth-first search part of the LT algorithm.
+func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 {
+ preorder[i] = v
+ v.dom.pre = i // For now: DFS preorder of spanning tree of CFG
+ i++
+ lt.sdom[v.Index] = v
+ lt.link(nil, v)
+ for _, w := range v.Succs {
+ if lt.sdom[w.Index] == nil {
+ lt.parent[w.Index] = v
+ i = lt.dfs(w, i, preorder)
+ }
+ }
+ return i
+}
+
+// eval implements the EVAL part of the LT algorithm.
+func (lt *ltState) eval(v *BasicBlock) *BasicBlock {
+ // TODO(adonovan): opt: do path compression per simple LT.
+ u := v
+ for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] {
+ if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre {
+ u = v
+ }
+ }
+ return u
+}
+
+// link implements the LINK part of the LT algorithm.
+func (lt *ltState) link(v, w *BasicBlock) {
+ lt.ancestor[w.Index] = v
+}
+
+// buildDomTree computes the dominator tree of f using the LT algorithm.
+// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
+func buildDomTree(f *Function) {
+ // The step numbers refer to the original LT paper; the
+ // reordering is due to Georgiadis.
+
+ // Clear any previous domInfo.
+ for _, b := range f.Blocks {
+ b.dom = domInfo{}
+ }
+
+ n := len(f.Blocks)
+ // Allocate space for 5 contiguous [n]*BasicBlock arrays:
+ // sdom, parent, ancestor, preorder, buckets.
+ space := make([]*BasicBlock, 5*n)
+ lt := ltState{
+ sdom: space[0:n],
+ parent: space[n : 2*n],
+ ancestor: space[2*n : 3*n],
+ }
+
+ // Step 1. Number vertices by depth-first preorder.
+ preorder := space[3*n : 4*n]
+ root := f.Blocks[0]
+ prenum := lt.dfs(root, 0, preorder)
+ recover := f.Recover
+ if recover != nil {
+ lt.dfs(recover, prenum, preorder)
+ }
+
+ buckets := space[4*n : 5*n]
+ copy(buckets, preorder)
+
+ // In reverse preorder...
+ for i := int32(n) - 1; i > 0; i-- {
+ w := preorder[i]
+
+ // Step 3. Implicitly define the immediate dominator of each node.
+ for v := buckets[i]; v != w; v = buckets[v.dom.pre] {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < i {
+ v.dom.idom = u
+ } else {
+ v.dom.idom = w
+ }
+ }
+
+ // Step 2. Compute the semidominators of all nodes.
+ lt.sdom[w.Index] = lt.parent[w.Index]
+ for _, v := range w.Preds {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre {
+ lt.sdom[w.Index] = lt.sdom[u.Index]
+ }
+ }
+
+ lt.link(lt.parent[w.Index], w)
+
+ if lt.parent[w.Index] == lt.sdom[w.Index] {
+ w.dom.idom = lt.parent[w.Index]
+ } else {
+ buckets[i] = buckets[lt.sdom[w.Index].dom.pre]
+ buckets[lt.sdom[w.Index].dom.pre] = w
+ }
+ }
+
+ // The final 'Step 3' is now outside the loop.
+ for v := buckets[0]; v != root; v = buckets[v.dom.pre] {
+ v.dom.idom = root
+ }
+
+ // Step 4. Explicitly define the immediate dominator of each
+ // node, in preorder.
+ for _, w := range preorder[1:] {
+ if w == root || w == recover {
+ w.dom.idom = nil
+ } else {
+ if w.dom.idom != lt.sdom[w.Index] {
+ w.dom.idom = w.dom.idom.dom.idom
+ }
+ // Calculate Children relation as inverse of Idom.
+ w.dom.idom.dom.children = append(w.dom.idom.dom.children, w)
+ }
+ }
+
+ pre, post := numberDomTree(root, 0, 0)
+ if recover != nil {
+ numberDomTree(recover, pre, post)
+ }
+
+ // printDomTreeDot(os.Stderr, f) // debugging
+ // printDomTreeText(os.Stderr, root, 0) // debugging
+
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ sanityCheckDomTree(f)
+ }
+}
+
+// numberDomTree sets the pre- and post-order numbers of a depth-first
+// traversal of the dominator tree rooted at v. These are used to
+// answer dominance queries in constant time.
+func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
+ v.dom.pre = pre
+ pre++
+ for _, child := range v.dom.children {
+ pre, post = numberDomTree(child, pre, post)
+ }
+ v.dom.post = post
+ post++
+ return pre, post
+}
+
+// Testing utilities ----------------------------------------
+
+// sanityCheckDomTree checks the correctness of the dominator tree
+// computed by the LT algorithm by comparing against the dominance
+// relation computed by a naive Kildall-style forward dataflow
+// analysis (Algorithm 10.16 from the "Dragon" book).
+func sanityCheckDomTree(f *Function) {
+ n := len(f.Blocks)
+
+ // D[i] is the set of blocks that dominate f.Blocks[i],
+ // represented as a bit-set of block indices.
+ D := make([]big.Int, n)
+
+ one := big.NewInt(1)
+
+ // all is the set of all blocks; constant.
+ var all big.Int
+ all.Set(one).Lsh(&all, uint(n)).Sub(&all, one)
+
+ // Initialization.
+ for i, b := range f.Blocks {
+ if i == 0 || b == f.Recover {
+ // A root is dominated only by itself.
+ D[i].SetBit(&D[0], 0, 1)
+ } else {
+ // All other blocks are (initially) dominated
+ // by every block.
+ D[i].Set(&all)
+ }
+ }
+
+ // Iteration until fixed point.
+ for changed := true; changed; {
+ changed = false
+ for i, b := range f.Blocks {
+ if i == 0 || b == f.Recover {
+ continue
+ }
+ // Compute intersection across predecessors.
+ var x big.Int
+ x.Set(&all)
+ for _, pred := range b.Preds {
+ x.And(&x, &D[pred.Index])
+ }
+ x.SetBit(&x, i, 1) // a block always dominates itself.
+ if D[i].Cmp(&x) != 0 {
+ D[i].Set(&x)
+ changed = true
+ }
+ }
+ }
+
+ // Check the entire relation. O(n^2).
+ // The Recover block (if any) must be treated specially so we skip it.
+ ok := true
+ for i := 0; i < n; i++ {
+ for j := 0; j < n; j++ {
+ b, c := f.Blocks[i], f.Blocks[j]
+ if c == f.Recover {
+ continue
+ }
+ actual := b.Dominates(c)
+ expected := D[j].Bit(i) == 1
+ if actual != expected {
+ fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected)
+ ok = false
+ }
+ }
+ }
+
+ preorder := f.DomPreorder()
+ for _, b := range f.Blocks {
+ if got := preorder[b.dom.pre]; got != b {
+ fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b)
+ ok = false
+ }
+ }
+
+ if !ok {
+ panic("sanityCheckDomTree failed for " + f.String())
+ }
+
+}
+
+// Printing functions ----------------------------------------
+
+// printDomTreeText prints the dominator tree as text, using indentation.
+func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
+ fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
+ for _, child := range v.dom.children {
+ printDomTreeText(buf, child, indent+1)
+ }
+}
+
+// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
+// (.dot) format.
+func printDomTreeDot(buf *bytes.Buffer, f *Function) {
+ fmt.Fprintln(buf, "//", f)
+ fmt.Fprintln(buf, "digraph domtree {")
+ for i, b := range f.Blocks {
+ v := b.dom
+ fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post)
+ // TODO(adonovan): improve appearance of edges
+ // belonging to both dominator tree and CFG.
+
+ // Dominator tree edge.
+ if i != 0 {
+ fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre)
+ }
+ // CFG edges.
+ for _, pred := range b.Preds {
+ fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre)
+ }
+ }
+ fmt.Fprintln(buf, "}")
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go
new file mode 100644
index 0000000..c664ff8
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/emit.go
@@ -0,0 +1,614 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// Helpers for emitting SSA instructions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// emitAlloc emits to f a new Alloc instruction allocating a variable
+// of type typ.
+//
+// The caller must set Alloc.Heap=true (for an heap-allocated variable)
+// or add the Alloc to f.Locals (for a frame-allocated variable).
+//
+// During building, a variable in f.Locals may have its Heap flag
+// set when it is discovered that its address is taken.
+// These Allocs are removed from f.Locals at the end.
+//
+// The builder should generally call one of the emit{New,Local,LocalVar} wrappers instead.
+func emitAlloc(f *Function, typ types.Type, pos token.Pos, comment string) *Alloc {
+ v := &Alloc{Comment: comment}
+ v.setType(types.NewPointer(typ))
+ v.setPos(pos)
+ f.emit(v)
+ return v
+}
+
+// emitNew emits to f a new Alloc instruction heap-allocating a
+// variable of type typ. pos is the optional source location.
+func emitNew(f *Function, typ types.Type, pos token.Pos, comment string) *Alloc {
+ alloc := emitAlloc(f, typ, pos, comment)
+ alloc.Heap = true
+ return alloc
+}
+
+// emitLocal creates a local var for (t, pos, comment) and
+// emits an Alloc instruction for it.
+//
+// (Use this function or emitNew for synthetic variables;
+// for source-level variables in the same function, use emitLocalVar.)
+func emitLocal(f *Function, t types.Type, pos token.Pos, comment string) *Alloc {
+ local := emitAlloc(f, t, pos, comment)
+ f.Locals = append(f.Locals, local)
+ return local
+}
+
+// emitLocalVar creates a local var for v and emits an Alloc instruction for it.
+// Subsequent calls to f.lookup(v) return it.
+// It applies the appropriate generic instantiation to the type.
+func emitLocalVar(f *Function, v *types.Var) *Alloc {
+ alloc := emitLocal(f, f.typ(v.Type()), v.Pos(), v.Name())
+ f.vars[v] = alloc
+ return alloc
+}
+
+// emitLoad emits to f an instruction to load the address addr into a
+// new temporary, and returns the value so defined.
+func emitLoad(f *Function, addr Value) *UnOp {
+ v := &UnOp{Op: token.MUL, X: addr}
+ v.setType(typeparams.MustDeref(addr.Type()))
+ f.emit(v)
+ return v
+}
+
+// emitDebugRef emits to f a DebugRef pseudo-instruction associating
+// expression e with value v.
+func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
+ if !f.debugInfo() {
+ return // debugging not enabled
+ }
+ if v == nil || e == nil {
+ panic("nil")
+ }
+ var obj types.Object
+ e = unparen(e)
+ if id, ok := e.(*ast.Ident); ok {
+ if isBlankIdent(id) {
+ return
+ }
+ obj = f.objectOf(id)
+ switch obj.(type) {
+ case *types.Nil, *types.Const, *types.Builtin:
+ return
+ }
+ }
+ f.emit(&DebugRef{
+ X: v,
+ Expr: e,
+ IsAddr: isAddr,
+ object: obj,
+ })
+}
+
+// emitArith emits to f code to compute the binary operation op(x, y)
+// where op is an eager shift, logical or arithmetic operation.
+// (Use emitCompare() for comparisons and Builder.logicalBinop() for
+// non-eager operations.)
+func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value {
+ switch op {
+ case token.SHL, token.SHR:
+ x = emitConv(f, x, t)
+ // y may be signed or an 'untyped' constant.
+
+ // There is a runtime panic if y is signed and <0. Instead of inserting a check for y<0
+ // and converting to an unsigned value (like the compiler) leave y as is.
+
+ if isUntyped(y.Type().Underlying()) {
+ // Untyped conversion:
+ // Spec https://go.dev/ref/spec#Operators:
+ // The right operand in a shift expression must have integer type or be an untyped constant
+ // representable by a value of type uint.
+ y = emitConv(f, y, types.Typ[types.Uint])
+ }
+
+ case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
+ x = emitConv(f, x, t)
+ y = emitConv(f, y, t)
+
+ default:
+ panic("illegal op in emitArith: " + op.String())
+
+ }
+ v := &BinOp{
+ Op: op,
+ X: x,
+ Y: y,
+ }
+ v.setPos(pos)
+ v.setType(t)
+ return f.emit(v)
+}
+
+// emitCompare emits to f code compute the boolean result of
+// comparison 'x op y'.
+func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
+ xt := x.Type().Underlying()
+ yt := y.Type().Underlying()
+
+ // Special case to optimise a tagless SwitchStmt so that
+ // these are equivalent
+ // switch { case e: ...}
+ // switch true { case e: ... }
+ // if e==true { ... }
+ // even in the case when e's type is an interface.
+ // TODO(adonovan): opt: generalise to x==true, false!=y, etc.
+ if x == vTrue && op == token.EQL {
+ if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 {
+ return y
+ }
+ }
+
+ if types.Identical(xt, yt) {
+ // no conversion necessary
+ } else if isNonTypeParamInterface(x.Type()) {
+ y = emitConv(f, y, x.Type())
+ } else if isNonTypeParamInterface(y.Type()) {
+ x = emitConv(f, x, y.Type())
+ } else if _, ok := x.(*Const); ok {
+ x = emitConv(f, x, y.Type())
+ } else if _, ok := y.(*Const); ok {
+ y = emitConv(f, y, x.Type())
+ } else {
+ // other cases, e.g. channels. No-op.
+ }
+
+ v := &BinOp{
+ Op: op,
+ X: x,
+ Y: y,
+ }
+ v.setPos(pos)
+ v.setType(tBool)
+ return f.emit(v)
+}
+
+// isValuePreserving returns true if a conversion from ut_src to
+// ut_dst is value-preserving, i.e. just a change of type.
+// Precondition: neither argument is a named or alias type.
+func isValuePreserving(ut_src, ut_dst types.Type) bool {
+ // Identical underlying types?
+ if types.IdenticalIgnoreTags(ut_dst, ut_src) {
+ return true
+ }
+
+ switch ut_dst.(type) {
+ case *types.Chan:
+ // Conversion between channel types?
+ _, ok := ut_src.(*types.Chan)
+ return ok
+
+ case *types.Pointer:
+ // Conversion between pointers with identical base types?
+ _, ok := ut_src.(*types.Pointer)
+ return ok
+ }
+ return false
+}
+
+// emitConv emits to f code to convert Value val to exactly type typ,
+// and returns the converted value. Implicit conversions are required
+// by language assignability rules in assignments, parameter passing,
+// etc.
+func emitConv(f *Function, val Value, typ types.Type) Value {
+ t_src := val.Type()
+
+ // Identical types? Conversion is a no-op.
+ if types.Identical(t_src, typ) {
+ return val
+ }
+ ut_dst := typ.Underlying()
+ ut_src := t_src.Underlying()
+
+ // Conversion to, or construction of a value of, an interface type?
+ if isNonTypeParamInterface(typ) {
+ // Interface name change?
+ if isValuePreserving(ut_src, ut_dst) {
+ c := &ChangeType{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ // Assignment from one interface type to another?
+ if isNonTypeParamInterface(t_src) {
+ c := &ChangeInterface{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
+ // Untyped nil constant? Return interface-typed nil constant.
+ if ut_src == tUntypedNil {
+ return zeroConst(typ)
+ }
+
+ // Convert (non-nil) "untyped" literals to their default type.
+ if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 {
+ val = emitConv(f, val, types.Default(ut_src))
+ }
+
+ // Record the types of operands to MakeInterface, if
+ // non-parameterized, as they are the set of runtime types.
+ t := val.Type()
+ if f.typeparams.Len() == 0 || !f.Prog.isParameterized(t) {
+ addRuntimeType(f.Prog, t)
+ }
+
+ mi := &MakeInterface{X: val}
+ mi.setType(typ)
+ return f.emit(mi)
+ }
+
+ // In the common case, the typesets of src and dst are singletons
+ // and we emit an appropriate conversion. But if either contains
+ // a type parameter, the conversion may represent a cross product,
+ // in which case which we emit a MultiConvert.
+ dst_terms := typeSetOf(ut_dst)
+ src_terms := typeSetOf(ut_src)
+
+ // conversionCase describes an instruction pattern that maybe emitted to
+ // model d <- s for d in dst_terms and s in src_terms.
+ // Multiple conversions can match the same pattern.
+ type conversionCase uint8
+ const (
+ changeType conversionCase = 1 << iota
+ sliceToArray
+ sliceToArrayPtr
+ sliceTo0Array
+ sliceTo0ArrayPtr
+ convert
+ )
+ // classify the conversion case of a source type us to a destination type ud.
+ // us and ud are underlying types (not *Named or *Alias)
+ classify := func(us, ud types.Type) conversionCase {
+ // Just a change of type, but not value or representation?
+ if isValuePreserving(us, ud) {
+ return changeType
+ }
+
+ // Conversion from slice to array or slice to array pointer?
+ if slice, ok := us.(*types.Slice); ok {
+ var arr *types.Array
+ var ptr bool
+ // Conversion from slice to array pointer?
+ switch d := ud.(type) {
+ case *types.Array:
+ arr = d
+ case *types.Pointer:
+ arr, _ = d.Elem().Underlying().(*types.Array)
+ ptr = true
+ }
+ if arr != nil && types.Identical(slice.Elem(), arr.Elem()) {
+ if arr.Len() == 0 {
+ if ptr {
+ return sliceTo0ArrayPtr
+ } else {
+ return sliceTo0Array
+ }
+ }
+ if ptr {
+ return sliceToArrayPtr
+ } else {
+ return sliceToArray
+ }
+ }
+ }
+
+ // The only remaining case in well-typed code is a representation-
+ // changing conversion of basic types (possibly with []byte/[]rune).
+ if !isBasic(us) && !isBasic(ud) {
+ panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), us, typ, ud))
+ }
+ return convert
+ }
+
+ var classifications conversionCase
+ for _, s := range src_terms {
+ us := s.Type().Underlying()
+ for _, d := range dst_terms {
+ ud := d.Type().Underlying()
+ classifications |= classify(us, ud)
+ }
+ }
+ if classifications == 0 {
+ panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ))
+ }
+
+ // Conversion of a compile-time constant value?
+ if c, ok := val.(*Const); ok {
+ // Conversion to a basic type?
+ if isBasic(ut_dst) {
+ // Conversion of a compile-time constant to
+ // another constant type results in a new
+ // constant of the destination type and
+ // (initially) the same abstract value.
+ // We don't truncate the value yet.
+ return NewConst(c.Value, typ)
+ }
+ // Can we always convert from zero value without panicking?
+ const mayPanic = sliceToArray | sliceToArrayPtr
+ if c.Value == nil && classifications&mayPanic == 0 {
+ return NewConst(nil, typ)
+ }
+
+ // We're converting from constant to non-constant type,
+ // e.g. string -> []byte/[]rune.
+ }
+
+ switch classifications {
+ case changeType: // representation-preserving change
+ c := &ChangeType{X: val}
+ c.setType(typ)
+ return f.emit(c)
+
+ case sliceToArrayPtr, sliceTo0ArrayPtr: // slice to array pointer
+ c := &SliceToArrayPointer{X: val}
+ c.setType(typ)
+ return f.emit(c)
+
+ case sliceToArray: // slice to arrays (not zero-length)
+ ptype := types.NewPointer(typ)
+ p := &SliceToArrayPointer{X: val}
+ p.setType(ptype)
+ x := f.emit(p)
+ unOp := &UnOp{Op: token.MUL, X: x}
+ unOp.setType(typ)
+ return f.emit(unOp)
+
+ case sliceTo0Array: // slice to zero-length arrays (constant)
+ return zeroConst(typ)
+
+ case convert: // representation-changing conversion
+ c := &Convert{X: val}
+ c.setType(typ)
+ return f.emit(c)
+
+ default: // multiple conversion
+ c := &MultiConvert{X: val, from: src_terms, to: dst_terms}
+ c.setType(typ)
+ return f.emit(c)
+ }
+}
+
+// emitTypeCoercion emits to f code to coerce the type of a
+// Value v to exactly type typ, and returns the coerced value.
+//
+// Requires that coercing v.Typ() to typ is a value preserving change.
+//
+// Currently used only when v.Type() is a type instance of typ or vice versa.
+// A type v is a type instance of a type t if there exists a
+// type parameter substitution σ s.t. σ(v) == t. Example:
+//
+// σ(func(T) T) == func(int) int for σ == [T ↦ int]
+//
+// This happens in instantiation wrappers for conversion
+// from an instantiation to a parameterized type (and vice versa)
+// with σ substituting f.typeparams by f.typeargs.
+func emitTypeCoercion(f *Function, v Value, typ types.Type) Value {
+ if types.Identical(v.Type(), typ) {
+ return v // no coercion needed
+ }
+ // TODO(taking): for instances should we record which side is the instance?
+ c := &ChangeType{
+ X: v,
+ }
+ c.setType(typ)
+ f.emit(c)
+ return c
+}
+
+// emitStore emits to f an instruction to store value val at location
+// addr, applying implicit conversions as required by assignability rules.
+func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
+ typ := typeparams.MustDeref(addr.Type())
+ s := &Store{
+ Addr: addr,
+ Val: emitConv(f, val, typ),
+ pos: pos,
+ }
+ f.emit(s)
+ return s
+}
+
+// emitJump emits to f a jump to target, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+func emitJump(f *Function, target *BasicBlock) {
+ b := f.currentBlock
+ b.emit(new(Jump))
+ addEdge(b, target)
+ f.currentBlock = nil
+}
+
+// emitIf emits to f a conditional jump to tblock or fblock based on
+// cond, and updates the control-flow graph.
+// Postcondition: f.currentBlock is nil.
+func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
+ b := f.currentBlock
+ b.emit(&If{Cond: cond})
+ addEdge(b, tblock)
+ addEdge(b, fblock)
+ f.currentBlock = nil
+}
+
+// emitExtract emits to f an instruction to extract the index'th
+// component of tuple. It returns the extracted value.
+func emitExtract(f *Function, tuple Value, index int) Value {
+ e := &Extract{Tuple: tuple, Index: index}
+ e.setType(tuple.Type().(*types.Tuple).At(index).Type())
+ return f.emit(e)
+}
+
+// emitTypeAssert emits to f a type assertion value := x.(t) and
+// returns the value. x.Type() must be an interface.
+func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value {
+ a := &TypeAssert{X: x, AssertedType: t}
+ a.setPos(pos)
+ a.setType(t)
+ return f.emit(a)
+}
+
+// emitTypeTest emits to f a type test value,ok := x.(t) and returns
+// a (value, ok) tuple. x.Type() must be an interface.
+func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
+ a := &TypeAssert{
+ X: x,
+ AssertedType: t,
+ CommaOk: true,
+ }
+ a.setPos(pos)
+ a.setType(types.NewTuple(
+ newVar("value", t),
+ varOk,
+ ))
+ return f.emit(a)
+}
+
+// emitTailCall emits to f a function call in tail position. The
+// caller is responsible for all fields of 'call' except its type.
+// Intended for wrapper methods.
+// Precondition: f does/will not use deferred procedure calls.
+// Postcondition: f.currentBlock is nil.
+func emitTailCall(f *Function, call *Call) {
+ tresults := f.Signature.Results()
+ nr := tresults.Len()
+ if nr == 1 {
+ call.typ = tresults.At(0).Type()
+ } else {
+ call.typ = tresults
+ }
+ tuple := f.emit(call)
+ var ret Return
+ switch nr {
+ case 0:
+ // no-op
+ case 1:
+ ret.Results = []Value{tuple}
+ default:
+ for i := 0; i < nr; i++ {
+ v := emitExtract(f, tuple, i)
+ // TODO(adonovan): in principle, this is required:
+ // v = emitConv(f, o.Type, f.Signature.Results[i].Type)
+ // but in practice emitTailCall is only used when
+ // the types exactly match.
+ ret.Results = append(ret.Results, v)
+ }
+ }
+ f.emit(&ret)
+ f.currentBlock = nil
+}
+
+// emitImplicitSelections emits to f code to apply the sequence of
+// implicit field selections specified by indices to base value v, and
+// returns the selected value.
+//
+// If v is the address of a struct, the result will be the address of
+// a field; if it is the value of a struct, the result will be the
+// value of a field.
+func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value {
+ for _, index := range indices {
+ if isPointerCore(v.Type()) {
+ fld := fieldOf(typeparams.MustDeref(v.Type()), index)
+ instr := &FieldAddr{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(pos)
+ instr.setType(types.NewPointer(fld.Type()))
+ v = f.emit(instr)
+ // Load the field's value iff indirectly embedded.
+ if isPointerCore(fld.Type()) {
+ v = emitLoad(f, v)
+ }
+ } else {
+ fld := fieldOf(v.Type(), index)
+ instr := &Field{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(pos)
+ instr.setType(fld.Type())
+ v = f.emit(instr)
+ }
+ }
+ return v
+}
+
+// emitFieldSelection emits to f code to select the index'th field of v.
+//
+// If wantAddr, the input must be a pointer-to-struct and the result
+// will be the field's address; otherwise the result will be the
+// field's value.
+// Ident id is used for position and debug info.
+func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value {
+ if isPointerCore(v.Type()) {
+ fld := fieldOf(typeparams.MustDeref(v.Type()), index)
+ instr := &FieldAddr{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(id.Pos())
+ instr.setType(types.NewPointer(fld.Type()))
+ v = f.emit(instr)
+ // Load the field's value iff we don't want its address.
+ if !wantAddr {
+ v = emitLoad(f, v)
+ }
+ } else {
+ fld := fieldOf(v.Type(), index)
+ instr := &Field{
+ X: v,
+ Field: index,
+ }
+ instr.setPos(id.Pos())
+ instr.setType(fld.Type())
+ v = f.emit(instr)
+ }
+ emitDebugRef(f, id, v, wantAddr)
+ return v
+}
+
+// createRecoverBlock emits to f a block of code to return after a
+// recovered panic, and sets f.Recover to it.
+//
+// If f's result parameters are named, the code loads and returns
+// their current values, otherwise it returns the zero values of their
+// type.
+//
+// Idempotent.
+func createRecoverBlock(f *Function) {
+ if f.Recover != nil {
+ return // already created
+ }
+ saved := f.currentBlock
+
+ f.Recover = f.newBasicBlock("recover")
+ f.currentBlock = f.Recover
+
+ var results []Value
+ // Reload NRPs to form value tuple.
+ for _, nr := range f.results {
+ results = append(results, emitLoad(f, nr))
+ }
+
+ f.emit(&Return{Results: results})
+
+ f.currentBlock = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go
new file mode 100644
index 0000000..2ed63bf
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/func.go
@@ -0,0 +1,816 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the Function type.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+ "strings"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// Like ObjectOf, but panics instead of returning nil.
+// Only valid during f's create and build phases.
+func (f *Function) objectOf(id *ast.Ident) types.Object {
+ if o := f.info.ObjectOf(id); o != nil {
+ return o
+ }
+ panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
+ id.Name, f.Prog.Fset.Position(id.Pos())))
+}
+
+// Like TypeOf, but panics instead of returning nil.
+// Only valid during f's create and build phases.
+func (f *Function) typeOf(e ast.Expr) types.Type {
+ if T := f.info.TypeOf(e); T != nil {
+ return f.typ(T)
+ }
+ panic(fmt.Sprintf("no type for %T @ %s", e, f.Prog.Fset.Position(e.Pos())))
+}
+
+// typ is the locally instantiated type of T.
+// If f is not an instantiation, then f.typ(T)==T.
+func (f *Function) typ(T types.Type) types.Type {
+ return f.subst.typ(T)
+}
+
+// If id is an Instance, returns info.Instances[id].Type.
+// Otherwise returns f.typeOf(id).
+func (f *Function) instanceType(id *ast.Ident) types.Type {
+ if t, ok := f.info.Instances[id]; ok {
+ return t.Type
+ }
+ return f.typeOf(id)
+}
+
+// selection returns a *selection corresponding to f.info.Selections[selector]
+// with potential updates for type substitution.
+func (f *Function) selection(selector *ast.SelectorExpr) *selection {
+ sel := f.info.Selections[selector]
+ if sel == nil {
+ return nil
+ }
+
+ switch sel.Kind() {
+ case types.MethodExpr, types.MethodVal:
+ if recv := f.typ(sel.Recv()); recv != sel.Recv() {
+ // recv changed during type substitution.
+ pkg := f.declaredPackage().Pkg
+ obj, index, indirect := types.LookupFieldOrMethod(recv, true, pkg, sel.Obj().Name())
+
+ // sig replaces sel.Type(). See (types.Selection).Typ() for details.
+ sig := obj.Type().(*types.Signature)
+ sig = changeRecv(sig, newVar(sig.Recv().Name(), recv))
+ if sel.Kind() == types.MethodExpr {
+ sig = recvAsFirstArg(sig)
+ }
+ return &selection{
+ kind: sel.Kind(),
+ recv: recv,
+ typ: sig,
+ obj: obj,
+ index: index,
+ indirect: indirect,
+ }
+ }
+ }
+ return toSelection(sel)
+}
+
+// Destinations associated with unlabelled for/switch/select stmts.
+// We push/pop one of these as we enter/leave each construct and for
+// each BranchStmt we scan for the innermost target of the right type.
+type targets struct {
+ tail *targets // rest of stack
+ _break *BasicBlock
+ _continue *BasicBlock
+ _fallthrough *BasicBlock
+}
+
+// Destinations associated with a labelled block.
+// We populate these as labels are encountered in forward gotos or
+// labelled statements.
+// Forward gotos are resolved once it is known which statement they
+// are associated with inside the Function.
+type lblock struct {
+ label *types.Label // Label targeted by the blocks.
+ resolved bool // _goto block encountered (back jump or resolved fwd jump)
+ _goto *BasicBlock
+ _break *BasicBlock
+ _continue *BasicBlock
+}
+
+// label returns the symbol denoted by a label identifier.
+//
+// label should be a non-blank identifier (label.Name != "_").
+func (f *Function) label(label *ast.Ident) *types.Label {
+ return f.objectOf(label).(*types.Label)
+}
+
+// lblockOf returns the branch target associated with the
+// specified label, creating it if needed.
+func (f *Function) lblockOf(label *types.Label) *lblock {
+ lb := f.lblocks[label]
+ if lb == nil {
+ lb = &lblock{
+ label: label,
+ _goto: f.newBasicBlock(label.Name()),
+ }
+ if f.lblocks == nil {
+ f.lblocks = make(map[*types.Label]*lblock)
+ }
+ f.lblocks[label] = lb
+ }
+ return lb
+}
+
+// labelledBlock searches f for the block of the specified label.
+//
+// If f is a yield function, it additionally searches ancestor Functions
+// corresponding to enclosing range-over-func statements within the
+// same source function, so the returned block may belong to a different Function.
+func labelledBlock(f *Function, label *types.Label, tok token.Token) *BasicBlock {
+ if lb := f.lblocks[label]; lb != nil {
+ var block *BasicBlock
+ switch tok {
+ case token.BREAK:
+ block = lb._break
+ case token.CONTINUE:
+ block = lb._continue
+ case token.GOTO:
+ block = lb._goto
+ }
+ if block != nil {
+ return block
+ }
+ }
+ // Search ancestors if this is a yield function.
+ if f.jump != nil {
+ return labelledBlock(f.parent, label, tok)
+ }
+ return nil
+}
+
+// targetedBlock looks for the nearest block in f.targets
+// (and f's ancestors) that matches tok's type, and returns
+// the block and function it was found in.
+func targetedBlock(f *Function, tok token.Token) *BasicBlock {
+ if f == nil {
+ return nil
+ }
+ for t := f.targets; t != nil; t = t.tail {
+ var block *BasicBlock
+ switch tok {
+ case token.BREAK:
+ block = t._break
+ case token.CONTINUE:
+ block = t._continue
+ case token.FALLTHROUGH:
+ block = t._fallthrough
+ }
+ if block != nil {
+ return block
+ }
+ }
+ // Search f's ancestors (in case f is a yield function).
+ return targetedBlock(f.parent, tok)
+}
+
+// addResultVar adds a result for a variable v to f.results and v to f.returnVars.
+func (f *Function) addResultVar(v *types.Var) {
+ result := emitLocalVar(f, v)
+ f.results = append(f.results, result)
+ f.returnVars = append(f.returnVars, v)
+}
+
+// addParamVar adds a parameter to f.Params.
+func (f *Function) addParamVar(v *types.Var) *Parameter {
+ name := v.Name()
+ if name == "" {
+ name = fmt.Sprintf("arg%d", len(f.Params))
+ }
+ param := &Parameter{
+ name: name,
+ object: v,
+ typ: f.typ(v.Type()),
+ parent: f,
+ }
+ f.Params = append(f.Params, param)
+ return param
+}
+
+// addSpilledParam declares a parameter that is pre-spilled to the
+// stack; the function body will load/store the spilled location.
+// Subsequent lifting will eliminate spills where possible.
+func (f *Function) addSpilledParam(obj *types.Var) {
+ param := f.addParamVar(obj)
+ spill := emitLocalVar(f, obj)
+ f.emit(&Store{Addr: spill, Val: param})
+}
+
+// startBody initializes the function prior to generating SSA code for its body.
+// Precondition: f.Type() already set.
+func (f *Function) startBody() {
+ f.currentBlock = f.newBasicBlock("entry")
+ f.vars = make(map[*types.Var]Value) // needed for some synthetics, e.g. init
+}
+
+// createSyntacticParams populates f.Params and generates code (spills
+// and named result locals) for all the parameters declared in the
+// syntax. In addition it populates the f.objects mapping.
+//
+// Preconditions:
+// f.startBody() was called. f.info != nil.
+// Postcondition:
+// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0)
+func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) {
+ // Receiver (at most one inner iteration).
+ if recv != nil {
+ for _, field := range recv.List {
+ for _, n := range field.Names {
+ f.addSpilledParam(identVar(f, n))
+ }
+ // Anonymous receiver? No need to spill.
+ if field.Names == nil {
+ f.addParamVar(f.Signature.Recv())
+ }
+ }
+ }
+
+ // Parameters.
+ if functype.Params != nil {
+ n := len(f.Params) // 1 if has recv, 0 otherwise
+ for _, field := range functype.Params.List {
+ for _, n := range field.Names {
+ f.addSpilledParam(identVar(f, n))
+ }
+ // Anonymous parameter? No need to spill.
+ if field.Names == nil {
+ f.addParamVar(f.Signature.Params().At(len(f.Params) - n))
+ }
+ }
+ }
+
+ // Results.
+ if functype.Results != nil {
+ for _, field := range functype.Results.List {
+ // Implicit "var" decl of locals for named results.
+ for _, n := range field.Names {
+ v := identVar(f, n)
+ f.addResultVar(v)
+ }
+ // Implicit "var" decl of local for an unnamed result.
+ if field.Names == nil {
+ v := f.Signature.Results().At(len(f.results))
+ f.addResultVar(v)
+ }
+ }
+ }
+}
+
+// createDeferStack initializes fn.deferstack to local variable
+// initialized to a ssa:deferstack() call.
+func (fn *Function) createDeferStack() {
+ // Each syntactic function makes a call to ssa:deferstack,
+ // which is spilled to a local. Unused ones are later removed.
+ fn.deferstack = newVar("defer$stack", tDeferStack)
+ call := &Call{Call: CallCommon{Value: vDeferStack}}
+ call.setType(tDeferStack)
+ deferstack := fn.emit(call)
+ spill := emitLocalVar(fn, fn.deferstack)
+ emitStore(fn, spill, deferstack, token.NoPos)
+}
+
+type setNumable interface {
+ setNum(int)
+}
+
+// numberRegisters assigns numbers to all SSA registers
+// (value-defining Instructions) in f, to aid debugging.
+// (Non-Instruction Values are named at construction.)
+func numberRegisters(f *Function) {
+ v := 0
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ switch instr.(type) {
+ case Value:
+ instr.(setNumable).setNum(v)
+ v++
+ }
+ }
+ }
+}
+
+// buildReferrers populates the def/use information in all non-nil
+// Value.Referrers slice.
+// Precondition: all such slices are initially empty.
+func buildReferrers(f *Function) {
+ var rands []*Value
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ rands = instr.Operands(rands[:0]) // recycle storage
+ for _, rand := range rands {
+ if r := *rand; r != nil {
+ if ref := r.Referrers(); ref != nil {
+ *ref = append(*ref, instr)
+ }
+ }
+ }
+ }
+ }
+}
+
+// finishBody() finalizes the contents of the function after SSA code generation of its body.
+//
+// The function is not done being built until done() is called.
+func (f *Function) finishBody() {
+ f.currentBlock = nil
+ f.lblocks = nil
+ f.returnVars = nil
+ f.jump = nil
+ f.source = nil
+ f.exits = nil
+
+ // Remove from f.Locals any Allocs that escape to the heap.
+ j := 0
+ for _, l := range f.Locals {
+ if !l.Heap {
+ f.Locals[j] = l
+ j++
+ }
+ }
+ // Nil out f.Locals[j:] to aid GC.
+ for i := j; i < len(f.Locals); i++ {
+ f.Locals[i] = nil
+ }
+ f.Locals = f.Locals[:j]
+
+ optimizeBlocks(f)
+
+ buildReferrers(f)
+
+ buildDomTree(f)
+
+ if f.Prog.mode&NaiveForm == 0 {
+ // For debugging pre-state of lifting pass:
+ // numberRegisters(f)
+ // f.WriteTo(os.Stderr)
+ lift(f)
+ }
+
+ // clear remaining builder state
+ f.results = nil // (used by lifting)
+ f.deferstack = nil // (used by lifting)
+ f.vars = nil // (used by lifting)
+ f.subst = nil
+
+ numberRegisters(f) // uses f.namedRegisters
+}
+
+// done marks the building of f's SSA body complete,
+// along with any nested functions, and optionally prints them.
+func (f *Function) done() {
+ assert(f.parent == nil, "done called on an anonymous function")
+
+ var visit func(*Function)
+ visit = func(f *Function) {
+ for _, anon := range f.AnonFuncs {
+ visit(anon) // anon is done building before f.
+ }
+
+ f.uniq = 0 // done with uniq
+ f.build = nil // function is built
+
+ if f.Prog.mode&PrintFunctions != 0 {
+ printMu.Lock()
+ f.WriteTo(os.Stdout)
+ printMu.Unlock()
+ }
+
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ mustSanityCheck(f, nil)
+ }
+ }
+ visit(f)
+}
+
+// removeNilBlocks eliminates nils from f.Blocks and updates each
+// BasicBlock.Index. Use this after any pass that may delete blocks.
+func (f *Function) removeNilBlocks() {
+ j := 0
+ for _, b := range f.Blocks {
+ if b != nil {
+ b.Index = j
+ f.Blocks[j] = b
+ j++
+ }
+ }
+ // Nil out f.Blocks[j:] to aid GC.
+ for i := j; i < len(f.Blocks); i++ {
+ f.Blocks[i] = nil
+ }
+ f.Blocks = f.Blocks[:j]
+}
+
+// SetDebugMode sets the debug mode for package pkg. If true, all its
+// functions will include full debug info. This greatly increases the
+// size of the instruction stream, and causes Functions to depend upon
+// the ASTs, potentially keeping them live in memory for longer.
+func (pkg *Package) SetDebugMode(debug bool) {
+ pkg.debug = debug
+}
+
+// debugInfo reports whether debug info is wanted for this function.
+func (f *Function) debugInfo() bool {
+ // debug info for instantiations follows the debug info of their origin.
+ p := f.declaredPackage()
+ return p != nil && p.debug
+}
+
+// lookup returns the address of the named variable identified by obj
+// that is local to function f or one of its enclosing functions.
+// If escaping, the reference comes from a potentially escaping pointer
+// expression and the referent must be heap-allocated.
+// We assume the referent is a *Alloc or *Phi.
+// (The only Phis at this stage are those created directly by go1.22 "for" loops.)
+func (f *Function) lookup(obj *types.Var, escaping bool) Value {
+ if v, ok := f.vars[obj]; ok {
+ if escaping {
+ switch v := v.(type) {
+ case *Alloc:
+ v.Heap = true
+ case *Phi:
+ for _, edge := range v.Edges {
+ if alloc, ok := edge.(*Alloc); ok {
+ alloc.Heap = true
+ }
+ }
+ }
+ }
+ return v // function-local var (address)
+ }
+
+ // Definition must be in an enclosing function;
+ // plumb it through intervening closures.
+ if f.parent == nil {
+ panic("no ssa.Value for " + obj.String())
+ }
+ outer := f.parent.lookup(obj, true) // escaping
+ v := &FreeVar{
+ name: obj.Name(),
+ typ: outer.Type(),
+ pos: outer.Pos(),
+ outer: outer,
+ parent: f,
+ }
+ f.vars[obj] = v
+ f.FreeVars = append(f.FreeVars, v)
+ return v
+}
+
+// emit emits the specified instruction to function f.
+func (f *Function) emit(instr Instruction) Value {
+ return f.currentBlock.emit(instr)
+}
+
+// RelString returns the full name of this function, qualified by
+// package name, receiver type, etc.
+//
+// The specific formatting rules are not guaranteed and may change.
+//
+// Examples:
+//
+// "math.IsNaN" // a package-level function
+// "(*bytes.Buffer).Bytes" // a declared method or a wrapper
+// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0)
+// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure)
+// "main.main$1" // an anonymous function in main
+// "main.init#1" // a declared init function
+// "main.init" // the synthesized package initializer
+//
+// When these functions are referred to from within the same package
+// (i.e. from == f.Pkg.Object), they are rendered without the package path.
+// For example: "IsNaN", "(*Buffer).Bytes", etc.
+//
+// All non-synthetic functions have distinct package-qualified names.
+// (But two methods may have the same name "(T).f" if one is a synthetic
+// wrapper promoting a non-exported method "f" from another package; in
+// that case, the strings are equal but the identifiers "f" are distinct.)
+func (f *Function) RelString(from *types.Package) string {
+ // Anonymous?
+ if f.parent != nil {
+ // An anonymous function's Name() looks like "parentName$1",
+ // but its String() should include the type/package/etc.
+ parent := f.parent.RelString(from)
+ for i, anon := range f.parent.AnonFuncs {
+ if anon == f {
+ return fmt.Sprintf("%s$%d", parent, 1+i)
+ }
+ }
+
+ return f.name // should never happen
+ }
+
+ // Method (declared or wrapper)?
+ if recv := f.Signature.Recv(); recv != nil {
+ return f.relMethod(from, recv.Type())
+ }
+
+ // Thunk?
+ if f.method != nil {
+ return f.relMethod(from, f.method.recv)
+ }
+
+ // Bound?
+ if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") {
+ return f.relMethod(from, f.FreeVars[0].Type())
+ }
+
+ // Package-level function?
+ // Prefix with package name for cross-package references only.
+ if p := f.relPkg(); p != nil && p != from {
+ return fmt.Sprintf("%s.%s", p.Path(), f.name)
+ }
+
+ // Unknown.
+ return f.name
+}
+
+func (f *Function) relMethod(from *types.Package, recv types.Type) string {
+ return fmt.Sprintf("(%s).%s", relType(recv, from), f.name)
+}
+
+// writeSignature writes to buf the signature sig in declaration syntax.
+func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature) {
+ buf.WriteString("func ")
+ if recv := sig.Recv(); recv != nil {
+ buf.WriteString("(")
+ if name := recv.Name(); name != "" {
+ buf.WriteString(name)
+ buf.WriteString(" ")
+ }
+ types.WriteType(buf, recv.Type(), types.RelativeTo(from))
+ buf.WriteString(") ")
+ }
+ buf.WriteString(name)
+ types.WriteSignature(buf, sig, types.RelativeTo(from))
+}
+
+// declaredPackage returns the package fn is declared in or nil if the
+// function is not declared in a package.
+func (fn *Function) declaredPackage() *Package {
+ switch {
+ case fn.Pkg != nil:
+ return fn.Pkg // non-generic function (does that follow??)
+ case fn.topLevelOrigin != nil:
+ return fn.topLevelOrigin.Pkg // instance of a named generic function
+ case fn.parent != nil:
+ return fn.parent.declaredPackage() // instance of an anonymous [generic] function
+ default:
+ return nil // function is not declared in a package, e.g. a wrapper.
+ }
+}
+
+// relPkg returns types.Package fn is printed in relationship to.
+func (fn *Function) relPkg() *types.Package {
+ if p := fn.declaredPackage(); p != nil {
+ return p.Pkg
+ }
+ return nil
+}
+
+var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer
+
+func (f *Function) WriteTo(w io.Writer) (int64, error) {
+ var buf bytes.Buffer
+ WriteFunction(&buf, f)
+ n, err := w.Write(buf.Bytes())
+ return int64(n), err
+}
+
+// WriteFunction writes to buf a human-readable "disassembly" of f.
+func WriteFunction(buf *bytes.Buffer, f *Function) {
+ fmt.Fprintf(buf, "# Name: %s\n", f.String())
+ if f.Pkg != nil {
+ fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path())
+ }
+ if syn := f.Synthetic; syn != "" {
+ fmt.Fprintln(buf, "# Synthetic:", syn)
+ }
+ if pos := f.Pos(); pos.IsValid() {
+ fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos))
+ }
+
+ if f.parent != nil {
+ fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name())
+ }
+
+ if f.Recover != nil {
+ fmt.Fprintf(buf, "# Recover: %s\n", f.Recover)
+ }
+
+ from := f.relPkg()
+
+ if f.FreeVars != nil {
+ buf.WriteString("# Free variables:\n")
+ for i, fv := range f.FreeVars {
+ fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from))
+ }
+ }
+
+ if len(f.Locals) > 0 {
+ buf.WriteString("# Locals:\n")
+ for i, l := range f.Locals {
+ fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(typeparams.MustDeref(l.Type()), from))
+ }
+ }
+ writeSignature(buf, from, f.Name(), f.Signature)
+ buf.WriteString(":\n")
+
+ if f.Blocks == nil {
+ buf.WriteString("\t(external)\n")
+ }
+
+ // NB. column calculations are confused by non-ASCII
+ // characters and assume 8-space tabs.
+ const punchcard = 80 // for old time's sake.
+ const tabwidth = 8
+ for _, b := range f.Blocks {
+ if b == nil {
+ // Corrupt CFG.
+ fmt.Fprintf(buf, ".nil:\n")
+ continue
+ }
+ n, _ := fmt.Fprintf(buf, "%d:", b.Index)
+ bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs))
+ fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg)
+
+ if false { // CFG debugging
+ fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs)
+ }
+ for _, instr := range b.Instrs {
+ buf.WriteString("\t")
+ switch v := instr.(type) {
+ case Value:
+ l := punchcard - tabwidth
+ // Left-align the instruction.
+ if name := v.Name(); name != "" {
+ n, _ := fmt.Fprintf(buf, "%s = ", name)
+ l -= n
+ }
+ n, _ := buf.WriteString(instr.String())
+ l -= n
+ // Right-align the type if there's space.
+ if t := v.Type(); t != nil {
+ buf.WriteByte(' ')
+ ts := relType(t, from)
+ l -= len(ts) + len(" ") // (spaces before and after type)
+ if l > 0 {
+ fmt.Fprintf(buf, "%*s", l, "")
+ }
+ buf.WriteString(ts)
+ }
+ case nil:
+ // Be robust against bad transforms.
+ buf.WriteString("<deleted>")
+ default:
+ buf.WriteString(instr.String())
+ }
+ // -mode=S: show line numbers
+ if f.Prog.mode&LogSource != 0 {
+ if pos := instr.Pos(); pos.IsValid() {
+ fmt.Fprintf(buf, " L%d", f.Prog.Fset.Position(pos).Line)
+ }
+ }
+ buf.WriteString("\n")
+ }
+ }
+ fmt.Fprintf(buf, "\n")
+}
+
+// newBasicBlock adds to f a new basic block and returns it. It does
+// not automatically become the current block for subsequent calls to emit.
+// comment is an optional string for more readable debugging output.
+func (f *Function) newBasicBlock(comment string) *BasicBlock {
+ b := &BasicBlock{
+ Index: len(f.Blocks),
+ Comment: comment,
+ parent: f,
+ }
+ b.Succs = b.succs2[:0]
+ f.Blocks = append(f.Blocks, b)
+ return b
+}
+
+// NewFunction returns a new synthetic Function instance belonging to
+// prog, with its name and signature fields set as specified.
+//
+// The caller is responsible for initializing the remaining fields of
+// the function object, e.g. Pkg, Params, Blocks.
+//
+// It is practically impossible for clients to construct well-formed
+// SSA functions/packages/programs directly, so we assume this is the
+// job of the Builder alone. NewFunction exists to provide clients a
+// little flexibility. For example, analysis tools may wish to
+// construct fake Functions for the root of the callgraph, a fake
+// "reflect" package, etc.
+//
+// TODO(adonovan): think harder about the API here.
+func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function {
+ return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance}
+}
+
+// Syntax returns the function's syntax (*ast.Func{Decl,Lit})
+// if it was produced from syntax or an *ast.RangeStmt if
+// it is a range-over-func yield function.
+func (f *Function) Syntax() ast.Node { return f.syntax }
+
+// identVar returns the variable defined by id.
+func identVar(fn *Function, id *ast.Ident) *types.Var {
+ return fn.info.Defs[id].(*types.Var)
+}
+
+// unique returns a unique positive int within the source tree of f.
+// The source tree of f includes all of f's ancestors by parent and all
+// of the AnonFuncs contained within these.
+func unique(f *Function) int64 {
+ f.uniq++
+ return f.uniq
+}
+
+// exit is a change of control flow going from a range-over-func
+// yield function to an ancestor function caused by a break, continue,
+// goto, or return statement.
+//
+// There are 3 types of exits:
+// * return from the source function (from ReturnStmt),
+// * jump to a block (from break and continue statements [labelled/unlabelled]),
+// * go to a label (from goto statements).
+//
+// As the builder does one pass over the ast, it is unclear whether
+// a forward goto statement will leave a range-over-func body.
+// The function being exited to is unresolved until the end
+// of building the range-over-func body.
+type exit struct {
+ id int64 // unique value for exit within from and to
+ from *Function // the function the exit starts from
+ to *Function // the function being exited to (nil if unresolved)
+ pos token.Pos
+
+ block *BasicBlock // basic block within to being jumped to.
+ label *types.Label // forward label being jumped to via goto.
+ // block == nil && label == nil => return
+}
+
+// storeVar emits to function f code to store a value v to a *types.Var x.
+func storeVar(f *Function, x *types.Var, v Value, pos token.Pos) {
+ emitStore(f, f.lookup(x, true), v, pos)
+}
+
+// labelExit creates a new exit to a yield fn to exit the function using a label.
+func labelExit(fn *Function, label *types.Label, pos token.Pos) *exit {
+ e := &exit{
+ id: unique(fn),
+ from: fn,
+ to: nil,
+ pos: pos,
+ label: label,
+ }
+ fn.exits = append(fn.exits, e)
+ return e
+}
+
+// blockExit creates a new exit to a yield fn that jumps to a basic block.
+func blockExit(fn *Function, block *BasicBlock, pos token.Pos) *exit {
+ e := &exit{
+ id: unique(fn),
+ from: fn,
+ to: block.parent,
+ pos: pos,
+ block: block,
+ }
+ fn.exits = append(fn.exits, e)
+ return e
+}
+
+// blockExit creates a new exit to a yield fn that returns the source function.
+func returnExit(fn *Function, pos token.Pos) *exit {
+ e := &exit{
+ id: unique(fn),
+ from: fn,
+ to: fn.source,
+ pos: pos,
+ }
+ fn.exits = append(fn.exits, e)
+ return e
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/instantiate.go b/vendor/golang.org/x/tools/go/ssa/instantiate.go
new file mode 100644
index 0000000..2512f32
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/instantiate.go
@@ -0,0 +1,131 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+ "fmt"
+ "go/types"
+ "sync"
+)
+
+// A generic records information about a generic origin function,
+// including a cache of existing instantiations.
+type generic struct {
+ instancesMu sync.Mutex
+ instances map[*typeList]*Function // canonical type arguments to an instance.
+}
+
+// instance returns a Function that is the instantiation of generic
+// origin function fn with the type arguments targs.
+//
+// Any created instance is added to cr.
+//
+// Acquires fn.generic.instancesMu.
+func (fn *Function) instance(targs []types.Type, b *builder) *Function {
+ key := fn.Prog.canon.List(targs)
+
+ gen := fn.generic
+
+ gen.instancesMu.Lock()
+ defer gen.instancesMu.Unlock()
+ inst, ok := gen.instances[key]
+ if !ok {
+ inst = createInstance(fn, targs)
+ inst.buildshared = b.shared()
+ b.enqueue(inst)
+
+ if gen.instances == nil {
+ gen.instances = make(map[*typeList]*Function)
+ }
+ gen.instances[key] = inst
+ } else {
+ b.waitForSharedFunction(inst)
+ }
+ return inst
+}
+
+// createInstance returns the instantiation of generic function fn using targs.
+//
+// Requires fn.generic.instancesMu.
+func createInstance(fn *Function, targs []types.Type) *Function {
+ prog := fn.Prog
+
+ // Compute signature.
+ var sig *types.Signature
+ var obj *types.Func
+ if recv := fn.Signature.Recv(); recv != nil {
+ // method
+ obj = prog.canon.instantiateMethod(fn.object, targs, prog.ctxt)
+ sig = obj.Type().(*types.Signature)
+ } else {
+ // function
+ instSig, err := types.Instantiate(prog.ctxt, fn.Signature, targs, false)
+ if err != nil {
+ panic(err)
+ }
+ instance, ok := instSig.(*types.Signature)
+ if !ok {
+ panic("Instantiate of a Signature returned a non-signature")
+ }
+ obj = fn.object // instantiation does not exist yet
+ sig = prog.canon.Type(instance).(*types.Signature)
+ }
+
+ // Choose strategy (instance or wrapper).
+ var (
+ synthetic string
+ subst *subster
+ build buildFunc
+ )
+ if prog.mode&InstantiateGenerics != 0 && !prog.isParameterized(targs...) {
+ synthetic = fmt.Sprintf("instance of %s", fn.Name())
+ if fn.syntax != nil {
+ subst = makeSubster(prog.ctxt, obj, fn.typeparams, targs, false)
+ build = (*builder).buildFromSyntax
+ } else {
+ build = (*builder).buildParamsOnly
+ }
+ } else {
+ synthetic = fmt.Sprintf("instantiation wrapper of %s", fn.Name())
+ build = (*builder).buildInstantiationWrapper
+ }
+
+ /* generic instance or instantiation wrapper */
+ return &Function{
+ name: fmt.Sprintf("%s%s", fn.Name(), targs), // may not be unique
+ object: obj,
+ Signature: sig,
+ Synthetic: synthetic,
+ syntax: fn.syntax, // \
+ info: fn.info, // } empty for non-created packages
+ goversion: fn.goversion, // /
+ build: build,
+ topLevelOrigin: fn,
+ pos: obj.Pos(),
+ Pkg: nil,
+ Prog: fn.Prog,
+ typeparams: fn.typeparams, // share with origin
+ typeargs: targs,
+ subst: subst,
+ }
+}
+
+// isParameterized reports whether any of the specified types contains
+// a free type parameter. It is safe to call concurrently.
+func (prog *Program) isParameterized(ts ...types.Type) bool {
+ prog.hasParamsMu.Lock()
+ defer prog.hasParamsMu.Unlock()
+
+ // TODO(adonovan): profile. If this operation is expensive,
+ // handle the most common but shallow cases such as T, pkg.T,
+ // *T without consulting the cache under the lock.
+
+ for _, t := range ts {
+ if prog.hasParams.Has(t) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/lift.go b/vendor/golang.org/x/tools/go/ssa/lift.go
new file mode 100644
index 0000000..aada3dc
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/lift.go
@@ -0,0 +1,688 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the lifting pass which tries to "lift" Alloc
+// cells (new/local variables) into SSA registers, replacing loads
+// with the dominating stored value, eliminating loads and stores, and
+// inserting φ-nodes as needed.
+
+// Cited papers and resources:
+//
+// Ron Cytron et al. 1991. Efficiently computing SSA form...
+// http://doi.acm.org/10.1145/115372.115320
+//
+// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm.
+// Software Practice and Experience 2001, 4:1-10.
+// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf
+//
+// Daniel Berlin, llvmdev mailing list, 2012.
+// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html
+// (Be sure to expand the whole thread.)
+
+// TODO(adonovan): opt: there are many optimizations worth evaluating, and
+// the conventional wisdom for SSA construction is that a simple
+// algorithm well engineered often beats those of better asymptotic
+// complexity on all but the most egregious inputs.
+//
+// Danny Berlin suggests that the Cooper et al. algorithm for
+// computing the dominance frontier is superior to Cytron et al.
+// Furthermore he recommends that rather than computing the DF for the
+// whole function then renaming all alloc cells, it may be cheaper to
+// compute the DF for each alloc cell separately and throw it away.
+//
+// Consider exploiting liveness information to avoid creating dead
+// φ-nodes which we then immediately remove.
+//
+// Also see many other "TODO: opt" suggestions in the code.
+
+import (
+ "fmt"
+ "go/token"
+ "math/big"
+ "os"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// If true, show diagnostic information at each step of lifting.
+// Very verbose.
+const debugLifting = false
+
+// domFrontier maps each block to the set of blocks in its dominance
+// frontier. The outer slice is conceptually a map keyed by
+// Block.Index. The inner slice is conceptually a set, possibly
+// containing duplicates.
+//
+// TODO(adonovan): opt: measure impact of dups; consider a packed bit
+// representation, e.g. big.Int, and bitwise parallel operations for
+// the union step in the Children loop.
+//
+// domFrontier's methods mutate the slice's elements but not its
+// length, so their receivers needn't be pointers.
+type domFrontier [][]*BasicBlock
+
+func (df domFrontier) add(u, v *BasicBlock) {
+ p := &df[u.Index]
+ *p = append(*p, v)
+}
+
+// build builds the dominance frontier df for the dominator (sub)tree
+// rooted at u, using the Cytron et al. algorithm.
+//
+// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA
+// by pruning the entire IDF computation, rather than merely pruning
+// the DF -> IDF step.
+func (df domFrontier) build(u *BasicBlock) {
+ // Encounter each node u in postorder of dom tree.
+ for _, child := range u.dom.children {
+ df.build(child)
+ }
+ for _, vb := range u.Succs {
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
+ }
+ }
+ for _, w := range u.dom.children {
+ for _, vb := range df[w.Index] {
+ // TODO(adonovan): opt: use word-parallel bitwise union.
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
+ }
+ }
+ }
+}
+
+func buildDomFrontier(fn *Function) domFrontier {
+ df := make(domFrontier, len(fn.Blocks))
+ df.build(fn.Blocks[0])
+ if fn.Recover != nil {
+ df.build(fn.Recover)
+ }
+ return df
+}
+
+func removeInstr(refs []Instruction, instr Instruction) []Instruction {
+ return removeInstrsIf(refs, func(i Instruction) bool { return i == instr })
+}
+
+func removeInstrsIf(refs []Instruction, p func(Instruction) bool) []Instruction {
+ // TODO(taking): replace with go1.22 slices.DeleteFunc.
+ i := 0
+ for _, ref := range refs {
+ if p(ref) {
+ continue
+ }
+ refs[i] = ref
+ i++
+ }
+ for j := i; j != len(refs); j++ {
+ refs[j] = nil // aid GC
+ }
+ return refs[:i]
+}
+
+// lift replaces local and new Allocs accessed only with
+// load/store by SSA registers, inserting φ-nodes where necessary.
+// The result is a program in classical pruned SSA form.
+//
+// Preconditions:
+// - fn has no dead blocks (blockopt has run).
+// - Def/use info (Operands and Referrers) is up-to-date.
+// - The dominator tree is up-to-date.
+func lift(fn *Function) {
+ // TODO(adonovan): opt: lots of little optimizations may be
+ // worthwhile here, especially if they cause us to avoid
+ // buildDomFrontier. For example:
+ //
+ // - Alloc never loaded? Eliminate.
+ // - Alloc never stored? Replace all loads with a zero constant.
+ // - Alloc stored once? Replace loads with dominating store;
+ // don't forget that an Alloc is itself an effective store
+ // of zero.
+ // - Alloc used only within a single block?
+ // Use degenerate algorithm avoiding φ-nodes.
+ // - Consider synergy with scalar replacement of aggregates (SRA).
+ // e.g. *(&x.f) where x is an Alloc.
+ // Perhaps we'd get better results if we generated this as x.f
+ // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)).
+ // Unclear.
+ //
+ // But we will start with the simplest correct code.
+ df := buildDomFrontier(fn)
+
+ if debugLifting {
+ title := false
+ for i, blocks := range df {
+ if blocks != nil {
+ if !title {
+ fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn)
+ title = true
+ }
+ fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks)
+ }
+ }
+ }
+
+ newPhis := make(newPhiMap)
+
+ // During this pass we will replace some BasicBlock.Instrs
+ // (allocs, loads and stores) with nil, keeping a count in
+ // BasicBlock.gaps. At the end we will reset Instrs to the
+ // concatenation of all non-dead newPhis and non-nil Instrs
+ // for the block, reusing the original array if space permits.
+
+ // While we're here, we also eliminate 'rundefers'
+ // instructions and ssa:deferstack() in functions that contain no
+ // 'defer' instructions. For now, we also eliminate
+ // 's = ssa:deferstack()' calls if s doesn't escape, replacing s
+ // with nil in Defer{DeferStack: s}. This has the same meaning,
+ // but allows eliminating the intrinsic function `ssa:deferstack()`
+ // (unless it is needed due to range-over-func instances). This gives
+ // ssa users more time to support range-over-func.
+ usesDefer := false
+ deferstackAlloc, deferstackCall := deferstackPreamble(fn)
+ eliminateDeferStack := deferstackAlloc != nil && !deferstackAlloc.Heap
+
+ // A counter used to generate ~unique ids for Phi nodes, as an
+ // aid to debugging. We use large numbers to make them highly
+ // visible. All nodes are renumbered later.
+ fresh := 1000
+
+ // Determine which allocs we can lift and number them densely.
+ // The renaming phase uses this numbering for compact maps.
+ numAllocs := 0
+ for _, b := range fn.Blocks {
+ b.gaps = 0
+ b.rundefers = 0
+ for _, instr := range b.Instrs {
+ switch instr := instr.(type) {
+ case *Alloc:
+ index := -1
+ if liftAlloc(df, instr, newPhis, &fresh) {
+ index = numAllocs
+ numAllocs++
+ }
+ instr.index = index
+ case *Defer:
+ usesDefer = true
+ if eliminateDeferStack {
+ // Clear DeferStack and remove references to loads
+ if instr.DeferStack != nil {
+ if refs := instr.DeferStack.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, instr)
+ }
+ instr.DeferStack = nil
+ }
+ }
+ case *RunDefers:
+ b.rundefers++
+ }
+ }
+ }
+
+ // renaming maps an alloc (keyed by index) to its replacement
+ // value. Initially the renaming contains nil, signifying the
+ // zero constant of the appropriate type; we construct the
+ // Const lazily at most once on each path through the domtree.
+ // TODO(adonovan): opt: cache per-function not per subtree.
+ renaming := make([]Value, numAllocs)
+
+ // Renaming.
+ rename(fn.Blocks[0], renaming, newPhis)
+
+ // Eliminate dead φ-nodes.
+ removeDeadPhis(fn.Blocks, newPhis)
+
+ // Eliminate ssa:deferstack() call.
+ if eliminateDeferStack {
+ b := deferstackCall.block
+ for i, instr := range b.Instrs {
+ if instr == deferstackCall {
+ b.Instrs[i] = nil
+ b.gaps++
+ break
+ }
+ }
+ }
+
+ // Prepend remaining live φ-nodes to each block.
+ for _, b := range fn.Blocks {
+ nps := newPhis[b]
+ j := len(nps)
+
+ rundefersToKill := b.rundefers
+ if usesDefer {
+ rundefersToKill = 0
+ }
+
+ if j+b.gaps+rundefersToKill == 0 {
+ continue // fast path: no new phis or gaps
+ }
+
+ // Compact nps + non-nil Instrs into a new slice.
+ // TODO(adonovan): opt: compact in situ (rightwards)
+ // if Instrs has sufficient space or slack.
+ dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill)
+ for i, np := range nps {
+ dst[i] = np.phi
+ }
+ for _, instr := range b.Instrs {
+ if instr == nil {
+ continue
+ }
+ if !usesDefer {
+ if _, ok := instr.(*RunDefers); ok {
+ continue
+ }
+ }
+ dst[j] = instr
+ j++
+ }
+ b.Instrs = dst
+ }
+
+ // Remove any fn.Locals that were lifted.
+ j := 0
+ for _, l := range fn.Locals {
+ if l.index < 0 {
+ fn.Locals[j] = l
+ j++
+ }
+ }
+ // Nil out fn.Locals[j:] to aid GC.
+ for i := j; i < len(fn.Locals); i++ {
+ fn.Locals[i] = nil
+ }
+ fn.Locals = fn.Locals[:j]
+}
+
+// removeDeadPhis removes φ-nodes not transitively needed by a
+// non-Phi, non-DebugRef instruction.
+func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) {
+ // First pass: find the set of "live" φ-nodes: those reachable
+ // from some non-Phi instruction.
+ //
+ // We compute reachability in reverse, starting from each φ,
+ // rather than forwards, starting from each live non-Phi
+ // instruction, because this way visits much less of the
+ // Value graph.
+ livePhis := make(map[*Phi]bool)
+ for _, npList := range newPhis {
+ for _, np := range npList {
+ phi := np.phi
+ if !livePhis[phi] && phiHasDirectReferrer(phi) {
+ markLivePhi(livePhis, phi)
+ }
+ }
+ }
+
+ // Existing φ-nodes due to && and || operators
+ // are all considered live (see Go issue 19622).
+ for _, b := range blocks {
+ for _, phi := range b.phis() {
+ markLivePhi(livePhis, phi.(*Phi))
+ }
+ }
+
+ // Second pass: eliminate unused phis from newPhis.
+ for block, npList := range newPhis {
+ j := 0
+ for _, np := range npList {
+ if livePhis[np.phi] {
+ npList[j] = np
+ j++
+ } else {
+ // discard it, first removing it from referrers
+ for _, val := range np.phi.Edges {
+ if refs := val.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, np.phi)
+ }
+ }
+ np.phi.block = nil
+ }
+ }
+ newPhis[block] = npList[:j]
+ }
+}
+
+// markLivePhi marks phi, and all φ-nodes transitively reachable via
+// its Operands, live.
+func markLivePhi(livePhis map[*Phi]bool, phi *Phi) {
+ livePhis[phi] = true
+ for _, rand := range phi.Operands(nil) {
+ if q, ok := (*rand).(*Phi); ok {
+ if !livePhis[q] {
+ markLivePhi(livePhis, q)
+ }
+ }
+ }
+}
+
+// phiHasDirectReferrer reports whether phi is directly referred to by
+// a non-Phi instruction. Such instructions are the
+// roots of the liveness traversal.
+func phiHasDirectReferrer(phi *Phi) bool {
+ for _, instr := range *phi.Referrers() {
+ if _, ok := instr.(*Phi); !ok {
+ return true
+ }
+ }
+ return false
+}
+
+type blockSet struct{ big.Int } // (inherit methods from Int)
+
+// add adds b to the set and returns true if the set changed.
+func (s *blockSet) add(b *BasicBlock) bool {
+ i := b.Index
+ if s.Bit(i) != 0 {
+ return false
+ }
+ s.SetBit(&s.Int, i, 1)
+ return true
+}
+
+// take removes an arbitrary element from a set s and
+// returns its index, or returns -1 if empty.
+func (s *blockSet) take() int {
+ l := s.BitLen()
+ for i := 0; i < l; i++ {
+ if s.Bit(i) == 1 {
+ s.SetBit(&s.Int, i, 0)
+ return i
+ }
+ }
+ return -1
+}
+
+// newPhi is a pair of a newly introduced φ-node and the lifted Alloc
+// it replaces.
+type newPhi struct {
+ phi *Phi
+ alloc *Alloc
+}
+
+// newPhiMap records for each basic block, the set of newPhis that
+// must be prepended to the block.
+type newPhiMap map[*BasicBlock][]newPhi
+
+// liftAlloc determines whether alloc can be lifted into registers,
+// and if so, it populates newPhis with all the φ-nodes it may require
+// and returns true.
+//
+// fresh is a source of fresh ids for phi nodes.
+func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool {
+ // Don't lift result values in functions that defer
+ // calls that may recover from panic.
+ if fn := alloc.Parent(); fn.Recover != nil {
+ for _, nr := range fn.results {
+ if nr == alloc {
+ return false
+ }
+ }
+ }
+
+ // Compute defblocks, the set of blocks containing a
+ // definition of the alloc cell.
+ var defblocks blockSet
+ for _, instr := range *alloc.Referrers() {
+ // Bail out if we discover the alloc is not liftable;
+ // the only operations permitted to use the alloc are
+ // loads/stores into the cell, and DebugRef.
+ switch instr := instr.(type) {
+ case *Store:
+ if instr.Val == alloc {
+ return false // address used as value
+ }
+ if instr.Addr != alloc {
+ panic("Alloc.Referrers is inconsistent")
+ }
+ defblocks.add(instr.Block())
+ case *UnOp:
+ if instr.Op != token.MUL {
+ return false // not a load
+ }
+ if instr.X != alloc {
+ panic("Alloc.Referrers is inconsistent")
+ }
+ case *DebugRef:
+ // ok
+ default:
+ return false // some other instruction
+ }
+ }
+ // The Alloc itself counts as a (zero) definition of the cell.
+ defblocks.add(alloc.Block())
+
+ if debugLifting {
+ fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name())
+ }
+
+ fn := alloc.Parent()
+
+ // Φ-insertion.
+ //
+ // What follows is the body of the main loop of the insert-φ
+ // function described by Cytron et al, but instead of using
+ // counter tricks, we just reset the 'hasAlready' and 'work'
+ // sets each iteration. These are bitmaps so it's pretty cheap.
+ //
+ // TODO(adonovan): opt: recycle slice storage for W,
+ // hasAlready, defBlocks across liftAlloc calls.
+ var hasAlready blockSet
+
+ // Initialize W and work to defblocks.
+ var work blockSet = defblocks // blocks seen
+ var W blockSet // blocks to do
+ W.Set(&defblocks.Int)
+
+ // Traverse iterated dominance frontier, inserting φ-nodes.
+ for i := W.take(); i != -1; i = W.take() {
+ u := fn.Blocks[i]
+ for _, v := range df[u.Index] {
+ if hasAlready.add(v) {
+ // Create φ-node.
+ // It will be prepended to v.Instrs later, if needed.
+ phi := &Phi{
+ Edges: make([]Value, len(v.Preds)),
+ Comment: alloc.Comment,
+ }
+ // This is merely a debugging aid:
+ phi.setNum(*fresh)
+ *fresh++
+
+ phi.pos = alloc.Pos()
+ phi.setType(typeparams.MustDeref(alloc.Type()))
+ phi.block = v
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v)
+ }
+ newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
+
+ if work.add(v) {
+ W.add(v)
+ }
+ }
+ }
+ }
+
+ return true
+}
+
+// replaceAll replaces all intraprocedural uses of x with y,
+// updating x.Referrers and y.Referrers.
+// Precondition: x.Referrers() != nil, i.e. x must be local to some function.
+func replaceAll(x, y Value) {
+ var rands []*Value
+ pxrefs := x.Referrers()
+ pyrefs := y.Referrers()
+ for _, instr := range *pxrefs {
+ rands = instr.Operands(rands[:0]) // recycle storage
+ for _, rand := range rands {
+ if *rand != nil {
+ if *rand == x {
+ *rand = y
+ }
+ }
+ }
+ if pyrefs != nil {
+ *pyrefs = append(*pyrefs, instr) // dups ok
+ }
+ }
+ *pxrefs = nil // x is now unreferenced
+}
+
+// renamed returns the value to which alloc is being renamed,
+// constructing it lazily if it's the implicit zero initialization.
+func renamed(renaming []Value, alloc *Alloc) Value {
+ v := renaming[alloc.index]
+ if v == nil {
+ v = zeroConst(typeparams.MustDeref(alloc.Type()))
+ renaming[alloc.index] = v
+ }
+ return v
+}
+
+// rename implements the (Cytron et al) SSA renaming algorithm, a
+// preorder traversal of the dominator tree replacing all loads of
+// Alloc cells with the value stored to that cell by the dominating
+// store instruction. For lifting, we need only consider loads,
+// stores and φ-nodes.
+//
+// renaming is a map from *Alloc (keyed by index number) to its
+// dominating stored value; newPhis[x] is the set of new φ-nodes to be
+// prepended to block x.
+func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) {
+ // Each φ-node becomes the new name for its associated Alloc.
+ for _, np := range newPhis[u] {
+ phi := np.phi
+ alloc := np.alloc
+ renaming[alloc.index] = phi
+ }
+
+ // Rename loads and stores of allocs.
+ for i, instr := range u.Instrs {
+ switch instr := instr.(type) {
+ case *Alloc:
+ if instr.index >= 0 { // store of zero to Alloc cell
+ // Replace dominated loads by the zero value.
+ renaming[instr.index] = nil
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr)
+ }
+ // Delete the Alloc.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+
+ case *Store:
+ if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell
+ // Replace dominated loads by the stored value.
+ renaming[alloc.index] = instr.Val
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n",
+ instr, instr.Val.Name())
+ }
+ // Remove the store from the referrer list of the stored value.
+ if refs := instr.Val.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, instr)
+ }
+ // Delete the Store.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+
+ case *UnOp:
+ if instr.Op == token.MUL {
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell
+ newval := renamed(renaming, alloc)
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n",
+ instr.Name(), instr, newval.Name())
+ }
+ // Replace all references to
+ // the loaded value by the
+ // dominating stored value.
+ replaceAll(instr, newval)
+ // Delete the Load.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+ }
+
+ case *DebugRef:
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell
+ if instr.IsAddr {
+ instr.X = renamed(renaming, alloc)
+ instr.IsAddr = false
+
+ // Add DebugRef to instr.X's referrers.
+ if refs := instr.X.Referrers(); refs != nil {
+ *refs = append(*refs, instr)
+ }
+ } else {
+ // A source expression denotes the address
+ // of an Alloc that was optimized away.
+ instr.X = nil
+
+ // Delete the DebugRef.
+ u.Instrs[i] = nil
+ u.gaps++
+ }
+ }
+ }
+ }
+
+ // For each φ-node in a CFG successor, rename the edge.
+ for _, v := range u.Succs {
+ phis := newPhis[v]
+ if len(phis) == 0 {
+ continue
+ }
+ i := v.predIndex(u)
+ for _, np := range phis {
+ phi := np.phi
+ alloc := np.alloc
+ newval := renamed(renaming, alloc)
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n",
+ phi.Name(), u, v, i, alloc.Name(), newval.Name())
+ }
+ phi.Edges[i] = newval
+ if prefs := newval.Referrers(); prefs != nil {
+ *prefs = append(*prefs, phi)
+ }
+ }
+ }
+
+ // Continue depth-first recursion over domtree, pushing a
+ // fresh copy of the renaming map for each subtree.
+ for i, v := range u.dom.children {
+ r := renaming
+ if i < len(u.dom.children)-1 {
+ // On all but the final iteration, we must make
+ // a copy to avoid destructive update.
+ r = make([]Value, len(renaming))
+ copy(r, renaming)
+ }
+ rename(v, r, newPhis)
+ }
+
+}
+
+// deferstackPreamble returns the *Alloc and ssa:deferstack() call for fn.deferstack.
+func deferstackPreamble(fn *Function) (*Alloc, *Call) {
+ if alloc, _ := fn.vars[fn.deferstack].(*Alloc); alloc != nil {
+ for _, ref := range *alloc.Referrers() {
+ if ref, _ := ref.(*Store); ref != nil && ref.Addr == alloc {
+ if call, _ := ref.Val.(*Call); call != nil {
+ return alloc, call
+ }
+ }
+ }
+ }
+ return nil, nil
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/lvalue.go b/vendor/golang.org/x/tools/go/ssa/lvalue.go
new file mode 100644
index 0000000..eede307
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/lvalue.go
@@ -0,0 +1,155 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// lvalues are the union of addressable expressions and map-index
+// expressions.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// An lvalue represents an assignable location that may appear on the
+// left-hand side of an assignment. This is a generalization of a
+// pointer to permit updates to elements of maps.
+type lvalue interface {
+ store(fn *Function, v Value) // stores v into the location
+ load(fn *Function) Value // loads the contents of the location
+ address(fn *Function) Value // address of the location
+ typ() types.Type // returns the type of the location
+}
+
+// An address is an lvalue represented by a true pointer.
+type address struct {
+ addr Value // must have a pointer core type.
+ pos token.Pos // source position
+ expr ast.Expr // source syntax of the value (not address) [debug mode]
+}
+
+func (a *address) load(fn *Function) Value {
+ load := emitLoad(fn, a.addr)
+ load.pos = a.pos
+ return load
+}
+
+func (a *address) store(fn *Function, v Value) {
+ store := emitStore(fn, a.addr, v, a.pos)
+ if a.expr != nil {
+ // store.Val is v, converted for assignability.
+ emitDebugRef(fn, a.expr, store.Val, false)
+ }
+}
+
+func (a *address) address(fn *Function) Value {
+ if a.expr != nil {
+ emitDebugRef(fn, a.expr, a.addr, true)
+ }
+ return a.addr
+}
+
+func (a *address) typ() types.Type {
+ return typeparams.MustDeref(a.addr.Type())
+}
+
+// An element is an lvalue represented by m[k], the location of an
+// element of a map. These locations are not addressable
+// since pointers cannot be formed from them, but they do support
+// load() and store().
+type element struct {
+ m, k Value // map
+ t types.Type // map element type
+ pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v)
+}
+
+func (e *element) load(fn *Function) Value {
+ l := &Lookup{
+ X: e.m,
+ Index: e.k,
+ }
+ l.setPos(e.pos)
+ l.setType(e.t)
+ return fn.emit(l)
+}
+
+func (e *element) store(fn *Function, v Value) {
+ up := &MapUpdate{
+ Map: e.m,
+ Key: e.k,
+ Value: emitConv(fn, v, e.t),
+ }
+ up.pos = e.pos
+ fn.emit(up)
+}
+
+func (e *element) address(fn *Function) Value {
+ panic("map elements are not addressable")
+}
+
+func (e *element) typ() types.Type {
+ return e.t
+}
+
+// A lazyAddress is an lvalue whose address is the result of an instruction.
+// These work like an *address except a new address.address() Value
+// is created on each load, store and address call.
+// A lazyAddress can be used to control when a side effect (nil pointer
+// dereference, index out of bounds) of using a location happens.
+type lazyAddress struct {
+ addr func(fn *Function) Value // emit to fn the computation of the address
+ t types.Type // type of the location
+ pos token.Pos // source position
+ expr ast.Expr // source syntax of the value (not address) [debug mode]
+}
+
+func (l *lazyAddress) load(fn *Function) Value {
+ load := emitLoad(fn, l.addr(fn))
+ load.pos = l.pos
+ return load
+}
+
+func (l *lazyAddress) store(fn *Function, v Value) {
+ store := emitStore(fn, l.addr(fn), v, l.pos)
+ if l.expr != nil {
+ // store.Val is v, converted for assignability.
+ emitDebugRef(fn, l.expr, store.Val, false)
+ }
+}
+
+func (l *lazyAddress) address(fn *Function) Value {
+ addr := l.addr(fn)
+ if l.expr != nil {
+ emitDebugRef(fn, l.expr, addr, true)
+ }
+ return addr
+}
+
+func (l *lazyAddress) typ() types.Type { return l.t }
+
+// A blank is a dummy variable whose name is "_".
+// It is not reified: loads are illegal and stores are ignored.
+type blank struct{}
+
+func (bl blank) load(fn *Function) Value {
+ panic("blank.load is illegal")
+}
+
+func (bl blank) store(fn *Function, v Value) {
+ // no-op
+}
+
+func (bl blank) address(fn *Function) Value {
+ panic("blank var is not addressable")
+}
+
+func (bl blank) typ() types.Type {
+ // This should be the type of the blank Ident; the typechecker
+ // doesn't provide this yet, but fortunately, we don't need it
+ // yet either.
+ panic("blank.typ is unimplemented")
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go
new file mode 100644
index 0000000..b956018
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/methods.go
@@ -0,0 +1,281 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines utilities for population of method sets.
+
+import (
+ "fmt"
+ "go/types"
+
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/aliases"
+)
+
+// MethodValue returns the Function implementing method sel, building
+// wrapper methods on demand. It returns nil if sel denotes an
+// interface or generic method.
+//
+// Precondition: sel.Kind() == MethodVal.
+//
+// Thread-safe.
+//
+// Acquires prog.methodsMu.
+func (prog *Program) MethodValue(sel *types.Selection) *Function {
+ if sel.Kind() != types.MethodVal {
+ panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel))
+ }
+ T := sel.Recv()
+ if types.IsInterface(T) {
+ return nil // interface method or type parameter
+ }
+
+ if prog.isParameterized(T) {
+ return nil // generic method
+ }
+
+ if prog.mode&LogSource != 0 {
+ defer logStack("MethodValue %s %v", T, sel)()
+ }
+
+ var b builder
+
+ m := func() *Function {
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ // Get or create SSA method set.
+ mset, ok := prog.methodSets.At(T).(*methodSet)
+ if !ok {
+ mset = &methodSet{mapping: make(map[string]*Function)}
+ prog.methodSets.Set(T, mset)
+ }
+
+ // Get or create SSA method.
+ id := sel.Obj().Id()
+ fn, ok := mset.mapping[id]
+ if !ok {
+ obj := sel.Obj().(*types.Func)
+ needsPromotion := len(sel.Index()) > 1
+ needsIndirection := !isPointer(recvType(obj)) && isPointer(T)
+ if needsPromotion || needsIndirection {
+ fn = createWrapper(prog, toSelection(sel))
+ fn.buildshared = b.shared()
+ b.enqueue(fn)
+ } else {
+ fn = prog.objectMethod(obj, &b)
+ }
+ if fn.Signature.Recv() == nil {
+ panic(fn)
+ }
+ mset.mapping[id] = fn
+ } else {
+ b.waitForSharedFunction(fn)
+ }
+
+ return fn
+ }()
+
+ b.iterate()
+
+ return m
+}
+
+// objectMethod returns the Function for a given method symbol.
+// The symbol may be an instance of a generic function. It need not
+// belong to an existing SSA package created by a call to
+// prog.CreatePackage.
+//
+// objectMethod panics if the function is not a method.
+//
+// Acquires prog.objectMethodsMu.
+func (prog *Program) objectMethod(obj *types.Func, b *builder) *Function {
+ sig := obj.Type().(*types.Signature)
+ if sig.Recv() == nil {
+ panic("not a method: " + obj.String())
+ }
+
+ // Belongs to a created package?
+ if fn := prog.FuncValue(obj); fn != nil {
+ return fn
+ }
+
+ // Instantiation of generic?
+ if originObj := obj.Origin(); originObj != obj {
+ origin := prog.objectMethod(originObj, b)
+ assert(origin.typeparams.Len() > 0, "origin is not generic")
+ targs := receiverTypeArgs(obj)
+ return origin.instance(targs, b)
+ }
+
+ // Consult/update cache of methods created from types.Func.
+ prog.objectMethodsMu.Lock()
+ defer prog.objectMethodsMu.Unlock()
+ fn, ok := prog.objectMethods[obj]
+ if !ok {
+ fn = createFunction(prog, obj, obj.Name(), nil, nil, "")
+ fn.Synthetic = "from type information (on demand)"
+ fn.buildshared = b.shared()
+ b.enqueue(fn)
+
+ if prog.objectMethods == nil {
+ prog.objectMethods = make(map[*types.Func]*Function)
+ }
+ prog.objectMethods[obj] = fn
+ } else {
+ b.waitForSharedFunction(fn)
+ }
+ return fn
+}
+
+// LookupMethod returns the implementation of the method of type T
+// identified by (pkg, name). It returns nil if the method exists but
+// is an interface method or generic method, and panics if T has no such method.
+func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function {
+ sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name)
+ if sel == nil {
+ panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name)))
+ }
+ return prog.MethodValue(sel)
+}
+
+// methodSet contains the (concrete) methods of a concrete type (non-interface, non-parameterized).
+type methodSet struct {
+ mapping map[string]*Function // populated lazily
+}
+
+// RuntimeTypes returns a new unordered slice containing all types in
+// the program for which a runtime type is required.
+//
+// A runtime type is required for any non-parameterized, non-interface
+// type that is converted to an interface, or for any type (including
+// interface types) derivable from one through reflection.
+//
+// The methods of such types may be reachable through reflection or
+// interface calls even if they are never called directly.
+//
+// Thread-safe.
+//
+// Acquires prog.runtimeTypesMu.
+func (prog *Program) RuntimeTypes() []types.Type {
+ prog.runtimeTypesMu.Lock()
+ defer prog.runtimeTypesMu.Unlock()
+ return prog.runtimeTypes.Keys()
+}
+
+// forEachReachable calls f for type T and each type reachable from
+// its type through reflection.
+//
+// The function f must use memoization to break cycles and
+// return false when the type has already been visited.
+//
+// TODO(adonovan): publish in typeutil and share with go/callgraph/rta.
+func forEachReachable(msets *typeutil.MethodSetCache, T types.Type, f func(types.Type) bool) {
+ var visit func(T types.Type, skip bool)
+ visit = func(T types.Type, skip bool) {
+ if !skip {
+ if !f(T) {
+ return
+ }
+ }
+
+ // Recursion over signatures of each method.
+ tmset := msets.MethodSet(T)
+ for i := 0; i < tmset.Len(); i++ {
+ sig := tmset.At(i).Type().(*types.Signature)
+ // It is tempting to call visit(sig, false)
+ // but, as noted in golang.org/cl/65450043,
+ // the Signature.Recv field is ignored by
+ // types.Identical and typeutil.Map, which
+ // is confusing at best.
+ //
+ // More importantly, the true signature rtype
+ // reachable from a method using reflection
+ // has no receiver but an extra ordinary parameter.
+ // For the Read method of io.Reader we want:
+ // func(Reader, []byte) (int, error)
+ // but here sig is:
+ // func([]byte) (int, error)
+ // with .Recv = Reader (though it is hard to
+ // notice because it doesn't affect Signature.String
+ // or types.Identical).
+ //
+ // TODO(adonovan): construct and visit the correct
+ // non-method signature with an extra parameter
+ // (though since unnamed func types have no methods
+ // there is essentially no actual demand for this).
+ //
+ // TODO(adonovan): document whether or not it is
+ // safe to skip non-exported methods (as RTA does).
+ visit(sig.Params(), true) // skip the Tuple
+ visit(sig.Results(), true) // skip the Tuple
+ }
+
+ switch T := T.(type) {
+ case *aliases.Alias:
+ visit(aliases.Unalias(T), skip) // emulates the pre-Alias behavior
+
+ case *types.Basic:
+ // nop
+
+ case *types.Interface:
+ // nop---handled by recursion over method set.
+
+ case *types.Pointer:
+ visit(T.Elem(), false)
+
+ case *types.Slice:
+ visit(T.Elem(), false)
+
+ case *types.Chan:
+ visit(T.Elem(), false)
+
+ case *types.Map:
+ visit(T.Key(), false)
+ visit(T.Elem(), false)
+
+ case *types.Signature:
+ if T.Recv() != nil {
+ panic(fmt.Sprintf("Signature %s has Recv %s", T, T.Recv()))
+ }
+ visit(T.Params(), true) // skip the Tuple
+ visit(T.Results(), true) // skip the Tuple
+
+ case *types.Named:
+ // A pointer-to-named type can be derived from a named
+ // type via reflection. It may have methods too.
+ visit(types.NewPointer(T), false)
+
+ // Consider 'type T struct{S}' where S has methods.
+ // Reflection provides no way to get from T to struct{S},
+ // only to S, so the method set of struct{S} is unwanted,
+ // so set 'skip' flag during recursion.
+ visit(T.Underlying(), true) // skip the unnamed type
+
+ case *types.Array:
+ visit(T.Elem(), false)
+
+ case *types.Struct:
+ for i, n := 0, T.NumFields(); i < n; i++ {
+ // TODO(adonovan): document whether or not
+ // it is safe to skip non-exported fields.
+ visit(T.Field(i).Type(), false)
+ }
+
+ case *types.Tuple:
+ for i, n := 0, T.Len(); i < n; i++ {
+ visit(T.At(i).Type(), false)
+ }
+
+ case *types.TypeParam, *types.Union:
+ // forEachReachable must not be called on parameterized types.
+ panic(T)
+
+ default:
+ panic(T)
+ }
+ }
+ visit(T, false)
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/mode.go b/vendor/golang.org/x/tools/go/ssa/mode.go
new file mode 100644
index 0000000..8381639
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/mode.go
@@ -0,0 +1,111 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines the BuilderMode type and its command-line flag.
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// BuilderMode is a bitmask of options for diagnostics and checking.
+//
+// *BuilderMode satisfies the flag.Value interface. Example:
+//
+// var mode = ssa.BuilderMode(0)
+// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) }
+type BuilderMode uint
+
+const (
+ PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout
+ PrintFunctions // Print function SSA code to stdout
+ LogSource // Log source locations as SSA builder progresses
+ SanityCheckFunctions // Perform sanity checking of function bodies
+ NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers
+ BuildSerially // Build packages serially, not in parallel.
+ GlobalDebug // Enable debug info for all packages
+ BareInits // Build init functions without guards or calls to dependent inits
+ InstantiateGenerics // Instantiate generics functions (monomorphize) while building
+)
+
+const BuilderModeDoc = `Options controlling the SSA builder.
+The value is a sequence of zero or more of these letters:
+C perform sanity [C]hecking of the SSA form.
+D include [D]ebug info for every function.
+P print [P]ackage inventory.
+F print [F]unction SSA code.
+S log [S]ource locations as SSA builder progresses.
+L build distinct packages seria[L]ly instead of in parallel.
+N build [N]aive SSA form: don't replace local loads/stores with registers.
+I build bare [I]nit functions: no init guards or calls to dependent inits.
+G instantiate [G]eneric function bodies via monomorphization
+`
+
+func (m BuilderMode) String() string {
+ var buf bytes.Buffer
+ if m&GlobalDebug != 0 {
+ buf.WriteByte('D')
+ }
+ if m&PrintPackages != 0 {
+ buf.WriteByte('P')
+ }
+ if m&PrintFunctions != 0 {
+ buf.WriteByte('F')
+ }
+ if m&LogSource != 0 {
+ buf.WriteByte('S')
+ }
+ if m&SanityCheckFunctions != 0 {
+ buf.WriteByte('C')
+ }
+ if m&NaiveForm != 0 {
+ buf.WriteByte('N')
+ }
+ if m&BuildSerially != 0 {
+ buf.WriteByte('L')
+ }
+ if m&BareInits != 0 {
+ buf.WriteByte('I')
+ }
+ if m&InstantiateGenerics != 0 {
+ buf.WriteByte('G')
+ }
+ return buf.String()
+}
+
+// Set parses the flag characters in s and updates *m.
+func (m *BuilderMode) Set(s string) error {
+ var mode BuilderMode
+ for _, c := range s {
+ switch c {
+ case 'D':
+ mode |= GlobalDebug
+ case 'P':
+ mode |= PrintPackages
+ case 'F':
+ mode |= PrintFunctions
+ case 'S':
+ mode |= LogSource | BuildSerially
+ case 'C':
+ mode |= SanityCheckFunctions
+ case 'N':
+ mode |= NaiveForm
+ case 'L':
+ mode |= BuildSerially
+ case 'I':
+ mode |= BareInits
+ case 'G':
+ mode |= InstantiateGenerics
+ default:
+ return fmt.Errorf("unknown BuilderMode option: %q", c)
+ }
+ }
+ *m = mode
+ return nil
+}
+
+// Get returns m.
+func (m BuilderMode) Get() interface{} { return m }
diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go
new file mode 100644
index 0000000..c890d7e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/print.go
@@ -0,0 +1,470 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file implements the String() methods for all Value and
+// Instruction types.
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "io"
+ "reflect"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// relName returns the name of v relative to i.
+// In most cases, this is identical to v.Name(), but references to
+// Functions (including methods) and Globals use RelString and
+// all types are displayed with relType, so that only cross-package
+// references are package-qualified.
+func relName(v Value, i Instruction) string {
+ var from *types.Package
+ if i != nil {
+ from = i.Parent().relPkg()
+ }
+ switch v := v.(type) {
+ case Member: // *Function or *Global
+ return v.RelString(from)
+ case *Const:
+ return v.RelString(from)
+ }
+ return v.Name()
+}
+
+// normalizeAnyForTesting controls whether we replace occurrences of
+// interface{} with any. It is only used for normalizing test output.
+var normalizeAnyForTesting bool
+
+func relType(t types.Type, from *types.Package) string {
+ s := types.TypeString(t, types.RelativeTo(from))
+ if normalizeAnyForTesting {
+ s = strings.ReplaceAll(s, "interface{}", "any")
+ }
+ return s
+}
+
+func relTerm(term *types.Term, from *types.Package) string {
+ s := relType(term.Type(), from)
+ if term.Tilde() {
+ return "~" + s
+ }
+ return s
+}
+
+func relString(m Member, from *types.Package) string {
+ // NB: not all globals have an Object (e.g. init$guard),
+ // so use Package().Object not Object.Package().
+ if pkg := m.Package().Pkg; pkg != nil && pkg != from {
+ return fmt.Sprintf("%s.%s", pkg.Path(), m.Name())
+ }
+ return m.Name()
+}
+
+// Value.String()
+//
+// This method is provided only for debugging.
+// It never appears in disassembly, which uses Value.Name().
+
+func (v *Parameter) String() string {
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from))
+}
+
+func (v *FreeVar) String() string {
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from))
+}
+
+func (v *Builtin) String() string {
+ return fmt.Sprintf("builtin %s", v.Name())
+}
+
+// Instruction.String()
+
+func (v *Alloc) String() string {
+ op := "local"
+ if v.Heap {
+ op = "new"
+ }
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("%s %s (%s)", op, relType(typeparams.MustDeref(v.Type()), from), v.Comment)
+}
+
+func (v *Phi) String() string {
+ var b bytes.Buffer
+ b.WriteString("phi [")
+ for i, edge := range v.Edges {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ // Be robust against malformed CFG.
+ if v.block == nil {
+ b.WriteString("??")
+ continue
+ }
+ block := -1
+ if i < len(v.block.Preds) {
+ block = v.block.Preds[i].Index
+ }
+ fmt.Fprintf(&b, "%d: ", block)
+ edgeVal := "<nil>" // be robust
+ if edge != nil {
+ edgeVal = relName(edge, v)
+ }
+ b.WriteString(edgeVal)
+ }
+ b.WriteString("]")
+ if v.Comment != "" {
+ b.WriteString(" #")
+ b.WriteString(v.Comment)
+ }
+ return b.String()
+}
+
+func printCall(v *CallCommon, prefix string, instr Instruction) string {
+ var b bytes.Buffer
+ b.WriteString(prefix)
+ if !v.IsInvoke() {
+ b.WriteString(relName(v.Value, instr))
+ } else {
+ fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name())
+ }
+ b.WriteString("(")
+ for i, arg := range v.Args {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(arg, instr))
+ }
+ if v.Signature().Variadic() {
+ b.WriteString("...")
+ }
+ b.WriteString(")")
+ return b.String()
+}
+
+func (c *CallCommon) String() string {
+ return printCall(c, "", nil)
+}
+
+func (v *Call) String() string {
+ return printCall(&v.Call, "", v)
+}
+
+func (v *BinOp) String() string {
+ return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v))
+}
+
+func (v *UnOp) String() string {
+ return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk))
+}
+
+func printConv(prefix string, v, x Value) string {
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("%s %s <- %s (%s)",
+ prefix,
+ relType(v.Type(), from),
+ relType(x.Type(), from),
+ relName(x, v.(Instruction)))
+}
+
+func (v *ChangeType) String() string { return printConv("changetype", v, v.X) }
+func (v *Convert) String() string { return printConv("convert", v, v.X) }
+func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) }
+func (v *SliceToArrayPointer) String() string { return printConv("slice to array pointer", v, v.X) }
+func (v *MakeInterface) String() string { return printConv("make", v, v.X) }
+
+func (v *MultiConvert) String() string {
+ from := v.Parent().relPkg()
+
+ var b strings.Builder
+ b.WriteString(printConv("multiconvert", v, v.X))
+ b.WriteString(" [")
+ for i, s := range v.from {
+ for j, d := range v.to {
+ if i != 0 || j != 0 {
+ b.WriteString(" | ")
+ }
+ fmt.Fprintf(&b, "%s <- %s", relTerm(d, from), relTerm(s, from))
+ }
+ }
+ b.WriteString("]")
+ return b.String()
+}
+
+func (v *MakeClosure) String() string {
+ var b bytes.Buffer
+ fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v))
+ if v.Bindings != nil {
+ b.WriteString(" [")
+ for i, c := range v.Bindings {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(c, v))
+ }
+ b.WriteString("]")
+ }
+ return b.String()
+}
+
+func (v *MakeSlice) String() string {
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("make %s %s %s",
+ relType(v.Type(), from),
+ relName(v.Len, v),
+ relName(v.Cap, v))
+}
+
+func (v *Slice) String() string {
+ var b bytes.Buffer
+ b.WriteString("slice ")
+ b.WriteString(relName(v.X, v))
+ b.WriteString("[")
+ if v.Low != nil {
+ b.WriteString(relName(v.Low, v))
+ }
+ b.WriteString(":")
+ if v.High != nil {
+ b.WriteString(relName(v.High, v))
+ }
+ if v.Max != nil {
+ b.WriteString(":")
+ b.WriteString(relName(v.Max, v))
+ }
+ b.WriteString("]")
+ return b.String()
+}
+
+func (v *MakeMap) String() string {
+ res := ""
+ if v.Reserve != nil {
+ res = relName(v.Reserve, v)
+ }
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), res)
+}
+
+func (v *MakeChan) String() string {
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v))
+}
+
+func (v *FieldAddr) String() string {
+ // Be robust against a bad index.
+ name := "?"
+ if fld := fieldOf(typeparams.MustDeref(v.X.Type()), v.Field); fld != nil {
+ name = fld.Name()
+ }
+ return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *Field) String() string {
+ // Be robust against a bad index.
+ name := "?"
+ if fld := fieldOf(v.X.Type(), v.Field); fld != nil {
+ name = fld.Name()
+ }
+ return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field)
+}
+
+func (v *IndexAddr) String() string {
+ return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Index) String() string {
+ return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v))
+}
+
+func (v *Lookup) String() string {
+ return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk))
+}
+
+func (v *Range) String() string {
+ return "range " + relName(v.X, v)
+}
+
+func (v *Next) String() string {
+ return "next " + relName(v.Iter, v)
+}
+
+func (v *TypeAssert) String() string {
+ from := v.Parent().relPkg()
+ return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from))
+}
+
+func (v *Extract) String() string {
+ return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index)
+}
+
+func (s *Jump) String() string {
+ // Be robust against malformed CFG.
+ block := -1
+ if s.block != nil && len(s.block.Succs) == 1 {
+ block = s.block.Succs[0].Index
+ }
+ return fmt.Sprintf("jump %d", block)
+}
+
+func (s *If) String() string {
+ // Be robust against malformed CFG.
+ tblock, fblock := -1, -1
+ if s.block != nil && len(s.block.Succs) == 2 {
+ tblock = s.block.Succs[0].Index
+ fblock = s.block.Succs[1].Index
+ }
+ return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock)
+}
+
+func (s *Go) String() string {
+ return printCall(&s.Call, "go ", s)
+}
+
+func (s *Panic) String() string {
+ return "panic " + relName(s.X, s)
+}
+
+func (s *Return) String() string {
+ var b bytes.Buffer
+ b.WriteString("return")
+ for i, r := range s.Results {
+ if i == 0 {
+ b.WriteString(" ")
+ } else {
+ b.WriteString(", ")
+ }
+ b.WriteString(relName(r, s))
+ }
+ return b.String()
+}
+
+func (*RunDefers) String() string {
+ return "rundefers"
+}
+
+func (s *Send) String() string {
+ return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s))
+}
+
+func (s *Defer) String() string {
+ prefix := "defer "
+ if s.DeferStack != nil {
+ prefix += "[" + relName(s.DeferStack, s) + "] "
+ }
+ c := printCall(&s.Call, prefix, s)
+ return c
+}
+
+func (s *Select) String() string {
+ var b bytes.Buffer
+ for i, st := range s.States {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ if st.Dir == types.RecvOnly {
+ b.WriteString("<-")
+ b.WriteString(relName(st.Chan, s))
+ } else {
+ b.WriteString(relName(st.Chan, s))
+ b.WriteString("<-")
+ b.WriteString(relName(st.Send, s))
+ }
+ }
+ non := ""
+ if !s.Blocking {
+ non = "non"
+ }
+ return fmt.Sprintf("select %sblocking [%s]", non, b.String())
+}
+
+func (s *Store) String() string {
+ return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s))
+}
+
+func (s *MapUpdate) String() string {
+ return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
+}
+
+func (s *DebugRef) String() string {
+ p := s.Parent().Prog.Fset.Position(s.Pos())
+ var descr interface{}
+ if s.object != nil {
+ descr = s.object // e.g. "var x int"
+ } else {
+ descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr"
+ }
+ var addr string
+ if s.IsAddr {
+ addr = "address of "
+ }
+ return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name())
+}
+
+func (p *Package) String() string {
+ return "package " + p.Pkg.Path()
+}
+
+var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer
+
+func (p *Package) WriteTo(w io.Writer) (int64, error) {
+ var buf bytes.Buffer
+ WritePackage(&buf, p)
+ n, err := w.Write(buf.Bytes())
+ return int64(n), err
+}
+
+// WritePackage writes to buf a human-readable summary of p.
+func WritePackage(buf *bytes.Buffer, p *Package) {
+ fmt.Fprintf(buf, "%s:\n", p)
+
+ var names []string
+ maxname := 0
+ for name := range p.Members {
+ if l := len(name); l > maxname {
+ maxname = l
+ }
+ names = append(names, name)
+ }
+
+ from := p.Pkg
+ sort.Strings(names)
+ for _, name := range names {
+ switch mem := p.Members[name].(type) {
+ case *NamedConst:
+ fmt.Fprintf(buf, " const %-*s %s = %s\n",
+ maxname, name, mem.Name(), mem.Value.RelString(from))
+
+ case *Function:
+ fmt.Fprintf(buf, " func %-*s %s\n",
+ maxname, name, relType(mem.Type(), from))
+
+ case *Type:
+ fmt.Fprintf(buf, " type %-*s %s\n",
+ maxname, name, relType(mem.Type().Underlying(), from))
+ for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) {
+ fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from)))
+ }
+
+ case *Global:
+ fmt.Fprintf(buf, " var %-*s %s\n",
+ maxname, name, relType(typeparams.MustDeref(mem.Type()), from))
+ }
+ }
+
+ fmt.Fprintf(buf, "\n")
+}
+
+func commaOk(x bool) string {
+ if x {
+ return ",ok"
+ }
+ return ""
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/sanity.go b/vendor/golang.org/x/tools/go/ssa/sanity.go
new file mode 100644
index 0000000..285cba0
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/sanity.go
@@ -0,0 +1,560 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// An optional pass for sanity-checking invariants of the SSA representation.
+// Currently it checks CFG invariants but little at the instruction level.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/types"
+ "io"
+ "os"
+ "strings"
+)
+
+type sanity struct {
+ reporter io.Writer
+ fn *Function
+ block *BasicBlock
+ instrs map[Instruction]unit
+ insane bool
+}
+
+// sanityCheck performs integrity checking of the SSA representation
+// of the function fn and returns true if it was valid. Diagnostics
+// are written to reporter if non-nil, os.Stderr otherwise. Some
+// diagnostics are only warnings and do not imply a negative result.
+//
+// Sanity-checking is intended to facilitate the debugging of code
+// transformation passes.
+func sanityCheck(fn *Function, reporter io.Writer) bool {
+ if reporter == nil {
+ reporter = os.Stderr
+ }
+ return (&sanity{reporter: reporter}).checkFunction(fn)
+}
+
+// mustSanityCheck is like sanityCheck but panics instead of returning
+// a negative result.
+func mustSanityCheck(fn *Function, reporter io.Writer) {
+ if !sanityCheck(fn, reporter) {
+ fn.WriteTo(os.Stderr)
+ panic("SanityCheck failed")
+ }
+}
+
+func (s *sanity) diagnostic(prefix, format string, args ...interface{}) {
+ fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn)
+ if s.block != nil {
+ fmt.Fprintf(s.reporter, ", block %s", s.block)
+ }
+ io.WriteString(s.reporter, ": ")
+ fmt.Fprintf(s.reporter, format, args...)
+ io.WriteString(s.reporter, "\n")
+}
+
+func (s *sanity) errorf(format string, args ...interface{}) {
+ s.insane = true
+ s.diagnostic("Error", format, args...)
+}
+
+func (s *sanity) warnf(format string, args ...interface{}) {
+ s.diagnostic("Warning", format, args...)
+}
+
+// findDuplicate returns an arbitrary basic block that appeared more
+// than once in blocks, or nil if all were unique.
+func findDuplicate(blocks []*BasicBlock) *BasicBlock {
+ if len(blocks) < 2 {
+ return nil
+ }
+ if blocks[0] == blocks[1] {
+ return blocks[0]
+ }
+ // Slow path:
+ m := make(map[*BasicBlock]bool)
+ for _, b := range blocks {
+ if m[b] {
+ return b
+ }
+ m[b] = true
+ }
+ return nil
+}
+
+func (s *sanity) checkInstr(idx int, instr Instruction) {
+ switch instr := instr.(type) {
+ case *If, *Jump, *Return, *Panic:
+ s.errorf("control flow instruction not at end of block")
+ case *Phi:
+ if idx == 0 {
+ // It suffices to apply this check to just the first phi node.
+ if dup := findDuplicate(s.block.Preds); dup != nil {
+ s.errorf("phi node in block with duplicate predecessor %s", dup)
+ }
+ } else {
+ prev := s.block.Instrs[idx-1]
+ if _, ok := prev.(*Phi); !ok {
+ s.errorf("Phi instruction follows a non-Phi: %T", prev)
+ }
+ }
+ if ne, np := len(instr.Edges), len(s.block.Preds); ne != np {
+ s.errorf("phi node has %d edges but %d predecessors", ne, np)
+
+ } else {
+ for i, e := range instr.Edges {
+ if e == nil {
+ s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i])
+ } else if !types.Identical(instr.typ, e.Type()) {
+ s.errorf("phi node '%s' has a different type (%s) for edge #%d from %s (%s)",
+ instr.Comment, instr.Type(), i, s.block.Preds[i], e.Type())
+ }
+ }
+ }
+
+ case *Alloc:
+ if !instr.Heap {
+ found := false
+ for _, l := range s.fn.Locals {
+ if l == instr {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr)
+ }
+ }
+
+ case *BinOp:
+ case *Call:
+ if common := instr.Call; common.IsInvoke() {
+ if !types.IsInterface(common.Value.Type()) {
+ s.errorf("invoke on %s (%s) which is not an interface type (or type param)", common.Value, common.Value.Type())
+ }
+ }
+ case *ChangeInterface:
+ case *ChangeType:
+ case *SliceToArrayPointer:
+ case *Convert:
+ if from := instr.X.Type(); !isBasicConvTypes(typeSetOf(from)) {
+ if to := instr.Type(); !isBasicConvTypes(typeSetOf(to)) {
+ s.errorf("convert %s -> %s: at least one type must be basic (or all basic, []byte, or []rune)", from, to)
+ }
+ }
+ case *MultiConvert:
+ case *Defer:
+ case *Extract:
+ case *Field:
+ case *FieldAddr:
+ case *Go:
+ case *Index:
+ case *IndexAddr:
+ case *Lookup:
+ case *MakeChan:
+ case *MakeClosure:
+ numFree := len(instr.Fn.(*Function).FreeVars)
+ numBind := len(instr.Bindings)
+ if numFree != numBind {
+ s.errorf("MakeClosure has %d Bindings for function %s with %d free vars",
+ numBind, instr.Fn, numFree)
+
+ }
+ if recv := instr.Type().(*types.Signature).Recv(); recv != nil {
+ s.errorf("MakeClosure's type includes receiver %s", recv.Type())
+ }
+
+ case *MakeInterface:
+ case *MakeMap:
+ case *MakeSlice:
+ case *MapUpdate:
+ case *Next:
+ case *Range:
+ case *RunDefers:
+ case *Select:
+ case *Send:
+ case *Slice:
+ case *Store:
+ case *TypeAssert:
+ case *UnOp:
+ case *DebugRef:
+ // TODO(adonovan): implement checks.
+ default:
+ panic(fmt.Sprintf("Unknown instruction type: %T", instr))
+ }
+
+ if call, ok := instr.(CallInstruction); ok {
+ if call.Common().Signature() == nil {
+ s.errorf("nil signature: %s", call)
+ }
+ }
+
+ // Check that value-defining instructions have valid types
+ // and a valid referrer list.
+ if v, ok := instr.(Value); ok {
+ t := v.Type()
+ if t == nil {
+ s.errorf("no type: %s = %s", v.Name(), v)
+ } else if t == tRangeIter || t == tDeferStack {
+ // not a proper type; ignore.
+ } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {
+ s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t)
+ }
+ s.checkReferrerList(v)
+ }
+
+ // Untyped constants are legal as instruction Operands(),
+ // for example:
+ // _ = "foo"[0]
+ // or:
+ // if wordsize==64 {...}
+
+ // All other non-Instruction Values can be found via their
+ // enclosing Function or Package.
+}
+
+func (s *sanity) checkFinalInstr(instr Instruction) {
+ switch instr := instr.(type) {
+ case *If:
+ if nsuccs := len(s.block.Succs); nsuccs != 2 {
+ s.errorf("If-terminated block has %d successors; expected 2", nsuccs)
+ return
+ }
+ if s.block.Succs[0] == s.block.Succs[1] {
+ s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0])
+ return
+ }
+
+ case *Jump:
+ if nsuccs := len(s.block.Succs); nsuccs != 1 {
+ s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs)
+ return
+ }
+
+ case *Return:
+ if nsuccs := len(s.block.Succs); nsuccs != 0 {
+ s.errorf("Return-terminated block has %d successors; expected none", nsuccs)
+ return
+ }
+ if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na {
+ s.errorf("%d-ary return in %d-ary function", na, nf)
+ }
+
+ case *Panic:
+ if nsuccs := len(s.block.Succs); nsuccs != 0 {
+ s.errorf("Panic-terminated block has %d successors; expected none", nsuccs)
+ return
+ }
+
+ default:
+ s.errorf("non-control flow instruction at end of block")
+ }
+}
+
+func (s *sanity) checkBlock(b *BasicBlock, index int) {
+ s.block = b
+
+ if b.Index != index {
+ s.errorf("block has incorrect Index %d", b.Index)
+ }
+ if b.parent != s.fn {
+ s.errorf("block has incorrect parent %s", b.parent)
+ }
+
+ // Check all blocks are reachable.
+ // (The entry block is always implicitly reachable,
+ // as is the Recover block, if any.)
+ if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 {
+ s.warnf("unreachable block")
+ if b.Instrs == nil {
+ // Since this block is about to be pruned,
+ // tolerating transient problems in it
+ // simplifies other optimizations.
+ return
+ }
+ }
+
+ // Check predecessor and successor relations are dual,
+ // and that all blocks in CFG belong to same function.
+ for _, a := range b.Preds {
+ found := false
+ for _, bb := range a.Succs {
+ if bb == b {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs)
+ }
+ if a.parent != s.fn {
+ s.errorf("predecessor %s belongs to different function %s", a, a.parent)
+ }
+ }
+ for _, c := range b.Succs {
+ found := false
+ for _, bb := range c.Preds {
+ if bb == b {
+ found = true
+ break
+ }
+ }
+ if !found {
+ s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds)
+ }
+ if c.parent != s.fn {
+ s.errorf("successor %s belongs to different function %s", c, c.parent)
+ }
+ }
+
+ // Check each instruction is sane.
+ n := len(b.Instrs)
+ if n == 0 {
+ s.errorf("basic block contains no instructions")
+ }
+ var rands [10]*Value // reuse storage
+ for j, instr := range b.Instrs {
+ if instr == nil {
+ s.errorf("nil instruction at index %d", j)
+ continue
+ }
+ if b2 := instr.Block(); b2 == nil {
+ s.errorf("nil Block() for instruction at index %d", j)
+ continue
+ } else if b2 != b {
+ s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j)
+ continue
+ }
+ if j < n-1 {
+ s.checkInstr(j, instr)
+ } else {
+ s.checkFinalInstr(instr)
+ }
+
+ // Check Instruction.Operands.
+ operands:
+ for i, op := range instr.Operands(rands[:0]) {
+ if op == nil {
+ s.errorf("nil operand pointer %d of %s", i, instr)
+ continue
+ }
+ val := *op
+ if val == nil {
+ continue // a nil operand is ok
+ }
+
+ // Check that "untyped" types only appear on constant operands.
+ if _, ok := (*op).(*Const); !ok {
+ if basic, ok := (*op).Type().Underlying().(*types.Basic); ok {
+ if basic.Info()&types.IsUntyped != 0 {
+ s.errorf("operand #%d of %s is untyped: %s", i, instr, basic)
+ }
+ }
+ }
+
+ // Check that Operands that are also Instructions belong to same function.
+ // TODO(adonovan): also check their block dominates block b.
+ if val, ok := val.(Instruction); ok {
+ if val.Block() == nil {
+ s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val)
+ } else if val.Parent() != s.fn {
+ s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent())
+ }
+ }
+
+ // Check that each function-local operand of
+ // instr refers back to instr. (NB: quadratic)
+ switch val := val.(type) {
+ case *Const, *Global, *Builtin:
+ continue // not local
+ case *Function:
+ if val.parent == nil {
+ continue // only anon functions are local
+ }
+ }
+
+ // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined.
+
+ if refs := val.Referrers(); refs != nil {
+ for _, ref := range *refs {
+ if ref == instr {
+ continue operands
+ }
+ }
+ s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val)
+ } else {
+ s.errorf("operand %d of %s (%s) has no referrers", i, instr, val)
+ }
+ }
+ }
+}
+
+func (s *sanity) checkReferrerList(v Value) {
+ refs := v.Referrers()
+ if refs == nil {
+ s.errorf("%s has missing referrer list", v.Name())
+ return
+ }
+ for i, ref := range *refs {
+ if _, ok := s.instrs[ref]; !ok {
+ s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref)
+ }
+ }
+}
+
+func (s *sanity) checkFunction(fn *Function) bool {
+ // TODO(adonovan): check Function invariants:
+ // - check params match signature
+ // - check transient fields are nil
+ // - warn if any fn.Locals do not appear among block instructions.
+
+ // TODO(taking): Sanity check origin, typeparams, and typeargs.
+ s.fn = fn
+ if fn.Prog == nil {
+ s.errorf("nil Prog")
+ }
+
+ var buf bytes.Buffer
+ _ = fn.String() // must not crash
+ _ = fn.RelString(fn.relPkg()) // must not crash
+ WriteFunction(&buf, fn) // must not crash
+
+ // All functions have a package, except delegates (which are
+ // shared across packages, or duplicated as weak symbols in a
+ // separate-compilation model), and error.Error.
+ if fn.Pkg == nil {
+ if strings.HasPrefix(fn.Synthetic, "from type information (on demand)") ||
+ strings.HasPrefix(fn.Synthetic, "wrapper ") ||
+ strings.HasPrefix(fn.Synthetic, "bound ") ||
+ strings.HasPrefix(fn.Synthetic, "thunk ") ||
+ strings.HasSuffix(fn.name, "Error") ||
+ strings.HasPrefix(fn.Synthetic, "instance ") ||
+ strings.HasPrefix(fn.Synthetic, "instantiation ") ||
+ (fn.parent != nil && len(fn.typeargs) > 0) /* anon fun in instance */ {
+ // ok
+ } else {
+ s.errorf("nil Pkg")
+ }
+ }
+ if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn {
+ if len(fn.typeargs) > 0 && fn.Prog.mode&InstantiateGenerics != 0 {
+ // ok (instantiation with InstantiateGenerics on)
+ } else if fn.topLevelOrigin != nil && len(fn.typeargs) > 0 {
+ // ok (we always have the syntax set for instantiation)
+ } else if _, rng := fn.syntax.(*ast.RangeStmt); rng && fn.Synthetic == "range-over-func yield" {
+ // ok (range-func-yields are both synthetic and keep syntax)
+ } else {
+ s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn)
+ }
+ }
+ for i, l := range fn.Locals {
+ if l.Parent() != fn {
+ s.errorf("Local %s at index %d has wrong parent", l.Name(), i)
+ }
+ if l.Heap {
+ s.errorf("Local %s at index %d has Heap flag set", l.Name(), i)
+ }
+ }
+ // Build the set of valid referrers.
+ s.instrs = make(map[Instruction]unit)
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ s.instrs[instr] = unit{}
+ }
+ }
+ for i, p := range fn.Params {
+ if p.Parent() != fn {
+ s.errorf("Param %s at index %d has wrong parent", p.Name(), i)
+ }
+ // Check common suffix of Signature and Params match type.
+ if sig := fn.Signature; sig != nil {
+ j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params
+ if j < 0 {
+ continue
+ }
+ if !types.Identical(p.Type(), sig.Params().At(j).Type()) {
+ s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type())
+
+ }
+ }
+ s.checkReferrerList(p)
+ }
+ for i, fv := range fn.FreeVars {
+ if fv.Parent() != fn {
+ s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i)
+ }
+ s.checkReferrerList(fv)
+ }
+
+ if fn.Blocks != nil && len(fn.Blocks) == 0 {
+ // Function _had_ blocks (so it's not external) but
+ // they were "optimized" away, even the entry block.
+ s.errorf("Blocks slice is non-nil but empty")
+ }
+ for i, b := range fn.Blocks {
+ if b == nil {
+ s.warnf("nil *BasicBlock at f.Blocks[%d]", i)
+ continue
+ }
+ s.checkBlock(b, i)
+ }
+ if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover {
+ s.errorf("Recover block is not in Blocks slice")
+ }
+
+ s.block = nil
+ for i, anon := range fn.AnonFuncs {
+ if anon.Parent() != fn {
+ s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent())
+ }
+ if i != int(anon.anonIdx) {
+ s.errorf("AnonFuncs[%d]=%s but %s.anonIdx=%d", i, anon, anon, anon.anonIdx)
+ }
+ }
+ s.fn = nil
+ return !s.insane
+}
+
+// sanityCheckPackage checks invariants of packages upon creation.
+// It does not require that the package is built.
+// Unlike sanityCheck (for functions), it just panics at the first error.
+func sanityCheckPackage(pkg *Package) {
+ if pkg.Pkg == nil {
+ panic(fmt.Sprintf("Package %s has no Object", pkg))
+ }
+ _ = pkg.String() // must not crash
+
+ for name, mem := range pkg.Members {
+ if name != mem.Name() {
+ panic(fmt.Sprintf("%s: %T.Name() = %s, want %s",
+ pkg.Pkg.Path(), mem, mem.Name(), name))
+ }
+ obj := mem.Object()
+ if obj == nil {
+ // This check is sound because fields
+ // {Global,Function}.object have type
+ // types.Object. (If they were declared as
+ // *types.{Var,Func}, we'd have a non-empty
+ // interface containing a nil pointer.)
+
+ continue // not all members have typechecker objects
+ }
+ if obj.Name() != name {
+ if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") {
+ // Ok. The name of a declared init function varies between
+ // its types.Func ("init") and its ssa.Function ("init#%d").
+ } else {
+ panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s",
+ pkg.Pkg.Path(), mem, obj.Name(), name))
+ }
+ }
+ if obj.Pos() != mem.Pos() {
+ panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos()))
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/source.go b/vendor/golang.org/x/tools/go/ssa/source.go
new file mode 100644
index 0000000..7b71c88
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/source.go
@@ -0,0 +1,288 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines utilities for working with source positions
+// or source-level named entities ("objects").
+
+// TODO(adonovan): test that {Value,Instruction}.Pos() positions match
+// the originating syntax, as specified.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// EnclosingFunction returns the function that contains the syntax
+// node denoted by path.
+//
+// Syntax associated with package-level variable specifications is
+// enclosed by the package's init() function.
+//
+// Returns nil if not found; reasons might include:
+// - the node is not enclosed by any function.
+// - the node is within an anonymous function (FuncLit) and
+// its SSA function has not been created yet
+// (pkg.Build() has not yet been called).
+func EnclosingFunction(pkg *Package, path []ast.Node) *Function {
+ // Start with package-level function...
+ fn := findEnclosingPackageLevelFunction(pkg, path)
+ if fn == nil {
+ return nil // not in any function
+ }
+
+ // ...then walk down the nested anonymous functions.
+ n := len(path)
+outer:
+ for i := range path {
+ if lit, ok := path[n-1-i].(*ast.FuncLit); ok {
+ for _, anon := range fn.AnonFuncs {
+ if anon.Pos() == lit.Type.Func {
+ fn = anon
+ continue outer
+ }
+ }
+ // SSA function not found:
+ // - package not yet built, or maybe
+ // - builder skipped FuncLit in dead block
+ // (in principle; but currently the Builder
+ // generates even dead FuncLits).
+ return nil
+ }
+ }
+ return fn
+}
+
+// HasEnclosingFunction returns true if the AST node denoted by path
+// is contained within the declaration of some function or
+// package-level variable.
+//
+// Unlike EnclosingFunction, the behaviour of this function does not
+// depend on whether SSA code for pkg has been built, so it can be
+// used to quickly reject check inputs that will cause
+// EnclosingFunction to fail, prior to SSA building.
+func HasEnclosingFunction(pkg *Package, path []ast.Node) bool {
+ return findEnclosingPackageLevelFunction(pkg, path) != nil
+}
+
+// findEnclosingPackageLevelFunction returns the Function
+// corresponding to the package-level function enclosing path.
+func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function {
+ if n := len(path); n >= 2 { // [... {Gen,Func}Decl File]
+ switch decl := path[n-2].(type) {
+ case *ast.GenDecl:
+ if decl.Tok == token.VAR && n >= 3 {
+ // Package-level 'var' initializer.
+ return pkg.init
+ }
+
+ case *ast.FuncDecl:
+ if decl.Recv == nil && decl.Name.Name == "init" {
+ // Explicit init() function.
+ for _, b := range pkg.init.Blocks {
+ for _, instr := range b.Instrs {
+ if instr, ok := instr.(*Call); ok {
+ if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos {
+ return callee
+ }
+ }
+ }
+ }
+ // Hack: return non-nil when SSA is not yet
+ // built so that HasEnclosingFunction works.
+ return pkg.init
+ }
+ // Declared function/method.
+ return findNamedFunc(pkg, decl.Name.NamePos)
+ }
+ }
+ return nil // not in any function
+}
+
+// findNamedFunc returns the named function whose FuncDecl.Ident is at
+// position pos.
+func findNamedFunc(pkg *Package, pos token.Pos) *Function {
+ // Look at all package members and method sets of named types.
+ // Not very efficient.
+ for _, mem := range pkg.Members {
+ switch mem := mem.(type) {
+ case *Function:
+ if mem.Pos() == pos {
+ return mem
+ }
+ case *Type:
+ mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type()))
+ for i, n := 0, mset.Len(); i < n; i++ {
+ // Don't call Program.Method: avoid creating wrappers.
+ obj := mset.At(i).Obj().(*types.Func)
+ if obj.Pos() == pos {
+ // obj from MethodSet may not be the origin type.
+ m := obj.Origin()
+ return pkg.objects[m].(*Function)
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// ValueForExpr returns the SSA Value that corresponds to non-constant
+// expression e.
+//
+// It returns nil if no value was found, e.g.
+// - the expression is not lexically contained within f;
+// - f was not built with debug information; or
+// - e is a constant expression. (For efficiency, no debug
+// information is stored for constants. Use
+// go/types.Info.Types[e].Value instead.)
+// - e is a reference to nil or a built-in function.
+// - the value was optimised away.
+//
+// If e is an addressable expression used in an lvalue context,
+// value is the address denoted by e, and isAddr is true.
+//
+// The types of e (or &e, if isAddr) and the result are equal
+// (modulo "untyped" bools resulting from comparisons).
+//
+// (Tip: to find the ssa.Value given a source position, use
+// astutil.PathEnclosingInterval to locate the ast.Node, then
+// EnclosingFunction to locate the Function, then ValueForExpr to find
+// the ssa.Value.)
+func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
+ if f.debugInfo() { // (opt)
+ e = unparen(e)
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ if ref, ok := instr.(*DebugRef); ok {
+ if ref.Expr == e {
+ return ref.X, ref.IsAddr
+ }
+ }
+ }
+ }
+ }
+ return
+}
+
+// --- Lookup functions for source-level named entities (types.Objects) ---
+
+// Package returns the SSA Package corresponding to the specified
+// type-checker package. It returns nil if no such Package was
+// created by a prior call to prog.CreatePackage.
+func (prog *Program) Package(pkg *types.Package) *Package {
+ return prog.packages[pkg]
+}
+
+// packageLevelMember returns the package-level member corresponding
+// to the specified symbol, which may be a package-level const
+// (*NamedConst), var (*Global) or func/method (*Function) of some
+// package in prog.
+//
+// It returns nil if the object belongs to a package that has not been
+// created by prog.CreatePackage.
+func (prog *Program) packageLevelMember(obj types.Object) Member {
+ if pkg, ok := prog.packages[obj.Pkg()]; ok {
+ return pkg.objects[obj]
+ }
+ return nil
+}
+
+// FuncValue returns the SSA function or (non-interface) method
+// denoted by the specified func symbol. It returns nil id the symbol
+// denotes an interface method, or belongs to a package that was not
+// created by prog.CreatePackage.
+func (prog *Program) FuncValue(obj *types.Func) *Function {
+ fn, _ := prog.packageLevelMember(obj).(*Function)
+ return fn
+}
+
+// ConstValue returns the SSA constant denoted by the specified const symbol.
+func (prog *Program) ConstValue(obj *types.Const) *Const {
+ // TODO(adonovan): opt: share (don't reallocate)
+ // Consts for const objects and constant ast.Exprs.
+
+ // Universal constant? {true,false,nil}
+ if obj.Parent() == types.Universe {
+ return NewConst(obj.Val(), obj.Type())
+ }
+ // Package-level named constant?
+ if v := prog.packageLevelMember(obj); v != nil {
+ return v.(*NamedConst).Value
+ }
+ return NewConst(obj.Val(), obj.Type())
+}
+
+// VarValue returns the SSA Value that corresponds to a specific
+// identifier denoting the specified var symbol.
+//
+// VarValue returns nil if a local variable was not found, perhaps
+// because its package was not built, the debug information was not
+// requested during SSA construction, or the value was optimized away.
+//
+// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval),
+// and that ident must resolve to obj.
+//
+// pkg is the package enclosing the reference. (A reference to a var
+// always occurs within a function, so we need to know where to find it.)
+//
+// If the identifier is a field selector and its base expression is
+// non-addressable, then VarValue returns the value of that field.
+// For example:
+//
+// func f() struct {x int}
+// f().x // VarValue(x) returns a *Field instruction of type int
+//
+// All other identifiers denote addressable locations (variables).
+// For them, VarValue may return either the variable's address or its
+// value, even when the expression is evaluated only for its value; the
+// situation is reported by isAddr, the second component of the result.
+//
+// If !isAddr, the returned value is the one associated with the
+// specific identifier. For example,
+//
+// var x int // VarValue(x) returns Const 0 here
+// x = 1 // VarValue(x) returns Const 1 here
+//
+// It is not specified whether the value or the address is returned in
+// any particular case, as it may depend upon optimizations performed
+// during SSA code generation, such as registerization, constant
+// folding, avoidance of materialization of subexpressions, etc.
+func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) {
+ // All references to a var are local to some function, possibly init.
+ fn := EnclosingFunction(pkg, ref)
+ if fn == nil {
+ return // e.g. def of struct field; SSA not built?
+ }
+
+ id := ref[0].(*ast.Ident)
+
+ // Defining ident of a parameter?
+ if id.Pos() == obj.Pos() {
+ for _, param := range fn.Params {
+ if param.Object() == obj {
+ return param, false
+ }
+ }
+ }
+
+ // Other ident?
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ if dr, ok := instr.(*DebugRef); ok {
+ if dr.Pos() == id.Pos() {
+ return dr.X, dr.IsAddr
+ }
+ }
+ }
+ }
+
+ // Defining ident of package-level var?
+ if v := prog.packageLevelMember(obj); v != nil {
+ return v.(*Global), true
+ }
+
+ return // e.g. debug info not requested, or var optimized away
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go
new file mode 100644
index 0000000..1231afd
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssa.go
@@ -0,0 +1,1871 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This package defines a high-level intermediate representation for
+// Go programs using static single-assignment (SSA) form.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "sync"
+
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// A Program is a partial or complete Go program converted to SSA form.
+type Program struct {
+ Fset *token.FileSet // position information for the files of this Program
+ imported map[string]*Package // all importable Packages, keyed by import path
+ packages map[*types.Package]*Package // all created Packages
+ mode BuilderMode // set of mode bits for SSA construction
+ MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets
+
+ canon *canonizer // type canonicalization map
+ ctxt *types.Context // cache for type checking instantiations
+
+ methodsMu sync.Mutex
+ methodSets typeutil.Map // maps type to its concrete *methodSet
+
+ // memoization of whether a type refers to type parameters
+ hasParamsMu sync.Mutex
+ hasParams typeparams.Free
+
+ runtimeTypesMu sync.Mutex
+ runtimeTypes typeutil.Map // set of runtime types (from MakeInterface)
+
+ // objectMethods is a memoization of objectMethod
+ // to avoid creation of duplicate methods from type information.
+ objectMethodsMu sync.Mutex
+ objectMethods map[*types.Func]*Function
+}
+
+// A Package is a single analyzed Go package containing Members for
+// all package-level functions, variables, constants and types it
+// declares. These may be accessed directly via Members, or via the
+// type-specific accessor methods Func, Type, Var and Const.
+//
+// Members also contains entries for "init" (the synthetic package
+// initializer) and "init#%d", the nth declared init function,
+// and unspecified other things too.
+type Package struct {
+ Prog *Program // the owning program
+ Pkg *types.Package // the corresponding go/types.Package
+ Members map[string]Member // all package members keyed by name (incl. init and init#%d)
+ objects map[types.Object]Member // mapping of package objects to members (incl. methods). Contains *NamedConst, *Global, *Function (values but not types)
+ init *Function // Func("init"); the package's init function
+ debug bool // include full debug info in this package
+ syntax bool // package was loaded from syntax
+
+ // The following fields are set transiently, then cleared
+ // after building.
+ buildOnce sync.Once // ensures package building occurs once
+ ninit int32 // number of init functions
+ info *types.Info // package type information
+ files []*ast.File // package ASTs
+ created []*Function // members created as a result of building this package (includes declared functions, wrappers)
+ initVersion map[ast.Expr]string // goversion to use for each global var init expr
+}
+
+// A Member is a member of a Go package, implemented by *NamedConst,
+// *Global, *Function, or *Type; they are created by package-level
+// const, var, func and type declarations respectively.
+type Member interface {
+ Name() string // declared name of the package member
+ String() string // package-qualified name of the package member
+ RelString(*types.Package) string // like String, but relative refs are unqualified
+ Object() types.Object // typechecker's object for this member, if any
+ Pos() token.Pos // position of member's declaration, if known
+ Type() types.Type // type of the package member
+ Token() token.Token // token.{VAR,FUNC,CONST,TYPE}
+ Package() *Package // the containing package
+}
+
+// A Type is a Member of a Package representing a package-level named type.
+type Type struct {
+ object *types.TypeName
+ pkg *Package
+}
+
+// A NamedConst is a Member of a Package representing a package-level
+// named constant.
+//
+// Pos() returns the position of the declaring ast.ValueSpec.Names[*]
+// identifier.
+//
+// NB: a NamedConst is not a Value; it contains a constant Value, which
+// it augments with the name and position of its 'const' declaration.
+type NamedConst struct {
+ object *types.Const
+ Value *Const
+ pkg *Package
+}
+
+// A Value is an SSA value that can be referenced by an instruction.
+type Value interface {
+ // Name returns the name of this value, and determines how
+ // this Value appears when used as an operand of an
+ // Instruction.
+ //
+ // This is the same as the source name for Parameters,
+ // Builtins, Functions, FreeVars, Globals.
+ // For constants, it is a representation of the constant's value
+ // and type. For all other Values this is the name of the
+ // virtual register defined by the instruction.
+ //
+ // The name of an SSA Value is not semantically significant,
+ // and may not even be unique within a function.
+ Name() string
+
+ // If this value is an Instruction, String returns its
+ // disassembled form; otherwise it returns unspecified
+ // human-readable information about the Value, such as its
+ // kind, name and type.
+ String() string
+
+ // Type returns the type of this value. Many instructions
+ // (e.g. IndexAddr) change their behaviour depending on the
+ // types of their operands.
+ Type() types.Type
+
+ // Parent returns the function to which this Value belongs.
+ // It returns nil for named Functions, Builtin, Const and Global.
+ Parent() *Function
+
+ // Referrers returns the list of instructions that have this
+ // value as one of their operands; it may contain duplicates
+ // if an instruction has a repeated operand.
+ //
+ // Referrers actually returns a pointer through which the
+ // caller may perform mutations to the object's state.
+ //
+ // Referrers is currently only defined if Parent()!=nil,
+ // i.e. for the function-local values FreeVar, Parameter,
+ // Functions (iff anonymous) and all value-defining instructions.
+ // It returns nil for named Functions, Builtin, Const and Global.
+ //
+ // Instruction.Operands contains the inverse of this relation.
+ Referrers() *[]Instruction
+
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this value,
+ // or token.NoPos if it was not explicit in the source.
+ //
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Lparen
+ // for an *ast.CallExpr. This permits a compact but
+ // approximate mapping from Values to source positions for use
+ // in diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Value
+ // corresponds to an ast.Expr; use Function.ValueForExpr
+ // instead. NB: it requires that the function was built with
+ // debug information.)
+ Pos() token.Pos
+}
+
+// An Instruction is an SSA instruction that computes a new Value or
+// has some effect.
+//
+// An Instruction that defines a value (e.g. BinOp) also implements
+// the Value interface; an Instruction that only has an effect (e.g. Store)
+// does not.
+type Instruction interface {
+ // String returns the disassembled form of this value.
+ //
+ // Examples of Instructions that are Values:
+ // "x + y" (BinOp)
+ // "len([])" (Call)
+ // Note that the name of the Value is not printed.
+ //
+ // Examples of Instructions that are not Values:
+ // "return x" (Return)
+ // "*y = x" (Store)
+ //
+ // (The separation Value.Name() from Value.String() is useful
+ // for some analyses which distinguish the operation from the
+ // value it defines, e.g., 'y = local int' is both an allocation
+ // of memory 'local int' and a definition of a pointer y.)
+ String() string
+
+ // Parent returns the function to which this instruction
+ // belongs.
+ Parent() *Function
+
+ // Block returns the basic block to which this instruction
+ // belongs.
+ Block() *BasicBlock
+
+ // setBlock sets the basic block to which this instruction belongs.
+ setBlock(*BasicBlock)
+
+ // Operands returns the operands of this instruction: the
+ // set of Values it references.
+ //
+ // Specifically, it appends their addresses to rands, a
+ // user-provided slice, and returns the resulting slice,
+ // permitting avoidance of memory allocation.
+ //
+ // The operands are appended in undefined order, but the order
+ // is consistent for a given Instruction; the addresses are
+ // always non-nil but may point to a nil Value. Clients may
+ // store through the pointers, e.g. to effect a value
+ // renaming.
+ //
+ // Value.Referrers is a subset of the inverse of this
+ // relation. (Referrers are not tracked for all types of
+ // Values.)
+ Operands(rands []*Value) []*Value
+
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this
+ // instruction, or token.NoPos if it was not explicit in the
+ // source.
+ //
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Go token
+ // for an *ast.GoStmt. This permits a compact but approximate
+ // mapping from Instructions to source positions for use in
+ // diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Instruction
+ // corresponds to an ast.Expr; see the notes for Value.Pos.
+ // This position may be used to determine which non-Value
+ // Instruction corresponds to some ast.Stmts, but not all: If
+ // and Jump instructions have no Pos(), for example.)
+ Pos() token.Pos
+}
+
+// A Node is a node in the SSA value graph. Every concrete type that
+// implements Node is also either a Value, an Instruction, or both.
+//
+// Node contains the methods common to Value and Instruction, plus the
+// Operands and Referrers methods generalized to return nil for
+// non-Instructions and non-Values, respectively.
+//
+// Node is provided to simplify SSA graph algorithms. Clients should
+// use the more specific and informative Value or Instruction
+// interfaces where appropriate.
+type Node interface {
+ // Common methods:
+ String() string
+ Pos() token.Pos
+ Parent() *Function
+
+ // Partial methods:
+ Operands(rands []*Value) []*Value // nil for non-Instructions
+ Referrers() *[]Instruction // nil for non-Values
+}
+
+// Function represents the parameters, results, and code of a function
+// or method.
+//
+// If Blocks is nil, this indicates an external function for which no
+// Go source code is available. In this case, FreeVars, Locals, and
+// Params are nil too. Clients performing whole-program analysis must
+// handle external functions specially.
+//
+// Blocks contains the function's control-flow graph (CFG).
+// Blocks[0] is the function entry point; block order is not otherwise
+// semantically significant, though it may affect the readability of
+// the disassembly.
+// To iterate over the blocks in dominance order, use DomPreorder().
+//
+// Recover is an optional second entry point to which control resumes
+// after a recovered panic. The Recover block may contain only a return
+// statement, preceded by a load of the function's named return
+// parameters, if any.
+//
+// A nested function (Parent()!=nil) that refers to one or more
+// lexically enclosing local variables ("free variables") has FreeVars.
+// Such functions cannot be called directly but require a
+// value created by MakeClosure which, via its Bindings, supplies
+// values for these parameters.
+//
+// If the function is a method (Signature.Recv() != nil) then the first
+// element of Params is the receiver parameter.
+//
+// A Go package may declare many functions called "init".
+// For each one, Object().Name() returns "init" but Name() returns
+// "init#1", etc, in declaration order.
+//
+// Pos() returns the declaring ast.FuncLit.Type.Func or the position
+// of the ast.FuncDecl.Name, if the function was explicit in the
+// source. Synthetic wrappers, for which Synthetic != "", may share
+// the same position as the function they wrap.
+// Syntax.Pos() always returns the position of the declaring "func" token.
+//
+// When the operand of a range statement is an iterator function,
+// the loop body is transformed into a synthetic anonymous function
+// that is passed as the yield argument in a call to the iterator.
+// In that case, Function.Pos is the position of the "range" token,
+// and Function.Syntax is the ast.RangeStmt.
+//
+// Synthetic functions, for which Synthetic != "", are functions
+// that do not appear in the source AST. These include:
+// - method wrappers,
+// - thunks,
+// - bound functions,
+// - empty functions built from loaded type information,
+// - yield functions created from range-over-func loops,
+// - package init functions, and
+// - instantiations of generic functions.
+//
+// Synthetic wrapper functions may share the same position
+// as the function they wrap.
+//
+// Type() returns the function's Signature.
+//
+// A generic function is a function or method that has uninstantiated type
+// parameters (TypeParams() != nil). Consider a hypothetical generic
+// method, (*Map[K,V]).Get. It may be instantiated with all
+// non-parameterized types as (*Map[string,int]).Get or with
+// parameterized types as (*Map[string,U]).Get, where U is a type parameter.
+// In both instantiations, Origin() refers to the instantiated generic
+// method, (*Map[K,V]).Get, TypeParams() refers to the parameters [K,V] of
+// the generic method. TypeArgs() refers to [string,U] or [string,int],
+// respectively, and is nil in the generic method.
+type Function struct {
+ name string
+ object *types.Func // symbol for declared function (nil for FuncLit or synthetic init)
+ method *selection // info about provenance of synthetic methods; thunk => non-nil
+ Signature *types.Signature
+ pos token.Pos
+
+ // source information
+ Synthetic string // provenance of synthetic function; "" for true source functions
+ syntax ast.Node // *ast.Func{Decl,Lit}, if from syntax (incl. generic instances) or (*ast.RangeStmt if a yield function)
+ info *types.Info // type annotations (iff syntax != nil)
+ goversion string // Go version of syntax (NB: init is special)
+
+ parent *Function // enclosing function if anon; nil if global
+ Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error)
+ Prog *Program // enclosing program
+
+ buildshared *task // wait for a shared function to be done building (may be nil if <=1 builder ever needs to wait)
+
+ // These fields are populated only when the function body is built:
+
+ Params []*Parameter // function parameters; for methods, includes receiver
+ FreeVars []*FreeVar // free variables whose values must be supplied by closure
+ Locals []*Alloc // frame-allocated variables of this function
+ Blocks []*BasicBlock // basic blocks of the function; nil => external
+ Recover *BasicBlock // optional; control transfers here after recovered panic
+ AnonFuncs []*Function // anonymous functions (from FuncLit,RangeStmt) directly beneath this one
+ referrers []Instruction // referring instructions (iff Parent() != nil)
+ anonIdx int32 // position of a nested function in parent's AnonFuncs. fn.Parent()!=nil => fn.Parent().AnonFunc[fn.anonIdx] == fn.
+
+ typeparams *types.TypeParamList // type parameters of this function. typeparams.Len() > 0 => generic or instance of generic function
+ typeargs []types.Type // type arguments that instantiated typeparams. len(typeargs) > 0 => instance of generic function
+ topLevelOrigin *Function // the origin function if this is an instance of a source function. nil if Parent()!=nil.
+ generic *generic // instances of this function, if generic
+
+ // The following fields are cleared after building.
+ build buildFunc // algorithm to build function body (nil => built)
+ currentBlock *BasicBlock // where to emit code
+ vars map[*types.Var]Value // addresses of local variables
+ results []*Alloc // result allocations of the current function
+ returnVars []*types.Var // variables for a return statement. Either results or for range-over-func a parent's results
+ targets *targets // linked stack of branch targets
+ lblocks map[*types.Label]*lblock // labelled blocks
+ subst *subster // type parameter substitutions (if non-nil)
+ jump *types.Var // synthetic variable for the yield state (non-nil => range-over-func)
+ deferstack *types.Var // synthetic variable holding enclosing ssa:deferstack()
+ source *Function // nearest enclosing source function
+ exits []*exit // exits of the function that need to be resolved
+ uniq int64 // source of unique ints within the source tree while building
+}
+
+// BasicBlock represents an SSA basic block.
+//
+// The final element of Instrs is always an explicit transfer of
+// control (If, Jump, Return, or Panic).
+//
+// A block may contain no Instructions only if it is unreachable,
+// i.e., Preds is nil. Empty blocks are typically pruned.
+//
+// BasicBlocks and their Preds/Succs relation form a (possibly cyclic)
+// graph independent of the SSA Value graph: the control-flow graph or
+// CFG. It is illegal for multiple edges to exist between the same
+// pair of blocks.
+//
+// Each BasicBlock is also a node in the dominator tree of the CFG.
+// The tree may be navigated using Idom()/Dominees() and queried using
+// Dominates().
+//
+// The order of Preds and Succs is significant (to Phi and If
+// instructions, respectively).
+type BasicBlock struct {
+ Index int // index of this block within Parent().Blocks
+ Comment string // optional label; no semantic significance
+ parent *Function // parent function
+ Instrs []Instruction // instructions in order
+ Preds, Succs []*BasicBlock // predecessors and successors
+ succs2 [2]*BasicBlock // initial space for Succs
+ dom domInfo // dominator tree info
+ gaps int // number of nil Instrs (transient)
+ rundefers int // number of rundefers (transient)
+}
+
+// Pure values ----------------------------------------
+
+// A FreeVar represents a free variable of the function to which it
+// belongs.
+//
+// FreeVars are used to implement anonymous functions, whose free
+// variables are lexically captured in a closure formed by
+// MakeClosure. The value of such a free var is an Alloc or another
+// FreeVar and is considered a potentially escaping heap address, with
+// pointer type.
+//
+// FreeVars are also used to implement bound method closures. Such a
+// free var represents the receiver value and may be of any type that
+// has concrete methods.
+//
+// Pos() returns the position of the value that was captured, which
+// belongs to an enclosing function.
+type FreeVar struct {
+ name string
+ typ types.Type
+ pos token.Pos
+ parent *Function
+ referrers []Instruction
+
+ // Transiently needed during building.
+ outer Value // the Value captured from the enclosing context.
+}
+
+// A Parameter represents an input parameter of a function.
+type Parameter struct {
+ name string
+ object *types.Var // non-nil
+ typ types.Type
+ parent *Function
+ referrers []Instruction
+}
+
+// A Const represents a value known at build time.
+//
+// Consts include true constants of boolean, numeric, and string types, as
+// defined by the Go spec; these are represented by a non-nil Value field.
+//
+// Consts also include the "zero" value of any type, of which the nil values
+// of various pointer-like types are a special case; these are represented
+// by a nil Value field.
+//
+// Pos() returns token.NoPos.
+//
+// Example printed forms:
+//
+// 42:int
+// "hello":untyped string
+// 3+4i:MyComplex
+// nil:*int
+// nil:[]string
+// [3]int{}:[3]int
+// struct{x string}{}:struct{x string}
+// 0:interface{int|int64}
+// nil:interface{bool|int} // no go/constant representation
+type Const struct {
+ typ types.Type
+ Value constant.Value
+}
+
+// A Global is a named Value holding the address of a package-level
+// variable.
+//
+// Pos() returns the position of the ast.ValueSpec.Names[*]
+// identifier.
+type Global struct {
+ name string
+ object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard
+ typ types.Type
+ pos token.Pos
+
+ Pkg *Package
+}
+
+// A Builtin represents a specific use of a built-in function, e.g. len.
+//
+// Builtins are immutable values. Builtins do not have addresses.
+// Builtins can only appear in CallCommon.Value.
+//
+// Name() indicates the function: one of the built-in functions from the
+// Go spec (excluding "make" and "new") or one of these ssa-defined
+// intrinsics:
+//
+// // wrapnilchk returns ptr if non-nil, panics otherwise.
+// // (For use in indirection wrappers.)
+// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T
+//
+// Object() returns a *types.Builtin for built-ins defined by the spec,
+// nil for others.
+//
+// Type() returns a *types.Signature representing the effective
+// signature of the built-in for this call.
+type Builtin struct {
+ name string
+ sig *types.Signature
+}
+
+// Value-defining instructions ----------------------------------------
+
+// The Alloc instruction reserves space for a variable of the given type,
+// zero-initializes it, and yields its address.
+//
+// Alloc values are always addresses, and have pointer types, so the
+// type of the allocated variable is actually
+// Type().Underlying().(*types.Pointer).Elem().
+//
+// If Heap is false, Alloc zero-initializes the same local variable in
+// the call frame and returns its address; in this case the Alloc must
+// be present in Function.Locals. We call this a "local" alloc.
+//
+// If Heap is true, Alloc allocates a new zero-initialized variable
+// each time the instruction is executed. We call this a "new" alloc.
+//
+// When Alloc is applied to a channel, map or slice type, it returns
+// the address of an uninitialized (nil) reference of that kind; store
+// the result of MakeSlice, MakeMap or MakeChan in that location to
+// instantiate these types.
+//
+// Pos() returns the ast.CompositeLit.Lbrace for a composite literal,
+// or the ast.CallExpr.Rparen for a call to new() or for a call that
+// allocates a varargs slice.
+//
+// Example printed form:
+//
+// t0 = local int
+// t1 = new int
+type Alloc struct {
+ register
+ Comment string
+ Heap bool
+ index int // dense numbering; for lifting
+}
+
+// The Phi instruction represents an SSA φ-node, which combines values
+// that differ across incoming control-flow edges and yields a new
+// value. Within a block, all φ-nodes must appear before all non-φ
+// nodes.
+//
+// Pos() returns the position of the && or || for short-circuit
+// control-flow joins, or that of the *Alloc for φ-nodes inserted
+// during SSA renaming.
+//
+// Example printed form:
+//
+// t2 = phi [0: t0, 1: t1]
+type Phi struct {
+ register
+ Comment string // a hint as to its purpose
+ Edges []Value // Edges[i] is value for Block().Preds[i]
+}
+
+// The Call instruction represents a function or method call.
+//
+// The Call instruction yields the function result if there is exactly
+// one. Otherwise it returns a tuple, the components of which are
+// accessed via Extract.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.CallExpr.Lparen, if explicit in the source.
+//
+// Example printed form:
+//
+// t2 = println(t0, t1)
+// t4 = t3()
+// t7 = invoke t5.Println(...t6)
+type Call struct {
+ register
+ Call CallCommon
+}
+
+// The BinOp instruction yields the result of binary operation X Op Y.
+//
+// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source.
+//
+// Example printed form:
+//
+// t1 = t0 + 1:int
+type BinOp struct {
+ register
+ // One of:
+ // ADD SUB MUL QUO REM + - * / %
+ // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^
+ // EQL NEQ LSS LEQ GTR GEQ == != < <= < >=
+ Op token.Token
+ X, Y Value
+}
+
+// The UnOp instruction yields the result of Op X.
+// ARROW is channel receive.
+// MUL is pointer indirection (load).
+// XOR is bitwise complement.
+// SUB is negation.
+// NOT is logical negation.
+//
+// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above
+// and a boolean indicating the success of the receive. The
+// components of the tuple are accessed using Extract.
+//
+// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source.
+// For receive operations (ARROW) implicit in ranging over a channel,
+// Pos() returns the ast.RangeStmt.For.
+// For implicit memory loads (STAR), Pos() returns the position of the
+// most closely associated source-level construct; the details are not
+// specified.
+//
+// Example printed form:
+//
+// t0 = *x
+// t2 = <-t1,ok
+type UnOp struct {
+ register
+ Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^
+ X Value
+ CommaOk bool
+}
+
+// The ChangeType instruction applies to X a value-preserving type
+// change to Type().
+//
+// Type changes are permitted:
+// - between a named type and its underlying type.
+// - between two named types of the same underlying type.
+// - between (possibly named) pointers to identical base types.
+// - from a bidirectional channel to a read- or write-channel,
+// optionally adding/removing a name.
+// - between a type (t) and an instance of the type (tσ), i.e.
+// Type() == σ(X.Type()) (or X.Type()== σ(Type())) where
+// σ is the type substitution of Parent().TypeParams by
+// Parent().TypeArgs.
+//
+// This operation cannot fail dynamically.
+//
+// Type changes may to be to or from a type parameter (or both). All
+// types in the type set of X.Type() have a value-preserving type
+// change to all types in the type set of Type().
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+//
+// t1 = changetype *int <- IntPtr (t0)
+type ChangeType struct {
+ register
+ X Value
+}
+
+// The Convert instruction yields the conversion of value X to type
+// Type(). One or both of those types is basic (but possibly named).
+//
+// A conversion may change the value and representation of its operand.
+// Conversions are permitted:
+// - between real numeric types.
+// - between complex numeric types.
+// - between string and []byte or []rune.
+// - between pointers and unsafe.Pointer.
+// - between unsafe.Pointer and uintptr.
+// - from (Unicode) integer to (UTF-8) string.
+//
+// A conversion may imply a type name change also.
+//
+// Conversions may to be to or from a type parameter. All types in
+// the type set of X.Type() can be converted to all types in the type
+// set of Type().
+//
+// This operation cannot fail dynamically.
+//
+// Conversions of untyped string/number/bool constants to a specific
+// representation are eliminated during SSA construction.
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+//
+// t1 = convert []byte <- string (t0)
+type Convert struct {
+ register
+ X Value
+}
+
+// The MultiConvert instruction yields the conversion of value X to type
+// Type(). Either X.Type() or Type() must be a type parameter. Each
+// type in the type set of X.Type() can be converted to each type in the
+// type set of Type().
+//
+// See the documentation for Convert, ChangeType, and SliceToArrayPointer
+// for the conversions that are permitted. Additionally conversions of
+// slices to arrays are permitted.
+//
+// This operation can fail dynamically (see SliceToArrayPointer).
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+//
+// t1 = multiconvert D <- S (t0) [*[2]rune <- []rune | string <- []rune]
+type MultiConvert struct {
+ register
+ X Value
+ from []*types.Term
+ to []*types.Term
+}
+
+// ChangeInterface constructs a value of one interface type from a
+// value of another interface type known to be assignable to it.
+// This operation cannot fail.
+//
+// Pos() returns the ast.CallExpr.Lparen if the instruction arose from
+// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the
+// instruction arose from an explicit e.(T) operation; or token.NoPos
+// otherwise.
+//
+// Example printed form:
+//
+// t1 = change interface interface{} <- I (t0)
+type ChangeInterface struct {
+ register
+ X Value
+}
+
+// The SliceToArrayPointer instruction yields the conversion of slice X to
+// array pointer.
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Conversion may to be to or from a type parameter. All types in
+// the type set of X.Type() must be a slice types that can be converted to
+// all types in the type set of Type() which must all be pointer to array
+// types.
+//
+// This operation can fail dynamically if the length of the slice is less
+// than the length of the array.
+//
+// Example printed form:
+//
+// t1 = slice to array pointer *[4]byte <- []byte (t0)
+type SliceToArrayPointer struct {
+ register
+ X Value
+}
+
+// MakeInterface constructs an instance of an interface type from a
+// value of a concrete type.
+//
+// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set
+// of X, and Program.MethodValue(m) to find the implementation of a method.
+//
+// To construct the zero value of an interface type T, use:
+//
+// NewConst(constant.MakeNil(), T, pos)
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+//
+// t1 = make interface{} <- int (42:int)
+// t2 = make Stringer <- t0
+type MakeInterface struct {
+ register
+ X Value
+}
+
+// The MakeClosure instruction yields a closure value whose code is
+// Fn and whose free variables' values are supplied by Bindings.
+//
+// Type() returns a (possibly named) *types.Signature.
+//
+// Pos() returns the ast.FuncLit.Type.Func for a function literal
+// closure or the ast.SelectorExpr.Sel for a bound method closure.
+//
+// Example printed form:
+//
+// t0 = make closure anon@1.2 [x y z]
+// t1 = make closure bound$(main.I).add [i]
+type MakeClosure struct {
+ register
+ Fn Value // always a *Function
+ Bindings []Value // values for each free variable in Fn.FreeVars
+}
+
+// The MakeMap instruction creates a new hash-table-based map object
+// and yields a value of kind map.
+//
+// Type() returns a (possibly named) *types.Map.
+//
+// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or
+// the ast.CompositeLit.Lbrack if created by a literal.
+//
+// Example printed form:
+//
+// t1 = make map[string]int t0
+// t1 = make StringIntMap t0
+type MakeMap struct {
+ register
+ Reserve Value // initial space reservation; nil => default
+}
+
+// The MakeChan instruction creates a new channel object and yields a
+// value of kind chan.
+//
+// Type() returns a (possibly named) *types.Chan.
+//
+// Pos() returns the ast.CallExpr.Lparen for the make(chan) that
+// created it.
+//
+// Example printed form:
+//
+// t0 = make chan int 0
+// t0 = make IntChan 0
+type MakeChan struct {
+ register
+ Size Value // int; size of buffer; zero => synchronous.
+}
+
+// The MakeSlice instruction yields a slice of length Len backed by a
+// newly allocated array of length Cap.
+//
+// Both Len and Cap must be non-nil Values of integer type.
+//
+// (Alloc(types.Array) followed by Slice will not suffice because
+// Alloc can only create arrays of constant length.)
+//
+// Type() returns a (possibly named) *types.Slice.
+//
+// Pos() returns the ast.CallExpr.Lparen for the make([]T) that
+// created it.
+//
+// Example printed form:
+//
+// t1 = make []string 1:int t0
+// t1 = make StringSlice 1:int t0
+type MakeSlice struct {
+ register
+ Len Value
+ Cap Value
+}
+
+// The Slice instruction yields a slice of an existing string, slice
+// or *array X between optional integer bounds Low and High.
+//
+// Dynamically, this instruction panics if X evaluates to a nil *array
+// pointer.
+//
+// Type() returns string if the type of X was string, otherwise a
+// *types.Slice with the same element type as X.
+//
+// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice
+// operation, the ast.CompositeLit.Lbrace if created by a literal, or
+// NoPos if not explicit in the source (e.g. a variadic argument slice).
+//
+// Example printed form:
+//
+// t1 = slice t0[1:]
+type Slice struct {
+ register
+ X Value // slice, string, or *array
+ Low, High, Max Value // each may be nil
+}
+
+// The FieldAddr instruction yields the address of Field of *struct X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X.
+//
+// Dynamically, this instruction panics if X evaluates to a nil
+// pointer.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Pos() returns the position of the ast.SelectorExpr.Sel for the
+// field, if explicit in the source. For implicit selections, returns
+// the position of the inducing explicit selection. If produced for a
+// struct literal S{f: e}, it returns the position of the colon; for
+// S{e} it returns the start of expression e.
+//
+// Example printed form:
+//
+// t1 = &t0.name [#1]
+type FieldAddr struct {
+ register
+ X Value // *struct
+ Field int // index into CoreType(CoreType(X.Type()).(*types.Pointer).Elem()).(*types.Struct).Fields
+}
+
+// The Field instruction yields the Field of struct X.
+//
+// The field is identified by its index within the field list of the
+// struct type of X; by using numeric indices we avoid ambiguity of
+// package-local identifiers and permit compact representations.
+//
+// Pos() returns the position of the ast.SelectorExpr.Sel for the
+// field, if explicit in the source. For implicit selections, returns
+// the position of the inducing explicit selection.
+
+// Example printed form:
+//
+// t1 = t0.name [#1]
+type Field struct {
+ register
+ X Value // struct
+ Field int // index into CoreType(X.Type()).(*types.Struct).Fields
+}
+
+// The IndexAddr instruction yields the address of the element at
+// index Index of collection X. Index is an integer expression.
+//
+// The elements of maps and strings are not addressable; use Lookup (map),
+// Index (string), or MapUpdate instead.
+//
+// Dynamically, this instruction panics if X evaluates to a nil *array
+// pointer.
+//
+// Type() returns a (possibly named) *types.Pointer.
+//
+// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if
+// explicit in the source.
+//
+// Example printed form:
+//
+// t2 = &t0[t1]
+type IndexAddr struct {
+ register
+ X Value // *array, slice or type parameter with types array, *array, or slice.
+ Index Value // numeric index
+}
+
+// The Index instruction yields element Index of collection X, an array,
+// string or type parameter containing an array, a string, a pointer to an,
+// array or a slice.
+//
+// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if
+// explicit in the source.
+//
+// Example printed form:
+//
+// t2 = t0[t1]
+type Index struct {
+ register
+ X Value // array, string or type parameter with types array, *array, slice, or string.
+ Index Value // integer index
+}
+
+// The Lookup instruction yields element Index of collection map X.
+// Index is the appropriate key type.
+//
+// If CommaOk, the result is a 2-tuple of the value above and a
+// boolean indicating the result of a map membership test for the key.
+// The components of the tuple are accessed using Extract.
+//
+// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source.
+//
+// Example printed form:
+//
+// t2 = t0[t1]
+// t5 = t3[t4],ok
+type Lookup struct {
+ register
+ X Value // map
+ Index Value // key-typed index
+ CommaOk bool // return a value,ok pair
+}
+
+// SelectState is a helper for Select.
+// It represents one goal state and its corresponding communication.
+type SelectState struct {
+ Dir types.ChanDir // direction of case (SendOnly or RecvOnly)
+ Chan Value // channel to use (for send or receive)
+ Send Value // value to send (for send)
+ Pos token.Pos // position of token.ARROW
+ DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode]
+}
+
+// The Select instruction tests whether (or blocks until) one
+// of the specified sent or received states is entered.
+//
+// Let n be the number of States for which Dir==RECV and T_i (0<=i<n)
+// be the element type of each such state's Chan.
+// Select returns an n+2-tuple
+//
+// (index int, recvOk bool, r_0 T_0, ... r_n-1 T_n-1)
+//
+// The tuple's components, described below, must be accessed via the
+// Extract instruction.
+//
+// If Blocking, select waits until exactly one state holds, i.e. a
+// channel becomes ready for the designated operation of sending or
+// receiving; select chooses one among the ready states
+// pseudorandomly, performs the send or receive operation, and sets
+// 'index' to the index of the chosen channel.
+//
+// If !Blocking, select doesn't block if no states hold; instead it
+// returns immediately with index equal to -1.
+//
+// If the chosen channel was used for a receive, the r_i component is
+// set to the received value, where i is the index of that state among
+// all n receive states; otherwise r_i has the zero value of type T_i.
+// Note that the receive index i is not the same as the state
+// index index.
+//
+// The second component of the triple, recvOk, is a boolean whose value
+// is true iff the selected operation was a receive and the receive
+// successfully yielded a value.
+//
+// Pos() returns the ast.SelectStmt.Select.
+//
+// Example printed form:
+//
+// t3 = select nonblocking [<-t0, t1<-t2]
+// t4 = select blocking []
+type Select struct {
+ register
+ States []*SelectState
+ Blocking bool
+}
+
+// The Range instruction yields an iterator over the domain and range
+// of X, which must be a string or map.
+//
+// Elements are accessed via Next.
+//
+// Type() returns an opaque and degenerate "rangeIter" type.
+//
+// Pos() returns the ast.RangeStmt.For.
+//
+// Example printed form:
+//
+// t0 = range "hello":string
+type Range struct {
+ register
+ X Value // string or map
+}
+
+// The Next instruction reads and advances the (map or string)
+// iterator Iter and returns a 3-tuple value (ok, k, v). If the
+// iterator is not exhausted, ok is true and k and v are the next
+// elements of the domain and range, respectively. Otherwise ok is
+// false and k and v are undefined.
+//
+// Components of the tuple are accessed using Extract.
+//
+// The IsString field distinguishes iterators over strings from those
+// over maps, as the Type() alone is insufficient: consider
+// map[int]rune.
+//
+// Type() returns a *types.Tuple for the triple (ok, k, v).
+// The types of k and/or v may be types.Invalid.
+//
+// Example printed form:
+//
+// t1 = next t0
+type Next struct {
+ register
+ Iter Value
+ IsString bool // true => string iterator; false => map iterator.
+}
+
+// The TypeAssert instruction tests whether interface value X has type
+// AssertedType.
+//
+// If !CommaOk, on success it returns v, the result of the conversion
+// (defined below); on failure it panics.
+//
+// If CommaOk: on success it returns a pair (v, true) where v is the
+// result of the conversion; on failure it returns (z, false) where z
+// is AssertedType's zero value. The components of the pair must be
+// accessed using the Extract instruction.
+//
+// If Underlying: tests whether interface value X has the underlying
+// type AssertedType.
+//
+// If AssertedType is a concrete type, TypeAssert checks whether the
+// dynamic type in interface X is equal to it, and if so, the result
+// of the conversion is a copy of the value in the interface.
+//
+// If AssertedType is an interface, TypeAssert checks whether the
+// dynamic type of the interface is assignable to it, and if so, the
+// result of the conversion is a copy of the interface value X.
+// If AssertedType is a superinterface of X.Type(), the operation will
+// fail iff the operand is nil. (Contrast with ChangeInterface, which
+// performs no nil-check.)
+//
+// Type() reflects the actual type of the result, possibly a
+// 2-types.Tuple; AssertedType is the asserted type.
+//
+// Depending on the TypeAssert's purpose, Pos may return:
+// - the ast.CallExpr.Lparen of an explicit T(e) conversion;
+// - the ast.TypeAssertExpr.Lparen of an explicit e.(T) operation;
+// - the ast.CaseClause.Case of a case of a type-switch statement;
+// - the Ident(m).NamePos of an interface method value i.m
+// (for which TypeAssert may be used to effect the nil check).
+//
+// Example printed form:
+//
+// t1 = typeassert t0.(int)
+// t3 = typeassert,ok t2.(T)
+type TypeAssert struct {
+ register
+ X Value
+ AssertedType types.Type
+ CommaOk bool
+}
+
+// The Extract instruction yields component Index of Tuple.
+//
+// This is used to access the results of instructions with multiple
+// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and
+// IndexExpr(Map).
+//
+// Example printed form:
+//
+// t1 = extract t0 #1
+type Extract struct {
+ register
+ Tuple Value
+ Index int
+}
+
+// Instructions executed for effect. They do not yield a value. --------------------
+
+// The Jump instruction transfers control to the sole successor of its
+// owning block.
+//
+// A Jump must be the last instruction of its containing BasicBlock.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+//
+// jump done
+type Jump struct {
+ anInstruction
+}
+
+// The If instruction transfers control to one of the two successors
+// of its owning block, depending on the boolean Cond: the first if
+// true, the second if false.
+//
+// An If instruction must be the last instruction of its containing
+// BasicBlock.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+//
+// if t0 goto done else body
+type If struct {
+ anInstruction
+ Cond Value
+}
+
+// The Return instruction returns values and control back to the calling
+// function.
+//
+// len(Results) is always equal to the number of results in the
+// function's signature.
+//
+// If len(Results) > 1, Return returns a tuple value with the specified
+// components which the caller must access using Extract instructions.
+//
+// There is no instruction to return a ready-made tuple like those
+// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or
+// a tail-call to a function with multiple result parameters.
+//
+// Return must be the last instruction of its containing BasicBlock.
+// Such a block has no successors.
+//
+// Pos() returns the ast.ReturnStmt.Return, if explicit in the source.
+//
+// Example printed form:
+//
+// return
+// return nil:I, 2:int
+type Return struct {
+ anInstruction
+ Results []Value
+ pos token.Pos
+}
+
+// The RunDefers instruction pops and invokes the entire stack of
+// procedure calls pushed by Defer instructions in this function.
+//
+// It is legal to encounter multiple 'rundefers' instructions in a
+// single control-flow path through a function; this is useful in
+// the combined init() function, for example.
+//
+// Pos() returns NoPos.
+//
+// Example printed form:
+//
+// rundefers
+type RunDefers struct {
+ anInstruction
+}
+
+// The Panic instruction initiates a panic with value X.
+//
+// A Panic instruction must be the last instruction of its containing
+// BasicBlock, which must have no successors.
+//
+// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction;
+// they are treated as calls to a built-in function.
+//
+// Pos() returns the ast.CallExpr.Lparen if this panic was explicit
+// in the source.
+//
+// Example printed form:
+//
+// panic t0
+type Panic struct {
+ anInstruction
+ X Value // an interface{}
+ pos token.Pos
+}
+
+// The Go instruction creates a new goroutine and calls the specified
+// function within it.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.GoStmt.Go.
+//
+// Example printed form:
+//
+// go println(t0, t1)
+// go t3()
+// go invoke t5.Println(...t6)
+type Go struct {
+ anInstruction
+ Call CallCommon
+ pos token.Pos
+}
+
+// The Defer instruction pushes the specified call onto a stack of
+// functions to be called by a RunDefers instruction or by a panic.
+//
+// If DeferStack != nil, it indicates the defer list that the defer is
+// added to. Defer list values come from the Builtin function
+// ssa:deferstack. Calls to ssa:deferstack() produces the defer stack
+// of the current function frame. DeferStack allows for deferring into an
+// alternative function stack than the current function.
+//
+// See CallCommon for generic function call documentation.
+//
+// Pos() returns the ast.DeferStmt.Defer.
+//
+// Example printed form:
+//
+// defer println(t0, t1)
+// defer t3()
+// defer invoke t5.Println(...t6)
+type Defer struct {
+ anInstruction
+ Call CallCommon
+ DeferStack Value // stack of deferred functions (from ssa:deferstack() intrinsic) onto which this function is pushed
+ pos token.Pos
+}
+
+// The Send instruction sends X on channel Chan.
+//
+// Pos() returns the ast.SendStmt.Arrow, if explicit in the source.
+//
+// Example printed form:
+//
+// send t0 <- t1
+type Send struct {
+ anInstruction
+ Chan, X Value
+ pos token.Pos
+}
+
+// The Store instruction stores Val at address Addr.
+// Stores can be of arbitrary types.
+//
+// Pos() returns the position of the source-level construct most closely
+// associated with the memory store operation.
+// Since implicit memory stores are numerous and varied and depend upon
+// implementation choices, the details are not specified.
+//
+// Example printed form:
+//
+// *x = y
+type Store struct {
+ anInstruction
+ Addr Value
+ Val Value
+ pos token.Pos
+}
+
+// The MapUpdate instruction updates the association of Map[Key] to
+// Value.
+//
+// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack,
+// if explicit in the source.
+//
+// Example printed form:
+//
+// t0[t1] = t2
+type MapUpdate struct {
+ anInstruction
+ Map Value
+ Key Value
+ Value Value
+ pos token.Pos
+}
+
+// A DebugRef instruction maps a source-level expression Expr to the
+// SSA value X that represents the value (!IsAddr) or address (IsAddr)
+// of that expression.
+//
+// DebugRef is a pseudo-instruction: it has no dynamic effect.
+//
+// Pos() returns Expr.Pos(), the start position of the source-level
+// expression. This is not the same as the "designated" token as
+// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the
+// position of the ("designated") Lparen token.
+//
+// If Expr is an *ast.Ident denoting a var or func, Object() returns
+// the object; though this information can be obtained from the type
+// checker, including it here greatly facilitates debugging.
+// For non-Ident expressions, Object() returns nil.
+//
+// DebugRefs are generated only for functions built with debugging
+// enabled; see Package.SetDebugMode() and the GlobalDebug builder
+// mode flag.
+//
+// DebugRefs are not emitted for ast.Idents referring to constants or
+// predeclared identifiers, since they are trivial and numerous.
+// Nor are they emitted for ast.ParenExprs.
+//
+// (By representing these as instructions, rather than out-of-band,
+// consistency is maintained during transformation passes by the
+// ordinary SSA renaming machinery.)
+//
+// Example printed form:
+//
+// ; *ast.CallExpr @ 102:9 is t5
+// ; var x float64 @ 109:72 is x
+// ; address of *ast.CompositeLit @ 216:10 is t0
+type DebugRef struct {
+ // TODO(generics): Reconsider what DebugRefs are for generics.
+ anInstruction
+ Expr ast.Expr // the referring expression (never *ast.ParenExpr)
+ object types.Object // the identity of the source var/func
+ IsAddr bool // Expr is addressable and X is the address it denotes
+ X Value // the value or address of Expr
+}
+
+// Embeddable mix-ins and helpers for common parts of other structs. -----------
+
+// register is a mix-in embedded by all SSA values that are also
+// instructions, i.e. virtual registers, and provides a uniform
+// implementation of most of the Value interface: Value.Name() is a
+// numbered register (e.g. "t0"); the other methods are field accessors.
+//
+// Temporary names are automatically assigned to each register on
+// completion of building a function in SSA form.
+//
+// Clients must not assume that the 'id' value (and the Name() derived
+// from it) is unique within a function. As always in this API,
+// semantics are determined only by identity; names exist only to
+// facilitate debugging.
+type register struct {
+ anInstruction
+ num int // "name" of virtual register, e.g. "t0". Not guaranteed unique.
+ typ types.Type // type of virtual register
+ pos token.Pos // position of source expression, or NoPos
+ referrers []Instruction
+}
+
+// anInstruction is a mix-in embedded by all Instructions.
+// It provides the implementations of the Block and setBlock methods.
+type anInstruction struct {
+ block *BasicBlock // the basic block of this instruction
+}
+
+// CallCommon is contained by Go, Defer and Call to hold the
+// common parts of a function or method call.
+//
+// Each CallCommon exists in one of two modes, function call and
+// interface method invocation, or "call" and "invoke" for short.
+//
+// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon
+// represents an ordinary function call of the value in Value,
+// which may be a *Builtin, a *Function or any other value of kind
+// 'func'.
+//
+// Value may be one of:
+//
+// (a) a *Function, indicating a statically dispatched call
+// to a package-level function, an anonymous function, or
+// a method of a named type.
+// (b) a *MakeClosure, indicating an immediately applied
+// function literal with free variables.
+// (c) a *Builtin, indicating a statically dispatched call
+// to a built-in function.
+// (d) any other value, indicating a dynamically dispatched
+// function call.
+//
+// StaticCallee returns the identity of the callee in cases
+// (a) and (b), nil otherwise.
+//
+// Args contains the arguments to the call. If Value is a method,
+// Args[0] contains the receiver parameter.
+//
+// Example printed form:
+//
+// t2 = println(t0, t1)
+// go t3()
+// defer t5(...t6)
+//
+// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon
+// represents a dynamically dispatched call to an interface method.
+// In this mode, Value is the interface value and Method is the
+// interface's abstract method. The interface value may be a type
+// parameter. Note: an interface method may be shared by multiple
+// interfaces due to embedding; Value.Type() provides the specific
+// interface used for this call.
+//
+// Value is implicitly supplied to the concrete method implementation
+// as the receiver parameter; in other words, Args[0] holds not the
+// receiver but the first true argument.
+//
+// Example printed form:
+//
+// t1 = invoke t0.String()
+// go invoke t3.Run(t2)
+// defer invoke t4.Handle(...t5)
+//
+// For all calls to variadic functions (Signature().Variadic()),
+// the last element of Args is a slice.
+type CallCommon struct {
+ Value Value // receiver (invoke mode) or func value (call mode)
+ Method *types.Func // interface method (invoke mode)
+ Args []Value // actual parameters (in static method call, includes receiver)
+ pos token.Pos // position of CallExpr.Lparen, iff explicit in source
+}
+
+// IsInvoke returns true if this call has "invoke" (not "call") mode.
+func (c *CallCommon) IsInvoke() bool {
+ return c.Method != nil
+}
+
+func (c *CallCommon) Pos() token.Pos { return c.pos }
+
+// Signature returns the signature of the called function.
+//
+// For an "invoke"-mode call, the signature of the interface method is
+// returned.
+//
+// In either "call" or "invoke" mode, if the callee is a method, its
+// receiver is represented by sig.Recv, not sig.Params().At(0).
+func (c *CallCommon) Signature() *types.Signature {
+ if c.Method != nil {
+ return c.Method.Type().(*types.Signature)
+ }
+ return typeparams.CoreType(c.Value.Type()).(*types.Signature)
+}
+
+// StaticCallee returns the callee if this is a trivially static
+// "call"-mode call to a function.
+func (c *CallCommon) StaticCallee() *Function {
+ switch fn := c.Value.(type) {
+ case *Function:
+ return fn
+ case *MakeClosure:
+ return fn.Fn.(*Function)
+ }
+ return nil
+}
+
+// Description returns a description of the mode of this call suitable
+// for a user interface, e.g., "static method call".
+func (c *CallCommon) Description() string {
+ switch fn := c.Value.(type) {
+ case *Builtin:
+ return "built-in function call"
+ case *MakeClosure:
+ return "static function closure call"
+ case *Function:
+ if fn.Signature.Recv() != nil {
+ return "static method call"
+ }
+ return "static function call"
+ }
+ if c.IsInvoke() {
+ return "dynamic method call" // ("invoke" mode)
+ }
+ return "dynamic function call"
+}
+
+// The CallInstruction interface, implemented by *Go, *Defer and *Call,
+// exposes the common parts of function-calling instructions,
+// yet provides a way back to the Value defined by *Call alone.
+type CallInstruction interface {
+ Instruction
+ Common() *CallCommon // returns the common parts of the call
+ Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer)
+}
+
+func (s *Call) Common() *CallCommon { return &s.Call }
+func (s *Defer) Common() *CallCommon { return &s.Call }
+func (s *Go) Common() *CallCommon { return &s.Call }
+
+func (s *Call) Value() *Call { return s }
+func (s *Defer) Value() *Call { return nil }
+func (s *Go) Value() *Call { return nil }
+
+func (v *Builtin) Type() types.Type { return v.sig }
+func (v *Builtin) Name() string { return v.name }
+func (*Builtin) Referrers() *[]Instruction { return nil }
+func (v *Builtin) Pos() token.Pos { return token.NoPos }
+func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) }
+func (v *Builtin) Parent() *Function { return nil }
+
+func (v *FreeVar) Type() types.Type { return v.typ }
+func (v *FreeVar) Name() string { return v.name }
+func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers }
+func (v *FreeVar) Pos() token.Pos { return v.pos }
+func (v *FreeVar) Parent() *Function { return v.parent }
+
+func (v *Global) Type() types.Type { return v.typ }
+func (v *Global) Name() string { return v.name }
+func (v *Global) Parent() *Function { return nil }
+func (v *Global) Pos() token.Pos { return v.pos }
+func (v *Global) Referrers() *[]Instruction { return nil }
+func (v *Global) Token() token.Token { return token.VAR }
+func (v *Global) Object() types.Object { return v.object }
+func (v *Global) String() string { return v.RelString(nil) }
+func (v *Global) Package() *Package { return v.Pkg }
+func (v *Global) RelString(from *types.Package) string { return relString(v, from) }
+
+func (v *Function) Name() string { return v.name }
+func (v *Function) Type() types.Type { return v.Signature }
+func (v *Function) Pos() token.Pos { return v.pos }
+func (v *Function) Token() token.Token { return token.FUNC }
+func (v *Function) Object() types.Object {
+ if v.object != nil {
+ return types.Object(v.object)
+ }
+ return nil
+}
+func (v *Function) String() string { return v.RelString(nil) }
+func (v *Function) Package() *Package { return v.Pkg }
+func (v *Function) Parent() *Function { return v.parent }
+func (v *Function) Referrers() *[]Instruction {
+ if v.parent != nil {
+ return &v.referrers
+ }
+ return nil
+}
+
+// TypeParams are the function's type parameters if generic or the
+// type parameters that were instantiated if fn is an instantiation.
+func (fn *Function) TypeParams() *types.TypeParamList {
+ return fn.typeparams
+}
+
+// TypeArgs are the types that TypeParams() were instantiated by to create fn
+// from fn.Origin().
+func (fn *Function) TypeArgs() []types.Type { return fn.typeargs }
+
+// Origin returns the generic function from which fn was instantiated,
+// or nil if fn is not an instantiation.
+func (fn *Function) Origin() *Function {
+ if fn.parent != nil && len(fn.typeargs) > 0 {
+ // Nested functions are BUILT at a different time than their instances.
+ // Build declared package if not yet BUILT. This is not an expected use
+ // case, but is simple and robust.
+ fn.declaredPackage().Build()
+ }
+ return origin(fn)
+}
+
+// origin is the function that fn is an instantiation of. Returns nil if fn is
+// not an instantiation.
+//
+// Precondition: fn and the origin function are done building.
+func origin(fn *Function) *Function {
+ if fn.parent != nil && len(fn.typeargs) > 0 {
+ return origin(fn.parent).AnonFuncs[fn.anonIdx]
+ }
+ return fn.topLevelOrigin
+}
+
+func (v *Parameter) Type() types.Type { return v.typ }
+func (v *Parameter) Name() string { return v.name }
+func (v *Parameter) Object() types.Object { return v.object }
+func (v *Parameter) Referrers() *[]Instruction { return &v.referrers }
+func (v *Parameter) Pos() token.Pos { return v.object.Pos() }
+func (v *Parameter) Parent() *Function { return v.parent }
+
+func (v *Alloc) Type() types.Type { return v.typ }
+func (v *Alloc) Referrers() *[]Instruction { return &v.referrers }
+func (v *Alloc) Pos() token.Pos { return v.pos }
+
+func (v *register) Type() types.Type { return v.typ }
+func (v *register) setType(typ types.Type) { v.typ = typ }
+func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) }
+func (v *register) setNum(num int) { v.num = num }
+func (v *register) Referrers() *[]Instruction { return &v.referrers }
+func (v *register) Pos() token.Pos { return v.pos }
+func (v *register) setPos(pos token.Pos) { v.pos = pos }
+
+func (v *anInstruction) Parent() *Function { return v.block.parent }
+func (v *anInstruction) Block() *BasicBlock { return v.block }
+func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block }
+func (v *anInstruction) Referrers() *[]Instruction { return nil }
+
+func (t *Type) Name() string { return t.object.Name() }
+func (t *Type) Pos() token.Pos { return t.object.Pos() }
+func (t *Type) Type() types.Type { return t.object.Type() }
+func (t *Type) Token() token.Token { return token.TYPE }
+func (t *Type) Object() types.Object { return t.object }
+func (t *Type) String() string { return t.RelString(nil) }
+func (t *Type) Package() *Package { return t.pkg }
+func (t *Type) RelString(from *types.Package) string { return relString(t, from) }
+
+func (c *NamedConst) Name() string { return c.object.Name() }
+func (c *NamedConst) Pos() token.Pos { return c.object.Pos() }
+func (c *NamedConst) String() string { return c.RelString(nil) }
+func (c *NamedConst) Type() types.Type { return c.object.Type() }
+func (c *NamedConst) Token() token.Token { return token.CONST }
+func (c *NamedConst) Object() types.Object { return c.object }
+func (c *NamedConst) Package() *Package { return c.pkg }
+func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) }
+
+func (d *DebugRef) Object() types.Object { return d.object }
+
+// Func returns the package-level function of the specified name,
+// or nil if not found.
+func (p *Package) Func(name string) (f *Function) {
+ f, _ = p.Members[name].(*Function)
+ return
+}
+
+// Var returns the package-level variable of the specified name,
+// or nil if not found.
+func (p *Package) Var(name string) (g *Global) {
+ g, _ = p.Members[name].(*Global)
+ return
+}
+
+// Const returns the package-level constant of the specified name,
+// or nil if not found.
+func (p *Package) Const(name string) (c *NamedConst) {
+ c, _ = p.Members[name].(*NamedConst)
+ return
+}
+
+// Type returns the package-level type of the specified name,
+// or nil if not found.
+func (p *Package) Type(name string) (t *Type) {
+ t, _ = p.Members[name].(*Type)
+ return
+}
+
+func (v *Call) Pos() token.Pos { return v.Call.pos }
+func (s *Defer) Pos() token.Pos { return s.pos }
+func (s *Go) Pos() token.Pos { return s.pos }
+func (s *MapUpdate) Pos() token.Pos { return s.pos }
+func (s *Panic) Pos() token.Pos { return s.pos }
+func (s *Return) Pos() token.Pos { return s.pos }
+func (s *Send) Pos() token.Pos { return s.pos }
+func (s *Store) Pos() token.Pos { return s.pos }
+func (s *If) Pos() token.Pos { return token.NoPos }
+func (s *Jump) Pos() token.Pos { return token.NoPos }
+func (s *RunDefers) Pos() token.Pos { return token.NoPos }
+func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() }
+
+// Operands.
+
+func (v *Alloc) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *BinOp) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Y)
+}
+
+func (c *CallCommon) Operands(rands []*Value) []*Value {
+ rands = append(rands, &c.Value)
+ for i := range c.Args {
+ rands = append(rands, &c.Args[i])
+ }
+ return rands
+}
+
+func (s *Go) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (s *Call) Operands(rands []*Value) []*Value {
+ return s.Call.Operands(rands)
+}
+
+func (s *Defer) Operands(rands []*Value) []*Value {
+ return append(s.Call.Operands(rands), &s.DeferStack)
+}
+
+func (v *ChangeInterface) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *ChangeType) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *Convert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *MultiConvert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *DebugRef) Operands(rands []*Value) []*Value {
+ return append(rands, &s.X)
+}
+
+func (v *Extract) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Tuple)
+}
+
+func (v *Field) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *FieldAddr) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *If) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Cond)
+}
+
+func (v *Index) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (v *IndexAddr) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (*Jump) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *Lookup) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Index)
+}
+
+func (v *MakeChan) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Size)
+}
+
+func (v *MakeClosure) Operands(rands []*Value) []*Value {
+ rands = append(rands, &v.Fn)
+ for i := range v.Bindings {
+ rands = append(rands, &v.Bindings[i])
+ }
+ return rands
+}
+
+func (v *MakeInterface) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *MakeMap) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Reserve)
+}
+
+func (v *MakeSlice) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Len, &v.Cap)
+}
+
+func (v *MapUpdate) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Map, &v.Key, &v.Value)
+}
+
+func (v *Next) Operands(rands []*Value) []*Value {
+ return append(rands, &v.Iter)
+}
+
+func (s *Panic) Operands(rands []*Value) []*Value {
+ return append(rands, &s.X)
+}
+
+func (v *Phi) Operands(rands []*Value) []*Value {
+ for i := range v.Edges {
+ rands = append(rands, &v.Edges[i])
+ }
+ return rands
+}
+
+func (v *Range) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (s *Return) Operands(rands []*Value) []*Value {
+ for i := range s.Results {
+ rands = append(rands, &s.Results[i])
+ }
+ return rands
+}
+
+func (*RunDefers) Operands(rands []*Value) []*Value {
+ return rands
+}
+
+func (v *Select) Operands(rands []*Value) []*Value {
+ for i := range v.States {
+ rands = append(rands, &v.States[i].Chan, &v.States[i].Send)
+ }
+ return rands
+}
+
+func (s *Send) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Chan, &s.X)
+}
+
+func (v *Slice) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X, &v.Low, &v.High, &v.Max)
+}
+
+func (s *Store) Operands(rands []*Value) []*Value {
+ return append(rands, &s.Addr, &s.Val)
+}
+
+func (v *TypeAssert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+func (v *UnOp) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
+// Non-Instruction Values:
+func (v *Builtin) Operands(rands []*Value) []*Value { return rands }
+func (v *FreeVar) Operands(rands []*Value) []*Value { return rands }
+func (v *Const) Operands(rands []*Value) []*Value { return rands }
+func (v *Function) Operands(rands []*Value) []*Value { return rands }
+func (v *Global) Operands(rands []*Value) []*Value { return rands }
+func (v *Parameter) Operands(rands []*Value) []*Value { return rands }
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go
new file mode 100644
index 0000000..3daa67a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go
@@ -0,0 +1,214 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil
+
+// This file defines utility functions for constructing programs in SSA form.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/packages"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/internal/versions"
+)
+
+// Packages creates an SSA program for a set of packages.
+//
+// The packages must have been loaded from source syntax using the
+// [packages.Load] function in [packages.LoadSyntax] or
+// [packages.LoadAllSyntax] mode.
+//
+// Packages creates an SSA package for each well-typed package in the
+// initial list, plus all their dependencies. The resulting list of
+// packages corresponds to the list of initial packages, and may contain
+// a nil if SSA code could not be constructed for the corresponding initial
+// package due to type errors.
+//
+// Code for bodies of functions is not built until [Program.Build] is
+// called on the resulting Program. SSA code is constructed only for
+// the initial packages with well-typed syntax trees.
+//
+// The mode parameter controls diagnostics and checking during SSA construction.
+func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) {
+ // TODO(adonovan): opt: this calls CreatePackage far more than
+ // necessary: for all dependencies, not just the (non-initial)
+ // direct dependencies of the initial packages.
+ //
+ // But can it reasonably be changed without breaking the
+ // spirit and/or letter of the law above? Clients may notice
+ // if we call CreatePackage less, as methods like
+ // Program.FuncValue will return nil. Or must we provide a new
+ // function (and perhaps deprecate this one)? Is it worth it?
+ //
+ // Tim King makes the interesting point that it would be
+ // possible to entirely alleviate the client from the burden
+ // of calling CreatePackage for non-syntax packages, if we
+ // were to treat vars and funcs lazily in the same way we now
+ // treat methods. (In essence, try to move away from the
+ // notion of ssa.Packages, and make the Program answer
+ // all reasonable questions about any types.Object.)
+
+ return doPackages(initial, mode, false)
+}
+
+// AllPackages creates an SSA program for a set of packages plus all
+// their dependencies.
+//
+// The packages must have been loaded from source syntax using the
+// [packages.Load] function in [packages.LoadAllSyntax] mode.
+//
+// AllPackages creates an SSA package for each well-typed package in the
+// initial list, plus all their dependencies. The resulting list of
+// packages corresponds to the list of initial packages, and may contain
+// a nil if SSA code could not be constructed for the corresponding
+// initial package due to type errors.
+//
+// Code for bodies of functions is not built until Build is called on
+// the resulting Program. SSA code is constructed for all packages with
+// well-typed syntax trees.
+//
+// The mode parameter controls diagnostics and checking during SSA construction.
+func AllPackages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) {
+ return doPackages(initial, mode, true)
+}
+
+func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*ssa.Program, []*ssa.Package) {
+
+ var fset *token.FileSet
+ if len(initial) > 0 {
+ fset = initial[0].Fset
+ }
+
+ prog := ssa.NewProgram(fset, mode)
+
+ isInitial := make(map[*packages.Package]bool, len(initial))
+ for _, p := range initial {
+ isInitial[p] = true
+ }
+
+ ssamap := make(map[*packages.Package]*ssa.Package)
+ packages.Visit(initial, nil, func(p *packages.Package) {
+ if p.Types != nil && !p.IllTyped {
+ var files []*ast.File
+ var info *types.Info
+ if deps || isInitial[p] {
+ files = p.Syntax
+ info = p.TypesInfo
+ }
+ ssamap[p] = prog.CreatePackage(p.Types, files, info, true)
+ }
+ })
+
+ var ssapkgs []*ssa.Package
+ for _, p := range initial {
+ ssapkgs = append(ssapkgs, ssamap[p]) // may be nil
+ }
+ return prog, ssapkgs
+}
+
+// CreateProgram returns a new program in SSA form, given a program
+// loaded from source. An SSA package is created for each transitively
+// error-free package of lprog.
+//
+// Code for bodies of functions is not built until Build is called
+// on the result.
+//
+// The mode parameter controls diagnostics and checking during SSA construction.
+//
+// Deprecated: Use [golang.org/x/tools/go/packages] and the [Packages]
+// function instead; see ssa.Example_loadPackages.
+func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {
+ prog := ssa.NewProgram(lprog.Fset, mode)
+
+ for _, info := range lprog.AllPackages {
+ if info.TransitivelyErrorFree {
+ prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
+ }
+ }
+
+ return prog
+}
+
+// BuildPackage builds an SSA program with SSA intermediate
+// representation (IR) for all functions of a single package.
+//
+// It populates pkg by type-checking the specified file syntax trees. All
+// dependencies are loaded using the importer specified by tc, which
+// typically loads compiler export data; SSA code cannot be built for
+// those packages. BuildPackage then constructs an [ssa.Program] with all
+// dependency packages created, and builds and returns the SSA package
+// corresponding to pkg.
+//
+// The caller must have set pkg.Path to the import path.
+//
+// The operation fails if there were any type-checking or import errors.
+//
+// See ../example_test.go for an example.
+func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) {
+ if fset == nil {
+ panic("no token.FileSet")
+ }
+ if pkg.Path() == "" {
+ panic("package has no import path")
+ }
+
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ versions.InitFileVersions(info)
+ if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
+ return nil, nil, err
+ }
+
+ prog := ssa.NewProgram(fset, mode)
+
+ // Create SSA packages for all imports.
+ // Order is not significant.
+ created := make(map[*types.Package]bool)
+ var createAll func(pkgs []*types.Package)
+ createAll = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !created[p] {
+ created[p] = true
+ prog.CreatePackage(p, nil, nil, true)
+ createAll(p.Imports())
+ }
+ }
+ }
+ createAll(pkg.Imports())
+
+ // TODO(adonovan): we could replace createAll with just:
+ //
+ // // Create SSA packages for all imports.
+ // for _, p := range pkg.Imports() {
+ // prog.CreatePackage(p, nil, nil, true)
+ // }
+ //
+ // (with minor changes to changes to ../builder_test.go as
+ // shown in CL 511715 PS 10.) But this would strictly violate
+ // the letter of the doc comment above, which says "all
+ // dependencies created".
+ //
+ // Tim makes the good point with some extra work we could
+ // remove the need for any CreatePackage calls except the
+ // ones with syntax (i.e. primary packages). Of course
+ // You wouldn't have ssa.Packages and Members for as
+ // many things but no-one really uses that anyway.
+ // I wish I had done this from the outset.
+
+ // Create and build the primary package.
+ ssapkg := prog.CreatePackage(pkg, files, info, false)
+ ssapkg.Build()
+ return ssapkg, info, nil
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go
new file mode 100644
index 0000000..dd4b04e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go
@@ -0,0 +1,230 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil
+
+// This file implements discovery of switch and type-switch constructs
+// from low-level control flow.
+//
+// Many techniques exist for compiling a high-level switch with
+// constant cases to efficient machine code. The optimal choice will
+// depend on the data type, the specific case values, the code in the
+// body of each case, and the hardware.
+// Some examples:
+// - a lookup table (for a switch that maps constants to constants)
+// - a computed goto
+// - a binary tree
+// - a perfect hash
+// - a two-level switch (to partition constant strings by their first byte).
+
+import (
+ "bytes"
+ "fmt"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/ssa"
+)
+
+// A ConstCase represents a single constant comparison.
+// It is part of a Switch.
+type ConstCase struct {
+ Block *ssa.BasicBlock // block performing the comparison
+ Body *ssa.BasicBlock // body of the case
+ Value *ssa.Const // case comparand
+}
+
+// A TypeCase represents a single type assertion.
+// It is part of a Switch.
+type TypeCase struct {
+ Block *ssa.BasicBlock // block performing the type assert
+ Body *ssa.BasicBlock // body of the case
+ Type types.Type // case type
+ Binding ssa.Value // value bound by this case
+}
+
+// A Switch is a logical high-level control flow operation
+// (a multiway branch) discovered by analysis of a CFG containing
+// only if/else chains. It is not part of the ssa.Instruction set.
+//
+// One of ConstCases and TypeCases has length >= 2;
+// the other is nil.
+//
+// In a value switch, the list of cases may contain duplicate constants.
+// A type switch may contain duplicate types, or types assignable
+// to an interface type also in the list.
+// TODO(adonovan): eliminate such duplicates.
+type Switch struct {
+ Start *ssa.BasicBlock // block containing start of if/else chain
+ X ssa.Value // the switch operand
+ ConstCases []ConstCase // ordered list of constant comparisons
+ TypeCases []TypeCase // ordered list of type assertions
+ Default *ssa.BasicBlock // successor if all comparisons fail
+}
+
+func (sw *Switch) String() string {
+ // We represent each block by the String() of its
+ // first Instruction, e.g. "print(42:int)".
+ var buf bytes.Buffer
+ if sw.ConstCases != nil {
+ fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name())
+ for _, c := range sw.ConstCases {
+ fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0])
+ }
+ } else {
+ fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name())
+ for _, c := range sw.TypeCases {
+ fmt.Fprintf(&buf, "case %s %s: %s\n",
+ c.Binding.Name(), c.Type, c.Body.Instrs[0])
+ }
+ }
+ if sw.Default != nil {
+ fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0])
+ }
+ fmt.Fprintf(&buf, "}")
+ return buf.String()
+}
+
+// Switches examines the control-flow graph of fn and returns the
+// set of inferred value and type switches. A value switch tests an
+// ssa.Value for equality against two or more compile-time constant
+// values. Switches involving link-time constants (addresses) are
+// ignored. A type switch type-asserts an ssa.Value against two or
+// more types.
+//
+// The switches are returned in dominance order.
+//
+// The resulting switches do not necessarily correspond to uses of the
+// 'switch' keyword in the source: for example, a single source-level
+// switch statement with non-constant cases may result in zero, one or
+// many Switches, one per plural sequence of constant cases.
+// Switches may even be inferred from if/else- or goto-based control flow.
+// (In general, the control flow constructs of the source program
+// cannot be faithfully reproduced from the SSA representation.)
+func Switches(fn *ssa.Function) []Switch {
+ // Traverse the CFG in dominance order, so we don't
+ // enter an if/else-chain in the middle.
+ var switches []Switch
+ seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet
+ for _, b := range fn.DomPreorder() {
+ if x, k := isComparisonBlock(b); x != nil {
+ // Block b starts a switch.
+ sw := Switch{Start: b, X: x}
+ valueSwitch(&sw, k, seen)
+ if len(sw.ConstCases) > 1 {
+ switches = append(switches, sw)
+ }
+ }
+
+ if y, x, T := isTypeAssertBlock(b); y != nil {
+ // Block b starts a type switch.
+ sw := Switch{Start: b, X: x}
+ typeSwitch(&sw, y, T, seen)
+ if len(sw.TypeCases) > 1 {
+ switches = append(switches, sw)
+ }
+ }
+ }
+ return switches
+}
+
+func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) {
+ b := sw.Start
+ x := sw.X
+ for x == sw.X {
+ if seen[b] {
+ break
+ }
+ seen[b] = true
+
+ sw.ConstCases = append(sw.ConstCases, ConstCase{
+ Block: b,
+ Body: b.Succs[0],
+ Value: k,
+ })
+ b = b.Succs[1]
+ if len(b.Instrs) > 2 {
+ // Block b contains not just 'if x == k',
+ // so it may have side effects that
+ // make it unsafe to elide.
+ break
+ }
+ if len(b.Preds) != 1 {
+ // Block b has multiple predecessors,
+ // so it cannot be treated as a case.
+ break
+ }
+ x, k = isComparisonBlock(b)
+ }
+ sw.Default = b
+}
+
+func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) {
+ b := sw.Start
+ x := sw.X
+ for x == sw.X {
+ if seen[b] {
+ break
+ }
+ seen[b] = true
+
+ sw.TypeCases = append(sw.TypeCases, TypeCase{
+ Block: b,
+ Body: b.Succs[0],
+ Type: T,
+ Binding: y,
+ })
+ b = b.Succs[1]
+ if len(b.Instrs) > 4 {
+ // Block b contains not just
+ // {TypeAssert; Extract #0; Extract #1; If}
+ // so it may have side effects that
+ // make it unsafe to elide.
+ break
+ }
+ if len(b.Preds) != 1 {
+ // Block b has multiple predecessors,
+ // so it cannot be treated as a case.
+ break
+ }
+ y, x, T = isTypeAssertBlock(b)
+ }
+ sw.Default = b
+}
+
+// isComparisonBlock returns the operands (v, k) if a block ends with
+// a comparison v==k, where k is a compile-time constant.
+func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) {
+ if n := len(b.Instrs); n >= 2 {
+ if i, ok := b.Instrs[n-1].(*ssa.If); ok {
+ if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL {
+ if k, ok := binop.Y.(*ssa.Const); ok {
+ return binop.X, k
+ }
+ if k, ok := binop.X.(*ssa.Const); ok {
+ return binop.Y, k
+ }
+ }
+ }
+ }
+ return
+}
+
+// isTypeAssertBlock returns the operands (y, x, T) if a block ends with
+// a type assertion "if y, ok := x.(T); ok {".
+func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) {
+ if n := len(b.Instrs); n >= 4 {
+ if i, ok := b.Instrs[n-1].(*ssa.If); ok {
+ if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 {
+ if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b {
+ // hack: relies upon instruction ordering.
+ if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok {
+ return ext0, ta.X, ta.AssertedType
+ }
+ }
+ }
+ }
+ }
+ return
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go
new file mode 100644
index 0000000..b4feb42
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go
@@ -0,0 +1,157 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssautil // import "golang.org/x/tools/go/ssa/ssautil"
+
+import (
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/go/ssa"
+
+ _ "unsafe" // for linkname hack
+)
+
+// This file defines utilities for visiting the SSA representation of
+// a Program.
+//
+// TODO(adonovan): test coverage.
+
+// AllFunctions finds and returns the set of functions potentially
+// needed by program prog, as determined by a simple linker-style
+// reachability algorithm starting from the members and method-sets of
+// each package. The result may include anonymous functions and
+// synthetic wrappers.
+//
+// Precondition: all packages are built.
+//
+// TODO(adonovan): this function is underspecified. It doesn't
+// actually work like a linker, which computes reachability from main
+// using something like go/callgraph/cha (without materializing the
+// call graph). In fact, it treats all public functions and all
+// methods of public non-parameterized types as roots, even though
+// they may be unreachable--but only in packages created from syntax.
+//
+// I think we should deprecate AllFunctions function in favor of two
+// clearly defined ones:
+//
+// 1. The first would efficiently compute CHA reachability from a set
+// of main packages, making it suitable for a whole-program
+// analysis context with InstantiateGenerics, in conjunction with
+// Program.Build.
+//
+// 2. The second would return only the set of functions corresponding
+// to source Func{Decl,Lit} syntax, like SrcFunctions in
+// go/analysis/passes/buildssa; this is suitable for
+// package-at-a-time (or handful of packages) context.
+// ssa.Package could easily expose it as a field.
+//
+// We could add them unexported for now and use them via the linkname hack.
+func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool {
+ seen := make(map[*ssa.Function]bool)
+
+ var function func(fn *ssa.Function)
+ function = func(fn *ssa.Function) {
+ if !seen[fn] {
+ seen[fn] = true
+ var buf [10]*ssa.Value // avoid alloc in common case
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ for _, op := range instr.Operands(buf[:0]) {
+ if fn, ok := (*op).(*ssa.Function); ok {
+ function(fn)
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // TODO(adonovan): opt: provide a way to share a builder
+ // across a sequence of MethodValue calls.
+
+ methodsOf := func(T types.Type) {
+ if !types.IsInterface(T) {
+ mset := prog.MethodSets.MethodSet(T)
+ for i := 0; i < mset.Len(); i++ {
+ function(prog.MethodValue(mset.At(i)))
+ }
+ }
+ }
+
+ // Historically, Program.RuntimeTypes used to include the type
+ // of any exported member of a package loaded from syntax that
+ // has a non-parameterized type, plus all types
+ // reachable from that type using reflection, even though
+ // these runtime types may not be required for them.
+ //
+ // Rather than break existing programs that rely on
+ // AllFunctions visiting extra methods that are unreferenced
+ // by IR and unreachable via reflection, we moved the logic
+ // here, unprincipled though it is.
+ // (See doc comment for better ideas.)
+ //
+ // Nonetheless, after the move, we no longer visit every
+ // method of any type recursively reachable from T, only the
+ // methods of T and *T themselves, and we only apply this to
+ // named types T, and not to the type of every exported
+ // package member.
+ exportedTypeHack := func(t *ssa.Type) {
+ if isSyntactic(t.Package()) &&
+ ast.IsExported(t.Name()) &&
+ !types.IsInterface(t.Type()) {
+ // Consider only named types.
+ // (Ignore aliases and unsafe.Pointer.)
+ if named, ok := t.Type().(*types.Named); ok {
+ if named.TypeParams() == nil {
+ methodsOf(named) // T
+ methodsOf(types.NewPointer(named)) // *T
+ }
+ }
+ }
+ }
+
+ for _, pkg := range prog.AllPackages() {
+ for _, mem := range pkg.Members {
+ switch mem := mem.(type) {
+ case *ssa.Function:
+ // Visit all package-level declared functions.
+ function(mem)
+
+ case *ssa.Type:
+ exportedTypeHack(mem)
+ }
+ }
+ }
+
+ // Visit all methods of types for which runtime types were
+ // materialized, as they are reachable through reflection.
+ for _, T := range prog.RuntimeTypes() {
+ methodsOf(T)
+ }
+
+ return seen
+}
+
+// MainPackages returns the subset of the specified packages
+// named "main" that define a main function.
+// The result may include synthetic "testmain" packages.
+func MainPackages(pkgs []*ssa.Package) []*ssa.Package {
+ var mains []*ssa.Package
+ for _, pkg := range pkgs {
+ if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil {
+ mains = append(mains, pkg)
+ }
+ }
+ return mains
+}
+
+// TODO(adonovan): propose a principled API for this. One possibility
+// is a new field, Package.SrcFunctions []*Function, which would
+// contain the list of SrcFunctions described in point 2 of the
+// AllFunctions doc comment, or nil if the package is not from syntax.
+// But perhaps overloading nil vs empty slice is too subtle.
+//
+//go:linkname isSyntactic golang.org/x/tools/go/ssa.isSyntactic
+func isSyntactic(pkg *ssa.Package) bool
diff --git a/vendor/golang.org/x/tools/go/ssa/subst.go b/vendor/golang.org/x/tools/go/ssa/subst.go
new file mode 100644
index 0000000..4dcb871
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/subst.go
@@ -0,0 +1,642 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/aliases"
+)
+
+// subster defines a type substitution operation of a set of type parameters
+// to type parameter free replacement types. Substitution is done within
+// the context of a package-level function instantiation. *Named types
+// declared in the function are unique to the instantiation.
+//
+// For example, given a parameterized function F
+//
+// func F[S, T any]() any {
+// type X struct{ s S; next *X }
+// var p *X
+// return p
+// }
+//
+// calling the instantiation F[string, int]() returns an interface
+// value (*X[string,int], nil) where the underlying value of
+// X[string,int] is a struct{s string; next *X[string,int]}.
+//
+// A nil *subster is a valid, empty substitution map. It always acts as
+// the identity function. This allows for treating parameterized and
+// non-parameterized functions identically while compiling to ssa.
+//
+// Not concurrency-safe.
+//
+// Note: Some may find it helpful to think through some of the most
+// complex substitution cases using lambda calculus inspired notation.
+// subst.typ() solves evaluating a type expression E
+// within the body of a function Fn[m] with the type parameters m
+// once we have applied the type arguments N.
+// We can succinctly write this as a function application:
+//
+// ((λm. E) N)
+//
+// go/types does not provide this interface directly.
+// So what subster provides is a type substitution operation
+//
+// E[m:=N]
+type subster struct {
+ replacements map[*types.TypeParam]types.Type // values should contain no type params
+ cache map[types.Type]types.Type // cache of subst results
+ origin *types.Func // types.Objects declared within this origin function are unique within this context
+ ctxt *types.Context // speeds up repeated instantiations
+ uniqueness typeutil.Map // determines the uniqueness of the instantiations within the function
+ // TODO(taking): consider adding Pos
+}
+
+// Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache.
+// targs should not contain any types in tparams.
+// fn is the generic function for which we are substituting.
+func makeSubster(ctxt *types.Context, fn *types.Func, tparams *types.TypeParamList, targs []types.Type, debug bool) *subster {
+ assert(tparams.Len() == len(targs), "makeSubster argument count must match")
+
+ subst := &subster{
+ replacements: make(map[*types.TypeParam]types.Type, tparams.Len()),
+ cache: make(map[types.Type]types.Type),
+ origin: fn.Origin(),
+ ctxt: ctxt,
+ }
+ for i := 0; i < tparams.Len(); i++ {
+ subst.replacements[tparams.At(i)] = targs[i]
+ }
+ return subst
+}
+
+// typ returns the type of t with the type parameter tparams[i] substituted
+// for the type targs[i] where subst was created using tparams and targs.
+func (subst *subster) typ(t types.Type) (res types.Type) {
+ if subst == nil {
+ return t // A nil subst is type preserving.
+ }
+ if r, ok := subst.cache[t]; ok {
+ return r
+ }
+ defer func() {
+ subst.cache[t] = res
+ }()
+
+ switch t := t.(type) {
+ case *types.TypeParam:
+ if r := subst.replacements[t]; r != nil {
+ return r
+ }
+ return t
+
+ case *types.Basic:
+ return t
+
+ case *types.Array:
+ if r := subst.typ(t.Elem()); r != t.Elem() {
+ return types.NewArray(r, t.Len())
+ }
+ return t
+
+ case *types.Slice:
+ if r := subst.typ(t.Elem()); r != t.Elem() {
+ return types.NewSlice(r)
+ }
+ return t
+
+ case *types.Pointer:
+ if r := subst.typ(t.Elem()); r != t.Elem() {
+ return types.NewPointer(r)
+ }
+ return t
+
+ case *types.Tuple:
+ return subst.tuple(t)
+
+ case *types.Struct:
+ return subst.struct_(t)
+
+ case *types.Map:
+ key := subst.typ(t.Key())
+ elem := subst.typ(t.Elem())
+ if key != t.Key() || elem != t.Elem() {
+ return types.NewMap(key, elem)
+ }
+ return t
+
+ case *types.Chan:
+ if elem := subst.typ(t.Elem()); elem != t.Elem() {
+ return types.NewChan(t.Dir(), elem)
+ }
+ return t
+
+ case *types.Signature:
+ return subst.signature(t)
+
+ case *types.Union:
+ return subst.union(t)
+
+ case *types.Interface:
+ return subst.interface_(t)
+
+ case *aliases.Alias:
+ return subst.alias(t)
+
+ case *types.Named:
+ return subst.named(t)
+
+ case *opaqueType:
+ return t // opaque types are never substituted
+
+ default:
+ panic("unreachable")
+ }
+}
+
+// types returns the result of {subst.typ(ts[i])}.
+func (subst *subster) types(ts []types.Type) []types.Type {
+ res := make([]types.Type, len(ts))
+ for i := range ts {
+ res[i] = subst.typ(ts[i])
+ }
+ return res
+}
+
+func (subst *subster) tuple(t *types.Tuple) *types.Tuple {
+ if t != nil {
+ if vars := subst.varlist(t); vars != nil {
+ return types.NewTuple(vars...)
+ }
+ }
+ return t
+}
+
+type varlist interface {
+ At(i int) *types.Var
+ Len() int
+}
+
+// fieldlist is an adapter for structs for the varlist interface.
+type fieldlist struct {
+ str *types.Struct
+}
+
+func (fl fieldlist) At(i int) *types.Var { return fl.str.Field(i) }
+func (fl fieldlist) Len() int { return fl.str.NumFields() }
+
+func (subst *subster) struct_(t *types.Struct) *types.Struct {
+ if t != nil {
+ if fields := subst.varlist(fieldlist{t}); fields != nil {
+ tags := make([]string, t.NumFields())
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ tags[i] = t.Tag(i)
+ }
+ return types.NewStruct(fields, tags)
+ }
+ }
+ return t
+}
+
+// varlist returns subst(in[i]) or return nils if subst(v[i]) == v[i] for all i.
+func (subst *subster) varlist(in varlist) []*types.Var {
+ var out []*types.Var // nil => no updates
+ for i, n := 0, in.Len(); i < n; i++ {
+ v := in.At(i)
+ w := subst.var_(v)
+ if v != w && out == nil {
+ out = make([]*types.Var, n)
+ for j := 0; j < i; j++ {
+ out[j] = in.At(j)
+ }
+ }
+ if out != nil {
+ out[i] = w
+ }
+ }
+ return out
+}
+
+func (subst *subster) var_(v *types.Var) *types.Var {
+ if v != nil {
+ if typ := subst.typ(v.Type()); typ != v.Type() {
+ if v.IsField() {
+ return types.NewField(v.Pos(), v.Pkg(), v.Name(), typ, v.Embedded())
+ }
+ return types.NewVar(v.Pos(), v.Pkg(), v.Name(), typ)
+ }
+ }
+ return v
+}
+
+func (subst *subster) union(u *types.Union) *types.Union {
+ var out []*types.Term // nil => no updates
+
+ for i, n := 0, u.Len(); i < n; i++ {
+ t := u.Term(i)
+ r := subst.typ(t.Type())
+ if r != t.Type() && out == nil {
+ out = make([]*types.Term, n)
+ for j := 0; j < i; j++ {
+ out[j] = u.Term(j)
+ }
+ }
+ if out != nil {
+ out[i] = types.NewTerm(t.Tilde(), r)
+ }
+ }
+
+ if out != nil {
+ return types.NewUnion(out)
+ }
+ return u
+}
+
+func (subst *subster) interface_(iface *types.Interface) *types.Interface {
+ if iface == nil {
+ return nil
+ }
+
+ // methods for the interface. Initially nil if there is no known change needed.
+ // Signatures for the method where recv is nil. NewInterfaceType fills in the receivers.
+ var methods []*types.Func
+ initMethods := func(n int) { // copy first n explicit methods
+ methods = make([]*types.Func, iface.NumExplicitMethods())
+ for i := 0; i < n; i++ {
+ f := iface.ExplicitMethod(i)
+ norecv := changeRecv(f.Type().(*types.Signature), nil)
+ methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), norecv)
+ }
+ }
+ for i := 0; i < iface.NumExplicitMethods(); i++ {
+ f := iface.ExplicitMethod(i)
+ // On interfaces, we need to cycle break on anonymous interface types
+ // being in a cycle with their signatures being in cycles with their receivers
+ // that do not go through a Named.
+ norecv := changeRecv(f.Type().(*types.Signature), nil)
+ sig := subst.typ(norecv)
+ if sig != norecv && methods == nil {
+ initMethods(i)
+ }
+ if methods != nil {
+ methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), sig.(*types.Signature))
+ }
+ }
+
+ var embeds []types.Type
+ initEmbeds := func(n int) { // copy first n embedded types
+ embeds = make([]types.Type, iface.NumEmbeddeds())
+ for i := 0; i < n; i++ {
+ embeds[i] = iface.EmbeddedType(i)
+ }
+ }
+ for i := 0; i < iface.NumEmbeddeds(); i++ {
+ e := iface.EmbeddedType(i)
+ r := subst.typ(e)
+ if e != r && embeds == nil {
+ initEmbeds(i)
+ }
+ if embeds != nil {
+ embeds[i] = r
+ }
+ }
+
+ if methods == nil && embeds == nil {
+ return iface
+ }
+ if methods == nil {
+ initMethods(iface.NumExplicitMethods())
+ }
+ if embeds == nil {
+ initEmbeds(iface.NumEmbeddeds())
+ }
+ return types.NewInterfaceType(methods, embeds).Complete()
+}
+
+func (subst *subster) alias(t *aliases.Alias) types.Type {
+ // See subster.named. This follows the same strategy.
+ tparams := aliases.TypeParams(t)
+ targs := aliases.TypeArgs(t)
+ tname := t.Obj()
+ torigin := aliases.Origin(t)
+
+ if !declaredWithin(tname, subst.origin) {
+ // t is declared outside of the function origin. So t is a package level type alias.
+ if targs.Len() == 0 {
+ // No type arguments so no instantiation needed.
+ return t
+ }
+
+ // Instantiate with the substituted type arguments.
+ newTArgs := subst.typelist(targs)
+ return subst.instantiate(torigin, newTArgs)
+ }
+
+ if targs.Len() == 0 {
+ // t is declared within the function origin and has no type arguments.
+ //
+ // Example: This corresponds to A or B in F, but not A[int]:
+ //
+ // func F[T any]() {
+ // type A[S any] = struct{t T, s S}
+ // type B = T
+ // var x A[int]
+ // ...
+ // }
+ //
+ // This is somewhat different than *Named as *Alias cannot be created recursively.
+
+ // Copy and substitute type params.
+ var newTParams []*types.TypeParam
+ for i := 0; i < tparams.Len(); i++ {
+ cur := tparams.At(i)
+ cobj := cur.Obj()
+ cname := types.NewTypeName(cobj.Pos(), cobj.Pkg(), cobj.Name(), nil)
+ ntp := types.NewTypeParam(cname, nil)
+ subst.cache[cur] = ntp // See the comment "Note: Subtle" in subster.named.
+ newTParams = append(newTParams, ntp)
+ }
+
+ // Substitute rhs.
+ rhs := subst.typ(aliases.Rhs(t))
+
+ // Create the fresh alias.
+ obj := aliases.NewAlias(true, tname.Pos(), tname.Pkg(), tname.Name(), rhs)
+ fresh := obj.Type()
+ if fresh, ok := fresh.(*aliases.Alias); ok {
+ // TODO: assume ok when aliases are always materialized (go1.27).
+ aliases.SetTypeParams(fresh, newTParams)
+ }
+
+ // Substitute into all of the constraints after they are created.
+ for i, ntp := range newTParams {
+ bound := tparams.At(i).Constraint()
+ ntp.SetConstraint(subst.typ(bound))
+ }
+ return fresh
+ }
+
+ // t is declared within the function origin and has type arguments.
+ //
+ // Example: This corresponds to A[int] in F. Cases A and B are handled above.
+ // func F[T any]() {
+ // type A[S any] = struct{t T, s S}
+ // type B = T
+ // var x A[int]
+ // ...
+ // }
+ subOrigin := subst.typ(torigin)
+ subTArgs := subst.typelist(targs)
+ return subst.instantiate(subOrigin, subTArgs)
+}
+
+func (subst *subster) named(t *types.Named) types.Type {
+ // A Named type is a user defined type.
+ // Ignoring generics, Named types are canonical: they are identical if
+ // and only if they have the same defining symbol.
+ // Generics complicate things, both if the type definition itself is
+ // parameterized, and if the type is defined within the scope of a
+ // parameterized function. In this case, two named types are identical if
+ // and only if their identifying symbols are identical, and all type
+ // arguments bindings in scope of the named type definition (including the
+ // type parameters of the definition itself) are equivalent.
+ //
+ // Notably:
+ // 1. For type definition type T[P1 any] struct{}, T[A] and T[B] are identical
+ // only if A and B are identical.
+ // 2. Inside the generic func Fn[m any]() any { type T struct{}; return T{} },
+ // the result of Fn[A] and Fn[B] have identical type if and only if A and
+ // B are identical.
+ // 3. Both 1 and 2 could apply, such as in
+ // func F[m any]() any { type T[x any] struct{}; return T{} }
+ //
+ // A subster replaces type parameters within a function scope, and therefore must
+ // also replace free type parameters in the definitions of local types.
+ //
+ // Note: There are some detailed notes sprinkled throughout that borrow from
+ // lambda calculus notation. These contain some over simplifying math.
+ //
+ // LC: One way to think about subster is that it is a way of evaluating
+ // ((λm. E) N) as E[m:=N].
+ // Each Named type t has an object *TypeName within a scope S that binds an
+ // underlying type expression U. U can refer to symbols within S (+ S's ancestors).
+ // Let x = t.TypeParams() and A = t.TypeArgs().
+ // Each Named type t is then either:
+ // U where len(x) == 0 && len(A) == 0
+ // λx. U where len(x) != 0 && len(A) == 0
+ // ((λx. U) A) where len(x) == len(A)
+ // In each case, we will evaluate t[m:=N].
+ tparams := t.TypeParams() // x
+ targs := t.TypeArgs() // A
+
+ if !declaredWithin(t.Obj(), subst.origin) {
+ // t is declared outside of Fn[m].
+ //
+ // In this case, we can skip substituting t.Underlying().
+ // The underlying type cannot refer to the type parameters.
+ //
+ // LC: Let free(E) be the set of free type parameters in an expression E.
+ // Then whenever m ∉ free(E), then E = E[m:=N].
+ // t ∉ Scope(fn) so therefore m ∉ free(U) and m ∩ x = ∅.
+ if targs.Len() == 0 {
+ // t has no type arguments. So it does not need to be instantiated.
+ //
+ // This is the normal case in real Go code, where t is not parameterized,
+ // declared at some package scope, and m is a TypeParam from a parameterized
+ // function F[m] or method.
+ //
+ // LC: m ∉ free(A) lets us conclude m ∉ free(t). So t=t[m:=N].
+ return t
+ }
+
+ // t is declared outside of Fn[m] and has type arguments.
+ // The type arguments may contain type parameters m so
+ // substitute the type arguments, and instantiate the substituted
+ // type arguments.
+ //
+ // LC: Evaluate this as ((λx. U) A') where A' = A[m := N].
+ newTArgs := subst.typelist(targs)
+ return subst.instantiate(t.Origin(), newTArgs)
+ }
+
+ // t is declared within Fn[m].
+
+ if targs.Len() == 0 { // no type arguments?
+ assert(t == t.Origin(), "local parameterized type abstraction must be an origin type")
+
+ // t has no type arguments.
+ // The underlying type of t may contain the function's type parameters,
+ // replace these, and create a new type.
+ //
+ // Subtle: We short circuit substitution and use a newly created type in
+ // subst, i.e. cache[t]=fresh, to preemptively replace t with fresh
+ // in recursive types during traversal. This both breaks infinite cycles
+ // and allows for constructing types with the replacement applied in
+ // subst.typ(U).
+ //
+ // A new copy of the Named and Typename (and constraints) per function
+ // instantiation matches the semantics of Go, which treats all function
+ // instantiations F[N] as having distinct local types.
+ //
+ // LC: x.Len()=0 can be thought of as a special case of λx. U.
+ // LC: Evaluate (λx. U)[m:=N] as (λx'. U') where U'=U[x:=x',m:=N].
+ tname := t.Obj()
+ obj := types.NewTypeName(tname.Pos(), tname.Pkg(), tname.Name(), nil)
+ fresh := types.NewNamed(obj, nil, nil)
+ var newTParams []*types.TypeParam
+ for i := 0; i < tparams.Len(); i++ {
+ cur := tparams.At(i)
+ cobj := cur.Obj()
+ cname := types.NewTypeName(cobj.Pos(), cobj.Pkg(), cobj.Name(), nil)
+ ntp := types.NewTypeParam(cname, nil)
+ subst.cache[cur] = ntp
+ newTParams = append(newTParams, ntp)
+ }
+ fresh.SetTypeParams(newTParams)
+ subst.cache[t] = fresh
+ subst.cache[fresh] = fresh
+ fresh.SetUnderlying(subst.typ(t.Underlying()))
+ // Substitute into all of the constraints after they are created.
+ for i, ntp := range newTParams {
+ bound := tparams.At(i).Constraint()
+ ntp.SetConstraint(subst.typ(bound))
+ }
+ return fresh
+ }
+
+ // t is defined within Fn[m] and t has type arguments (an instantiation).
+ // We reduce this to the two cases above:
+ // (1) substitute the function's type parameters into t.Origin().
+ // (2) substitute t's type arguments A and instantiate the updated t.Origin() with these.
+ //
+ // LC: Evaluate ((λx. U) A)[m:=N] as (t' A') where t' = (λx. U)[m:=N] and A'=A [m:=N]
+ subOrigin := subst.typ(t.Origin())
+ subTArgs := subst.typelist(targs)
+ return subst.instantiate(subOrigin, subTArgs)
+}
+
+func (subst *subster) instantiate(orig types.Type, targs []types.Type) types.Type {
+ i, err := types.Instantiate(subst.ctxt, orig, targs, false)
+ assert(err == nil, "failed to Instantiate named (Named or Alias) type")
+ if c, _ := subst.uniqueness.At(i).(types.Type); c != nil {
+ return c.(types.Type)
+ }
+ subst.uniqueness.Set(i, i)
+ return i
+}
+
+func (subst *subster) typelist(l *types.TypeList) []types.Type {
+ res := make([]types.Type, l.Len())
+ for i := 0; i < l.Len(); i++ {
+ res[i] = subst.typ(l.At(i))
+ }
+ return res
+}
+
+func (subst *subster) signature(t *types.Signature) types.Type {
+ tparams := t.TypeParams()
+
+ // We are choosing not to support tparams.Len() > 0 until a need has been observed in practice.
+ //
+ // There are some known usages for types.Types coming from types.{Eval,CheckExpr}.
+ // To support tparams.Len() > 0, we just need to do the following [psuedocode]:
+ // targs := {subst.replacements[tparams[i]]]}; Instantiate(ctxt, t, targs, false)
+
+ assert(tparams.Len() == 0, "Substituting types.Signatures with generic functions are currently unsupported.")
+
+ // Either:
+ // (1)non-generic function.
+ // no type params to substitute
+ // (2)generic method and recv needs to be substituted.
+
+ // Receivers can be either:
+ // named
+ // pointer to named
+ // interface
+ // nil
+ // interface is the problematic case. We need to cycle break there!
+ recv := subst.var_(t.Recv())
+ params := subst.tuple(t.Params())
+ results := subst.tuple(t.Results())
+ if recv != t.Recv() || params != t.Params() || results != t.Results() {
+ return types.NewSignatureType(recv, nil, nil, params, results, t.Variadic())
+ }
+ return t
+}
+
+// reaches returns true if a type t reaches any type t' s.t. c[t'] == true.
+// It updates c to cache results.
+//
+// reaches is currently only part of the wellFormed debug logic, and
+// in practice c is initially only type parameters. It is not currently
+// relied on in production.
+func reaches(t types.Type, c map[types.Type]bool) (res bool) {
+ if c, ok := c[t]; ok {
+ return c
+ }
+
+ // c is populated with temporary false entries as types are visited.
+ // This avoids repeat visits and break cycles.
+ c[t] = false
+ defer func() {
+ c[t] = res
+ }()
+
+ switch t := t.(type) {
+ case *types.TypeParam, *types.Basic:
+ return false
+ case *types.Array:
+ return reaches(t.Elem(), c)
+ case *types.Slice:
+ return reaches(t.Elem(), c)
+ case *types.Pointer:
+ return reaches(t.Elem(), c)
+ case *types.Tuple:
+ for i := 0; i < t.Len(); i++ {
+ if reaches(t.At(i).Type(), c) {
+ return true
+ }
+ }
+ case *types.Struct:
+ for i := 0; i < t.NumFields(); i++ {
+ if reaches(t.Field(i).Type(), c) {
+ return true
+ }
+ }
+ case *types.Map:
+ return reaches(t.Key(), c) || reaches(t.Elem(), c)
+ case *types.Chan:
+ return reaches(t.Elem(), c)
+ case *types.Signature:
+ if t.Recv() != nil && reaches(t.Recv().Type(), c) {
+ return true
+ }
+ return reaches(t.Params(), c) || reaches(t.Results(), c)
+ case *types.Union:
+ for i := 0; i < t.Len(); i++ {
+ if reaches(t.Term(i).Type(), c) {
+ return true
+ }
+ }
+ case *types.Interface:
+ for i := 0; i < t.NumEmbeddeds(); i++ {
+ if reaches(t.Embedded(i), c) {
+ return true
+ }
+ }
+ for i := 0; i < t.NumExplicitMethods(); i++ {
+ if reaches(t.ExplicitMethod(i).Type(), c) {
+ return true
+ }
+ }
+ case *types.Named, *aliases.Alias:
+ return reaches(t.Underlying(), c)
+ default:
+ panic("unreachable")
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/task.go b/vendor/golang.org/x/tools/go/ssa/task.go
new file mode 100644
index 0000000..5024985
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/task.go
@@ -0,0 +1,103 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+ "sync/atomic"
+)
+
+// Each task has two states: it is initially "active",
+// and transitions to "done".
+//
+// tasks form a directed graph. An edge from x to y (with y in x.edges)
+// indicates that the task x waits on the task y to be done.
+// Cycles are permitted.
+//
+// Calling x.wait() blocks the calling goroutine until task x,
+// and all the tasks transitively reachable from x are done.
+//
+// The nil *task is always considered done.
+type task struct {
+ done chan unit // close when the task is done.
+ edges map[*task]unit // set of predecessors of this task.
+ transitive atomic.Bool // true once it is known all predecessors are done.
+}
+
+func (x *task) isTransitivelyDone() bool { return x == nil || x.transitive.Load() }
+
+// addEdge creates an edge from x to y, indicating that
+// x.wait() will not return before y is done.
+// All calls to x.addEdge(...) should happen before x.markDone().
+func (x *task) addEdge(y *task) {
+ if x == y || y.isTransitivelyDone() {
+ return // no work remaining
+ }
+
+ // heuristic done check
+ select {
+ case <-x.done:
+ panic("cannot add an edge to a done task")
+ default:
+ }
+
+ if x.edges == nil {
+ x.edges = make(map[*task]unit)
+ }
+ x.edges[y] = unit{}
+}
+
+// markDone changes the task's state to markDone.
+func (x *task) markDone() {
+ if x != nil {
+ close(x.done)
+ }
+}
+
+// wait blocks until x and all the tasks it can reach through edges are done.
+func (x *task) wait() {
+ if x.isTransitivelyDone() {
+ return // already known to be done. Skip allocations.
+ }
+
+ // Use BFS to wait on u.done to be closed, for all u transitively
+ // reachable from x via edges.
+ //
+ // This work can be repeated by multiple workers doing wait().
+ //
+ // Note: Tarjan's SCC algorithm is able to mark SCCs as transitively done
+ // as soon as the SCC has been visited. This is theoretically faster, but is
+ // a more complex algorithm. Until we have evidence, we need the more complex
+ // algorithm, the simpler algorithm BFS is implemented.
+ //
+ // In Go 1.23, ssa/TestStdlib reaches <=3 *tasks per wait() in most schedules
+ // On some schedules, there is a cycle building net/http and internal/trace/testtrace
+ // due to slices functions.
+ work := []*task{x}
+ enqueued := map[*task]unit{x: {}}
+ for i := 0; i < len(work); i++ {
+ u := work[i]
+ if u.isTransitivelyDone() { // already transitively done
+ work[i] = nil
+ continue
+ }
+ <-u.done // wait for u to be marked done.
+
+ for v := range u.edges {
+ if _, ok := enqueued[v]; !ok {
+ enqueued[v] = unit{}
+ work = append(work, v)
+ }
+ }
+ }
+
+ // work is transitively closed over dependencies.
+ // u in work is done (or transitively done and skipped).
+ // u is transitively done.
+ for _, u := range work {
+ if u != nil {
+ x.transitive.Store(true)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go
new file mode 100644
index 0000000..549c9c8
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/util.go
@@ -0,0 +1,430 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines a number of miscellaneous utility functions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+ "sync"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+type unit struct{}
+
+//// Sanity checking utilities
+
+// assert panics with the mesage msg if p is false.
+// Avoid combining with expensive string formatting.
+func assert(p bool, msg string) {
+ if !p {
+ panic(msg)
+ }
+}
+
+//// AST utilities
+
+func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
+
+// isBlankIdent returns true iff e is an Ident with name "_".
+// They have no associated types.Object, and thus no type.
+func isBlankIdent(e ast.Expr) bool {
+ id, ok := e.(*ast.Ident)
+ return ok && id.Name == "_"
+}
+
+// rangePosition is the position to give for the `range` token in a RangeStmt.
+var rangePosition = func(rng *ast.RangeStmt) token.Pos {
+ // Before 1.20, this is unreachable.
+ // rng.For is a close, but incorrect position.
+ return rng.For
+}
+
+//// Type utilities. Some of these belong in go/types.
+
+// isNonTypeParamInterface reports whether t is an interface type but not a type parameter.
+func isNonTypeParamInterface(t types.Type) bool {
+ return !typeparams.IsTypeParam(t) && types.IsInterface(t)
+}
+
+// isBasic reports whether t is a basic type.
+// t is assumed to be an Underlying type (not Named or Alias).
+func isBasic(t types.Type) bool {
+ _, ok := t.(*types.Basic)
+ return ok
+}
+
+// isString reports whether t is exactly a string type.
+// t is assumed to be an Underlying type (not Named or Alias).
+func isString(t types.Type) bool {
+ basic, ok := t.(*types.Basic)
+ return ok && basic.Info()&types.IsString != 0
+}
+
+// isByteSlice reports whether t is of the form []~bytes.
+// t is assumed to be an Underlying type (not Named or Alias).
+func isByteSlice(t types.Type) bool {
+ if b, ok := t.(*types.Slice); ok {
+ e, _ := b.Elem().Underlying().(*types.Basic)
+ return e != nil && e.Kind() == types.Byte
+ }
+ return false
+}
+
+// isRuneSlice reports whether t is of the form []~runes.
+// t is assumed to be an Underlying type (not Named or Alias).
+func isRuneSlice(t types.Type) bool {
+ if b, ok := t.(*types.Slice); ok {
+ e, _ := b.Elem().Underlying().(*types.Basic)
+ return e != nil && e.Kind() == types.Rune
+ }
+ return false
+}
+
+// isBasicConvTypes returns true when a type set can be
+// one side of a Convert operation. This is when:
+// - All are basic, []byte, or []rune.
+// - At least 1 is basic.
+// - At most 1 is []byte or []rune.
+func isBasicConvTypes(tset termList) bool {
+ basics := 0
+ all := underIs(tset, func(t types.Type) bool {
+ if isBasic(t) {
+ basics++
+ return true
+ }
+ return isByteSlice(t) || isRuneSlice(t)
+ })
+ return all && basics >= 1 && tset.Len()-basics <= 1
+}
+
+// isPointer reports whether t's underlying type is a pointer.
+func isPointer(t types.Type) bool {
+ return is[*types.Pointer](t.Underlying())
+}
+
+// isPointerCore reports whether t's core type is a pointer.
+//
+// (Most pointer manipulation is related to receivers, in which case
+// isPointer is appropriate. tecallers can use isPointer(t).
+func isPointerCore(t types.Type) bool {
+ return is[*types.Pointer](typeparams.CoreType(t))
+}
+
+func is[T any](x any) bool {
+ _, ok := x.(T)
+ return ok
+}
+
+// recvType returns the receiver type of method obj.
+func recvType(obj *types.Func) types.Type {
+ return obj.Type().(*types.Signature).Recv().Type()
+}
+
+// fieldOf returns the index'th field of the (core type of) a struct type;
+// otherwise returns nil.
+func fieldOf(typ types.Type, index int) *types.Var {
+ if st, ok := typeparams.CoreType(typ).(*types.Struct); ok {
+ if 0 <= index && index < st.NumFields() {
+ return st.Field(index)
+ }
+ }
+ return nil
+}
+
+// isUntyped reports whether typ is the type of an untyped constant.
+func isUntyped(typ types.Type) bool {
+ // No Underlying/Unalias: untyped constant types cannot be Named or Alias.
+ b, ok := typ.(*types.Basic)
+ return ok && b.Info()&types.IsUntyped != 0
+}
+
+// declaredWithin reports whether an object is declared within a function.
+//
+// obj must not be a method or a field.
+func declaredWithin(obj types.Object, fn *types.Func) bool {
+ if obj.Pos() != token.NoPos {
+ return fn.Scope().Contains(obj.Pos()) // trust the positions if they exist.
+ }
+ if fn.Pkg() != obj.Pkg() {
+ return false // fast path for different packages
+ }
+
+ // Traverse Parent() scopes for fn.Scope().
+ for p := obj.Parent(); p != nil; p = p.Parent() {
+ if p == fn.Scope() {
+ return true
+ }
+ }
+ return false
+}
+
+// logStack prints the formatted "start" message to stderr and
+// returns a closure that prints the corresponding "end" message.
+// Call using 'defer logStack(...)()' to show builder stack on panic.
+// Don't forget trailing parens!
+func logStack(format string, args ...interface{}) func() {
+ msg := fmt.Sprintf(format, args...)
+ io.WriteString(os.Stderr, msg)
+ io.WriteString(os.Stderr, "\n")
+ return func() {
+ io.WriteString(os.Stderr, msg)
+ io.WriteString(os.Stderr, " end\n")
+ }
+}
+
+// newVar creates a 'var' for use in a types.Tuple.
+func newVar(name string, typ types.Type) *types.Var {
+ return types.NewParam(token.NoPos, nil, name, typ)
+}
+
+// anonVar creates an anonymous 'var' for use in a types.Tuple.
+func anonVar(typ types.Type) *types.Var {
+ return newVar("", typ)
+}
+
+var lenResults = types.NewTuple(anonVar(tInt))
+
+// makeLen returns the len builtin specialized to type func(T)int.
+func makeLen(T types.Type) *Builtin {
+ lenParams := types.NewTuple(anonVar(T))
+ return &Builtin{
+ name: "len",
+ sig: types.NewSignature(nil, lenParams, lenResults, false),
+ }
+}
+
+// receiverTypeArgs returns the type arguments to a method's receiver.
+// Returns an empty list if the receiver does not have type arguments.
+func receiverTypeArgs(method *types.Func) []types.Type {
+ recv := method.Type().(*types.Signature).Recv()
+ _, named := typesinternal.ReceiverNamed(recv)
+ if named == nil {
+ return nil // recv is anonymous struct/interface
+ }
+ ts := named.TypeArgs()
+ if ts.Len() == 0 {
+ return nil
+ }
+ targs := make([]types.Type, ts.Len())
+ for i := 0; i < ts.Len(); i++ {
+ targs[i] = ts.At(i)
+ }
+ return targs
+}
+
+// recvAsFirstArg takes a method signature and returns a function
+// signature with receiver as the first parameter.
+func recvAsFirstArg(sig *types.Signature) *types.Signature {
+ params := make([]*types.Var, 0, 1+sig.Params().Len())
+ params = append(params, sig.Recv())
+ for i := 0; i < sig.Params().Len(); i++ {
+ params = append(params, sig.Params().At(i))
+ }
+ return types.NewSignatureType(nil, nil, nil, types.NewTuple(params...), sig.Results(), sig.Variadic())
+}
+
+// instance returns whether an expression is a simple or qualified identifier
+// that is a generic instantiation.
+func instance(info *types.Info, expr ast.Expr) bool {
+ // Compare the logic here against go/types.instantiatedIdent,
+ // which also handles *IndexExpr and *IndexListExpr.
+ var id *ast.Ident
+ switch x := expr.(type) {
+ case *ast.Ident:
+ id = x
+ case *ast.SelectorExpr:
+ id = x.Sel
+ default:
+ return false
+ }
+ _, ok := info.Instances[id]
+ return ok
+}
+
+// instanceArgs returns the Instance[id].TypeArgs as a slice.
+func instanceArgs(info *types.Info, id *ast.Ident) []types.Type {
+ targList := info.Instances[id].TypeArgs
+ if targList == nil {
+ return nil
+ }
+
+ targs := make([]types.Type, targList.Len())
+ for i, n := 0, targList.Len(); i < n; i++ {
+ targs[i] = targList.At(i)
+ }
+ return targs
+}
+
+// Mapping of a type T to a canonical instance C s.t. types.Indentical(T, C).
+// Thread-safe.
+type canonizer struct {
+ mu sync.Mutex
+ types typeutil.Map // map from type to a canonical instance
+ lists typeListMap // map from a list of types to a canonical instance
+}
+
+func newCanonizer() *canonizer {
+ c := &canonizer{}
+ h := typeutil.MakeHasher()
+ c.types.SetHasher(h)
+ c.lists.hasher = h
+ return c
+}
+
+// List returns a canonical representative of a list of types.
+// Representative of the empty list is nil.
+func (c *canonizer) List(ts []types.Type) *typeList {
+ if len(ts) == 0 {
+ return nil
+ }
+
+ unaliasAll := func(ts []types.Type) []types.Type {
+ // Is there some top level alias?
+ var found bool
+ for _, t := range ts {
+ if _, ok := t.(*aliases.Alias); ok {
+ found = true
+ break
+ }
+ }
+ if !found {
+ return ts // no top level alias
+ }
+
+ cp := make([]types.Type, len(ts)) // copy with top level aliases removed.
+ for i, t := range ts {
+ cp[i] = aliases.Unalias(t)
+ }
+ return cp
+ }
+ l := unaliasAll(ts)
+
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.lists.rep(l)
+}
+
+// Type returns a canonical representative of type T.
+// Removes top-level aliases.
+//
+// For performance, reasons the canonical instance is order-dependent,
+// and may contain deeply nested aliases.
+func (c *canonizer) Type(T types.Type) types.Type {
+ T = aliases.Unalias(T) // remove the top level alias.
+
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ if r := c.types.At(T); r != nil {
+ return r.(types.Type)
+ }
+ c.types.Set(T, T)
+ return T
+}
+
+// A type for representing a canonized list of types.
+type typeList []types.Type
+
+func (l *typeList) identical(ts []types.Type) bool {
+ if l == nil {
+ return len(ts) == 0
+ }
+ n := len(*l)
+ if len(ts) != n {
+ return false
+ }
+ for i, left := range *l {
+ right := ts[i]
+ if !types.Identical(left, right) {
+ return false
+ }
+ }
+ return true
+}
+
+type typeListMap struct {
+ hasher typeutil.Hasher
+ buckets map[uint32][]*typeList
+}
+
+// rep returns a canonical representative of a slice of types.
+func (m *typeListMap) rep(ts []types.Type) *typeList {
+ if m == nil || len(ts) == 0 {
+ return nil
+ }
+
+ if m.buckets == nil {
+ m.buckets = make(map[uint32][]*typeList)
+ }
+
+ h := m.hash(ts)
+ bucket := m.buckets[h]
+ for _, l := range bucket {
+ if l.identical(ts) {
+ return l
+ }
+ }
+
+ // not present. create a representative.
+ cp := make(typeList, len(ts))
+ copy(cp, ts)
+ rep := &cp
+
+ m.buckets[h] = append(bucket, rep)
+ return rep
+}
+
+func (m *typeListMap) hash(ts []types.Type) uint32 {
+ if m == nil {
+ return 0
+ }
+ // Some smallish prime far away from typeutil.Hash.
+ n := len(ts)
+ h := uint32(13619) + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ h += 3 * m.hasher.Hash(ts[i])
+ }
+ return h
+}
+
+// instantiateMethod instantiates m with targs and returns a canonical representative for this method.
+func (canon *canonizer) instantiateMethod(m *types.Func, targs []types.Type, ctxt *types.Context) *types.Func {
+ recv := recvType(m)
+ if p, ok := aliases.Unalias(recv).(*types.Pointer); ok {
+ recv = p.Elem()
+ }
+ named := aliases.Unalias(recv).(*types.Named)
+ inst, err := types.Instantiate(ctxt, named.Origin(), targs, false)
+ if err != nil {
+ panic(err)
+ }
+ rep := canon.Type(inst)
+ obj, _, _ := types.LookupFieldOrMethod(rep, true, m.Pkg(), m.Name())
+ return obj.(*types.Func)
+}
+
+// Exposed to ssautil using the linkname hack.
+func isSyntactic(pkg *Package) bool { return pkg.syntax }
+
+// mapValues returns a new unordered array of map values.
+func mapValues[K comparable, V any](m map[K]V) []V {
+ vals := make([]V, 0, len(m))
+ for _, fn := range m {
+ vals = append(vals, fn)
+ }
+ return vals
+
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/util_go120.go b/vendor/golang.org/x/tools/go/ssa/util_go120.go
new file mode 100644
index 0000000..9e8ea87
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/util_go120.go
@@ -0,0 +1,17 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.20
+// +build go1.20
+
+package ssa
+
+import (
+ "go/ast"
+ "go/token"
+)
+
+func init() {
+ rangePosition = func(rng *ast.RangeStmt) token.Pos { return rng.Range }
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/wrappers.go b/vendor/golang.org/x/tools/go/ssa/wrappers.go
new file mode 100644
index 0000000..d09b4f2
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ssa/wrappers.go
@@ -0,0 +1,348 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// This file defines synthesis of Functions that delegate to declared
+// methods; they come in three kinds:
+//
+// (1) wrappers: methods that wrap declared methods, performing
+// implicit pointer indirections and embedded field selections.
+//
+// (2) thunks: funcs that wrap declared methods. Like wrappers,
+// thunks perform indirections and field selections. The thunk's
+// first parameter is used as the receiver for the method call.
+//
+// (3) bounds: funcs that wrap declared methods. The bound's sole
+// free variable, supplied by a closure, is used as the receiver
+// for the method call. No indirections or field selections are
+// performed since they can be done before the call.
+
+import (
+ "fmt"
+
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// -- wrappers -----------------------------------------------------------
+
+// createWrapper returns a synthetic method that delegates to the
+// declared method denoted by meth.Obj(), first performing any
+// necessary pointer indirections or field selections implied by meth.
+//
+// The resulting method's receiver type is meth.Recv().
+//
+// This function is versatile but quite subtle! Consider the
+// following axes of variation when making changes:
+// - optional receiver indirection
+// - optional implicit field selections
+// - meth.Obj() may denote a concrete or an interface method
+// - the result may be a thunk or a wrapper.
+func createWrapper(prog *Program, sel *selection) *Function {
+ obj := sel.obj.(*types.Func) // the declared function
+ sig := sel.typ.(*types.Signature) // type of this wrapper
+
+ var recv *types.Var // wrapper's receiver or thunk's params[0]
+ name := obj.Name()
+ var description string
+ if sel.kind == types.MethodExpr {
+ name += "$thunk"
+ description = "thunk"
+ recv = sig.Params().At(0)
+ } else {
+ description = "wrapper"
+ recv = sig.Recv()
+ }
+
+ description = fmt.Sprintf("%s for %s", description, sel.obj)
+ if prog.mode&LogSource != 0 {
+ defer logStack("create %s to (%s)", description, recv.Type())()
+ }
+ /* method wrapper */
+ return &Function{
+ name: name,
+ method: sel,
+ object: obj,
+ Signature: sig,
+ Synthetic: description,
+ Prog: prog,
+ pos: obj.Pos(),
+ // wrappers have no syntax
+ build: (*builder).buildWrapper,
+ syntax: nil,
+ info: nil,
+ goversion: "",
+ }
+}
+
+// buildWrapper builds fn.Body for a method wrapper.
+func (b *builder) buildWrapper(fn *Function) {
+ var recv *types.Var // wrapper's receiver or thunk's params[0]
+ var start int // first regular param
+ if fn.method.kind == types.MethodExpr {
+ recv = fn.Signature.Params().At(0)
+ start = 1
+ } else {
+ recv = fn.Signature.Recv()
+ }
+
+ fn.startBody()
+ fn.addSpilledParam(recv)
+ createParams(fn, start)
+
+ indices := fn.method.index
+
+ var v Value = fn.Locals[0] // spilled receiver
+ if isPointer(fn.method.recv) {
+ v = emitLoad(fn, v)
+
+ // For simple indirection wrappers, perform an informative nil-check:
+ // "value method (T).f called using nil *T pointer"
+ if len(indices) == 1 && !isPointer(recvType(fn.object)) {
+ var c Call
+ c.Call.Value = &Builtin{
+ name: "ssa:wrapnilchk",
+ sig: types.NewSignature(nil,
+ types.NewTuple(anonVar(fn.method.recv), anonVar(tString), anonVar(tString)),
+ types.NewTuple(anonVar(fn.method.recv)), false),
+ }
+ c.Call.Args = []Value{
+ v,
+ stringConst(typeparams.MustDeref(fn.method.recv).String()),
+ stringConst(fn.method.obj.Name()),
+ }
+ c.setType(v.Type())
+ v = fn.emit(&c)
+ }
+ }
+
+ // Invariant: v is a pointer, either
+ // value of *A receiver param, or
+ // address of A spilled receiver.
+
+ // We use pointer arithmetic (FieldAddr possibly followed by
+ // Load) in preference to value extraction (Field possibly
+ // preceded by Load).
+
+ v = emitImplicitSelections(fn, v, indices[:len(indices)-1], token.NoPos)
+
+ // Invariant: v is a pointer, either
+ // value of implicit *C field, or
+ // address of implicit C field.
+
+ var c Call
+ if r := recvType(fn.object); !types.IsInterface(r) { // concrete method
+ if !isPointer(r) {
+ v = emitLoad(fn, v)
+ }
+ c.Call.Value = fn.Prog.objectMethod(fn.object, b)
+ c.Call.Args = append(c.Call.Args, v)
+ } else {
+ c.Call.Method = fn.object
+ c.Call.Value = emitLoad(fn, v) // interface (possibly a typeparam)
+ }
+ for _, arg := range fn.Params[1:] {
+ c.Call.Args = append(c.Call.Args, arg)
+ }
+ emitTailCall(fn, &c)
+ fn.finishBody()
+}
+
+// createParams creates parameters for wrapper method fn based on its
+// Signature.Params, which do not include the receiver.
+// start is the index of the first regular parameter to use.
+func createParams(fn *Function, start int) {
+ tparams := fn.Signature.Params()
+ for i, n := start, tparams.Len(); i < n; i++ {
+ fn.addParamVar(tparams.At(i))
+ }
+}
+
+// -- bounds -----------------------------------------------------------
+
+// createBound returns a bound method wrapper (or "bound"), a synthetic
+// function that delegates to a concrete or interface method denoted
+// by obj. The resulting function has no receiver, but has one free
+// variable which will be used as the method's receiver in the
+// tail-call.
+//
+// Use MakeClosure with such a wrapper to construct a bound method
+// closure. e.g.:
+//
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// var t T
+// f := t.meth
+// f() // calls t.meth()
+//
+// f is a closure of a synthetic wrapper defined as if by:
+//
+// f := func() { return t.meth() }
+//
+// Unlike createWrapper, createBound need perform no indirection or field
+// selections because that can be done before the closure is
+// constructed.
+func createBound(prog *Program, obj *types.Func) *Function {
+ description := fmt.Sprintf("bound method wrapper for %s", obj)
+ if prog.mode&LogSource != 0 {
+ defer logStack("%s", description)()
+ }
+ /* bound method wrapper */
+ fn := &Function{
+ name: obj.Name() + "$bound",
+ object: obj,
+ Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver
+ Synthetic: description,
+ Prog: prog,
+ pos: obj.Pos(),
+ // wrappers have no syntax
+ build: (*builder).buildBound,
+ syntax: nil,
+ info: nil,
+ goversion: "",
+ }
+ fn.FreeVars = []*FreeVar{{name: "recv", typ: recvType(obj), parent: fn}} // (cyclic)
+ return fn
+}
+
+// buildBound builds fn.Body for a bound method closure.
+func (b *builder) buildBound(fn *Function) {
+ fn.startBody()
+ createParams(fn, 0)
+ var c Call
+
+ recv := fn.FreeVars[0]
+ if !types.IsInterface(recvType(fn.object)) { // concrete
+ c.Call.Value = fn.Prog.objectMethod(fn.object, b)
+ c.Call.Args = []Value{recv}
+ } else {
+ c.Call.Method = fn.object
+ c.Call.Value = recv // interface (possibly a typeparam)
+ }
+ for _, arg := range fn.Params {
+ c.Call.Args = append(c.Call.Args, arg)
+ }
+ emitTailCall(fn, &c)
+ fn.finishBody()
+}
+
+// -- thunks -----------------------------------------------------------
+
+// createThunk returns a thunk, a synthetic function that delegates to a
+// concrete or interface method denoted by sel.obj. The resulting
+// function has no receiver, but has an additional (first) regular
+// parameter.
+//
+// Precondition: sel.kind == types.MethodExpr.
+//
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// f := T.meth
+// var t T
+// f(t) // calls t.meth()
+//
+// f is a synthetic wrapper defined as if by:
+//
+// f := func(t T) { return t.meth() }
+func createThunk(prog *Program, sel *selection) *Function {
+ if sel.kind != types.MethodExpr {
+ panic(sel)
+ }
+
+ fn := createWrapper(prog, sel)
+ if fn.Signature.Recv() != nil {
+ panic(fn) // unexpected receiver
+ }
+
+ return fn
+}
+
+func changeRecv(s *types.Signature, recv *types.Var) *types.Signature {
+ return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic())
+}
+
+// A local version of *types.Selection.
+// Needed for some additional control, such as creating a MethodExpr for an instantiation.
+type selection struct {
+ kind types.SelectionKind
+ recv types.Type
+ typ types.Type
+ obj types.Object
+ index []int
+ indirect bool
+}
+
+func toSelection(sel *types.Selection) *selection {
+ return &selection{
+ kind: sel.Kind(),
+ recv: sel.Recv(),
+ typ: sel.Type(),
+ obj: sel.Obj(),
+ index: sel.Index(),
+ indirect: sel.Indirect(),
+ }
+}
+
+// -- instantiations --------------------------------------------------
+
+// buildInstantiationWrapper builds the body of an instantiation
+// wrapper fn. The body calls the original generic function,
+// bracketed by ChangeType conversions on its arguments and results.
+func (b *builder) buildInstantiationWrapper(fn *Function) {
+ orig := fn.topLevelOrigin
+ sig := fn.Signature
+
+ fn.startBody()
+ if sig.Recv() != nil {
+ fn.addParamVar(sig.Recv())
+ }
+ createParams(fn, 0)
+
+ // Create body. Add a call to origin generic function
+ // and make type changes between argument and parameters,
+ // as well as return values.
+ var c Call
+ c.Call.Value = orig
+ if res := orig.Signature.Results(); res.Len() == 1 {
+ c.typ = res.At(0).Type()
+ } else {
+ c.typ = res
+ }
+
+ // parameter of instance becomes an argument to the call
+ // to the original generic function.
+ argOffset := 0
+ for i, arg := range fn.Params {
+ var typ types.Type
+ if i == 0 && sig.Recv() != nil {
+ typ = orig.Signature.Recv().Type()
+ argOffset = 1
+ } else {
+ typ = orig.Signature.Params().At(i - argOffset).Type()
+ }
+ c.Call.Args = append(c.Call.Args, emitTypeCoercion(fn, arg, typ))
+ }
+
+ results := fn.emit(&c)
+ var ret Return
+ switch res := sig.Results(); res.Len() {
+ case 0:
+ // no results, do nothing.
+ case 1:
+ ret.Results = []Value{emitTypeCoercion(fn, results, res.At(0).Type())}
+ default:
+ for i := 0; i < sig.Results().Len(); i++ {
+ v := emitExtract(fn, results, i)
+ ret.Results = append(ret.Results, emitTypeCoercion(fn, v, res.At(i).Type()))
+ }
+ }
+
+ fn.emit(&ret)
+ fn.currentBlock = nil
+
+ fn.finishBody()
+}
diff --git a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
new file mode 100644
index 0000000..9ada177
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
@@ -0,0 +1,788 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package objectpath defines a naming scheme for types.Objects
+// (that is, named entities in Go programs) relative to their enclosing
+// package.
+//
+// Type-checker objects are canonical, so they are usually identified by
+// their address in memory (a pointer), but a pointer has meaning only
+// within one address space. By contrast, objectpath names allow the
+// identity of an object to be sent from one program to another,
+// establishing a correspondence between types.Object variables that are
+// distinct but logically equivalent.
+//
+// A single object may have multiple paths. In this example,
+//
+// type A struct{ X int }
+// type B A
+//
+// the field X has two paths due to its membership of both A and B.
+// The For(obj) function always returns one of these paths, arbitrarily
+// but consistently.
+package objectpath
+
+import (
+ "fmt"
+ "go/types"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// TODO(adonovan): think about generic aliases.
+
+// A Path is an opaque name that identifies a types.Object
+// relative to its package. Conceptually, the name consists of a
+// sequence of destructuring operations applied to the package scope
+// to obtain the original object.
+// The name does not include the package itself.
+type Path string
+
+// Encoding
+//
+// An object path is a textual and (with training) human-readable encoding
+// of a sequence of destructuring operators, starting from a types.Package.
+// The sequences represent a path through the package/object/type graph.
+// We classify these operators by their type:
+//
+// PO package->object Package.Scope.Lookup
+// OT object->type Object.Type
+// TT type->type Type.{Elem,Key,{,{,Recv}Type}Params,Results,Underlying,Rhs} [EKPRUTrCa]
+// TO type->object Type.{At,Field,Method,Obj} [AFMO]
+//
+// All valid paths start with a package and end at an object
+// and thus may be defined by the regular language:
+//
+// objectpath = PO (OT TT* TO)*
+//
+// The concrete encoding follows directly:
+// - The only PO operator is Package.Scope.Lookup, which requires an identifier.
+// - The only OT operator is Object.Type,
+// which we encode as '.' because dot cannot appear in an identifier.
+// - The TT operators are encoded as [EKPRUTrCa];
+// two of these ({,Recv}TypeParams) require an integer operand,
+// which is encoded as a string of decimal digits.
+// - The TO operators are encoded as [AFMO];
+// three of these (At,Field,Method) require an integer operand,
+// which is encoded as a string of decimal digits.
+// These indices are stable across different representations
+// of the same package, even source and export data.
+// The indices used are implementation specific and may not correspond to
+// the argument to the go/types function.
+//
+// In the example below,
+//
+// package p
+//
+// type T interface {
+// f() (a string, b struct{ X int })
+// }
+//
+// field X has the path "T.UM0.RA1.F0",
+// representing the following sequence of operations:
+//
+// p.Lookup("T") T
+// .Type().Underlying().Method(0). f
+// .Type().Results().At(1) b
+// .Type().Field(0) X
+//
+// The encoding is not maximally compact---every R or P is
+// followed by an A, for example---but this simplifies the
+// encoder and decoder.
+const (
+ // object->type operators
+ opType = '.' // .Type() (Object)
+
+ // type->type operators
+ opElem = 'E' // .Elem() (Pointer, Slice, Array, Chan, Map)
+ opKey = 'K' // .Key() (Map)
+ opParams = 'P' // .Params() (Signature)
+ opResults = 'R' // .Results() (Signature)
+ opUnderlying = 'U' // .Underlying() (Named)
+ opTypeParam = 'T' // .TypeParams.At(i) (Named, Signature)
+ opRecvTypeParam = 'r' // .RecvTypeParams.At(i) (Signature)
+ opConstraint = 'C' // .Constraint() (TypeParam)
+ opRhs = 'a' // .Rhs() (Alias)
+
+ // type->object operators
+ opAt = 'A' // .At(i) (Tuple)
+ opField = 'F' // .Field(i) (Struct)
+ opMethod = 'M' // .Method(i) (Named or Interface; not Struct: "promoted" names are ignored)
+ opObj = 'O' // .Obj() (Named, TypeParam)
+)
+
+// For is equivalent to new(Encoder).For(obj).
+//
+// It may be more efficient to reuse a single Encoder across several calls.
+func For(obj types.Object) (Path, error) {
+ return new(Encoder).For(obj)
+}
+
+// An Encoder amortizes the cost of encoding the paths of multiple objects.
+// The zero value of an Encoder is ready to use.
+type Encoder struct {
+ scopeMemo map[*types.Scope][]types.Object // memoization of scopeObjects
+}
+
+// For returns the path to an object relative to its package,
+// or an error if the object is not accessible from the package's Scope.
+//
+// The For function guarantees to return a path only for the following objects:
+// - package-level types
+// - exported package-level non-types
+// - methods
+// - parameter and result variables
+// - struct fields
+// These objects are sufficient to define the API of their package.
+// The objects described by a package's export data are drawn from this set.
+//
+// The set of objects accessible from a package's Scope depends on
+// whether the package was produced by type-checking syntax, or
+// reading export data; the latter may have a smaller Scope since
+// export data trims objects that are not reachable from an exported
+// declaration. For example, the For function will return a path for
+// an exported method of an unexported type that is not reachable
+// from any public declaration; this path will cause the Object
+// function to fail if called on a package loaded from export data.
+// TODO(adonovan): is this a bug or feature? Should this package
+// compute accessibility in the same way?
+//
+// For does not return a path for predeclared names, imported package
+// names, local names, and unexported package-level names (except
+// types).
+//
+// Example: given this definition,
+//
+// package p
+//
+// type T interface {
+// f() (a string, b struct{ X int })
+// }
+//
+// For(X) would return a path that denotes the following sequence of operations:
+//
+// p.Scope().Lookup("T") (TypeName T)
+// .Type().Underlying().Method(0). (method Func f)
+// .Type().Results().At(1) (field Var b)
+// .Type().Field(0) (field Var X)
+//
+// where p is the package (*types.Package) to which X belongs.
+func (enc *Encoder) For(obj types.Object) (Path, error) {
+ pkg := obj.Pkg()
+
+ // This table lists the cases of interest.
+ //
+ // Object Action
+ // ------ ------
+ // nil reject
+ // builtin reject
+ // pkgname reject
+ // label reject
+ // var
+ // package-level accept
+ // func param/result accept
+ // local reject
+ // struct field accept
+ // const
+ // package-level accept
+ // local reject
+ // func
+ // package-level accept
+ // init functions reject
+ // concrete method accept
+ // interface method accept
+ // type
+ // package-level accept
+ // local reject
+ //
+ // The only accessible package-level objects are members of pkg itself.
+ //
+ // The cases are handled in four steps:
+ //
+ // 1. reject nil and builtin
+ // 2. accept package-level objects
+ // 3. reject obviously invalid objects
+ // 4. search the API for the path to the param/result/field/method.
+
+ // 1. reference to nil or builtin?
+ if pkg == nil {
+ return "", fmt.Errorf("predeclared %s has no path", obj)
+ }
+ scope := pkg.Scope()
+
+ // 2. package-level object?
+ if scope.Lookup(obj.Name()) == obj {
+ // Only exported objects (and non-exported types) have a path.
+ // Non-exported types may be referenced by other objects.
+ if _, ok := obj.(*types.TypeName); !ok && !obj.Exported() {
+ return "", fmt.Errorf("no path for non-exported %v", obj)
+ }
+ return Path(obj.Name()), nil
+ }
+
+ // 3. Not a package-level object.
+ // Reject obviously non-viable cases.
+ switch obj := obj.(type) {
+ case *types.TypeName:
+ if _, ok := aliases.Unalias(obj.Type()).(*types.TypeParam); !ok {
+ // With the exception of type parameters, only package-level type names
+ // have a path.
+ return "", fmt.Errorf("no path for %v", obj)
+ }
+ case *types.Const, // Only package-level constants have a path.
+ *types.Label, // Labels are function-local.
+ *types.PkgName: // PkgNames are file-local.
+ return "", fmt.Errorf("no path for %v", obj)
+
+ case *types.Var:
+ // Could be:
+ // - a field (obj.IsField())
+ // - a func parameter or result
+ // - a local var.
+ // Sadly there is no way to distinguish
+ // a param/result from a local
+ // so we must proceed to the find.
+
+ case *types.Func:
+ // A func, if not package-level, must be a method.
+ if recv := obj.Type().(*types.Signature).Recv(); recv == nil {
+ return "", fmt.Errorf("func is not a method: %v", obj)
+ }
+
+ if path, ok := enc.concreteMethod(obj); ok {
+ // Fast path for concrete methods that avoids looping over scope.
+ return path, nil
+ }
+
+ default:
+ panic(obj)
+ }
+
+ // 4. Search the API for the path to the var (field/param/result) or method.
+
+ // First inspect package-level named types.
+ // In the presence of path aliases, these give
+ // the best paths because non-types may
+ // refer to types, but not the reverse.
+ empty := make([]byte, 0, 48) // initial space
+ objs := enc.scopeObjects(scope)
+ for _, o := range objs {
+ tname, ok := o.(*types.TypeName)
+ if !ok {
+ continue // handle non-types in second pass
+ }
+
+ path := append(empty, o.Name()...)
+ path = append(path, opType)
+
+ T := o.Type()
+ if alias, ok := T.(*aliases.Alias); ok {
+ if r := findTypeParam(obj, aliases.TypeParams(alias), path, opTypeParam, nil); r != nil {
+ return Path(r), nil
+ }
+ if r := find(obj, aliases.Rhs(alias), append(path, opRhs), nil); r != nil {
+ return Path(r), nil
+ }
+
+ } else if tname.IsAlias() {
+ // legacy alias
+ if r := find(obj, T, path, nil); r != nil {
+ return Path(r), nil
+ }
+
+ } else if named, ok := T.(*types.Named); ok {
+ // defined (named) type
+ if r := findTypeParam(obj, named.TypeParams(), path, opTypeParam, nil); r != nil {
+ return Path(r), nil
+ }
+ if r := find(obj, named.Underlying(), append(path, opUnderlying), nil); r != nil {
+ return Path(r), nil
+ }
+ }
+ }
+
+ // Then inspect everything else:
+ // non-types, and declared methods of defined types.
+ for _, o := range objs {
+ path := append(empty, o.Name()...)
+ if _, ok := o.(*types.TypeName); !ok {
+ if o.Exported() {
+ // exported non-type (const, var, func)
+ if r := find(obj, o.Type(), append(path, opType), nil); r != nil {
+ return Path(r), nil
+ }
+ }
+ continue
+ }
+
+ // Inspect declared methods of defined types.
+ if T, ok := aliases.Unalias(o.Type()).(*types.Named); ok {
+ path = append(path, opType)
+ // The method index here is always with respect
+ // to the underlying go/types data structures,
+ // which ultimately derives from source order
+ // and must be preserved by export data.
+ for i := 0; i < T.NumMethods(); i++ {
+ m := T.Method(i)
+ path2 := appendOpArg(path, opMethod, i)
+ if m == obj {
+ return Path(path2), nil // found declared method
+ }
+ if r := find(obj, m.Type(), append(path2, opType), nil); r != nil {
+ return Path(r), nil
+ }
+ }
+ }
+ }
+
+ return "", fmt.Errorf("can't find path for %v in %s", obj, pkg.Path())
+}
+
+func appendOpArg(path []byte, op byte, arg int) []byte {
+ path = append(path, op)
+ path = strconv.AppendInt(path, int64(arg), 10)
+ return path
+}
+
+// concreteMethod returns the path for meth, which must have a non-nil receiver.
+// The second return value indicates success and may be false if the method is
+// an interface method or if it is an instantiated method.
+//
+// This function is just an optimization that avoids the general scope walking
+// approach. You are expected to fall back to the general approach if this
+// function fails.
+func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) {
+ // Concrete methods can only be declared on package-scoped named types. For
+ // that reason we can skip the expensive walk over the package scope: the
+ // path will always be package -> named type -> method. We can trivially get
+ // the type name from the receiver, and only have to look over the type's
+ // methods to find the method index.
+ //
+ // Methods on generic types require special consideration, however. Consider
+ // the following package:
+ //
+ // L1: type S[T any] struct{}
+ // L2: func (recv S[A]) Foo() { recv.Bar() }
+ // L3: func (recv S[B]) Bar() { }
+ // L4: type Alias = S[int]
+ // L5: func _[T any]() { var s S[int]; s.Foo() }
+ //
+ // The receivers of methods on generic types are instantiations. L2 and L3
+ // instantiate S with the type-parameters A and B, which are scoped to the
+ // respective methods. L4 and L5 each instantiate S with int. Each of these
+ // instantiations has its own method set, full of methods (and thus objects)
+ // with receivers whose types are the respective instantiations. In other
+ // words, we have
+ //
+ // S[A].Foo, S[A].Bar
+ // S[B].Foo, S[B].Bar
+ // S[int].Foo, S[int].Bar
+ //
+ // We may thus be trying to produce object paths for any of these objects.
+ //
+ // S[A].Foo and S[B].Bar are the origin methods, and their paths are S.Foo
+ // and S.Bar, which are the paths that this function naturally produces.
+ //
+ // S[A].Bar, S[B].Foo, and both methods on S[int] are instantiations that
+ // don't correspond to the origin methods. For S[int], this is significant.
+ // The most precise object path for S[int].Foo, for example, is Alias.Foo,
+ // not S.Foo. Our function, however, would produce S.Foo, which would
+ // resolve to a different object.
+ //
+ // For S[A].Bar and S[B].Foo it could be argued that S.Bar and S.Foo are
+ // still the correct paths, since only the origin methods have meaningful
+ // paths. But this is likely only true for trivial cases and has edge cases.
+ // Since this function is only an optimization, we err on the side of giving
+ // up, deferring to the slower but definitely correct algorithm. Most users
+ // of objectpath will only be giving us origin methods, anyway, as referring
+ // to instantiated methods is usually not useful.
+
+ if meth.Origin() != meth {
+ return "", false
+ }
+
+ _, named := typesinternal.ReceiverNamed(meth.Type().(*types.Signature).Recv())
+ if named == nil {
+ return "", false
+ }
+
+ if types.IsInterface(named) {
+ // Named interfaces don't have to be package-scoped
+ //
+ // TODO(dominikh): opt: if scope.Lookup(name) == named, then we can apply this optimization to interface
+ // methods, too, I think.
+ return "", false
+ }
+
+ // Preallocate space for the name, opType, opMethod, and some digits.
+ name := named.Obj().Name()
+ path := make([]byte, 0, len(name)+8)
+ path = append(path, name...)
+ path = append(path, opType)
+
+ // Method indices are w.r.t. the go/types data structures,
+ // ultimately deriving from source order,
+ // which is preserved by export data.
+ for i := 0; i < named.NumMethods(); i++ {
+ if named.Method(i) == meth {
+ path = appendOpArg(path, opMethod, i)
+ return Path(path), true
+ }
+ }
+
+ // Due to golang/go#59944, go/types fails to associate the receiver with
+ // certain methods on cgo types.
+ //
+ // TODO(rfindley): replace this panic once golang/go#59944 is fixed in all Go
+ // versions gopls supports.
+ return "", false
+ // panic(fmt.Sprintf("couldn't find method %s on type %s; methods: %#v", meth, named, enc.namedMethods(named)))
+}
+
+// find finds obj within type T, returning the path to it, or nil if not found.
+//
+// The seen map is used to short circuit cycles through type parameters. If
+// nil, it will be allocated as necessary.
+func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]bool) []byte {
+ switch T := T.(type) {
+ case *aliases.Alias:
+ return find(obj, aliases.Unalias(T), path, seen)
+ case *types.Basic, *types.Named:
+ // Named types belonging to pkg were handled already,
+ // so T must belong to another package. No path.
+ return nil
+ case *types.Pointer:
+ return find(obj, T.Elem(), append(path, opElem), seen)
+ case *types.Slice:
+ return find(obj, T.Elem(), append(path, opElem), seen)
+ case *types.Array:
+ return find(obj, T.Elem(), append(path, opElem), seen)
+ case *types.Chan:
+ return find(obj, T.Elem(), append(path, opElem), seen)
+ case *types.Map:
+ if r := find(obj, T.Key(), append(path, opKey), seen); r != nil {
+ return r
+ }
+ return find(obj, T.Elem(), append(path, opElem), seen)
+ case *types.Signature:
+ if r := findTypeParam(obj, T.RecvTypeParams(), path, opRecvTypeParam, nil); r != nil {
+ return r
+ }
+ if r := findTypeParam(obj, T.TypeParams(), path, opTypeParam, seen); r != nil {
+ return r
+ }
+ if r := find(obj, T.Params(), append(path, opParams), seen); r != nil {
+ return r
+ }
+ return find(obj, T.Results(), append(path, opResults), seen)
+ case *types.Struct:
+ for i := 0; i < T.NumFields(); i++ {
+ fld := T.Field(i)
+ path2 := appendOpArg(path, opField, i)
+ if fld == obj {
+ return path2 // found field var
+ }
+ if r := find(obj, fld.Type(), append(path2, opType), seen); r != nil {
+ return r
+ }
+ }
+ return nil
+ case *types.Tuple:
+ for i := 0; i < T.Len(); i++ {
+ v := T.At(i)
+ path2 := appendOpArg(path, opAt, i)
+ if v == obj {
+ return path2 // found param/result var
+ }
+ if r := find(obj, v.Type(), append(path2, opType), seen); r != nil {
+ return r
+ }
+ }
+ return nil
+ case *types.Interface:
+ for i := 0; i < T.NumMethods(); i++ {
+ m := T.Method(i)
+ path2 := appendOpArg(path, opMethod, i)
+ if m == obj {
+ return path2 // found interface method
+ }
+ if r := find(obj, m.Type(), append(path2, opType), seen); r != nil {
+ return r
+ }
+ }
+ return nil
+ case *types.TypeParam:
+ name := T.Obj()
+ if name == obj {
+ return append(path, opObj)
+ }
+ if seen[name] {
+ return nil
+ }
+ if seen == nil {
+ seen = make(map[*types.TypeName]bool)
+ }
+ seen[name] = true
+ if r := find(obj, T.Constraint(), append(path, opConstraint), seen); r != nil {
+ return r
+ }
+ return nil
+ }
+ panic(T)
+}
+
+func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, op byte, seen map[*types.TypeName]bool) []byte {
+ for i := 0; i < list.Len(); i++ {
+ tparam := list.At(i)
+ path2 := appendOpArg(path, op, i)
+ if r := find(obj, tparam, path2, seen); r != nil {
+ return r
+ }
+ }
+ return nil
+}
+
+// Object returns the object denoted by path p within the package pkg.
+func Object(pkg *types.Package, p Path) (types.Object, error) {
+ pathstr := string(p)
+ if pathstr == "" {
+ return nil, fmt.Errorf("empty path")
+ }
+
+ var pkgobj, suffix string
+ if dot := strings.IndexByte(pathstr, opType); dot < 0 {
+ pkgobj = pathstr
+ } else {
+ pkgobj = pathstr[:dot]
+ suffix = pathstr[dot:] // suffix starts with "."
+ }
+
+ obj := pkg.Scope().Lookup(pkgobj)
+ if obj == nil {
+ return nil, fmt.Errorf("package %s does not contain %q", pkg.Path(), pkgobj)
+ }
+
+ // abstraction of *types.{Pointer,Slice,Array,Chan,Map}
+ type hasElem interface {
+ Elem() types.Type
+ }
+ // abstraction of *types.{Named,Signature}
+ type hasTypeParams interface {
+ TypeParams() *types.TypeParamList
+ }
+ // abstraction of *types.{Named,TypeParam}
+ type hasObj interface {
+ Obj() *types.TypeName
+ }
+
+ // The loop state is the pair (t, obj),
+ // exactly one of which is non-nil, initially obj.
+ // All suffixes start with '.' (the only object->type operation),
+ // followed by optional type->type operations,
+ // then a type->object operation.
+ // The cycle then repeats.
+ var t types.Type
+ for suffix != "" {
+ code := suffix[0]
+ suffix = suffix[1:]
+
+ // Codes [AFMTr] have an integer operand.
+ var index int
+ switch code {
+ case opAt, opField, opMethod, opTypeParam, opRecvTypeParam:
+ rest := strings.TrimLeft(suffix, "0123456789")
+ numerals := suffix[:len(suffix)-len(rest)]
+ suffix = rest
+ i, err := strconv.Atoi(numerals)
+ if err != nil {
+ return nil, fmt.Errorf("invalid path: bad numeric operand %q for code %q", numerals, code)
+ }
+ index = int(i)
+ case opObj:
+ // no operand
+ default:
+ // The suffix must end with a type->object operation.
+ if suffix == "" {
+ return nil, fmt.Errorf("invalid path: ends with %q, want [AFMO]", code)
+ }
+ }
+
+ if code == opType {
+ if t != nil {
+ return nil, fmt.Errorf("invalid path: unexpected %q in type context", opType)
+ }
+ t = obj.Type()
+ obj = nil
+ continue
+ }
+
+ if t == nil {
+ return nil, fmt.Errorf("invalid path: code %q in object context", code)
+ }
+
+ // Inv: t != nil, obj == nil
+
+ t = aliases.Unalias(t)
+ switch code {
+ case opElem:
+ hasElem, ok := t.(hasElem) // Pointer, Slice, Array, Chan, Map
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want pointer, slice, array, chan or map)", code, t, t)
+ }
+ t = hasElem.Elem()
+
+ case opKey:
+ mapType, ok := t.(*types.Map)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want map)", code, t, t)
+ }
+ t = mapType.Key()
+
+ case opParams:
+ sig, ok := t.(*types.Signature)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t)
+ }
+ t = sig.Params()
+
+ case opResults:
+ sig, ok := t.(*types.Signature)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t)
+ }
+ t = sig.Results()
+
+ case opUnderlying:
+ named, ok := t.(*types.Named)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named)", code, t, t)
+ }
+ t = named.Underlying()
+
+ case opRhs:
+ if alias, ok := t.(*aliases.Alias); ok {
+ t = aliases.Rhs(alias)
+ } else if false && aliases.Enabled() {
+ // The Enabled check is too expensive, so for now we
+ // simply assume that aliases are not enabled.
+ // TODO(adonovan): replace with "if true {" when go1.24 is assured.
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want alias)", code, t, t)
+ }
+
+ case opTypeParam:
+ hasTypeParams, ok := t.(hasTypeParams) // Named, Signature
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or signature)", code, t, t)
+ }
+ tparams := hasTypeParams.TypeParams()
+ if n := tparams.Len(); index >= n {
+ return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n)
+ }
+ t = tparams.At(index)
+
+ case opRecvTypeParam:
+ sig, ok := t.(*types.Signature) // Signature
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t)
+ }
+ rtparams := sig.RecvTypeParams()
+ if n := rtparams.Len(); index >= n {
+ return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n)
+ }
+ t = rtparams.At(index)
+
+ case opConstraint:
+ tparam, ok := t.(*types.TypeParam)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want type parameter)", code, t, t)
+ }
+ t = tparam.Constraint()
+
+ case opAt:
+ tuple, ok := t.(*types.Tuple)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want tuple)", code, t, t)
+ }
+ if n := tuple.Len(); index >= n {
+ return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n)
+ }
+ obj = tuple.At(index)
+ t = nil
+
+ case opField:
+ structType, ok := t.(*types.Struct)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want struct)", code, t, t)
+ }
+ if n := structType.NumFields(); index >= n {
+ return nil, fmt.Errorf("field index %d out of range [0-%d)", index, n)
+ }
+ obj = structType.Field(index)
+ t = nil
+
+ case opMethod:
+ switch t := t.(type) {
+ case *types.Interface:
+ if index >= t.NumMethods() {
+ return nil, fmt.Errorf("method index %d out of range [0-%d)", index, t.NumMethods())
+ }
+ obj = t.Method(index) // Id-ordered
+
+ case *types.Named:
+ if index >= t.NumMethods() {
+ return nil, fmt.Errorf("method index %d out of range [0-%d)", index, t.NumMethods())
+ }
+ obj = t.Method(index)
+
+ default:
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want interface or named)", code, t, t)
+ }
+ t = nil
+
+ case opObj:
+ hasObj, ok := t.(hasObj)
+ if !ok {
+ return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or type param)", code, t, t)
+ }
+ obj = hasObj.Obj()
+ t = nil
+
+ default:
+ return nil, fmt.Errorf("invalid path: unknown code %q", code)
+ }
+ }
+
+ if obj == nil {
+ panic(p) // path does not end in an object-valued operator
+ }
+
+ if obj.Pkg() != pkg {
+ return nil, fmt.Errorf("path denotes %s, which belongs to a different package", obj)
+ }
+
+ return obj, nil // success
+}
+
+// scopeObjects is a memoization of scope objects.
+// Callers must not modify the result.
+func (enc *Encoder) scopeObjects(scope *types.Scope) []types.Object {
+ m := enc.scopeMemo
+ if m == nil {
+ m = make(map[*types.Scope][]types.Object)
+ enc.scopeMemo = m
+ }
+ objs, ok := m[scope]
+ if !ok {
+ names := scope.Names() // allocates and sorts
+ objs = make([]types.Object, len(names))
+ for i, name := range names {
+ objs[i] = scope.Lookup(name)
+ }
+ m[scope] = objs
+ }
+ return objs
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/vendor/golang.org/x/tools/go/types/typeutil/callee.go
new file mode 100644
index 0000000..90dc541
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/callee.go
@@ -0,0 +1,69 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import (
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// Callee returns the named target of a function call, if any:
+// a function, method, builtin, or variable.
+//
+// Functions and methods may potentially have type parameters.
+func Callee(info *types.Info, call *ast.CallExpr) types.Object {
+ fun := astutil.Unparen(call.Fun)
+
+ // Look through type instantiation if necessary.
+ isInstance := false
+ switch fun.(type) {
+ case *ast.IndexExpr, *ast.IndexListExpr:
+ // When extracting the callee from an *IndexExpr, we need to check that
+ // it is a *types.Func and not a *types.Var.
+ // Example: Don't match a slice m within the expression `m[0]()`.
+ isInstance = true
+ fun, _, _, _ = typeparams.UnpackIndexExpr(fun)
+ }
+
+ var obj types.Object
+ switch fun := fun.(type) {
+ case *ast.Ident:
+ obj = info.Uses[fun] // type, var, builtin, or declared func
+ case *ast.SelectorExpr:
+ if sel, ok := info.Selections[fun]; ok {
+ obj = sel.Obj() // method or field
+ } else {
+ obj = info.Uses[fun.Sel] // qualified identifier?
+ }
+ }
+ if _, ok := obj.(*types.TypeName); ok {
+ return nil // T(x) is a conversion, not a call
+ }
+ // A Func is required to match instantiations.
+ if _, ok := obj.(*types.Func); isInstance && !ok {
+ return nil // Was not a Func.
+ }
+ return obj
+}
+
+// StaticCallee returns the target (function or method) of a static function
+// call, if any. It returns nil for calls to builtins.
+//
+// Note: for calls of instantiated functions and methods, StaticCallee returns
+// the corresponding generic function or method on the generic type.
+func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func {
+ if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) {
+ return f
+ }
+ return nil
+}
+
+func interfaceMethod(f *types.Func) bool {
+ recv := f.Type().(*types.Signature).Recv()
+ return recv != nil && types.IsInterface(recv.Type())
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
new file mode 100644
index 0000000..b81ce0c
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
@@ -0,0 +1,30 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import "go/types"
+
+// Dependencies returns all dependencies of the specified packages.
+//
+// Dependent packages appear in topological order: if package P imports
+// package Q, Q appears earlier than P in the result.
+// The algorithm follows import statements in the order they
+// appear in the source code, so the result is a total order.
+func Dependencies(pkgs ...*types.Package) []*types.Package {
+ var result []*types.Package
+ seen := make(map[*types.Package]bool)
+ var visit func(pkgs []*types.Package)
+ visit = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !seen[p] {
+ seen[p] = true
+ visit(p.Imports())
+ result = append(result, p)
+ }
+ }
+ }
+ visit(pkgs)
+ return result
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go
new file mode 100644
index 0000000..a92f80d
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go
@@ -0,0 +1,518 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typeutil defines various utilities for types, such as Map,
+// a mapping from types.Type to any values.
+package typeutil // import "golang.org/x/tools/go/types/typeutil"
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "reflect"
+
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// Map is a hash-table-based mapping from types (types.Type) to
+// arbitrary any values. The concrete types that implement
+// the Type interface are pointers. Since they are not canonicalized,
+// == cannot be used to check for equivalence, and thus we cannot
+// simply use a Go map.
+//
+// Just as with map[K]V, a nil *Map is a valid empty map.
+//
+// Not thread-safe.
+type Map struct {
+ hasher Hasher // shared by many Maps
+ table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
+ length int // number of map entries
+}
+
+// entry is an entry (key/value association) in a hash bucket.
+type entry struct {
+ key types.Type
+ value any
+}
+
+// SetHasher sets the hasher used by Map.
+//
+// All Hashers are functionally equivalent but contain internal state
+// used to cache the results of hashing previously seen types.
+//
+// A single Hasher created by MakeHasher() may be shared among many
+// Maps. This is recommended if the instances have many keys in
+// common, as it will amortize the cost of hash computation.
+//
+// A Hasher may grow without bound as new types are seen. Even when a
+// type is deleted from the map, the Hasher never shrinks, since other
+// types in the map may reference the deleted type indirectly.
+//
+// Hashers are not thread-safe, and read-only operations such as
+// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
+// read-lock) is require around all Map operations if a shared
+// hasher is accessed from multiple threads.
+//
+// If SetHasher is not called, the Map will create a private hasher at
+// the first call to Insert.
+func (m *Map) SetHasher(hasher Hasher) {
+ m.hasher = hasher
+}
+
+// Delete removes the entry with the given key, if any.
+// It returns true if the entry was found.
+func (m *Map) Delete(key types.Type) bool {
+ if m != nil && m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ for i, e := range bucket {
+ if e.key != nil && types.Identical(key, e.key) {
+ // We can't compact the bucket as it
+ // would disturb iterators.
+ bucket[i] = entry{}
+ m.length--
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// At returns the map entry for the given key.
+// The result is nil if the entry is not present.
+func (m *Map) At(key types.Type) any {
+ if m != nil && m.table != nil {
+ for _, e := range m.table[m.hasher.Hash(key)] {
+ if e.key != nil && types.Identical(key, e.key) {
+ return e.value
+ }
+ }
+ }
+ return nil
+}
+
+// Set sets the map entry for key to val,
+// and returns the previous entry, if any.
+func (m *Map) Set(key types.Type, value any) (prev any) {
+ if m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ var hole *entry
+ for i, e := range bucket {
+ if e.key == nil {
+ hole = &bucket[i]
+ } else if types.Identical(key, e.key) {
+ prev = e.value
+ bucket[i].value = value
+ return
+ }
+ }
+
+ if hole != nil {
+ *hole = entry{key, value} // overwrite deleted entry
+ } else {
+ m.table[hash] = append(bucket, entry{key, value})
+ }
+ } else {
+ if m.hasher.memo == nil {
+ m.hasher = MakeHasher()
+ }
+ hash := m.hasher.Hash(key)
+ m.table = map[uint32][]entry{hash: {entry{key, value}}}
+ }
+
+ m.length++
+ return
+}
+
+// Len returns the number of map entries.
+func (m *Map) Len() int {
+ if m != nil {
+ return m.length
+ }
+ return 0
+}
+
+// Iterate calls function f on each entry in the map in unspecified order.
+//
+// If f should mutate the map, Iterate provides the same guarantees as
+// Go maps: if f deletes a map entry that Iterate has not yet reached,
+// f will not be invoked for it, but if f inserts a map entry that
+// Iterate has not yet reached, whether or not f will be invoked for
+// it is unspecified.
+func (m *Map) Iterate(f func(key types.Type, value any)) {
+ if m != nil {
+ for _, bucket := range m.table {
+ for _, e := range bucket {
+ if e.key != nil {
+ f(e.key, e.value)
+ }
+ }
+ }
+ }
+}
+
+// Keys returns a new slice containing the set of map keys.
+// The order is unspecified.
+func (m *Map) Keys() []types.Type {
+ keys := make([]types.Type, 0, m.Len())
+ m.Iterate(func(key types.Type, _ any) {
+ keys = append(keys, key)
+ })
+ return keys
+}
+
+func (m *Map) toString(values bool) string {
+ if m == nil {
+ return "{}"
+ }
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, "{")
+ sep := ""
+ m.Iterate(func(key types.Type, value any) {
+ fmt.Fprint(&buf, sep)
+ sep = ", "
+ fmt.Fprint(&buf, key)
+ if values {
+ fmt.Fprintf(&buf, ": %q", value)
+ }
+ })
+ fmt.Fprint(&buf, "}")
+ return buf.String()
+}
+
+// String returns a string representation of the map's entries.
+// Values are printed using fmt.Sprintf("%v", v).
+// Order is unspecified.
+func (m *Map) String() string {
+ return m.toString(true)
+}
+
+// KeysString returns a string representation of the map's key set.
+// Order is unspecified.
+func (m *Map) KeysString() string {
+ return m.toString(false)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Hasher
+
+// A Hasher maps each type to its hash value.
+// For efficiency, a hasher uses memoization; thus its memory
+// footprint grows monotonically over time.
+// Hashers are not thread-safe.
+// Hashers have reference semantics.
+// Call MakeHasher to create a Hasher.
+type Hasher struct {
+ memo map[types.Type]uint32
+
+ // ptrMap records pointer identity.
+ ptrMap map[any]uint32
+
+ // sigTParams holds type parameters from the signature being hashed.
+ // Signatures are considered identical modulo renaming of type parameters, so
+ // within the scope of a signature type the identity of the signature's type
+ // parameters is just their index.
+ //
+ // Since the language does not currently support referring to uninstantiated
+ // generic types or functions, and instantiated signatures do not have type
+ // parameter lists, we should never encounter a second non-empty type
+ // parameter list when hashing a generic signature.
+ sigTParams *types.TypeParamList
+}
+
+// MakeHasher returns a new Hasher instance.
+func MakeHasher() Hasher {
+ return Hasher{
+ memo: make(map[types.Type]uint32),
+ ptrMap: make(map[any]uint32),
+ sigTParams: nil,
+ }
+}
+
+// Hash computes a hash value for the given type t such that
+// Identical(t, t') => Hash(t) == Hash(t').
+func (h Hasher) Hash(t types.Type) uint32 {
+ hash, ok := h.memo[t]
+ if !ok {
+ hash = h.hashFor(t)
+ h.memo[t] = hash
+ }
+ return hash
+}
+
+// hashString computes the Fowler–Noll–Vo hash of s.
+func hashString(s string) uint32 {
+ var h uint32
+ for i := 0; i < len(s); i++ {
+ h ^= uint32(s[i])
+ h *= 16777619
+ }
+ return h
+}
+
+// hashFor computes the hash of t.
+func (h Hasher) hashFor(t types.Type) uint32 {
+ // See Identical for rationale.
+ switch t := t.(type) {
+ case *types.Basic:
+ return uint32(t.Kind())
+
+ case *aliases.Alias:
+ return h.Hash(aliases.Unalias(t))
+
+ case *types.Array:
+ return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
+
+ case *types.Slice:
+ return 9049 + 2*h.Hash(t.Elem())
+
+ case *types.Struct:
+ var hash uint32 = 9059
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ f := t.Field(i)
+ if f.Anonymous() {
+ hash += 8861
+ }
+ hash += hashString(t.Tag(i))
+ hash += hashString(f.Name()) // (ignore f.Pkg)
+ hash += h.Hash(f.Type())
+ }
+ return hash
+
+ case *types.Pointer:
+ return 9067 + 2*h.Hash(t.Elem())
+
+ case *types.Signature:
+ var hash uint32 = 9091
+ if t.Variadic() {
+ hash *= 8863
+ }
+
+ // Use a separate hasher for types inside of the signature, where type
+ // parameter identity is modified to be (index, constraint). We must use a
+ // new memo for this hasher as type identity may be affected by this
+ // masking. For example, in func[T any](*T), the identity of *T depends on
+ // whether we are mapping the argument in isolation, or recursively as part
+ // of hashing the signature.
+ //
+ // We should never encounter a generic signature while hashing another
+ // generic signature, but defensively set sigTParams only if h.mask is
+ // unset.
+ tparams := t.TypeParams()
+ if h.sigTParams == nil && tparams.Len() != 0 {
+ h = Hasher{
+ // There may be something more efficient than discarding the existing
+ // memo, but it would require detecting whether types are 'tainted' by
+ // references to type parameters.
+ memo: make(map[types.Type]uint32),
+ // Re-using ptrMap ensures that pointer identity is preserved in this
+ // hasher.
+ ptrMap: h.ptrMap,
+ sigTParams: tparams,
+ }
+ }
+
+ for i := 0; i < tparams.Len(); i++ {
+ tparam := tparams.At(i)
+ hash += 7 * h.Hash(tparam.Constraint())
+ }
+
+ return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
+
+ case *types.Union:
+ return h.hashUnion(t)
+
+ case *types.Interface:
+ // Interfaces are identical if they have the same set of methods, with
+ // identical names and types, and they have the same set of type
+ // restrictions. See go/types.identical for more details.
+ var hash uint32 = 9103
+
+ // Hash methods.
+ for i, n := 0, t.NumMethods(); i < n; i++ {
+ // Method order is not significant.
+ // Ignore m.Pkg().
+ m := t.Method(i)
+ // Use shallow hash on method signature to
+ // avoid anonymous interface cycles.
+ hash += 3*hashString(m.Name()) + 5*h.shallowHash(m.Type())
+ }
+
+ // Hash type restrictions.
+ terms, err := typeparams.InterfaceTermSet(t)
+ // if err != nil t has invalid type restrictions.
+ if err == nil {
+ hash += h.hashTermSet(terms)
+ }
+
+ return hash
+
+ case *types.Map:
+ return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())
+
+ case *types.Chan:
+ return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
+
+ case *types.Named:
+ hash := h.hashPtr(t.Obj())
+ targs := t.TypeArgs()
+ for i := 0; i < targs.Len(); i++ {
+ targ := targs.At(i)
+ hash += 2 * h.Hash(targ)
+ }
+ return hash
+
+ case *types.TypeParam:
+ return h.hashTypeParam(t)
+
+ case *types.Tuple:
+ return h.hashTuple(t)
+ }
+
+ panic(fmt.Sprintf("%T: %v", t, t))
+}
+
+func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
+ // See go/types.identicalTypes for rationale.
+ n := tuple.Len()
+ hash := 9137 + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ hash += 3 * h.Hash(tuple.At(i).Type())
+ }
+ return hash
+}
+
+func (h Hasher) hashUnion(t *types.Union) uint32 {
+ // Hash type restrictions.
+ terms, err := typeparams.UnionTermSet(t)
+ // if err != nil t has invalid type restrictions. Fall back on a non-zero
+ // hash.
+ if err != nil {
+ return 9151
+ }
+ return h.hashTermSet(terms)
+}
+
+func (h Hasher) hashTermSet(terms []*types.Term) uint32 {
+ hash := 9157 + 2*uint32(len(terms))
+ for _, term := range terms {
+ // term order is not significant.
+ termHash := h.Hash(term.Type())
+ if term.Tilde() {
+ termHash *= 9161
+ }
+ hash += 3 * termHash
+ }
+ return hash
+}
+
+// hashTypeParam returns a hash of the type parameter t, with a hash value
+// depending on whether t is contained in h.sigTParams.
+//
+// If h.sigTParams is set and contains t, then we are in the process of hashing
+// a signature, and the hash value of t must depend only on t's index and
+// constraint: signatures are considered identical modulo type parameter
+// renaming. To avoid infinite recursion, we only hash the type parameter
+// index, and rely on types.Identical to handle signatures where constraints
+// are not identical.
+//
+// Otherwise the hash of t depends only on t's pointer identity.
+func (h Hasher) hashTypeParam(t *types.TypeParam) uint32 {
+ if h.sigTParams != nil {
+ i := t.Index()
+ if i >= 0 && i < h.sigTParams.Len() && t == h.sigTParams.At(i) {
+ return 9173 + 3*uint32(i)
+ }
+ }
+ return h.hashPtr(t.Obj())
+}
+
+// hashPtr hashes the pointer identity of ptr. It uses h.ptrMap to ensure that
+// pointers values are not dependent on the GC.
+func (h Hasher) hashPtr(ptr any) uint32 {
+ if hash, ok := h.ptrMap[ptr]; ok {
+ return hash
+ }
+ hash := uint32(reflect.ValueOf(ptr).Pointer())
+ h.ptrMap[ptr] = hash
+ return hash
+}
+
+// shallowHash computes a hash of t without looking at any of its
+// element Types, to avoid potential anonymous cycles in the types of
+// interface methods.
+//
+// When an unnamed non-empty interface type appears anywhere among the
+// arguments or results of an interface method, there is a potential
+// for endless recursion. Consider:
+//
+// type X interface { m() []*interface { X } }
+//
+// The problem is that the Methods of the interface in m's result type
+// include m itself; there is no mention of the named type X that
+// might help us break the cycle.
+// (See comment in go/types.identical, case *Interface, for more.)
+func (h Hasher) shallowHash(t types.Type) uint32 {
+ // t is the type of an interface method (Signature),
+ // its params or results (Tuples), or their immediate
+ // elements (mostly Slice, Pointer, Basic, Named),
+ // so there's no need to optimize anything else.
+ switch t := t.(type) {
+ case *aliases.Alias:
+ return h.shallowHash(aliases.Unalias(t))
+
+ case *types.Signature:
+ var hash uint32 = 604171
+ if t.Variadic() {
+ hash *= 971767
+ }
+ // The Signature/Tuple recursion is always finite
+ // and invariably shallow.
+ return hash + 1062599*h.shallowHash(t.Params()) + 1282529*h.shallowHash(t.Results())
+
+ case *types.Tuple:
+ n := t.Len()
+ hash := 9137 + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ hash += 53471161 * h.shallowHash(t.At(i).Type())
+ }
+ return hash
+
+ case *types.Basic:
+ return 45212177 * uint32(t.Kind())
+
+ case *types.Array:
+ return 1524181 + 2*uint32(t.Len())
+
+ case *types.Slice:
+ return 2690201
+
+ case *types.Struct:
+ return 3326489
+
+ case *types.Pointer:
+ return 4393139
+
+ case *types.Union:
+ return 562448657
+
+ case *types.Interface:
+ return 2124679 // no recursion here
+
+ case *types.Map:
+ return 9109
+
+ case *types.Chan:
+ return 9127
+
+ case *types.Named:
+ return h.hashPtr(t.Obj())
+
+ case *types.TypeParam:
+ return h.hashPtr(t.Obj())
+ }
+ panic(fmt.Sprintf("shallowHash: %T: %v", t, t))
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
new file mode 100644
index 0000000..bd71aaf
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
@@ -0,0 +1,73 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements a cache of method sets.
+
+package typeutil
+
+import (
+ "go/types"
+ "sync"
+
+ "golang.org/x/tools/internal/aliases"
+)
+
+// A MethodSetCache records the method set of each type T for which
+// MethodSet(T) is called so that repeat queries are fast.
+// The zero value is a ready-to-use cache instance.
+type MethodSetCache struct {
+ mu sync.Mutex
+ named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N
+ others map[types.Type]*types.MethodSet // all other types
+}
+
+// MethodSet returns the method set of type T. It is thread-safe.
+//
+// If cache is nil, this function is equivalent to types.NewMethodSet(T).
+// Utility functions can thus expose an optional *MethodSetCache
+// parameter to clients that care about performance.
+func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
+ if cache == nil {
+ return types.NewMethodSet(T)
+ }
+ cache.mu.Lock()
+ defer cache.mu.Unlock()
+
+ switch T := aliases.Unalias(T).(type) {
+ case *types.Named:
+ return cache.lookupNamed(T).value
+
+ case *types.Pointer:
+ if N, ok := aliases.Unalias(T.Elem()).(*types.Named); ok {
+ return cache.lookupNamed(N).pointer
+ }
+ }
+
+ // all other types
+ // (The map uses pointer equivalence, not type identity.)
+ mset := cache.others[T]
+ if mset == nil {
+ mset = types.NewMethodSet(T)
+ if cache.others == nil {
+ cache.others = make(map[types.Type]*types.MethodSet)
+ }
+ cache.others[T] = mset
+ }
+ return mset
+}
+
+func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } {
+ if cache.named == nil {
+ cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet })
+ }
+ // Avoid recomputing mset(*T) for each distinct Pointer
+ // instance whose underlying type is a named type.
+ msets, ok := cache.named[named]
+ if !ok {
+ msets.value = types.NewMethodSet(named)
+ msets.pointer = types.NewMethodSet(types.NewPointer(named))
+ cache.named[named] = msets
+ }
+ return msets
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
new file mode 100644
index 0000000..a0c1a60
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
@@ -0,0 +1,55 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+// This file defines utilities for user interfaces that display types.
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/internal/aliases"
+)
+
+// IntuitiveMethodSet returns the intuitive method set of a type T,
+// which is the set of methods you can call on an addressable value of
+// that type.
+//
+// The result always contains MethodSet(T), and is exactly MethodSet(T)
+// for interface types and for pointer-to-concrete types.
+// For all other concrete types T, the result additionally
+// contains each method belonging to *T if there is no identically
+// named method on T itself.
+//
+// This corresponds to user intuition about method sets;
+// this function is intended only for user interfaces.
+//
+// The order of the result is as for types.MethodSet(T).
+func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection {
+ isPointerToConcrete := func(T types.Type) bool {
+ ptr, ok := aliases.Unalias(T).(*types.Pointer)
+ return ok && !types.IsInterface(ptr.Elem())
+ }
+
+ var result []*types.Selection
+ mset := msets.MethodSet(T)
+ if types.IsInterface(T) || isPointerToConcrete(T) {
+ for i, n := 0, mset.Len(); i < n; i++ {
+ result = append(result, mset.At(i))
+ }
+ } else {
+ // T is some other concrete type.
+ // Report methods of T and *T, preferring those of T.
+ pmset := msets.MethodSet(types.NewPointer(T))
+ for i, n := 0, pmset.Len(); i < n; i++ {
+ meth := pmset.At(i)
+ if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil {
+ meth = m
+ }
+ result = append(result, meth)
+ }
+
+ }
+ return result
+}
diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases.go b/vendor/golang.org/x/tools/internal/aliases/aliases.go
new file mode 100644
index 0000000..c24c2ee
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/aliases/aliases.go
@@ -0,0 +1,32 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package aliases
+
+import (
+ "go/token"
+ "go/types"
+)
+
+// Package aliases defines backward compatible shims
+// for the types.Alias type representation added in 1.22.
+// This defines placeholders for x/tools until 1.26.
+
+// NewAlias creates a new TypeName in Package pkg that
+// is an alias for the type rhs.
+//
+// The enabled parameter determines whether the resulting [TypeName]'s
+// type is an [types.Alias]. Its value must be the result of a call to
+// [Enabled], which computes the effective value of
+// GODEBUG=gotypesalias=... by invoking the type checker. The Enabled
+// function is expensive and should be called once per task (e.g.
+// package import), not once per call to NewAlias.
+func NewAlias(enabled bool, pos token.Pos, pkg *types.Package, name string, rhs types.Type) *types.TypeName {
+ if enabled {
+ tname := types.NewTypeName(pos, pkg, name, nil)
+ newAlias(tname, rhs)
+ return tname
+ }
+ return types.NewTypeName(pos, pkg, name, rhs)
+}
diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go b/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go
new file mode 100644
index 0000000..6652f7d
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go
@@ -0,0 +1,35 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.22
+// +build !go1.22
+
+package aliases
+
+import (
+ "go/types"
+)
+
+// Alias is a placeholder for a go/types.Alias for <=1.21.
+// It will never be created by go/types.
+type Alias struct{}
+
+func (*Alias) String() string { panic("unreachable") }
+func (*Alias) Underlying() types.Type { panic("unreachable") }
+func (*Alias) Obj() *types.TypeName { panic("unreachable") }
+func Rhs(alias *Alias) types.Type { panic("unreachable") }
+func TypeParams(alias *Alias) *types.TypeParamList { panic("unreachable") }
+func SetTypeParams(alias *Alias, tparams []*types.TypeParam) { panic("unreachable") }
+func TypeArgs(alias *Alias) *types.TypeList { panic("unreachable") }
+func Origin(alias *Alias) *Alias { panic("unreachable") }
+
+// Unalias returns the type t for go <=1.21.
+func Unalias(t types.Type) types.Type { return t }
+
+func newAlias(name *types.TypeName, rhs types.Type) *Alias { panic("unreachable") }
+
+// Enabled reports whether [NewAlias] should create [types.Alias] types.
+//
+// Before go1.22, this function always returns false.
+func Enabled() bool { return false }
diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go b/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go
new file mode 100644
index 0000000..3ef1afe
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go
@@ -0,0 +1,99 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.22
+// +build go1.22
+
+package aliases
+
+import (
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+)
+
+// Alias is an alias of types.Alias.
+type Alias = types.Alias
+
+// Rhs returns the type on the right-hand side of the alias declaration.
+func Rhs(alias *Alias) types.Type {
+ if alias, ok := any(alias).(interface{ Rhs() types.Type }); ok {
+ return alias.Rhs() // go1.23+
+ }
+
+ // go1.22's Alias didn't have the Rhs method,
+ // so Unalias is the best we can do.
+ return Unalias(alias)
+}
+
+// TypeParams returns the type parameter list of the alias.
+func TypeParams(alias *Alias) *types.TypeParamList {
+ if alias, ok := any(alias).(interface{ TypeParams() *types.TypeParamList }); ok {
+ return alias.TypeParams() // go1.23+
+ }
+ return nil
+}
+
+// SetTypeParams sets the type parameters of the alias type.
+func SetTypeParams(alias *Alias, tparams []*types.TypeParam) {
+ if alias, ok := any(alias).(interface {
+ SetTypeParams(tparams []*types.TypeParam)
+ }); ok {
+ alias.SetTypeParams(tparams) // go1.23+
+ } else if len(tparams) > 0 {
+ panic("cannot set type parameters of an Alias type in go1.22")
+ }
+}
+
+// TypeArgs returns the type arguments used to instantiate the Alias type.
+func TypeArgs(alias *Alias) *types.TypeList {
+ if alias, ok := any(alias).(interface{ TypeArgs() *types.TypeList }); ok {
+ return alias.TypeArgs() // go1.23+
+ }
+ return nil // empty (go1.22)
+}
+
+// Origin returns the generic Alias type of which alias is an instance.
+// If alias is not an instance of a generic alias, Origin returns alias.
+func Origin(alias *Alias) *Alias {
+ if alias, ok := any(alias).(interface{ Origin() *types.Alias }); ok {
+ return alias.Origin() // go1.23+
+ }
+ return alias // not an instance of a generic alias (go1.22)
+}
+
+// Unalias is a wrapper of types.Unalias.
+func Unalias(t types.Type) types.Type { return types.Unalias(t) }
+
+// newAlias is an internal alias around types.NewAlias.
+// Direct usage is discouraged as the moment.
+// Try to use NewAlias instead.
+func newAlias(tname *types.TypeName, rhs types.Type) *Alias {
+ a := types.NewAlias(tname, rhs)
+ // TODO(go.dev/issue/65455): Remove kludgy workaround to set a.actual as a side-effect.
+ Unalias(a)
+ return a
+}
+
+// Enabled reports whether [NewAlias] should create [types.Alias] types.
+//
+// This function is expensive! Call it sparingly.
+func Enabled() bool {
+ // The only reliable way to compute the answer is to invoke go/types.
+ // We don't parse the GODEBUG environment variable, because
+ // (a) it's tricky to do so in a manner that is consistent
+ // with the godebug package; in particular, a simple
+ // substring check is not good enough. The value is a
+ // rightmost-wins list of options. But more importantly:
+ // (b) it is impossible to detect changes to the effective
+ // setting caused by os.Setenv("GODEBUG"), as happens in
+ // many tests. Therefore any attempt to cache the result
+ // is just incorrect.
+ fset := token.NewFileSet()
+ f, _ := parser.ParseFile(fset, "a.go", "package p; type A = int", 0)
+ pkg, _ := new(types.Config).Check("p", fset, []*ast.File{f}, nil)
+ _, enabled := pkg.Scope().Lookup("A").Type().(*types.Alias)
+ return enabled
+}
diff --git a/vendor/golang.org/x/tools/internal/event/core/event.go b/vendor/golang.org/x/tools/internal/event/core/event.go
new file mode 100644
index 0000000..a6cf0e6
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/core/event.go
@@ -0,0 +1,85 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package core provides support for event based telemetry.
+package core
+
+import (
+ "fmt"
+ "time"
+
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Event holds the information about an event of note that occurred.
+type Event struct {
+ at time.Time
+
+ // As events are often on the stack, storing the first few labels directly
+ // in the event can avoid an allocation at all for the very common cases of
+ // simple events.
+ // The length needs to be large enough to cope with the majority of events
+ // but no so large as to cause undue stack pressure.
+ // A log message with two values will use 3 labels (one for each value and
+ // one for the message itself).
+
+ static [3]label.Label // inline storage for the first few labels
+ dynamic []label.Label // dynamically sized storage for remaining labels
+}
+
+// eventLabelMap implements label.Map for a the labels of an Event.
+type eventLabelMap struct {
+ event Event
+}
+
+func (ev Event) At() time.Time { return ev.at }
+
+func (ev Event) Format(f fmt.State, r rune) {
+ if !ev.at.IsZero() {
+ fmt.Fprint(f, ev.at.Format("2006/01/02 15:04:05 "))
+ }
+ for index := 0; ev.Valid(index); index++ {
+ if l := ev.Label(index); l.Valid() {
+ fmt.Fprintf(f, "\n\t%v", l)
+ }
+ }
+}
+
+func (ev Event) Valid(index int) bool {
+ return index >= 0 && index < len(ev.static)+len(ev.dynamic)
+}
+
+func (ev Event) Label(index int) label.Label {
+ if index < len(ev.static) {
+ return ev.static[index]
+ }
+ return ev.dynamic[index-len(ev.static)]
+}
+
+func (ev Event) Find(key label.Key) label.Label {
+ for _, l := range ev.static {
+ if l.Key() == key {
+ return l
+ }
+ }
+ for _, l := range ev.dynamic {
+ if l.Key() == key {
+ return l
+ }
+ }
+ return label.Label{}
+}
+
+func MakeEvent(static [3]label.Label, labels []label.Label) Event {
+ return Event{
+ static: static,
+ dynamic: labels,
+ }
+}
+
+// CloneEvent event returns a copy of the event with the time adjusted to at.
+func CloneEvent(ev Event, at time.Time) Event {
+ ev.at = at
+ return ev
+}
diff --git a/vendor/golang.org/x/tools/internal/event/core/export.go b/vendor/golang.org/x/tools/internal/event/core/export.go
new file mode 100644
index 0000000..05f3a9a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/core/export.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package core
+
+import (
+ "context"
+ "sync/atomic"
+ "time"
+ "unsafe"
+
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Exporter is a function that handles events.
+// It may return a modified context and event.
+type Exporter func(context.Context, Event, label.Map) context.Context
+
+var (
+ exporter unsafe.Pointer
+)
+
+// SetExporter sets the global exporter function that handles all events.
+// The exporter is called synchronously from the event call site, so it should
+// return quickly so as not to hold up user code.
+func SetExporter(e Exporter) {
+ p := unsafe.Pointer(&e)
+ if e == nil {
+ // &e is always valid, and so p is always valid, but for the early abort
+ // of ProcessEvent to be efficient it needs to make the nil check on the
+ // pointer without having to dereference it, so we make the nil function
+ // also a nil pointer
+ p = nil
+ }
+ atomic.StorePointer(&exporter, p)
+}
+
+// deliver is called to deliver an event to the supplied exporter.
+// it will fill in the time.
+func deliver(ctx context.Context, exporter Exporter, ev Event) context.Context {
+ // add the current time to the event
+ ev.at = time.Now()
+ // hand the event off to the current exporter
+ return exporter(ctx, ev, ev)
+}
+
+// Export is called to deliver an event to the global exporter if set.
+func Export(ctx context.Context, ev Event) context.Context {
+ // get the global exporter and abort early if there is not one
+ exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter))
+ if exporterPtr == nil {
+ return ctx
+ }
+ return deliver(ctx, *exporterPtr, ev)
+}
+
+// ExportPair is called to deliver a start event to the supplied exporter.
+// It also returns a function that will deliver the end event to the same
+// exporter.
+// It will fill in the time.
+func ExportPair(ctx context.Context, begin, end Event) (context.Context, func()) {
+ // get the global exporter and abort early if there is not one
+ exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter))
+ if exporterPtr == nil {
+ return ctx, func() {}
+ }
+ ctx = deliver(ctx, *exporterPtr, begin)
+ return ctx, func() { deliver(ctx, *exporterPtr, end) }
+}
diff --git a/vendor/golang.org/x/tools/internal/event/core/fast.go b/vendor/golang.org/x/tools/internal/event/core/fast.go
new file mode 100644
index 0000000..06c1d46
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/core/fast.go
@@ -0,0 +1,77 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package core
+
+import (
+ "context"
+
+ "golang.org/x/tools/internal/event/keys"
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Log1 takes a message and one label delivers a log event to the exporter.
+// It is a customized version of Print that is faster and does no allocation.
+func Log1(ctx context.Context, message string, t1 label.Label) {
+ Export(ctx, MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ t1,
+ }, nil))
+}
+
+// Log2 takes a message and two labels and delivers a log event to the exporter.
+// It is a customized version of Print that is faster and does no allocation.
+func Log2(ctx context.Context, message string, t1 label.Label, t2 label.Label) {
+ Export(ctx, MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ t1,
+ t2,
+ }, nil))
+}
+
+// Metric1 sends a label event to the exporter with the supplied labels.
+func Metric1(ctx context.Context, t1 label.Label) context.Context {
+ return Export(ctx, MakeEvent([3]label.Label{
+ keys.Metric.New(),
+ t1,
+ }, nil))
+}
+
+// Metric2 sends a label event to the exporter with the supplied labels.
+func Metric2(ctx context.Context, t1, t2 label.Label) context.Context {
+ return Export(ctx, MakeEvent([3]label.Label{
+ keys.Metric.New(),
+ t1,
+ t2,
+ }, nil))
+}
+
+// Start1 sends a span start event with the supplied label list to the exporter.
+// It also returns a function that will end the span, which should normally be
+// deferred.
+func Start1(ctx context.Context, name string, t1 label.Label) (context.Context, func()) {
+ return ExportPair(ctx,
+ MakeEvent([3]label.Label{
+ keys.Start.Of(name),
+ t1,
+ }, nil),
+ MakeEvent([3]label.Label{
+ keys.End.New(),
+ }, nil))
+}
+
+// Start2 sends a span start event with the supplied label list to the exporter.
+// It also returns a function that will end the span, which should normally be
+// deferred.
+func Start2(ctx context.Context, name string, t1, t2 label.Label) (context.Context, func()) {
+ return ExportPair(ctx,
+ MakeEvent([3]label.Label{
+ keys.Start.Of(name),
+ t1,
+ t2,
+ }, nil),
+ MakeEvent([3]label.Label{
+ keys.End.New(),
+ }, nil))
+}
diff --git a/vendor/golang.org/x/tools/internal/event/doc.go b/vendor/golang.org/x/tools/internal/event/doc.go
new file mode 100644
index 0000000..5dc6e6b
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/doc.go
@@ -0,0 +1,7 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package event provides a set of packages that cover the main
+// concepts of telemetry in an implementation agnostic way.
+package event
diff --git a/vendor/golang.org/x/tools/internal/event/event.go b/vendor/golang.org/x/tools/internal/event/event.go
new file mode 100644
index 0000000..4d55e57
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/event.go
@@ -0,0 +1,127 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package event
+
+import (
+ "context"
+
+ "golang.org/x/tools/internal/event/core"
+ "golang.org/x/tools/internal/event/keys"
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Exporter is a function that handles events.
+// It may return a modified context and event.
+type Exporter func(context.Context, core.Event, label.Map) context.Context
+
+// SetExporter sets the global exporter function that handles all events.
+// The exporter is called synchronously from the event call site, so it should
+// return quickly so as not to hold up user code.
+func SetExporter(e Exporter) {
+ core.SetExporter(core.Exporter(e))
+}
+
+// Log takes a message and a label list and combines them into a single event
+// before delivering them to the exporter.
+func Log(ctx context.Context, message string, labels ...label.Label) {
+ core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ }, labels))
+}
+
+// IsLog returns true if the event was built by the Log function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsLog(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Msg
+}
+
+// Error takes a message and a label list and combines them into a single event
+// before delivering them to the exporter. It captures the error in the
+// delivered event.
+func Error(ctx context.Context, message string, err error, labels ...label.Label) {
+ core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ keys.Err.Of(err),
+ }, labels))
+}
+
+// IsError returns true if the event was built by the Error function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsError(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Msg &&
+ ev.Label(1).Key() == keys.Err
+}
+
+// Metric sends a label event to the exporter with the supplied labels.
+func Metric(ctx context.Context, labels ...label.Label) {
+ core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Metric.New(),
+ }, labels))
+}
+
+// IsMetric returns true if the event was built by the Metric function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsMetric(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Metric
+}
+
+// Label sends a label event to the exporter with the supplied labels.
+func Label(ctx context.Context, labels ...label.Label) context.Context {
+ return core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Label.New(),
+ }, labels))
+}
+
+// IsLabel returns true if the event was built by the Label function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsLabel(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Label
+}
+
+// Start sends a span start event with the supplied label list to the exporter.
+// It also returns a function that will end the span, which should normally be
+// deferred.
+func Start(ctx context.Context, name string, labels ...label.Label) (context.Context, func()) {
+ return core.ExportPair(ctx,
+ core.MakeEvent([3]label.Label{
+ keys.Start.Of(name),
+ }, labels),
+ core.MakeEvent([3]label.Label{
+ keys.End.New(),
+ }, nil))
+}
+
+// IsStart returns true if the event was built by the Start function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsStart(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Start
+}
+
+// IsEnd returns true if the event was built by the End function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsEnd(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.End
+}
+
+// Detach returns a context without an associated span.
+// This allows the creation of spans that are not children of the current span.
+func Detach(ctx context.Context) context.Context {
+ return core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Detach.New(),
+ }, nil))
+}
+
+// IsDetach returns true if the event was built by the Detach function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsDetach(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Detach
+}
diff --git a/vendor/golang.org/x/tools/internal/event/keys/keys.go b/vendor/golang.org/x/tools/internal/event/keys/keys.go
new file mode 100644
index 0000000..a02206e
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/keys/keys.go
@@ -0,0 +1,564 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package keys
+
+import (
+ "fmt"
+ "io"
+ "math"
+ "strconv"
+
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Value represents a key for untyped values.
+type Value struct {
+ name string
+ description string
+}
+
+// New creates a new Key for untyped values.
+func New(name, description string) *Value {
+ return &Value{name: name, description: description}
+}
+
+func (k *Value) Name() string { return k.name }
+func (k *Value) Description() string { return k.description }
+
+func (k *Value) Format(w io.Writer, buf []byte, l label.Label) {
+ fmt.Fprint(w, k.From(l))
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Value) Get(lm label.Map) interface{} {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return nil
+}
+
+// From can be used to get a value from a Label.
+func (k *Value) From(t label.Label) interface{} { return t.UnpackValue() }
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Value) Of(value interface{}) label.Label { return label.OfValue(k, value) }
+
+// Tag represents a key for tagging labels that have no value.
+// These are used when the existence of the label is the entire information it
+// carries, such as marking events to be of a specific kind, or from a specific
+// package.
+type Tag struct {
+ name string
+ description string
+}
+
+// NewTag creates a new Key for tagging labels.
+func NewTag(name, description string) *Tag {
+ return &Tag{name: name, description: description}
+}
+
+func (k *Tag) Name() string { return k.name }
+func (k *Tag) Description() string { return k.description }
+
+func (k *Tag) Format(w io.Writer, buf []byte, l label.Label) {}
+
+// New creates a new Label with this key.
+func (k *Tag) New() label.Label { return label.OfValue(k, nil) }
+
+// Int represents a key
+type Int struct {
+ name string
+ description string
+}
+
+// NewInt creates a new Key for int values.
+func NewInt(name, description string) *Int {
+ return &Int{name: name, description: description}
+}
+
+func (k *Int) Name() string { return k.name }
+func (k *Int) Description() string { return k.description }
+
+func (k *Int) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int) Of(v int) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int) Get(lm label.Map) int {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int) From(t label.Label) int { return int(t.Unpack64()) }
+
+// Int8 represents a key
+type Int8 struct {
+ name string
+ description string
+}
+
+// NewInt8 creates a new Key for int8 values.
+func NewInt8(name, description string) *Int8 {
+ return &Int8{name: name, description: description}
+}
+
+func (k *Int8) Name() string { return k.name }
+func (k *Int8) Description() string { return k.description }
+
+func (k *Int8) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int8) Of(v int8) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int8) Get(lm label.Map) int8 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int8) From(t label.Label) int8 { return int8(t.Unpack64()) }
+
+// Int16 represents a key
+type Int16 struct {
+ name string
+ description string
+}
+
+// NewInt16 creates a new Key for int16 values.
+func NewInt16(name, description string) *Int16 {
+ return &Int16{name: name, description: description}
+}
+
+func (k *Int16) Name() string { return k.name }
+func (k *Int16) Description() string { return k.description }
+
+func (k *Int16) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int16) Of(v int16) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int16) Get(lm label.Map) int16 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int16) From(t label.Label) int16 { return int16(t.Unpack64()) }
+
+// Int32 represents a key
+type Int32 struct {
+ name string
+ description string
+}
+
+// NewInt32 creates a new Key for int32 values.
+func NewInt32(name, description string) *Int32 {
+ return &Int32{name: name, description: description}
+}
+
+func (k *Int32) Name() string { return k.name }
+func (k *Int32) Description() string { return k.description }
+
+func (k *Int32) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int32) Of(v int32) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int32) Get(lm label.Map) int32 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int32) From(t label.Label) int32 { return int32(t.Unpack64()) }
+
+// Int64 represents a key
+type Int64 struct {
+ name string
+ description string
+}
+
+// NewInt64 creates a new Key for int64 values.
+func NewInt64(name, description string) *Int64 {
+ return &Int64{name: name, description: description}
+}
+
+func (k *Int64) Name() string { return k.name }
+func (k *Int64) Description() string { return k.description }
+
+func (k *Int64) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, k.From(l), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int64) Of(v int64) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int64) Get(lm label.Map) int64 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int64) From(t label.Label) int64 { return int64(t.Unpack64()) }
+
+// UInt represents a key
+type UInt struct {
+ name string
+ description string
+}
+
+// NewUInt creates a new Key for uint values.
+func NewUInt(name, description string) *UInt {
+ return &UInt{name: name, description: description}
+}
+
+func (k *UInt) Name() string { return k.name }
+func (k *UInt) Description() string { return k.description }
+
+func (k *UInt) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt) Of(v uint) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt) Get(lm label.Map) uint {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt) From(t label.Label) uint { return uint(t.Unpack64()) }
+
+// UInt8 represents a key
+type UInt8 struct {
+ name string
+ description string
+}
+
+// NewUInt8 creates a new Key for uint8 values.
+func NewUInt8(name, description string) *UInt8 {
+ return &UInt8{name: name, description: description}
+}
+
+func (k *UInt8) Name() string { return k.name }
+func (k *UInt8) Description() string { return k.description }
+
+func (k *UInt8) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt8) Of(v uint8) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt8) Get(lm label.Map) uint8 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt8) From(t label.Label) uint8 { return uint8(t.Unpack64()) }
+
+// UInt16 represents a key
+type UInt16 struct {
+ name string
+ description string
+}
+
+// NewUInt16 creates a new Key for uint16 values.
+func NewUInt16(name, description string) *UInt16 {
+ return &UInt16{name: name, description: description}
+}
+
+func (k *UInt16) Name() string { return k.name }
+func (k *UInt16) Description() string { return k.description }
+
+func (k *UInt16) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt16) Of(v uint16) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt16) Get(lm label.Map) uint16 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt16) From(t label.Label) uint16 { return uint16(t.Unpack64()) }
+
+// UInt32 represents a key
+type UInt32 struct {
+ name string
+ description string
+}
+
+// NewUInt32 creates a new Key for uint32 values.
+func NewUInt32(name, description string) *UInt32 {
+ return &UInt32{name: name, description: description}
+}
+
+func (k *UInt32) Name() string { return k.name }
+func (k *UInt32) Description() string { return k.description }
+
+func (k *UInt32) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt32) Of(v uint32) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt32) Get(lm label.Map) uint32 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt32) From(t label.Label) uint32 { return uint32(t.Unpack64()) }
+
+// UInt64 represents a key
+type UInt64 struct {
+ name string
+ description string
+}
+
+// NewUInt64 creates a new Key for uint64 values.
+func NewUInt64(name, description string) *UInt64 {
+ return &UInt64{name: name, description: description}
+}
+
+func (k *UInt64) Name() string { return k.name }
+func (k *UInt64) Description() string { return k.description }
+
+func (k *UInt64) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, k.From(l), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt64) Of(v uint64) label.Label { return label.Of64(k, v) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt64) Get(lm label.Map) uint64 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt64) From(t label.Label) uint64 { return t.Unpack64() }
+
+// Float32 represents a key
+type Float32 struct {
+ name string
+ description string
+}
+
+// NewFloat32 creates a new Key for float32 values.
+func NewFloat32(name, description string) *Float32 {
+ return &Float32{name: name, description: description}
+}
+
+func (k *Float32) Name() string { return k.name }
+func (k *Float32) Description() string { return k.description }
+
+func (k *Float32) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendFloat(buf, float64(k.From(l)), 'E', -1, 32))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Float32) Of(v float32) label.Label {
+ return label.Of64(k, uint64(math.Float32bits(v)))
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Float32) Get(lm label.Map) float32 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Float32) From(t label.Label) float32 {
+ return math.Float32frombits(uint32(t.Unpack64()))
+}
+
+// Float64 represents a key
+type Float64 struct {
+ name string
+ description string
+}
+
+// NewFloat64 creates a new Key for int64 values.
+func NewFloat64(name, description string) *Float64 {
+ return &Float64{name: name, description: description}
+}
+
+func (k *Float64) Name() string { return k.name }
+func (k *Float64) Description() string { return k.description }
+
+func (k *Float64) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendFloat(buf, k.From(l), 'E', -1, 64))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Float64) Of(v float64) label.Label {
+ return label.Of64(k, math.Float64bits(v))
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Float64) Get(lm label.Map) float64 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Float64) From(t label.Label) float64 {
+ return math.Float64frombits(t.Unpack64())
+}
+
+// String represents a key
+type String struct {
+ name string
+ description string
+}
+
+// NewString creates a new Key for int64 values.
+func NewString(name, description string) *String {
+ return &String{name: name, description: description}
+}
+
+func (k *String) Name() string { return k.name }
+func (k *String) Description() string { return k.description }
+
+func (k *String) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendQuote(buf, k.From(l)))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *String) Of(v string) label.Label { return label.OfString(k, v) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *String) Get(lm label.Map) string {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return ""
+}
+
+// From can be used to get a value from a Label.
+func (k *String) From(t label.Label) string { return t.UnpackString() }
+
+// Boolean represents a key
+type Boolean struct {
+ name string
+ description string
+}
+
+// NewBoolean creates a new Key for bool values.
+func NewBoolean(name, description string) *Boolean {
+ return &Boolean{name: name, description: description}
+}
+
+func (k *Boolean) Name() string { return k.name }
+func (k *Boolean) Description() string { return k.description }
+
+func (k *Boolean) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendBool(buf, k.From(l)))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Boolean) Of(v bool) label.Label {
+ if v {
+ return label.Of64(k, 1)
+ }
+ return label.Of64(k, 0)
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Boolean) Get(lm label.Map) bool {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return false
+}
+
+// From can be used to get a value from a Label.
+func (k *Boolean) From(t label.Label) bool { return t.Unpack64() > 0 }
+
+// Error represents a key
+type Error struct {
+ name string
+ description string
+}
+
+// NewError creates a new Key for int64 values.
+func NewError(name, description string) *Error {
+ return &Error{name: name, description: description}
+}
+
+func (k *Error) Name() string { return k.name }
+func (k *Error) Description() string { return k.description }
+
+func (k *Error) Format(w io.Writer, buf []byte, l label.Label) {
+ io.WriteString(w, k.From(l).Error())
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Error) Of(v error) label.Label { return label.OfValue(k, v) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Error) Get(lm label.Map) error {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return nil
+}
+
+// From can be used to get a value from a Label.
+func (k *Error) From(t label.Label) error {
+ err, _ := t.UnpackValue().(error)
+ return err
+}
diff --git a/vendor/golang.org/x/tools/internal/event/keys/standard.go b/vendor/golang.org/x/tools/internal/event/keys/standard.go
new file mode 100644
index 0000000..7e95866
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/keys/standard.go
@@ -0,0 +1,22 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package keys
+
+var (
+ // Msg is a key used to add message strings to label lists.
+ Msg = NewString("message", "a readable message")
+ // Label is a key used to indicate an event adds labels to the context.
+ Label = NewTag("label", "a label context marker")
+ // Start is used for things like traces that have a name.
+ Start = NewString("start", "span start")
+ // Metric is a key used to indicate an event records metrics.
+ End = NewTag("end", "a span end marker")
+ // Metric is a key used to indicate an event records metrics.
+ Detach = NewTag("detach", "a span detach marker")
+ // Err is a key used to add error values to label lists.
+ Err = NewError("error", "an error that occurred")
+ // Metric is a key used to indicate an event records metrics.
+ Metric = NewTag("metric", "a metric event marker")
+)
diff --git a/vendor/golang.org/x/tools/internal/event/keys/util.go b/vendor/golang.org/x/tools/internal/event/keys/util.go
new file mode 100644
index 0000000..c0e8e73
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/keys/util.go
@@ -0,0 +1,21 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package keys
+
+import (
+ "sort"
+ "strings"
+)
+
+// Join returns a canonical join of the keys in S:
+// a sorted comma-separated string list.
+func Join[S ~[]T, T ~string](s S) string {
+ strs := make([]string, 0, len(s))
+ for _, v := range s {
+ strs = append(strs, string(v))
+ }
+ sort.Strings(strs)
+ return strings.Join(strs, ",")
+}
diff --git a/vendor/golang.org/x/tools/internal/event/label/label.go b/vendor/golang.org/x/tools/internal/event/label/label.go
new file mode 100644
index 0000000..0f526e1
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/label/label.go
@@ -0,0 +1,215 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package label
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "unsafe"
+)
+
+// Key is used as the identity of a Label.
+// Keys are intended to be compared by pointer only, the name should be unique
+// for communicating with external systems, but it is not required or enforced.
+type Key interface {
+ // Name returns the key name.
+ Name() string
+ // Description returns a string that can be used to describe the value.
+ Description() string
+
+ // Format is used in formatting to append the value of the label to the
+ // supplied buffer.
+ // The formatter may use the supplied buf as a scratch area to avoid
+ // allocations.
+ Format(w io.Writer, buf []byte, l Label)
+}
+
+// Label holds a key and value pair.
+// It is normally used when passing around lists of labels.
+type Label struct {
+ key Key
+ packed uint64
+ untyped interface{}
+}
+
+// Map is the interface to a collection of Labels indexed by key.
+type Map interface {
+ // Find returns the label that matches the supplied key.
+ Find(key Key) Label
+}
+
+// List is the interface to something that provides an iterable
+// list of labels.
+// Iteration should start from 0 and continue until Valid returns false.
+type List interface {
+ // Valid returns true if the index is within range for the list.
+ // It does not imply the label at that index will itself be valid.
+ Valid(index int) bool
+ // Label returns the label at the given index.
+ Label(index int) Label
+}
+
+// list implements LabelList for a list of Labels.
+type list struct {
+ labels []Label
+}
+
+// filter wraps a LabelList filtering out specific labels.
+type filter struct {
+ keys []Key
+ underlying List
+}
+
+// listMap implements LabelMap for a simple list of labels.
+type listMap struct {
+ labels []Label
+}
+
+// mapChain implements LabelMap for a list of underlying LabelMap.
+type mapChain struct {
+ maps []Map
+}
+
+// OfValue creates a new label from the key and value.
+// This method is for implementing new key types, label creation should
+// normally be done with the Of method of the key.
+func OfValue(k Key, value interface{}) Label { return Label{key: k, untyped: value} }
+
+// UnpackValue assumes the label was built using LabelOfValue and returns the value
+// that was passed to that constructor.
+// This method is for implementing new key types, for type safety normal
+// access should be done with the From method of the key.
+func (t Label) UnpackValue() interface{} { return t.untyped }
+
+// Of64 creates a new label from a key and a uint64. This is often
+// used for non uint64 values that can be packed into a uint64.
+// This method is for implementing new key types, label creation should
+// normally be done with the Of method of the key.
+func Of64(k Key, v uint64) Label { return Label{key: k, packed: v} }
+
+// Unpack64 assumes the label was built using LabelOf64 and returns the value that
+// was passed to that constructor.
+// This method is for implementing new key types, for type safety normal
+// access should be done with the From method of the key.
+func (t Label) Unpack64() uint64 { return t.packed }
+
+type stringptr unsafe.Pointer
+
+// OfString creates a new label from a key and a string.
+// This method is for implementing new key types, label creation should
+// normally be done with the Of method of the key.
+func OfString(k Key, v string) Label {
+ hdr := (*reflect.StringHeader)(unsafe.Pointer(&v))
+ return Label{
+ key: k,
+ packed: uint64(hdr.Len),
+ untyped: stringptr(hdr.Data),
+ }
+}
+
+// UnpackString assumes the label was built using LabelOfString and returns the
+// value that was passed to that constructor.
+// This method is for implementing new key types, for type safety normal
+// access should be done with the From method of the key.
+func (t Label) UnpackString() string {
+ var v string
+ hdr := (*reflect.StringHeader)(unsafe.Pointer(&v))
+ hdr.Data = uintptr(t.untyped.(stringptr))
+ hdr.Len = int(t.packed)
+ return v
+}
+
+// Valid returns true if the Label is a valid one (it has a key).
+func (t Label) Valid() bool { return t.key != nil }
+
+// Key returns the key of this Label.
+func (t Label) Key() Key { return t.key }
+
+// Format is used for debug printing of labels.
+func (t Label) Format(f fmt.State, r rune) {
+ if !t.Valid() {
+ io.WriteString(f, `nil`)
+ return
+ }
+ io.WriteString(f, t.Key().Name())
+ io.WriteString(f, "=")
+ var buf [128]byte
+ t.Key().Format(f, buf[:0], t)
+}
+
+func (l *list) Valid(index int) bool {
+ return index >= 0 && index < len(l.labels)
+}
+
+func (l *list) Label(index int) Label {
+ return l.labels[index]
+}
+
+func (f *filter) Valid(index int) bool {
+ return f.underlying.Valid(index)
+}
+
+func (f *filter) Label(index int) Label {
+ l := f.underlying.Label(index)
+ for _, f := range f.keys {
+ if l.Key() == f {
+ return Label{}
+ }
+ }
+ return l
+}
+
+func (lm listMap) Find(key Key) Label {
+ for _, l := range lm.labels {
+ if l.Key() == key {
+ return l
+ }
+ }
+ return Label{}
+}
+
+func (c mapChain) Find(key Key) Label {
+ for _, src := range c.maps {
+ l := src.Find(key)
+ if l.Valid() {
+ return l
+ }
+ }
+ return Label{}
+}
+
+var emptyList = &list{}
+
+func NewList(labels ...Label) List {
+ if len(labels) == 0 {
+ return emptyList
+ }
+ return &list{labels: labels}
+}
+
+func Filter(l List, keys ...Key) List {
+ if len(keys) == 0 {
+ return l
+ }
+ return &filter{keys: keys, underlying: l}
+}
+
+func NewMap(labels ...Label) Map {
+ return listMap{labels: labels}
+}
+
+func MergeMaps(srcs ...Map) Map {
+ var nonNil []Map
+ for _, src := range srcs {
+ if src != nil {
+ nonNil = append(nonNil, src)
+ }
+ }
+ if len(nonNil) == 1 {
+ return nonNil[0]
+ }
+ return mapChain{maps: nonNil}
+}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/internal/gcimporter/bimport.go
new file mode 100644
index 0000000..d98b0db
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/bimport.go
@@ -0,0 +1,150 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains the remaining vestiges of
+// $GOROOT/src/go/internal/gcimporter/bimport.go.
+
+package gcimporter
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "sync"
+)
+
+func errorf(format string, args ...interface{}) {
+ panic(fmt.Sprintf(format, args...))
+}
+
+const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
+
+// Synthesize a token.Pos
+type fakeFileSet struct {
+ fset *token.FileSet
+ files map[string]*fileInfo
+}
+
+type fileInfo struct {
+ file *token.File
+ lastline int
+}
+
+const maxlines = 64 * 1024
+
+func (s *fakeFileSet) pos(file string, line, column int) token.Pos {
+ // TODO(mdempsky): Make use of column.
+
+ // Since we don't know the set of needed file positions, we reserve maxlines
+ // positions per file. We delay calling token.File.SetLines until all
+ // positions have been calculated (by way of fakeFileSet.setLines), so that
+ // we can avoid setting unnecessary lines. See also golang/go#46586.
+ f := s.files[file]
+ if f == nil {
+ f = &fileInfo{file: s.fset.AddFile(file, -1, maxlines)}
+ s.files[file] = f
+ }
+ if line > maxlines {
+ line = 1
+ }
+ if line > f.lastline {
+ f.lastline = line
+ }
+
+ // Return a fake position assuming that f.file consists only of newlines.
+ return token.Pos(f.file.Base() + line - 1)
+}
+
+func (s *fakeFileSet) setLines() {
+ fakeLinesOnce.Do(func() {
+ fakeLines = make([]int, maxlines)
+ for i := range fakeLines {
+ fakeLines[i] = i
+ }
+ })
+ for _, f := range s.files {
+ f.file.SetLines(fakeLines[:f.lastline])
+ }
+}
+
+var (
+ fakeLines []int
+ fakeLinesOnce sync.Once
+)
+
+func chanDir(d int) types.ChanDir {
+ // tag values must match the constants in cmd/compile/internal/gc/go.go
+ switch d {
+ case 1 /* Crecv */ :
+ return types.RecvOnly
+ case 2 /* Csend */ :
+ return types.SendOnly
+ case 3 /* Cboth */ :
+ return types.SendRecv
+ default:
+ errorf("unexpected channel dir %d", d)
+ return 0
+ }
+}
+
+var predeclOnce sync.Once
+var predecl []types.Type // initialized lazily
+
+func predeclared() []types.Type {
+ predeclOnce.Do(func() {
+ // initialize lazily to be sure that all
+ // elements have been initialized before
+ predecl = []types.Type{ // basic types
+ types.Typ[types.Bool],
+ types.Typ[types.Int],
+ types.Typ[types.Int8],
+ types.Typ[types.Int16],
+ types.Typ[types.Int32],
+ types.Typ[types.Int64],
+ types.Typ[types.Uint],
+ types.Typ[types.Uint8],
+ types.Typ[types.Uint16],
+ types.Typ[types.Uint32],
+ types.Typ[types.Uint64],
+ types.Typ[types.Uintptr],
+ types.Typ[types.Float32],
+ types.Typ[types.Float64],
+ types.Typ[types.Complex64],
+ types.Typ[types.Complex128],
+ types.Typ[types.String],
+
+ // basic type aliases
+ types.Universe.Lookup("byte").Type(),
+ types.Universe.Lookup("rune").Type(),
+
+ // error
+ types.Universe.Lookup("error").Type(),
+
+ // untyped types
+ types.Typ[types.UntypedBool],
+ types.Typ[types.UntypedInt],
+ types.Typ[types.UntypedRune],
+ types.Typ[types.UntypedFloat],
+ types.Typ[types.UntypedComplex],
+ types.Typ[types.UntypedString],
+ types.Typ[types.UntypedNil],
+
+ // package unsafe
+ types.Typ[types.UnsafePointer],
+
+ // invalid type
+ types.Typ[types.Invalid], // only appears in packages with errors
+
+ // used internally by gc; never used by this package or in .a files
+ anyType{},
+ }
+ predecl = append(predecl, additionalPredeclared()...)
+ })
+ return predecl
+}
+
+type anyType struct{}
+
+func (t anyType) Underlying() types.Type { return t }
+func (t anyType) String() string { return "any" }
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go
new file mode 100644
index 0000000..f6437fe
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go
@@ -0,0 +1,99 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
+
+// This file implements FindExportData.
+
+package gcimporter
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+)
+
+func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) {
+ // See $GOROOT/include/ar.h.
+ hdr := make([]byte, 16+12+6+6+8+10+2)
+ _, err = io.ReadFull(r, hdr)
+ if err != nil {
+ return
+ }
+ // leave for debugging
+ if false {
+ fmt.Printf("header: %s", hdr)
+ }
+ s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
+ length, err := strconv.Atoi(s)
+ size = int64(length)
+ if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
+ err = fmt.Errorf("invalid archive header")
+ return
+ }
+ name = strings.TrimSpace(string(hdr[:16]))
+ return
+}
+
+// FindExportData positions the reader r at the beginning of the
+// export data section of an underlying GC-created object/archive
+// file by reading from it. The reader must be positioned at the
+// start of the file before calling this function. The hdr result
+// is the string before the export data, either "$$" or "$$B".
+// The size result is the length of the export data in bytes, or -1 if not known.
+func FindExportData(r *bufio.Reader) (hdr string, size int64, err error) {
+ // Read first line to make sure this is an object file.
+ line, err := r.ReadSlice('\n')
+ if err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+
+ if string(line) == "!<arch>\n" {
+ // Archive file. Scan to __.PKGDEF.
+ var name string
+ if name, size, err = readGopackHeader(r); err != nil {
+ return
+ }
+
+ // First entry should be __.PKGDEF.
+ if name != "__.PKGDEF" {
+ err = fmt.Errorf("go archive is missing __.PKGDEF")
+ return
+ }
+
+ // Read first line of __.PKGDEF data, so that line
+ // is once again the first line of the input.
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ size -= int64(len(line))
+ }
+
+ // Now at __.PKGDEF in archive or still at beginning of file.
+ // Either way, line should begin with "go object ".
+ if !strings.HasPrefix(string(line), "go object ") {
+ err = fmt.Errorf("not a Go object file")
+ return
+ }
+
+ // Skip over object header to export data.
+ // Begins after first line starting with $$.
+ for line[0] != '$' {
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ size -= int64(len(line))
+ }
+ hdr = string(line)
+ if size < 0 {
+ size = -1
+ }
+
+ return
+}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go
new file mode 100644
index 0000000..39df911
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go
@@ -0,0 +1,266 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a reduced copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go.
+
+// Package gcimporter provides various functions for reading
+// gc-generated object files that can be used to implement the
+// Importer interface defined by the Go 1.5 standard library package.
+//
+// The encoding is deterministic: if the encoder is applied twice to
+// the same types.Package data structure, both encodings are equal.
+// This property may be important to avoid spurious changes in
+// applications such as build systems.
+//
+// However, the encoder is not necessarily idempotent. Importing an
+// exported package may yield a types.Package that, while it
+// represents the same set of Go types as the original, may differ in
+// the details of its internal representation. Because of these
+// differences, re-encoding the imported package may yield a
+// different, but equally valid, encoding of the package.
+package gcimporter // import "golang.org/x/tools/internal/gcimporter"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/build"
+ "go/token"
+ "go/types"
+ "io"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+ "sync"
+)
+
+const (
+ // Enable debug during development: it adds some additional checks, and
+ // prevents errors from being recovered.
+ debug = false
+
+ // If trace is set, debugging output is printed to std out.
+ trace = false
+)
+
+var exportMap sync.Map // package dir → func() (string, bool)
+
+// lookupGorootExport returns the location of the export data
+// (normally found in the build cache, but located in GOROOT/pkg
+// in prior Go releases) for the package located in pkgDir.
+//
+// (We use the package's directory instead of its import path
+// mainly to simplify handling of the packages in src/vendor
+// and cmd/vendor.)
+func lookupGorootExport(pkgDir string) (string, bool) {
+ f, ok := exportMap.Load(pkgDir)
+ if !ok {
+ var (
+ listOnce sync.Once
+ exportPath string
+ )
+ f, _ = exportMap.LoadOrStore(pkgDir, func() (string, bool) {
+ listOnce.Do(func() {
+ cmd := exec.Command("go", "list", "-export", "-f", "{{.Export}}", pkgDir)
+ cmd.Dir = build.Default.GOROOT
+ var output []byte
+ output, err := cmd.Output()
+ if err != nil {
+ return
+ }
+
+ exports := strings.Split(string(bytes.TrimSpace(output)), "\n")
+ if len(exports) != 1 {
+ return
+ }
+
+ exportPath = exports[0]
+ })
+
+ return exportPath, exportPath != ""
+ })
+ }
+
+ return f.(func() (string, bool))()
+}
+
+var pkgExts = [...]string{".a", ".o"}
+
+// FindPkg returns the filename and unique package id for an import
+// path based on package information provided by build.Import (using
+// the build.Default build.Context). A relative srcDir is interpreted
+// relative to the current working directory.
+// If no file was found, an empty filename is returned.
+func FindPkg(path, srcDir string) (filename, id string) {
+ if path == "" {
+ return
+ }
+
+ var noext string
+ switch {
+ default:
+ // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
+ // Don't require the source files to be present.
+ if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
+ srcDir = abs
+ }
+ bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
+ if bp.PkgObj == "" {
+ var ok bool
+ if bp.Goroot && bp.Dir != "" {
+ filename, ok = lookupGorootExport(bp.Dir)
+ }
+ if !ok {
+ id = path // make sure we have an id to print in error message
+ return
+ }
+ } else {
+ noext = strings.TrimSuffix(bp.PkgObj, ".a")
+ id = bp.ImportPath
+ }
+
+ case build.IsLocalImport(path):
+ // "./x" -> "/this/directory/x.ext", "/this/directory/x"
+ noext = filepath.Join(srcDir, path)
+ id = noext
+
+ case filepath.IsAbs(path):
+ // for completeness only - go/build.Import
+ // does not support absolute imports
+ // "/x" -> "/x.ext", "/x"
+ noext = path
+ id = path
+ }
+
+ if false { // for debugging
+ if path != id {
+ fmt.Printf("%s -> %s\n", path, id)
+ }
+ }
+
+ if filename != "" {
+ if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+ return
+ }
+ }
+
+ // try extensions
+ for _, ext := range pkgExts {
+ filename = noext + ext
+ if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+ return
+ }
+ }
+
+ filename = "" // not found
+ return
+}
+
+// Import imports a gc-generated package given its import path and srcDir, adds
+// the corresponding package object to the packages map, and returns the object.
+// The packages map must contain all packages already imported.
+func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
+ var rc io.ReadCloser
+ var filename, id string
+ if lookup != nil {
+ // With custom lookup specified, assume that caller has
+ // converted path to a canonical import path for use in the map.
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ id = path
+
+ // No need to re-import if the package was imported completely before.
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+ f, err := lookup(path)
+ if err != nil {
+ return nil, err
+ }
+ rc = f
+ } else {
+ filename, id = FindPkg(path, srcDir)
+ if filename == "" {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %q", id)
+ }
+
+ // no need to re-import if the package was imported completely before
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("%s: %v", filename, err)
+ }
+ }()
+ rc = f
+ }
+ defer rc.Close()
+
+ var hdr string
+ var size int64
+ buf := bufio.NewReader(rc)
+ if hdr, size, err = FindExportData(buf); err != nil {
+ return
+ }
+
+ switch hdr {
+ case "$$B\n":
+ var data []byte
+ data, err = io.ReadAll(buf)
+ if err != nil {
+ break
+ }
+
+ // TODO(gri): allow clients of go/importer to provide a FileSet.
+ // Or, define a new standard go/types/gcexportdata package.
+ fset := token.NewFileSet()
+
+ // Select appropriate importer.
+ if len(data) > 0 {
+ switch data[0] {
+ case 'v', 'c', 'd': // binary, till go1.10
+ return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0])
+
+ case 'i': // indexed, till go1.19
+ _, pkg, err := IImportData(fset, packages, data[1:], id)
+ return pkg, err
+
+ case 'u': // unified, from go1.20
+ _, pkg, err := UImportData(fset, packages, data[1:size], id)
+ return pkg, err
+
+ default:
+ l := len(data)
+ if l > 10 {
+ l = 10
+ }
+ return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
+ }
+ }
+
+ default:
+ err = fmt.Errorf("unknown export data header: %q", hdr)
+ }
+
+ return
+}
+
+type byPath []*types.Package
+
+func (a byPath) Len() int { return len(a) }
+func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
new file mode 100644
index 0000000..deeb67f
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
@@ -0,0 +1,1332 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed binary package export.
+// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go;
+// see that file for specification of the format.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "math/big"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/types/objectpath"
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/tokeninternal"
+)
+
+// IExportShallow encodes "shallow" export data for the specified package.
+//
+// No promises are made about the encoding other than that it can be decoded by
+// the same version of IIExportShallow. If you plan to save export data in the
+// file system, be sure to include a cryptographic digest of the executable in
+// the key to avoid version skew.
+//
+// If the provided reportf func is non-nil, it will be used for reporting bugs
+// encountered during export.
+// TODO(rfindley): remove reportf when we are confident enough in the new
+// objectpath encoding.
+func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) ([]byte, error) {
+ // In principle this operation can only fail if out.Write fails,
+ // but that's impossible for bytes.Buffer---and as a matter of
+ // fact iexportCommon doesn't even check for I/O errors.
+ // TODO(adonovan): handle I/O errors properly.
+ // TODO(adonovan): use byte slices throughout, avoiding copying.
+ const bundle, shallow = false, true
+ var out bytes.Buffer
+ err := iexportCommon(&out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg})
+ return out.Bytes(), err
+}
+
+// IImportShallow decodes "shallow" types.Package data encoded by
+// IExportShallow in the same executable. This function cannot import data from
+// cmd/compile or gcexportdata.Write.
+//
+// The importer calls getPackages to obtain package symbols for all
+// packages mentioned in the export data, including the one being
+// decoded.
+//
+// If the provided reportf func is non-nil, it will be used for reporting bugs
+// encountered during import.
+// TODO(rfindley): remove reportf when we are confident enough in the new
+// objectpath encoding.
+func IImportShallow(fset *token.FileSet, getPackages GetPackagesFunc, data []byte, path string, reportf ReportFunc) (*types.Package, error) {
+ const bundle = false
+ const shallow = true
+ pkgs, err := iimportCommon(fset, getPackages, data, bundle, path, shallow, reportf)
+ if err != nil {
+ return nil, err
+ }
+ return pkgs[0], nil
+}
+
+// ReportFunc is the type of a function used to report formatted bugs.
+type ReportFunc = func(string, ...interface{})
+
+// Current bundled export format version. Increase with each format change.
+// 0: initial implementation
+const bundleVersion = 0
+
+// IExportData writes indexed export data for pkg to out.
+//
+// If no file set is provided, position info will be missing.
+// The package path of the top-level package will not be recorded,
+// so that calls to IImportData can override with a provided package path.
+func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
+ const bundle, shallow = false, false
+ return iexportCommon(out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg})
+}
+
+// IExportBundle writes an indexed export bundle for pkgs to out.
+func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error {
+ const bundle, shallow = true, false
+ return iexportCommon(out, fset, bundle, shallow, iexportVersion, pkgs)
+}
+
+func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, version int, pkgs []*types.Package) (err error) {
+ if !debug {
+ defer func() {
+ if e := recover(); e != nil {
+ if ierr, ok := e.(internalError); ok {
+ err = ierr
+ return
+ }
+ // Not an internal error; panic again.
+ panic(e)
+ }
+ }()
+ }
+
+ p := iexporter{
+ fset: fset,
+ version: version,
+ shallow: shallow,
+ allPkgs: map[*types.Package]bool{},
+ stringIndex: map[string]uint64{},
+ declIndex: map[types.Object]uint64{},
+ tparamNames: map[types.Object]string{},
+ typIndex: map[types.Type]uint64{},
+ }
+ if !bundle {
+ p.localpkg = pkgs[0]
+ }
+
+ for i, pt := range predeclared() {
+ p.typIndex[pt] = uint64(i)
+ }
+ if len(p.typIndex) > predeclReserved {
+ panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved))
+ }
+
+ // Initialize work queue with exported declarations.
+ for _, pkg := range pkgs {
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ if token.IsExported(name) {
+ p.pushDecl(scope.Lookup(name))
+ }
+ }
+
+ if bundle {
+ // Ensure pkg and its imports are included in the index.
+ p.allPkgs[pkg] = true
+ for _, imp := range pkg.Imports() {
+ p.allPkgs[imp] = true
+ }
+ }
+ }
+
+ // Loop until no more work.
+ for !p.declTodo.empty() {
+ p.doDecl(p.declTodo.popHead())
+ }
+
+ // Produce index of offset of each file record in files.
+ var files intWriter
+ var fileOffset []uint64 // fileOffset[i] is offset in files of file encoded as i
+ if p.shallow {
+ fileOffset = make([]uint64, len(p.fileInfos))
+ for i, info := range p.fileInfos {
+ fileOffset[i] = uint64(files.Len())
+ p.encodeFile(&files, info.file, info.needed)
+ }
+ }
+
+ // Append indices to data0 section.
+ dataLen := uint64(p.data0.Len())
+ w := p.newWriter()
+ w.writeIndex(p.declIndex)
+
+ if bundle {
+ w.uint64(uint64(len(pkgs)))
+ for _, pkg := range pkgs {
+ w.pkg(pkg)
+ imps := pkg.Imports()
+ w.uint64(uint64(len(imps)))
+ for _, imp := range imps {
+ w.pkg(imp)
+ }
+ }
+ }
+ w.flush()
+
+ // Assemble header.
+ var hdr intWriter
+ if bundle {
+ hdr.uint64(bundleVersion)
+ }
+ hdr.uint64(uint64(p.version))
+ hdr.uint64(uint64(p.strings.Len()))
+ if p.shallow {
+ hdr.uint64(uint64(files.Len()))
+ hdr.uint64(uint64(len(fileOffset)))
+ for _, offset := range fileOffset {
+ hdr.uint64(offset)
+ }
+ }
+ hdr.uint64(dataLen)
+
+ // Flush output.
+ io.Copy(out, &hdr)
+ io.Copy(out, &p.strings)
+ if p.shallow {
+ io.Copy(out, &files)
+ }
+ io.Copy(out, &p.data0)
+
+ return nil
+}
+
+// encodeFile writes to w a representation of the file sufficient to
+// faithfully restore position information about all needed offsets.
+// Mutates the needed array.
+func (p *iexporter) encodeFile(w *intWriter, file *token.File, needed []uint64) {
+ _ = needed[0] // precondition: needed is non-empty
+
+ w.uint64(p.stringOff(file.Name()))
+
+ size := uint64(file.Size())
+ w.uint64(size)
+
+ // Sort the set of needed offsets. Duplicates are harmless.
+ sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] })
+
+ lines := tokeninternal.GetLines(file) // byte offset of each line start
+ w.uint64(uint64(len(lines)))
+
+ // Rather than record the entire array of line start offsets,
+ // we save only a sparse list of (index, offset) pairs for
+ // the start of each line that contains a needed position.
+ var sparse [][2]int // (index, offset) pairs
+outer:
+ for i, lineStart := range lines {
+ lineEnd := size
+ if i < len(lines)-1 {
+ lineEnd = uint64(lines[i+1])
+ }
+ // Does this line contains a needed offset?
+ if needed[0] < lineEnd {
+ sparse = append(sparse, [2]int{i, lineStart})
+ for needed[0] < lineEnd {
+ needed = needed[1:]
+ if len(needed) == 0 {
+ break outer
+ }
+ }
+ }
+ }
+
+ // Delta-encode the columns.
+ w.uint64(uint64(len(sparse)))
+ var prev [2]int
+ for _, pair := range sparse {
+ w.uint64(uint64(pair[0] - prev[0]))
+ w.uint64(uint64(pair[1] - prev[1]))
+ prev = pair
+ }
+}
+
+// writeIndex writes out an object index. mainIndex indicates whether
+// we're writing out the main index, which is also read by
+// non-compiler tools and includes a complete package description
+// (i.e., name and height).
+func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
+ type pkgObj struct {
+ obj types.Object
+ name string // qualified name; differs from obj.Name for type params
+ }
+ // Build a map from packages to objects from that package.
+ pkgObjs := map[*types.Package][]pkgObj{}
+
+ // For the main index, make sure to include every package that
+ // we reference, even if we're not exporting (or reexporting)
+ // any symbols from it.
+ if w.p.localpkg != nil {
+ pkgObjs[w.p.localpkg] = nil
+ }
+ for pkg := range w.p.allPkgs {
+ pkgObjs[pkg] = nil
+ }
+
+ for obj := range index {
+ name := w.p.exportName(obj)
+ pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], pkgObj{obj, name})
+ }
+
+ var pkgs []*types.Package
+ for pkg, objs := range pkgObjs {
+ pkgs = append(pkgs, pkg)
+
+ sort.Slice(objs, func(i, j int) bool {
+ return objs[i].name < objs[j].name
+ })
+ }
+
+ sort.Slice(pkgs, func(i, j int) bool {
+ return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j])
+ })
+
+ w.uint64(uint64(len(pkgs)))
+ for _, pkg := range pkgs {
+ w.string(w.exportPath(pkg))
+ w.string(pkg.Name())
+ w.uint64(uint64(0)) // package height is not needed for go/types
+
+ objs := pkgObjs[pkg]
+ w.uint64(uint64(len(objs)))
+ for _, obj := range objs {
+ w.string(obj.name)
+ w.uint64(index[obj.obj])
+ }
+ }
+}
+
+// exportName returns the 'exported' name of an object. It differs from
+// obj.Name() only for type parameters (see tparamExportName for details).
+func (p *iexporter) exportName(obj types.Object) (res string) {
+ if name := p.tparamNames[obj]; name != "" {
+ return name
+ }
+ return obj.Name()
+}
+
+type iexporter struct {
+ fset *token.FileSet
+ out *bytes.Buffer
+ version int
+
+ shallow bool // don't put types from other packages in the index
+ objEncoder *objectpath.Encoder // encodes objects from other packages in shallow mode; lazily allocated
+ localpkg *types.Package // (nil in bundle mode)
+
+ // allPkgs tracks all packages that have been referenced by
+ // the export data, so we can ensure to include them in the
+ // main index.
+ allPkgs map[*types.Package]bool
+
+ declTodo objQueue
+
+ strings intWriter
+ stringIndex map[string]uint64
+
+ // In shallow mode, object positions are encoded as (file, offset).
+ // Each file is recorded as a line-number table.
+ // Only the lines of needed positions are saved faithfully.
+ fileInfo map[*token.File]uint64 // value is index in fileInfos
+ fileInfos []*filePositions
+
+ data0 intWriter
+ declIndex map[types.Object]uint64
+ tparamNames map[types.Object]string // typeparam->exported name
+ typIndex map[types.Type]uint64
+
+ indent int // for tracing support
+}
+
+type filePositions struct {
+ file *token.File
+ needed []uint64 // unordered list of needed file offsets
+}
+
+func (p *iexporter) trace(format string, args ...interface{}) {
+ if !trace {
+ // Call sites should also be guarded, but having this check here allows
+ // easily enabling/disabling debug trace statements.
+ return
+ }
+ fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
+}
+
+// objectpathEncoder returns the lazily allocated objectpath.Encoder to use
+// when encoding objects in other packages during shallow export.
+//
+// Using a shared Encoder amortizes some of cost of objectpath search.
+func (p *iexporter) objectpathEncoder() *objectpath.Encoder {
+ if p.objEncoder == nil {
+ p.objEncoder = new(objectpath.Encoder)
+ }
+ return p.objEncoder
+}
+
+// stringOff returns the offset of s within the string section.
+// If not already present, it's added to the end.
+func (p *iexporter) stringOff(s string) uint64 {
+ off, ok := p.stringIndex[s]
+ if !ok {
+ off = uint64(p.strings.Len())
+ p.stringIndex[s] = off
+
+ p.strings.uint64(uint64(len(s)))
+ p.strings.WriteString(s)
+ }
+ return off
+}
+
+// fileIndexAndOffset returns the index of the token.File and the byte offset of pos within it.
+func (p *iexporter) fileIndexAndOffset(file *token.File, pos token.Pos) (uint64, uint64) {
+ index, ok := p.fileInfo[file]
+ if !ok {
+ index = uint64(len(p.fileInfo))
+ p.fileInfos = append(p.fileInfos, &filePositions{file: file})
+ if p.fileInfo == nil {
+ p.fileInfo = make(map[*token.File]uint64)
+ }
+ p.fileInfo[file] = index
+ }
+ // Record each needed offset.
+ info := p.fileInfos[index]
+ offset := uint64(file.Offset(pos))
+ info.needed = append(info.needed, offset)
+
+ return index, offset
+}
+
+// pushDecl adds n to the declaration work queue, if not already present.
+func (p *iexporter) pushDecl(obj types.Object) {
+ // Package unsafe is known to the compiler and predeclared.
+ // Caller should not ask us to do export it.
+ if obj.Pkg() == types.Unsafe {
+ panic("cannot export package unsafe")
+ }
+
+ // Shallow export data: don't index decls from other packages.
+ if p.shallow && obj.Pkg() != p.localpkg {
+ return
+ }
+
+ if _, ok := p.declIndex[obj]; ok {
+ return
+ }
+
+ p.declIndex[obj] = ^uint64(0) // mark obj present in work queue
+ p.declTodo.pushTail(obj)
+}
+
+// exportWriter handles writing out individual data section chunks.
+type exportWriter struct {
+ p *iexporter
+
+ data intWriter
+ prevFile string
+ prevLine int64
+ prevColumn int64
+}
+
+func (w *exportWriter) exportPath(pkg *types.Package) string {
+ if pkg == w.p.localpkg {
+ return ""
+ }
+ return pkg.Path()
+}
+
+func (p *iexporter) doDecl(obj types.Object) {
+ if trace {
+ p.trace("exporting decl %v (%T)", obj, obj)
+ p.indent++
+ defer func() {
+ p.indent--
+ p.trace("=> %s", obj)
+ }()
+ }
+ w := p.newWriter()
+
+ switch obj := obj.(type) {
+ case *types.Var:
+ w.tag(varTag)
+ w.pos(obj.Pos())
+ w.typ(obj.Type(), obj.Pkg())
+
+ case *types.Func:
+ sig, _ := obj.Type().(*types.Signature)
+ if sig.Recv() != nil {
+ // We shouldn't see methods in the package scope,
+ // but the type checker may repair "func () F() {}"
+ // to "func (Invalid) F()" and then treat it like "func F()",
+ // so allow that. See golang/go#57729.
+ if sig.Recv().Type() != types.Typ[types.Invalid] {
+ panic(internalErrorf("unexpected method: %v", sig))
+ }
+ }
+
+ // Function.
+ if sig.TypeParams().Len() == 0 {
+ w.tag(funcTag)
+ } else {
+ w.tag(genericFuncTag)
+ }
+ w.pos(obj.Pos())
+ // The tparam list of the function type is the declaration of the type
+ // params. So, write out the type params right now. Then those type params
+ // will be referenced via their type offset (via typOff) in all other
+ // places in the signature and function where they are used.
+ //
+ // While importing the type parameters, tparamList computes and records
+ // their export name, so that it can be later used when writing the index.
+ if tparams := sig.TypeParams(); tparams.Len() > 0 {
+ w.tparamList(obj.Name(), tparams, obj.Pkg())
+ }
+ w.signature(sig)
+
+ case *types.Const:
+ w.tag(constTag)
+ w.pos(obj.Pos())
+ w.value(obj.Type(), obj.Val())
+
+ case *types.TypeName:
+ t := obj.Type()
+
+ if tparam, ok := aliases.Unalias(t).(*types.TypeParam); ok {
+ w.tag(typeParamTag)
+ w.pos(obj.Pos())
+ constraint := tparam.Constraint()
+ if p.version >= iexportVersionGo1_18 {
+ implicit := false
+ if iface, _ := aliases.Unalias(constraint).(*types.Interface); iface != nil {
+ implicit = iface.IsImplicit()
+ }
+ w.bool(implicit)
+ }
+ w.typ(constraint, obj.Pkg())
+ break
+ }
+
+ if obj.IsAlias() {
+ w.tag(aliasTag)
+ w.pos(obj.Pos())
+ if alias, ok := t.(*aliases.Alias); ok {
+ // Preserve materialized aliases,
+ // even of non-exported types.
+ t = aliases.Rhs(alias)
+ }
+ w.typ(t, obj.Pkg())
+ break
+ }
+
+ // Defined type.
+ named, ok := t.(*types.Named)
+ if !ok {
+ panic(internalErrorf("%s is not a defined type", t))
+ }
+
+ if named.TypeParams().Len() == 0 {
+ w.tag(typeTag)
+ } else {
+ w.tag(genericTypeTag)
+ }
+ w.pos(obj.Pos())
+
+ if named.TypeParams().Len() > 0 {
+ // While importing the type parameters, tparamList computes and records
+ // their export name, so that it can be later used when writing the index.
+ w.tparamList(obj.Name(), named.TypeParams(), obj.Pkg())
+ }
+
+ underlying := named.Underlying()
+ w.typ(underlying, obj.Pkg())
+
+ if types.IsInterface(t) {
+ break
+ }
+
+ n := named.NumMethods()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ m := named.Method(i)
+ w.pos(m.Pos())
+ w.string(m.Name())
+ sig, _ := m.Type().(*types.Signature)
+
+ // Receiver type parameters are type arguments of the receiver type, so
+ // their name must be qualified before exporting recv.
+ if rparams := sig.RecvTypeParams(); rparams.Len() > 0 {
+ prefix := obj.Name() + "." + m.Name()
+ for i := 0; i < rparams.Len(); i++ {
+ rparam := rparams.At(i)
+ name := tparamExportName(prefix, rparam)
+ w.p.tparamNames[rparam.Obj()] = name
+ }
+ }
+ w.param(sig.Recv())
+ w.signature(sig)
+ }
+
+ default:
+ panic(internalErrorf("unexpected object: %v", obj))
+ }
+
+ p.declIndex[obj] = w.flush()
+}
+
+func (w *exportWriter) tag(tag byte) {
+ w.data.WriteByte(tag)
+}
+
+func (w *exportWriter) pos(pos token.Pos) {
+ if w.p.shallow {
+ w.posV2(pos)
+ } else if w.p.version >= iexportVersionPosCol {
+ w.posV1(pos)
+ } else {
+ w.posV0(pos)
+ }
+}
+
+// posV2 encoding (used only in shallow mode) records positions as
+// (file, offset), where file is the index in the token.File table
+// (which records the file name and newline offsets) and offset is a
+// byte offset. It effectively ignores //line directives.
+func (w *exportWriter) posV2(pos token.Pos) {
+ if pos == token.NoPos {
+ w.uint64(0)
+ return
+ }
+ file := w.p.fset.File(pos) // fset must be non-nil
+ index, offset := w.p.fileIndexAndOffset(file, pos)
+ w.uint64(1 + index)
+ w.uint64(offset)
+}
+
+func (w *exportWriter) posV1(pos token.Pos) {
+ if w.p.fset == nil {
+ w.int64(0)
+ return
+ }
+
+ p := w.p.fset.Position(pos)
+ file := p.Filename
+ line := int64(p.Line)
+ column := int64(p.Column)
+
+ deltaColumn := (column - w.prevColumn) << 1
+ deltaLine := (line - w.prevLine) << 1
+
+ if file != w.prevFile {
+ deltaLine |= 1
+ }
+ if deltaLine != 0 {
+ deltaColumn |= 1
+ }
+
+ w.int64(deltaColumn)
+ if deltaColumn&1 != 0 {
+ w.int64(deltaLine)
+ if deltaLine&1 != 0 {
+ w.string(file)
+ }
+ }
+
+ w.prevFile = file
+ w.prevLine = line
+ w.prevColumn = column
+}
+
+func (w *exportWriter) posV0(pos token.Pos) {
+ if w.p.fset == nil {
+ w.int64(0)
+ return
+ }
+
+ p := w.p.fset.Position(pos)
+ file := p.Filename
+ line := int64(p.Line)
+
+ // When file is the same as the last position (common case),
+ // we can save a few bytes by delta encoding just the line
+ // number.
+ //
+ // Note: Because data objects may be read out of order (or not
+ // at all), we can only apply delta encoding within a single
+ // object. This is handled implicitly by tracking prevFile and
+ // prevLine as fields of exportWriter.
+
+ if file == w.prevFile {
+ delta := line - w.prevLine
+ w.int64(delta)
+ if delta == deltaNewFile {
+ w.int64(-1)
+ }
+ } else {
+ w.int64(deltaNewFile)
+ w.int64(line) // line >= 0
+ w.string(file)
+ w.prevFile = file
+ }
+ w.prevLine = line
+}
+
+func (w *exportWriter) pkg(pkg *types.Package) {
+ // Ensure any referenced packages are declared in the main index.
+ w.p.allPkgs[pkg] = true
+
+ w.string(w.exportPath(pkg))
+}
+
+func (w *exportWriter) qualifiedType(obj *types.TypeName) {
+ name := w.p.exportName(obj)
+
+ // Ensure any referenced declarations are written out too.
+ w.p.pushDecl(obj)
+ w.string(name)
+ w.pkg(obj.Pkg())
+}
+
+// TODO(rfindley): what does 'pkg' even mean here? It would be better to pass
+// it in explicitly into signatures and structs that may use it for
+// constructing fields.
+func (w *exportWriter) typ(t types.Type, pkg *types.Package) {
+ w.data.uint64(w.p.typOff(t, pkg))
+}
+
+func (p *iexporter) newWriter() *exportWriter {
+ return &exportWriter{p: p}
+}
+
+func (w *exportWriter) flush() uint64 {
+ off := uint64(w.p.data0.Len())
+ io.Copy(&w.p.data0, &w.data)
+ return off
+}
+
+func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 {
+ off, ok := p.typIndex[t]
+ if !ok {
+ w := p.newWriter()
+ w.doTyp(t, pkg)
+ off = predeclReserved + w.flush()
+ p.typIndex[t] = off
+ }
+ return off
+}
+
+func (w *exportWriter) startType(k itag) {
+ w.data.uint64(uint64(k))
+}
+
+func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
+ if trace {
+ w.p.trace("exporting type %s (%T)", t, t)
+ w.p.indent++
+ defer func() {
+ w.p.indent--
+ w.p.trace("=> %s", t)
+ }()
+ }
+ switch t := t.(type) {
+ case *aliases.Alias:
+ // TODO(adonovan): support parameterized aliases, following *types.Named.
+ w.startType(aliasType)
+ w.qualifiedType(t.Obj())
+
+ case *types.Named:
+ if targs := t.TypeArgs(); targs.Len() > 0 {
+ w.startType(instanceType)
+ // TODO(rfindley): investigate if this position is correct, and if it
+ // matters.
+ w.pos(t.Obj().Pos())
+ w.typeList(targs, pkg)
+ w.typ(t.Origin(), pkg)
+ return
+ }
+ w.startType(definedType)
+ w.qualifiedType(t.Obj())
+
+ case *types.TypeParam:
+ w.startType(typeParamType)
+ w.qualifiedType(t.Obj())
+
+ case *types.Pointer:
+ w.startType(pointerType)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Slice:
+ w.startType(sliceType)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Array:
+ w.startType(arrayType)
+ w.uint64(uint64(t.Len()))
+ w.typ(t.Elem(), pkg)
+
+ case *types.Chan:
+ w.startType(chanType)
+ // 1 RecvOnly; 2 SendOnly; 3 SendRecv
+ var dir uint64
+ switch t.Dir() {
+ case types.RecvOnly:
+ dir = 1
+ case types.SendOnly:
+ dir = 2
+ case types.SendRecv:
+ dir = 3
+ }
+ w.uint64(dir)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Map:
+ w.startType(mapType)
+ w.typ(t.Key(), pkg)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Signature:
+ w.startType(signatureType)
+ w.pkg(pkg)
+ w.signature(t)
+
+ case *types.Struct:
+ w.startType(structType)
+ n := t.NumFields()
+ // Even for struct{} we must emit some qualifying package, because that's
+ // what the compiler does, and thus that's what the importer expects.
+ fieldPkg := pkg
+ if n > 0 {
+ fieldPkg = t.Field(0).Pkg()
+ }
+ if fieldPkg == nil {
+ // TODO(rfindley): improve this very hacky logic.
+ //
+ // The importer expects a package to be set for all struct types, even
+ // those with no fields. A better encoding might be to set NumFields
+ // before pkg. setPkg panics with a nil package, which may be possible
+ // to reach with invalid packages (and perhaps valid packages, too?), so
+ // (arbitrarily) set the localpkg if available.
+ //
+ // Alternatively, we may be able to simply guarantee that pkg != nil, by
+ // reconsidering the encoding of constant values.
+ if w.p.shallow {
+ fieldPkg = w.p.localpkg
+ } else {
+ panic(internalErrorf("no package to set for empty struct"))
+ }
+ }
+ w.pkg(fieldPkg)
+ w.uint64(uint64(n))
+
+ for i := 0; i < n; i++ {
+ f := t.Field(i)
+ if w.p.shallow {
+ w.objectPath(f)
+ }
+ w.pos(f.Pos())
+ w.string(f.Name()) // unexported fields implicitly qualified by prior setPkg
+ w.typ(f.Type(), fieldPkg)
+ w.bool(f.Anonymous())
+ w.string(t.Tag(i)) // note (or tag)
+ }
+
+ case *types.Interface:
+ w.startType(interfaceType)
+ w.pkg(pkg)
+
+ n := t.NumEmbeddeds()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ ft := t.EmbeddedType(i)
+ tPkg := pkg
+ if named, _ := aliases.Unalias(ft).(*types.Named); named != nil {
+ w.pos(named.Obj().Pos())
+ } else {
+ w.pos(token.NoPos)
+ }
+ w.typ(ft, tPkg)
+ }
+
+ // See comment for struct fields. In shallow mode we change the encoding
+ // for interface methods that are promoted from other packages.
+
+ n = t.NumExplicitMethods()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ m := t.ExplicitMethod(i)
+ if w.p.shallow {
+ w.objectPath(m)
+ }
+ w.pos(m.Pos())
+ w.string(m.Name())
+ sig, _ := m.Type().(*types.Signature)
+ w.signature(sig)
+ }
+
+ case *types.Union:
+ w.startType(unionType)
+ nt := t.Len()
+ w.uint64(uint64(nt))
+ for i := 0; i < nt; i++ {
+ term := t.Term(i)
+ w.bool(term.Tilde())
+ w.typ(term.Type(), pkg)
+ }
+
+ default:
+ panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
+ }
+}
+
+// objectPath writes the package and objectPath to use to look up obj in a
+// different package, when encoding in "shallow" mode.
+//
+// When doing a shallow import, the importer creates only the local package,
+// and requests package symbols for dependencies from the client.
+// However, certain types defined in the local package may hold objects defined
+// (perhaps deeply) within another package.
+//
+// For example, consider the following:
+//
+// package a
+// func F() chan * map[string] struct { X int }
+//
+// package b
+// import "a"
+// var B = a.F()
+//
+// In this example, the type of b.B holds fields defined in package a.
+// In order to have the correct canonical objects for the field defined in the
+// type of B, they are encoded as objectPaths and later looked up in the
+// importer. The same problem applies to interface methods.
+func (w *exportWriter) objectPath(obj types.Object) {
+ if obj.Pkg() == nil || obj.Pkg() == w.p.localpkg {
+ // obj.Pkg() may be nil for the builtin error.Error.
+ // In this case, or if obj is declared in the local package, no need to
+ // encode.
+ w.string("")
+ return
+ }
+ objectPath, err := w.p.objectpathEncoder().For(obj)
+ if err != nil {
+ // Fall back to the empty string, which will cause the importer to create a
+ // new object, which matches earlier behavior. Creating a new object is
+ // sufficient for many purposes (such as type checking), but causes certain
+ // references algorithms to fail (golang/go#60819). However, we didn't
+ // notice this problem during months of gopls@v0.12.0 testing.
+ //
+ // TODO(golang/go#61674): this workaround is insufficient, as in the case
+ // where the field forwarded from an instantiated type that may not appear
+ // in the export data of the original package:
+ //
+ // // package a
+ // type A[P any] struct{ F P }
+ //
+ // // package b
+ // type B a.A[int]
+ //
+ // We need to update references algorithms not to depend on this
+ // de-duplication, at which point we may want to simply remove the
+ // workaround here.
+ w.string("")
+ return
+ }
+ w.string(string(objectPath))
+ w.pkg(obj.Pkg())
+}
+
+func (w *exportWriter) signature(sig *types.Signature) {
+ w.paramList(sig.Params())
+ w.paramList(sig.Results())
+ if sig.Params().Len() > 0 {
+ w.bool(sig.Variadic())
+ }
+}
+
+func (w *exportWriter) typeList(ts *types.TypeList, pkg *types.Package) {
+ w.uint64(uint64(ts.Len()))
+ for i := 0; i < ts.Len(); i++ {
+ w.typ(ts.At(i), pkg)
+ }
+}
+
+func (w *exportWriter) tparamList(prefix string, list *types.TypeParamList, pkg *types.Package) {
+ ll := uint64(list.Len())
+ w.uint64(ll)
+ for i := 0; i < list.Len(); i++ {
+ tparam := list.At(i)
+ // Set the type parameter exportName before exporting its type.
+ exportName := tparamExportName(prefix, tparam)
+ w.p.tparamNames[tparam.Obj()] = exportName
+ w.typ(list.At(i), pkg)
+ }
+}
+
+const blankMarker = "$"
+
+// tparamExportName returns the 'exported' name of a type parameter, which
+// differs from its actual object name: it is prefixed with a qualifier, and
+// blank type parameter names are disambiguated by their index in the type
+// parameter list.
+func tparamExportName(prefix string, tparam *types.TypeParam) string {
+ assert(prefix != "")
+ name := tparam.Obj().Name()
+ if name == "_" {
+ name = blankMarker + strconv.Itoa(tparam.Index())
+ }
+ return prefix + "." + name
+}
+
+// tparamName returns the real name of a type parameter, after stripping its
+// qualifying prefix and reverting blank-name encoding. See tparamExportName
+// for details.
+func tparamName(exportName string) string {
+ // Remove the "path" from the type param name that makes it unique.
+ ix := strings.LastIndex(exportName, ".")
+ if ix < 0 {
+ errorf("malformed type parameter export name %s: missing prefix", exportName)
+ }
+ name := exportName[ix+1:]
+ if strings.HasPrefix(name, blankMarker) {
+ return "_"
+ }
+ return name
+}
+
+func (w *exportWriter) paramList(tup *types.Tuple) {
+ n := tup.Len()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ w.param(tup.At(i))
+ }
+}
+
+func (w *exportWriter) param(obj types.Object) {
+ w.pos(obj.Pos())
+ w.localIdent(obj)
+ w.typ(obj.Type(), obj.Pkg())
+}
+
+func (w *exportWriter) value(typ types.Type, v constant.Value) {
+ w.typ(typ, nil)
+ if w.p.version >= iexportVersionGo1_18 {
+ w.int64(int64(v.Kind()))
+ }
+
+ if v.Kind() == constant.Unknown {
+ // golang/go#60605: treat unknown constant values as if they have invalid type
+ //
+ // This loses some fidelity over the package type-checked from source, but that
+ // is acceptable.
+ //
+ // TODO(rfindley): we should switch on the recorded constant kind rather
+ // than the constant type
+ return
+ }
+
+ switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
+ case types.IsBoolean:
+ w.bool(constant.BoolVal(v))
+ case types.IsInteger:
+ var i big.Int
+ if i64, exact := constant.Int64Val(v); exact {
+ i.SetInt64(i64)
+ } else if ui64, exact := constant.Uint64Val(v); exact {
+ i.SetUint64(ui64)
+ } else {
+ i.SetString(v.ExactString(), 10)
+ }
+ w.mpint(&i, typ)
+ case types.IsFloat:
+ f := constantToFloat(v)
+ w.mpfloat(f, typ)
+ case types.IsComplex:
+ w.mpfloat(constantToFloat(constant.Real(v)), typ)
+ w.mpfloat(constantToFloat(constant.Imag(v)), typ)
+ case types.IsString:
+ w.string(constant.StringVal(v))
+ default:
+ if b.Kind() == types.Invalid {
+ // package contains type errors
+ break
+ }
+ panic(internalErrorf("unexpected type %v (%v)", typ, typ.Underlying()))
+ }
+}
+
+// constantToFloat converts a constant.Value with kind constant.Float to a
+// big.Float.
+func constantToFloat(x constant.Value) *big.Float {
+ x = constant.ToFloat(x)
+ // Use the same floating-point precision (512) as cmd/compile
+ // (see Mpprec in cmd/compile/internal/gc/mpfloat.go).
+ const mpprec = 512
+ var f big.Float
+ f.SetPrec(mpprec)
+ if v, exact := constant.Float64Val(x); exact {
+ // float64
+ f.SetFloat64(v)
+ } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+ // TODO(gri): add big.Rat accessor to constant.Value.
+ n := valueToRat(num)
+ d := valueToRat(denom)
+ f.SetRat(n.Quo(n, d))
+ } else {
+ // Value too large to represent as a fraction => inaccessible.
+ // TODO(gri): add big.Float accessor to constant.Value.
+ _, ok := f.SetString(x.ExactString())
+ assert(ok)
+ }
+ return &f
+}
+
+func valueToRat(x constant.Value) *big.Rat {
+ // Convert little-endian to big-endian.
+ // I can't believe this is necessary.
+ bytes := constant.Bytes(x)
+ for i := 0; i < len(bytes)/2; i++ {
+ bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
+ }
+ return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
+}
+
+// mpint exports a multi-precision integer.
+//
+// For unsigned types, small values are written out as a single
+// byte. Larger values are written out as a length-prefixed big-endian
+// byte string, where the length prefix is encoded as its complement.
+// For example, bytes 0, 1, and 2 directly represent the integer
+// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
+// 2-, and 3-byte big-endian string follow.
+//
+// Encoding for signed types use the same general approach as for
+// unsigned types, except small values use zig-zag encoding and the
+// bottom bit of length prefix byte for large values is reserved as a
+// sign bit.
+//
+// The exact boundary between small and large encodings varies
+// according to the maximum number of bytes needed to encode a value
+// of type typ. As a special case, 8-bit types are always encoded as a
+// single byte.
+//
+// TODO(mdempsky): Is this level of complexity really worthwhile?
+func (w *exportWriter) mpint(x *big.Int, typ types.Type) {
+ basic, ok := typ.Underlying().(*types.Basic)
+ if !ok {
+ panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying()))
+ }
+
+ signed, maxBytes := intSize(basic)
+
+ negative := x.Sign() < 0
+ if !signed && negative {
+ panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x))
+ }
+
+ b := x.Bytes()
+ if len(b) > 0 && b[0] == 0 {
+ panic(internalErrorf("leading zeros"))
+ }
+ if uint(len(b)) > maxBytes {
+ panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x))
+ }
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ // Check if x can use small value encoding.
+ if len(b) <= 1 {
+ var ux uint
+ if len(b) == 1 {
+ ux = uint(b[0])
+ }
+ if signed {
+ ux <<= 1
+ if negative {
+ ux--
+ }
+ }
+ if ux < maxSmall {
+ w.data.WriteByte(byte(ux))
+ return
+ }
+ }
+
+ n := 256 - uint(len(b))
+ if signed {
+ n = 256 - 2*uint(len(b))
+ if negative {
+ n |= 1
+ }
+ }
+ if n < maxSmall || n >= 256 {
+ panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n))
+ }
+
+ w.data.WriteByte(byte(n))
+ w.data.Write(b)
+}
+
+// mpfloat exports a multi-precision floating point number.
+//
+// The number's value is decomposed into mantissa × 2**exponent, where
+// mantissa is an integer. The value is written out as mantissa (as a
+// multi-precision integer) and then the exponent, except exponent is
+// omitted if mantissa is zero.
+func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) {
+ if f.IsInf() {
+ panic("infinite constant")
+ }
+
+ // Break into f = mant × 2**exp, with 0.5 <= mant < 1.
+ var mant big.Float
+ exp := int64(f.MantExp(&mant))
+
+ // Scale so that mant is an integer.
+ prec := mant.MinPrec()
+ mant.SetMantExp(&mant, int(prec))
+ exp -= int64(prec)
+
+ manti, acc := mant.Int(nil)
+ if acc != big.Exact {
+ panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc))
+ }
+ w.mpint(manti, typ)
+ if manti.Sign() != 0 {
+ w.int64(exp)
+ }
+}
+
+func (w *exportWriter) bool(b bool) bool {
+ var x uint64
+ if b {
+ x = 1
+ }
+ w.uint64(x)
+ return b
+}
+
+func (w *exportWriter) int64(x int64) { w.data.int64(x) }
+func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
+func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
+
+func (w *exportWriter) localIdent(obj types.Object) {
+ // Anonymous parameters.
+ if obj == nil {
+ w.string("")
+ return
+ }
+
+ name := obj.Name()
+ if name == "_" {
+ w.string("_")
+ return
+ }
+
+ w.string(name)
+}
+
+type intWriter struct {
+ bytes.Buffer
+}
+
+func (w *intWriter) int64(x int64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutVarint(buf[:], x)
+ w.Write(buf[:n])
+}
+
+func (w *intWriter) uint64(x uint64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(buf[:], x)
+ w.Write(buf[:n])
+}
+
+func assert(cond bool) {
+ if !cond {
+ panic("internal error: assertion failed")
+ }
+}
+
+// The below is copied from go/src/cmd/compile/internal/gc/syntax.go.
+
+// objQueue is a FIFO queue of types.Object. The zero value of objQueue is
+// a ready-to-use empty queue.
+type objQueue struct {
+ ring []types.Object
+ head, tail int
+}
+
+// empty returns true if q contains no Nodes.
+func (q *objQueue) empty() bool {
+ return q.head == q.tail
+}
+
+// pushTail appends n to the tail of the queue.
+func (q *objQueue) pushTail(obj types.Object) {
+ if len(q.ring) == 0 {
+ q.ring = make([]types.Object, 16)
+ } else if q.head+len(q.ring) == q.tail {
+ // Grow the ring.
+ nring := make([]types.Object, len(q.ring)*2)
+ // Copy the old elements.
+ part := q.ring[q.head%len(q.ring):]
+ if q.tail-q.head <= len(part) {
+ part = part[:q.tail-q.head]
+ copy(nring, part)
+ } else {
+ pos := copy(nring, part)
+ copy(nring[pos:], q.ring[:q.tail%len(q.ring)])
+ }
+ q.ring, q.head, q.tail = nring, 0, q.tail-q.head
+ }
+
+ q.ring[q.tail%len(q.ring)] = obj
+ q.tail++
+}
+
+// popHead pops a node from the head of the queue. It panics if q is empty.
+func (q *objQueue) popHead() types.Object {
+ if q.empty() {
+ panic("dequeue empty")
+ }
+ obj := q.ring[q.head%len(q.ring)]
+ q.head++
+ return obj
+}
+
+// internalError represents an error generated inside this package.
+type internalError string
+
+func (e internalError) Error() string { return "gcimporter: " + string(e) }
+
+// TODO(adonovan): make this call panic, so that it's symmetric with errorf.
+// Otherwise it's easy to forget to do anything with the error.
+//
+// TODO(adonovan): also, consider switching the names "errorf" and
+// "internalErrorf" as the former is used for bugs, whose cause is
+// internal inconsistency, whereas the latter is used for ordinary
+// situations like bad input, whose cause is external.
+func internalErrorf(format string, args ...interface{}) error {
+ return internalError(fmt.Sprintf(format, args...))
+}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
new file mode 100644
index 0000000..136aa03
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
@@ -0,0 +1,1100 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed package import.
+// See cmd/compile/internal/gc/iexport.go for the export data format.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "math/big"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/go/types/objectpath"
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+type intReader struct {
+ *bytes.Reader
+ path string
+}
+
+func (r *intReader) int64() int64 {
+ i, err := binary.ReadVarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+func (r *intReader) uint64() uint64 {
+ i, err := binary.ReadUvarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+// Keep this in sync with constants in iexport.go.
+const (
+ iexportVersionGo1_11 = 0
+ iexportVersionPosCol = 1
+ iexportVersionGo1_18 = 2
+ iexportVersionGenerics = 2
+
+ iexportVersionCurrent = 2
+)
+
+type ident struct {
+ pkg *types.Package
+ name string
+}
+
+const predeclReserved = 32
+
+type itag uint64
+
+const (
+ // Types
+ definedType itag = iota
+ pointerType
+ sliceType
+ arrayType
+ chanType
+ mapType
+ signatureType
+ structType
+ interfaceType
+ typeParamType
+ instanceType
+ unionType
+ aliasType
+)
+
+// Object tags
+const (
+ varTag = 'V'
+ funcTag = 'F'
+ genericFuncTag = 'G'
+ constTag = 'C'
+ aliasTag = 'A'
+ genericAliasTag = 'B'
+ typeParamTag = 'P'
+ typeTag = 'T'
+ genericTypeTag = 'U'
+)
+
+// IImportData imports a package from the serialized package data
+// and returns 0 and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (int, *types.Package, error) {
+ pkgs, err := iimportCommon(fset, GetPackagesFromMap(imports), data, false, path, false, nil)
+ if err != nil {
+ return 0, nil, err
+ }
+ return 0, pkgs[0], nil
+}
+
+// IImportBundle imports a set of packages from the serialized package bundle.
+func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data []byte) ([]*types.Package, error) {
+ return iimportCommon(fset, GetPackagesFromMap(imports), data, true, "", false, nil)
+}
+
+// A GetPackagesFunc function obtains the non-nil symbols for a set of
+// packages, creating and recursively importing them as needed. An
+// implementation should store each package symbol is in the Pkg
+// field of the items array.
+//
+// Any error causes importing to fail. This can be used to quickly read
+// the import manifest of an export data file without fully decoding it.
+type GetPackagesFunc = func(items []GetPackagesItem) error
+
+// A GetPackagesItem is a request from the importer for the package
+// symbol of the specified name and path.
+type GetPackagesItem struct {
+ Name, Path string
+ Pkg *types.Package // to be filled in by GetPackagesFunc call
+
+ // private importer state
+ pathOffset uint64
+ nameIndex map[string]uint64
+}
+
+// GetPackagesFromMap returns a GetPackagesFunc that retrieves
+// packages from the given map of package path to package.
+//
+// The returned function may mutate m: each requested package that is not
+// found is created with types.NewPackage and inserted into m.
+func GetPackagesFromMap(m map[string]*types.Package) GetPackagesFunc {
+ return func(items []GetPackagesItem) error {
+ for i, item := range items {
+ pkg, ok := m[item.Path]
+ if !ok {
+ pkg = types.NewPackage(item.Path, item.Name)
+ m[item.Path] = pkg
+ }
+ items[i].Pkg = pkg
+ }
+ return nil
+ }
+}
+
+func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte, bundle bool, path string, shallow bool, reportf ReportFunc) (pkgs []*types.Package, err error) {
+ const currentVersion = iexportVersionCurrent
+ version := int64(-1)
+ if !debug {
+ defer func() {
+ if e := recover(); e != nil {
+ if bundle {
+ err = fmt.Errorf("%v", e)
+ } else if version > currentVersion {
+ err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+ } else {
+ err = fmt.Errorf("internal error while importing %q (%v); please report an issue", path, e)
+ }
+ }
+ }()
+ }
+
+ r := &intReader{bytes.NewReader(data), path}
+
+ if bundle {
+ if v := r.uint64(); v != bundleVersion {
+ errorf("unknown bundle format version %d", v)
+ }
+ }
+
+ version = int64(r.uint64())
+ switch version {
+ case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11:
+ default:
+ if version > iexportVersionGo1_18 {
+ errorf("unstable iexport format version %d, just rebuild compiler and std library", version)
+ } else {
+ errorf("unknown iexport format version %d", version)
+ }
+ }
+
+ sLen := int64(r.uint64())
+ var fLen int64
+ var fileOffset []uint64
+ if shallow {
+ // Shallow mode uses a different position encoding.
+ fLen = int64(r.uint64())
+ fileOffset = make([]uint64, r.uint64())
+ for i := range fileOffset {
+ fileOffset[i] = r.uint64()
+ }
+ }
+ dLen := int64(r.uint64())
+
+ whence, _ := r.Seek(0, io.SeekCurrent)
+ stringData := data[whence : whence+sLen]
+ fileData := data[whence+sLen : whence+sLen+fLen]
+ declData := data[whence+sLen+fLen : whence+sLen+fLen+dLen]
+ r.Seek(sLen+fLen+dLen, io.SeekCurrent)
+
+ p := iimporter{
+ version: int(version),
+ ipath: path,
+ aliases: aliases.Enabled(),
+ shallow: shallow,
+ reportf: reportf,
+
+ stringData: stringData,
+ stringCache: make(map[uint64]string),
+ fileOffset: fileOffset,
+ fileData: fileData,
+ fileCache: make([]*token.File, len(fileOffset)),
+ pkgCache: make(map[uint64]*types.Package),
+
+ declData: declData,
+ pkgIndex: make(map[*types.Package]map[string]uint64),
+ typCache: make(map[uint64]types.Type),
+ // Separate map for typeparams, keyed by their package and unique
+ // name.
+ tparamIndex: make(map[ident]types.Type),
+
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*fileInfo),
+ },
+ }
+ defer p.fake.setLines() // set lines for files in fset
+
+ for i, pt := range predeclared() {
+ p.typCache[uint64(i)] = pt
+ }
+
+ // Gather the relevant packages from the manifest.
+ items := make([]GetPackagesItem, r.uint64())
+ uniquePkgPaths := make(map[string]bool)
+ for i := range items {
+ pkgPathOff := r.uint64()
+ pkgPath := p.stringAt(pkgPathOff)
+ pkgName := p.stringAt(r.uint64())
+ _ = r.uint64() // package height; unused by go/types
+
+ if pkgPath == "" {
+ pkgPath = path
+ }
+ items[i].Name = pkgName
+ items[i].Path = pkgPath
+ items[i].pathOffset = pkgPathOff
+
+ // Read index for package.
+ nameIndex := make(map[string]uint64)
+ nSyms := r.uint64()
+ // In shallow mode, only the current package (i=0) has an index.
+ assert(!(shallow && i > 0 && nSyms != 0))
+ for ; nSyms > 0; nSyms-- {
+ name := p.stringAt(r.uint64())
+ nameIndex[name] = r.uint64()
+ }
+
+ items[i].nameIndex = nameIndex
+
+ uniquePkgPaths[pkgPath] = true
+ }
+ // Debugging #63822; hypothesis: there are duplicate PkgPaths.
+ if len(uniquePkgPaths) != len(items) {
+ reportf("found duplicate PkgPaths while reading export data manifest: %v", items)
+ }
+
+ // Request packages all at once from the client,
+ // enabling a parallel implementation.
+ if err := getPackages(items); err != nil {
+ return nil, err // don't wrap this error
+ }
+
+ // Check the results and complete the index.
+ pkgList := make([]*types.Package, len(items))
+ for i, item := range items {
+ pkg := item.Pkg
+ if pkg == nil {
+ errorf("internal error: getPackages returned nil package for %q", item.Path)
+ } else if pkg.Path() != item.Path {
+ errorf("internal error: getPackages returned wrong path %q, want %q", pkg.Path(), item.Path)
+ } else if pkg.Name() != item.Name {
+ errorf("internal error: getPackages returned wrong name %s for package %q, want %s", pkg.Name(), item.Path, item.Name)
+ }
+ p.pkgCache[item.pathOffset] = pkg
+ p.pkgIndex[pkg] = item.nameIndex
+ pkgList[i] = pkg
+ }
+
+ if bundle {
+ pkgs = make([]*types.Package, r.uint64())
+ for i := range pkgs {
+ pkg := p.pkgAt(r.uint64())
+ imps := make([]*types.Package, r.uint64())
+ for j := range imps {
+ imps[j] = p.pkgAt(r.uint64())
+ }
+ pkg.SetImports(imps)
+ pkgs[i] = pkg
+ }
+ } else {
+ if len(pkgList) == 0 {
+ errorf("no packages found for %s", path)
+ panic("unreachable")
+ }
+ pkgs = pkgList[:1]
+
+ // record all referenced packages as imports
+ list := append(([]*types.Package)(nil), pkgList[1:]...)
+ sort.Sort(byPath(list))
+ pkgs[0].SetImports(list)
+ }
+
+ for _, pkg := range pkgs {
+ if pkg.Complete() {
+ continue
+ }
+
+ names := make([]string, 0, len(p.pkgIndex[pkg]))
+ for name := range p.pkgIndex[pkg] {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ p.doDecl(pkg, name)
+ }
+
+ // package was imported completely and without errors
+ pkg.MarkComplete()
+ }
+
+ // SetConstraint can't be called if the constraint type is not yet complete.
+ // When type params are created in the typeParamTag case of (*importReader).obj(),
+ // the associated constraint type may not be complete due to recursion.
+ // Therefore, we defer calling SetConstraint there, and call it here instead
+ // after all types are complete.
+ for _, d := range p.later {
+ d.t.SetConstraint(d.constraint)
+ }
+
+ for _, typ := range p.interfaceList {
+ typ.Complete()
+ }
+
+ // Workaround for golang/go#61561. See the doc for instanceList for details.
+ for _, typ := range p.instanceList {
+ if iface, _ := typ.Underlying().(*types.Interface); iface != nil {
+ iface.Complete()
+ }
+ }
+
+ return pkgs, nil
+}
+
+type setConstraintArgs struct {
+ t *types.TypeParam
+ constraint types.Type
+}
+
+type iimporter struct {
+ version int
+ ipath string
+
+ aliases bool
+ shallow bool
+ reportf ReportFunc // if non-nil, used to report bugs
+
+ stringData []byte
+ stringCache map[uint64]string
+ fileOffset []uint64 // fileOffset[i] is offset in fileData for info about file encoded as i
+ fileData []byte
+ fileCache []*token.File // memoized decoding of file encoded as i
+ pkgCache map[uint64]*types.Package
+
+ declData []byte
+ pkgIndex map[*types.Package]map[string]uint64
+ typCache map[uint64]types.Type
+ tparamIndex map[ident]types.Type
+
+ fake fakeFileSet
+ interfaceList []*types.Interface
+
+ // Workaround for the go/types bug golang/go#61561: instances produced during
+ // instantiation may contain incomplete interfaces. Here we only complete the
+ // underlying type of the instance, which is the most common case but doesn't
+ // handle parameterized interface literals defined deeper in the type.
+ instanceList []types.Type // instances for later completion (see golang/go#61561)
+
+ // Arguments for calls to SetConstraint that are deferred due to recursive types
+ later []setConstraintArgs
+
+ indent int // for tracing support
+}
+
+func (p *iimporter) trace(format string, args ...interface{}) {
+ if !trace {
+ // Call sites should also be guarded, but having this check here allows
+ // easily enabling/disabling debug trace statements.
+ return
+ }
+ fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
+}
+
+func (p *iimporter) doDecl(pkg *types.Package, name string) {
+ if debug {
+ p.trace("import decl %s", name)
+ p.indent++
+ defer func() {
+ p.indent--
+ p.trace("=> %s", name)
+ }()
+ }
+ // See if we've already imported this declaration.
+ if obj := pkg.Scope().Lookup(name); obj != nil {
+ return
+ }
+
+ off, ok := p.pkgIndex[pkg][name]
+ if !ok {
+ // In deep mode, the index should be complete. In shallow
+ // mode, we should have already recursively loaded necessary
+ // dependencies so the above Lookup succeeds.
+ errorf("%v.%v not in index", pkg, name)
+ }
+
+ r := &importReader{p: p, currPkg: pkg}
+ r.declReader.Reset(p.declData[off:])
+
+ r.obj(name)
+}
+
+func (p *iimporter) stringAt(off uint64) string {
+ if s, ok := p.stringCache[off]; ok {
+ return s
+ }
+
+ slen, n := binary.Uvarint(p.stringData[off:])
+ if n <= 0 {
+ errorf("varint failed")
+ }
+ spos := off + uint64(n)
+ s := string(p.stringData[spos : spos+slen])
+ p.stringCache[off] = s
+ return s
+}
+
+func (p *iimporter) fileAt(index uint64) *token.File {
+ file := p.fileCache[index]
+ if file == nil {
+ off := p.fileOffset[index]
+ file = p.decodeFile(intReader{bytes.NewReader(p.fileData[off:]), p.ipath})
+ p.fileCache[index] = file
+ }
+ return file
+}
+
+func (p *iimporter) decodeFile(rd intReader) *token.File {
+ filename := p.stringAt(rd.uint64())
+ size := int(rd.uint64())
+ file := p.fake.fset.AddFile(filename, -1, size)
+
+ // SetLines requires a nondecreasing sequence.
+ // Because it is common for clients to derive the interval
+ // [start, start+len(name)] from a start position, and we
+ // want to ensure that the end offset is on the same line,
+ // we fill in the gaps of the sparse encoding with values
+ // that strictly increase by the largest possible amount.
+ // This allows us to avoid having to record the actual end
+ // offset of each needed line.
+
+ lines := make([]int, int(rd.uint64()))
+ var index, offset int
+ for i, n := 0, int(rd.uint64()); i < n; i++ {
+ index += int(rd.uint64())
+ offset += int(rd.uint64())
+ lines[index] = offset
+
+ // Ensure monotonicity between points.
+ for j := index - 1; j > 0 && lines[j] == 0; j-- {
+ lines[j] = lines[j+1] - 1
+ }
+ }
+
+ // Ensure monotonicity after last point.
+ for j := len(lines) - 1; j > 0 && lines[j] == 0; j-- {
+ size--
+ lines[j] = size
+ }
+
+ if !file.SetLines(lines) {
+ errorf("SetLines failed: %d", lines) // can't happen
+ }
+ return file
+}
+
+func (p *iimporter) pkgAt(off uint64) *types.Package {
+ if pkg, ok := p.pkgCache[off]; ok {
+ return pkg
+ }
+ path := p.stringAt(off)
+ errorf("missing package %q in %q", path, p.ipath)
+ return nil
+}
+
+func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
+ if t, ok := p.typCache[off]; ok && canReuse(base, t) {
+ return t
+ }
+
+ if off < predeclReserved {
+ errorf("predeclared type missing from cache: %v", off)
+ }
+
+ r := &importReader{p: p}
+ r.declReader.Reset(p.declData[off-predeclReserved:])
+ t := r.doType(base)
+
+ if canReuse(base, t) {
+ p.typCache[off] = t
+ }
+ return t
+}
+
+// canReuse reports whether the type rhs on the RHS of the declaration for def
+// may be re-used.
+//
+// Specifically, if def is non-nil and rhs is an interface type with methods, it
+// may not be re-used because we have a convention of setting the receiver type
+// for interface methods to def.
+func canReuse(def *types.Named, rhs types.Type) bool {
+ if def == nil {
+ return true
+ }
+ iface, _ := aliases.Unalias(rhs).(*types.Interface)
+ if iface == nil {
+ return true
+ }
+ // Don't use iface.Empty() here as iface may not be complete.
+ return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0
+}
+
+type importReader struct {
+ p *iimporter
+ declReader bytes.Reader
+ currPkg *types.Package
+ prevFile string
+ prevLine int64
+ prevColumn int64
+}
+
+func (r *importReader) obj(name string) {
+ tag := r.byte()
+ pos := r.pos()
+
+ switch tag {
+ case aliasTag:
+ typ := r.typ()
+ // TODO(adonovan): support generic aliases:
+ // if tag == genericAliasTag {
+ // tparams := r.tparamList()
+ // alias.SetTypeParams(tparams)
+ // }
+ r.declare(aliases.NewAlias(r.p.aliases, pos, r.currPkg, name, typ))
+
+ case constTag:
+ typ, val := r.value()
+
+ r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
+
+ case funcTag, genericFuncTag:
+ var tparams []*types.TypeParam
+ if tag == genericFuncTag {
+ tparams = r.tparamList()
+ }
+ sig := r.signature(nil, nil, tparams)
+ r.declare(types.NewFunc(pos, r.currPkg, name, sig))
+
+ case typeTag, genericTypeTag:
+ // Types can be recursive. We need to setup a stub
+ // declaration before recursing.
+ obj := types.NewTypeName(pos, r.currPkg, name, nil)
+ named := types.NewNamed(obj, nil, nil)
+ // Declare obj before calling r.tparamList, so the new type name is recognized
+ // if used in the constraint of one of its own typeparams (see #48280).
+ r.declare(obj)
+ if tag == genericTypeTag {
+ tparams := r.tparamList()
+ named.SetTypeParams(tparams)
+ }
+
+ underlying := r.p.typAt(r.uint64(), named).Underlying()
+ named.SetUnderlying(underlying)
+
+ if !isInterface(underlying) {
+ for n := r.uint64(); n > 0; n-- {
+ mpos := r.pos()
+ mname := r.ident()
+ recv := r.param()
+
+ // If the receiver has any targs, set those as the
+ // rparams of the method (since those are the
+ // typeparams being used in the method sig/body).
+ _, recvNamed := typesinternal.ReceiverNamed(recv)
+ targs := recvNamed.TypeArgs()
+ var rparams []*types.TypeParam
+ if targs.Len() > 0 {
+ rparams = make([]*types.TypeParam, targs.Len())
+ for i := range rparams {
+ rparams[i] = aliases.Unalias(targs.At(i)).(*types.TypeParam)
+ }
+ }
+ msig := r.signature(recv, rparams, nil)
+
+ named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
+ }
+ }
+
+ case typeParamTag:
+ // We need to "declare" a typeparam in order to have a name that
+ // can be referenced recursively (if needed) in the type param's
+ // bound.
+ if r.p.version < iexportVersionGenerics {
+ errorf("unexpected type param type")
+ }
+ name0 := tparamName(name)
+ tn := types.NewTypeName(pos, r.currPkg, name0, nil)
+ t := types.NewTypeParam(tn, nil)
+
+ // To handle recursive references to the typeparam within its
+ // bound, save the partial type in tparamIndex before reading the bounds.
+ id := ident{r.currPkg, name}
+ r.p.tparamIndex[id] = t
+ var implicit bool
+ if r.p.version >= iexportVersionGo1_18 {
+ implicit = r.bool()
+ }
+ constraint := r.typ()
+ if implicit {
+ iface, _ := aliases.Unalias(constraint).(*types.Interface)
+ if iface == nil {
+ errorf("non-interface constraint marked implicit")
+ }
+ iface.MarkImplicit()
+ }
+ // The constraint type may not be complete, if we
+ // are in the middle of a type recursion involving type
+ // constraints. So, we defer SetConstraint until we have
+ // completely set up all types in ImportData.
+ r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint})
+
+ case varTag:
+ typ := r.typ()
+
+ r.declare(types.NewVar(pos, r.currPkg, name, typ))
+
+ default:
+ errorf("unexpected tag: %v", tag)
+ }
+}
+
+func (r *importReader) declare(obj types.Object) {
+ obj.Pkg().Scope().Insert(obj)
+}
+
+func (r *importReader) value() (typ types.Type, val constant.Value) {
+ typ = r.typ()
+ if r.p.version >= iexportVersionGo1_18 {
+ // TODO: add support for using the kind.
+ _ = constant.Kind(r.int64())
+ }
+
+ switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
+ case types.IsBoolean:
+ val = constant.MakeBool(r.bool())
+
+ case types.IsString:
+ val = constant.MakeString(r.string())
+
+ case types.IsInteger:
+ var x big.Int
+ r.mpint(&x, b)
+ val = constant.Make(&x)
+
+ case types.IsFloat:
+ val = r.mpfloat(b)
+
+ case types.IsComplex:
+ re := r.mpfloat(b)
+ im := r.mpfloat(b)
+ val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+
+ default:
+ if b.Kind() == types.Invalid {
+ val = constant.MakeUnknown()
+ return
+ }
+ errorf("unexpected type %v", typ) // panics
+ panic("unreachable")
+ }
+
+ return
+}
+
+func intSize(b *types.Basic) (signed bool, maxBytes uint) {
+ if (b.Info() & types.IsUntyped) != 0 {
+ return true, 64
+ }
+
+ switch b.Kind() {
+ case types.Float32, types.Complex64:
+ return true, 3
+ case types.Float64, types.Complex128:
+ return true, 7
+ }
+
+ signed = (b.Info() & types.IsUnsigned) == 0
+ switch b.Kind() {
+ case types.Int8, types.Uint8:
+ maxBytes = 1
+ case types.Int16, types.Uint16:
+ maxBytes = 2
+ case types.Int32, types.Uint32:
+ maxBytes = 4
+ default:
+ maxBytes = 8
+ }
+
+ return
+}
+
+func (r *importReader) mpint(x *big.Int, typ *types.Basic) {
+ signed, maxBytes := intSize(typ)
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ n, _ := r.declReader.ReadByte()
+ if uint(n) < maxSmall {
+ v := int64(n)
+ if signed {
+ v >>= 1
+ if n&1 != 0 {
+ v = ^v
+ }
+ }
+ x.SetInt64(v)
+ return
+ }
+
+ v := -n
+ if signed {
+ v = -(n &^ 1) >> 1
+ }
+ if v < 1 || uint(v) > maxBytes {
+ errorf("weird decoding: %v, %v => %v", n, signed, v)
+ }
+ b := make([]byte, v)
+ io.ReadFull(&r.declReader, b)
+ x.SetBytes(b)
+ if signed && n&1 != 0 {
+ x.Neg(x)
+ }
+}
+
+func (r *importReader) mpfloat(typ *types.Basic) constant.Value {
+ var mant big.Int
+ r.mpint(&mant, typ)
+ var f big.Float
+ f.SetInt(&mant)
+ if f.Sign() != 0 {
+ f.SetMantExp(&f, int(r.int64()))
+ }
+ return constant.Make(&f)
+}
+
+func (r *importReader) ident() string {
+ return r.string()
+}
+
+func (r *importReader) qualifiedIdent() (*types.Package, string) {
+ name := r.string()
+ pkg := r.pkg()
+ return pkg, name
+}
+
+func (r *importReader) pos() token.Pos {
+ if r.p.shallow {
+ // precise offsets are encoded only in shallow mode
+ return r.posv2()
+ }
+ if r.p.version >= iexportVersionPosCol {
+ r.posv1()
+ } else {
+ r.posv0()
+ }
+
+ if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 {
+ return token.NoPos
+ }
+ return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn))
+}
+
+func (r *importReader) posv0() {
+ delta := r.int64()
+ if delta != deltaNewFile {
+ r.prevLine += delta
+ } else if l := r.int64(); l == -1 {
+ r.prevLine += deltaNewFile
+ } else {
+ r.prevFile = r.string()
+ r.prevLine = l
+ }
+}
+
+func (r *importReader) posv1() {
+ delta := r.int64()
+ r.prevColumn += delta >> 1
+ if delta&1 != 0 {
+ delta = r.int64()
+ r.prevLine += delta >> 1
+ if delta&1 != 0 {
+ r.prevFile = r.string()
+ }
+ }
+}
+
+func (r *importReader) posv2() token.Pos {
+ file := r.uint64()
+ if file == 0 {
+ return token.NoPos
+ }
+ tf := r.p.fileAt(file - 1)
+ return tf.Pos(int(r.uint64()))
+}
+
+func (r *importReader) typ() types.Type {
+ return r.p.typAt(r.uint64(), nil)
+}
+
+func isInterface(t types.Type) bool {
+ _, ok := aliases.Unalias(t).(*types.Interface)
+ return ok
+}
+
+func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
+func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
+
+func (r *importReader) doType(base *types.Named) (res types.Type) {
+ k := r.kind()
+ if debug {
+ r.p.trace("importing type %d (base: %s)", k, base)
+ r.p.indent++
+ defer func() {
+ r.p.indent--
+ r.p.trace("=> %s", res)
+ }()
+ }
+ switch k {
+ default:
+ errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
+ return nil
+
+ case aliasType, definedType:
+ pkg, name := r.qualifiedIdent()
+ r.p.doDecl(pkg, name)
+ return pkg.Scope().Lookup(name).(*types.TypeName).Type()
+ case pointerType:
+ return types.NewPointer(r.typ())
+ case sliceType:
+ return types.NewSlice(r.typ())
+ case arrayType:
+ n := r.uint64()
+ return types.NewArray(r.typ(), int64(n))
+ case chanType:
+ dir := chanDir(int(r.uint64()))
+ return types.NewChan(dir, r.typ())
+ case mapType:
+ return types.NewMap(r.typ(), r.typ())
+ case signatureType:
+ r.currPkg = r.pkg()
+ return r.signature(nil, nil, nil)
+
+ case structType:
+ r.currPkg = r.pkg()
+
+ fields := make([]*types.Var, r.uint64())
+ tags := make([]string, len(fields))
+ for i := range fields {
+ var field *types.Var
+ if r.p.shallow {
+ field, _ = r.objectPathObject().(*types.Var)
+ }
+
+ fpos := r.pos()
+ fname := r.ident()
+ ftyp := r.typ()
+ emb := r.bool()
+ tag := r.string()
+
+ // Either this is not a shallow import, the field is local, or the
+ // encoded objectPath failed to produce an object (a bug).
+ //
+ // Even in this last, buggy case, fall back on creating a new field. As
+ // discussed in iexport.go, this is not correct, but mostly works and is
+ // preferable to failing (for now at least).
+ if field == nil {
+ field = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
+ }
+
+ fields[i] = field
+ tags[i] = tag
+ }
+ return types.NewStruct(fields, tags)
+
+ case interfaceType:
+ r.currPkg = r.pkg()
+
+ embeddeds := make([]types.Type, r.uint64())
+ for i := range embeddeds {
+ _ = r.pos()
+ embeddeds[i] = r.typ()
+ }
+
+ methods := make([]*types.Func, r.uint64())
+ for i := range methods {
+ var method *types.Func
+ if r.p.shallow {
+ method, _ = r.objectPathObject().(*types.Func)
+ }
+
+ mpos := r.pos()
+ mname := r.ident()
+
+ // TODO(mdempsky): Matches bimport.go, but I
+ // don't agree with this.
+ var recv *types.Var
+ if base != nil {
+ recv = types.NewVar(token.NoPos, r.currPkg, "", base)
+ }
+ msig := r.signature(recv, nil, nil)
+
+ if method == nil {
+ method = types.NewFunc(mpos, r.currPkg, mname, msig)
+ }
+ methods[i] = method
+ }
+
+ typ := newInterface(methods, embeddeds)
+ r.p.interfaceList = append(r.p.interfaceList, typ)
+ return typ
+
+ case typeParamType:
+ if r.p.version < iexportVersionGenerics {
+ errorf("unexpected type param type")
+ }
+ pkg, name := r.qualifiedIdent()
+ id := ident{pkg, name}
+ if t, ok := r.p.tparamIndex[id]; ok {
+ // We're already in the process of importing this typeparam.
+ return t
+ }
+ // Otherwise, import the definition of the typeparam now.
+ r.p.doDecl(pkg, name)
+ return r.p.tparamIndex[id]
+
+ case instanceType:
+ if r.p.version < iexportVersionGenerics {
+ errorf("unexpected instantiation type")
+ }
+ // pos does not matter for instances: they are positioned on the original
+ // type.
+ _ = r.pos()
+ len := r.uint64()
+ targs := make([]types.Type, len)
+ for i := range targs {
+ targs[i] = r.typ()
+ }
+ baseType := r.typ()
+ // The imported instantiated type doesn't include any methods, so
+ // we must always use the methods of the base (orig) type.
+ // TODO provide a non-nil *Environment
+ t, _ := types.Instantiate(nil, baseType, targs, false)
+
+ // Workaround for golang/go#61561. See the doc for instanceList for details.
+ r.p.instanceList = append(r.p.instanceList, t)
+ return t
+
+ case unionType:
+ if r.p.version < iexportVersionGenerics {
+ errorf("unexpected instantiation type")
+ }
+ terms := make([]*types.Term, r.uint64())
+ for i := range terms {
+ terms[i] = types.NewTerm(r.bool(), r.typ())
+ }
+ return types.NewUnion(terms)
+ }
+}
+
+func (r *importReader) kind() itag {
+ return itag(r.uint64())
+}
+
+// objectPathObject is the inverse of exportWriter.objectPath.
+//
+// In shallow mode, certain fields and methods may need to be looked up in an
+// imported package. See the doc for exportWriter.objectPath for a full
+// explanation.
+func (r *importReader) objectPathObject() types.Object {
+ objPath := objectpath.Path(r.string())
+ if objPath == "" {
+ return nil
+ }
+ pkg := r.pkg()
+ obj, err := objectpath.Object(pkg, objPath)
+ if err != nil {
+ if r.p.reportf != nil {
+ r.p.reportf("failed to find object for objectPath %q: %v", objPath, err)
+ }
+ }
+ return obj
+}
+
+func (r *importReader) signature(recv *types.Var, rparams []*types.TypeParam, tparams []*types.TypeParam) *types.Signature {
+ params := r.paramList()
+ results := r.paramList()
+ variadic := params.Len() > 0 && r.bool()
+ return types.NewSignatureType(recv, rparams, tparams, params, results, variadic)
+}
+
+func (r *importReader) tparamList() []*types.TypeParam {
+ n := r.uint64()
+ if n == 0 {
+ return nil
+ }
+ xs := make([]*types.TypeParam, n)
+ for i := range xs {
+ // Note: the standard library importer is tolerant of nil types here,
+ // though would panic in SetTypeParams.
+ xs[i] = aliases.Unalias(r.typ()).(*types.TypeParam)
+ }
+ return xs
+}
+
+func (r *importReader) paramList() *types.Tuple {
+ xs := make([]*types.Var, r.uint64())
+ for i := range xs {
+ xs[i] = r.param()
+ }
+ return types.NewTuple(xs...)
+}
+
+func (r *importReader) param() *types.Var {
+ pos := r.pos()
+ name := r.ident()
+ typ := r.typ()
+ return types.NewParam(pos, r.currPkg, name, typ)
+}
+
+func (r *importReader) bool() bool {
+ return r.uint64() != 0
+}
+
+func (r *importReader) int64() int64 {
+ n, err := binary.ReadVarint(&r.declReader)
+ if err != nil {
+ errorf("readVarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) uint64() uint64 {
+ n, err := binary.ReadUvarint(&r.declReader)
+ if err != nil {
+ errorf("readUvarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) byte() byte {
+ x, err := r.declReader.ReadByte()
+ if err != nil {
+ errorf("declReader.ReadByte: %v", err)
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go
new file mode 100644
index 0000000..8b163e3
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go
@@ -0,0 +1,22 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.11
+// +build !go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ named := make([]*types.Named, len(embeddeds))
+ for i, e := range embeddeds {
+ var ok bool
+ named[i], ok = e.(*types.Named)
+ if !ok {
+ panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
+ }
+ }
+ return types.NewInterface(methods, named)
+}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go
new file mode 100644
index 0000000..49984f4
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.11
+// +build go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ return types.NewInterfaceType(methods, embeddeds)
+}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go b/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go
new file mode 100644
index 0000000..0cd3b91
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go
@@ -0,0 +1,34 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gcimporter
+
+import "go/types"
+
+const iexportVersion = iexportVersionGenerics
+
+// additionalPredeclared returns additional predeclared types in go.1.18.
+func additionalPredeclared() []types.Type {
+ return []types.Type{
+ // comparable
+ types.Universe.Lookup("comparable").Type(),
+
+ // any
+ types.Universe.Lookup("any").Type(),
+ }
+}
+
+// See cmd/compile/internal/types.SplitVargenSuffix.
+func splitVargenSuffix(name string) (base, suffix string) {
+ i := len(name)
+ for i > 0 && name[i-1] >= '0' && name[i-1] <= '9' {
+ i--
+ }
+ const dot = "·"
+ if i >= len(dot) && name[i-len(dot):i] == dot {
+ i -= len(dot)
+ return name[:i], name[i:]
+ }
+ return name, ""
+}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go b/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go
new file mode 100644
index 0000000..38b624c
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go
@@ -0,0 +1,10 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !goexperiment.unified
+// +build !goexperiment.unified
+
+package gcimporter
+
+const unifiedIR = false
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go b/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go
new file mode 100644
index 0000000..b5118d0
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go
@@ -0,0 +1,10 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build goexperiment.unified
+// +build goexperiment.unified
+
+package gcimporter
+
+const unifiedIR = true
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go
new file mode 100644
index 0000000..2c07706
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go
@@ -0,0 +1,728 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Derived from go/internal/gcimporter/ureader.go
+
+package gcimporter
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/internal/aliases"
+ "golang.org/x/tools/internal/pkgbits"
+)
+
+// A pkgReader holds the shared state for reading a unified IR package
+// description.
+type pkgReader struct {
+ pkgbits.PkgDecoder
+
+ fake fakeFileSet
+
+ ctxt *types.Context
+ imports map[string]*types.Package // previously imported packages, indexed by path
+ aliases bool // create types.Alias nodes
+
+ // lazily initialized arrays corresponding to the unified IR
+ // PosBase, Pkg, and Type sections, respectively.
+ posBases []string // position bases (i.e., file names)
+ pkgs []*types.Package
+ typs []types.Type
+
+ // laterFns holds functions that need to be invoked at the end of
+ // import reading.
+ laterFns []func()
+ // laterFors is used in case of 'type A B' to ensure that B is processed before A.
+ laterFors map[types.Type]int
+
+ // ifaces holds a list of constructed Interfaces, which need to have
+ // Complete called after importing is done.
+ ifaces []*types.Interface
+}
+
+// later adds a function to be invoked at the end of import reading.
+func (pr *pkgReader) later(fn func()) {
+ pr.laterFns = append(pr.laterFns, fn)
+}
+
+// See cmd/compile/internal/noder.derivedInfo.
+type derivedInfo struct {
+ idx pkgbits.Index
+ needed bool
+}
+
+// See cmd/compile/internal/noder.typeInfo.
+type typeInfo struct {
+ idx pkgbits.Index
+ derived bool
+}
+
+func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ if !debug {
+ defer func() {
+ if x := recover(); x != nil {
+ err = fmt.Errorf("internal error in importing %q (%v); please report an issue", path, x)
+ }
+ }()
+ }
+
+ s := string(data)
+ s = s[:strings.LastIndex(s, "\n$$\n")]
+ input := pkgbits.NewPkgDecoder(path, s)
+ pkg = readUnifiedPackage(fset, nil, imports, input)
+ return
+}
+
+// laterFor adds a function to be invoked at the end of import reading, and records the type that function is finishing.
+func (pr *pkgReader) laterFor(t types.Type, fn func()) {
+ if pr.laterFors == nil {
+ pr.laterFors = make(map[types.Type]int)
+ }
+ pr.laterFors[t] = len(pr.laterFns)
+ pr.laterFns = append(pr.laterFns, fn)
+}
+
+// readUnifiedPackage reads a package description from the given
+// unified IR export data decoder.
+func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[string]*types.Package, input pkgbits.PkgDecoder) *types.Package {
+ pr := pkgReader{
+ PkgDecoder: input,
+
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*fileInfo),
+ },
+
+ ctxt: ctxt,
+ imports: imports,
+ aliases: aliases.Enabled(),
+
+ posBases: make([]string, input.NumElems(pkgbits.RelocPosBase)),
+ pkgs: make([]*types.Package, input.NumElems(pkgbits.RelocPkg)),
+ typs: make([]types.Type, input.NumElems(pkgbits.RelocType)),
+ }
+ defer pr.fake.setLines()
+
+ r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic)
+ pkg := r.pkg()
+ r.Bool() // has init
+
+ for i, n := 0, r.Len(); i < n; i++ {
+ // As if r.obj(), but avoiding the Scope.Lookup call,
+ // to avoid eager loading of imports.
+ r.Sync(pkgbits.SyncObject)
+ assert(!r.Bool())
+ r.p.objIdx(r.Reloc(pkgbits.RelocObj))
+ assert(r.Len() == 0)
+ }
+
+ r.Sync(pkgbits.SyncEOF)
+
+ for _, fn := range pr.laterFns {
+ fn()
+ }
+
+ for _, iface := range pr.ifaces {
+ iface.Complete()
+ }
+
+ // Imports() of pkg are all of the transitive packages that were loaded.
+ var imps []*types.Package
+ for _, imp := range pr.pkgs {
+ if imp != nil && imp != pkg {
+ imps = append(imps, imp)
+ }
+ }
+ sort.Sort(byPath(imps))
+ pkg.SetImports(imps)
+
+ pkg.MarkComplete()
+ return pkg
+}
+
+// A reader holds the state for reading a single unified IR element
+// within a package.
+type reader struct {
+ pkgbits.Decoder
+
+ p *pkgReader
+
+ dict *readerDict
+}
+
+// A readerDict holds the state for type parameters that parameterize
+// the current unified IR element.
+type readerDict struct {
+ // bounds is a slice of typeInfos corresponding to the underlying
+ // bounds of the element's type parameters.
+ bounds []typeInfo
+
+ // tparams is a slice of the constructed TypeParams for the element.
+ tparams []*types.TypeParam
+
+ // devived is a slice of types derived from tparams, which may be
+ // instantiated while reading the current element.
+ derived []derivedInfo
+ derivedTypes []types.Type // lazily instantiated from derived
+}
+
+func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
+ return &reader{
+ Decoder: pr.NewDecoder(k, idx, marker),
+ p: pr,
+ }
+}
+
+func (pr *pkgReader) tempReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
+ return &reader{
+ Decoder: pr.TempDecoder(k, idx, marker),
+ p: pr,
+ }
+}
+
+func (pr *pkgReader) retireReader(r *reader) {
+ pr.RetireDecoder(&r.Decoder)
+}
+
+// @@@ Positions
+
+func (r *reader) pos() token.Pos {
+ r.Sync(pkgbits.SyncPos)
+ if !r.Bool() {
+ return token.NoPos
+ }
+
+ // TODO(mdempsky): Delta encoding.
+ posBase := r.posBase()
+ line := r.Uint()
+ col := r.Uint()
+ return r.p.fake.pos(posBase, int(line), int(col))
+}
+
+func (r *reader) posBase() string {
+ return r.p.posBaseIdx(r.Reloc(pkgbits.RelocPosBase))
+}
+
+func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string {
+ if b := pr.posBases[idx]; b != "" {
+ return b
+ }
+
+ var filename string
+ {
+ r := pr.tempReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
+
+ // Within types2, position bases have a lot more details (e.g.,
+ // keeping track of where //line directives appeared exactly).
+ //
+ // For go/types, we just track the file name.
+
+ filename = r.String()
+
+ if r.Bool() { // file base
+ // Was: "b = token.NewTrimmedFileBase(filename, true)"
+ } else { // line base
+ pos := r.pos()
+ line := r.Uint()
+ col := r.Uint()
+
+ // Was: "b = token.NewLineBase(pos, filename, true, line, col)"
+ _, _, _ = pos, line, col
+ }
+ pr.retireReader(r)
+ }
+ b := filename
+ pr.posBases[idx] = b
+ return b
+}
+
+// @@@ Packages
+
+func (r *reader) pkg() *types.Package {
+ r.Sync(pkgbits.SyncPkg)
+ return r.p.pkgIdx(r.Reloc(pkgbits.RelocPkg))
+}
+
+func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package {
+ // TODO(mdempsky): Consider using some non-nil pointer to indicate
+ // the universe scope, so we don't need to keep re-reading it.
+ if pkg := pr.pkgs[idx]; pkg != nil {
+ return pkg
+ }
+
+ pkg := pr.newReader(pkgbits.RelocPkg, idx, pkgbits.SyncPkgDef).doPkg()
+ pr.pkgs[idx] = pkg
+ return pkg
+}
+
+func (r *reader) doPkg() *types.Package {
+ path := r.String()
+ switch path {
+ case "":
+ path = r.p.PkgPath()
+ case "builtin":
+ return nil // universe
+ case "unsafe":
+ return types.Unsafe
+ }
+
+ if pkg := r.p.imports[path]; pkg != nil {
+ return pkg
+ }
+
+ name := r.String()
+
+ pkg := types.NewPackage(path, name)
+ r.p.imports[path] = pkg
+
+ return pkg
+}
+
+// @@@ Types
+
+func (r *reader) typ() types.Type {
+ return r.p.typIdx(r.typInfo(), r.dict)
+}
+
+func (r *reader) typInfo() typeInfo {
+ r.Sync(pkgbits.SyncType)
+ if r.Bool() {
+ return typeInfo{idx: pkgbits.Index(r.Len()), derived: true}
+ }
+ return typeInfo{idx: r.Reloc(pkgbits.RelocType), derived: false}
+}
+
+func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type {
+ idx := info.idx
+ var where *types.Type
+ if info.derived {
+ where = &dict.derivedTypes[idx]
+ idx = dict.derived[idx].idx
+ } else {
+ where = &pr.typs[idx]
+ }
+
+ if typ := *where; typ != nil {
+ return typ
+ }
+
+ var typ types.Type
+ {
+ r := pr.tempReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
+ r.dict = dict
+
+ typ = r.doTyp()
+ assert(typ != nil)
+ pr.retireReader(r)
+ }
+ // See comment in pkgReader.typIdx explaining how this happens.
+ if prev := *where; prev != nil {
+ return prev
+ }
+
+ *where = typ
+ return typ
+}
+
+func (r *reader) doTyp() (res types.Type) {
+ switch tag := pkgbits.CodeType(r.Code(pkgbits.SyncType)); tag {
+ default:
+ errorf("unhandled type tag: %v", tag)
+ panic("unreachable")
+
+ case pkgbits.TypeBasic:
+ return types.Typ[r.Len()]
+
+ case pkgbits.TypeNamed:
+ obj, targs := r.obj()
+ name := obj.(*types.TypeName)
+ if len(targs) != 0 {
+ t, _ := types.Instantiate(r.p.ctxt, name.Type(), targs, false)
+ return t
+ }
+ return name.Type()
+
+ case pkgbits.TypeTypeParam:
+ return r.dict.tparams[r.Len()]
+
+ case pkgbits.TypeArray:
+ len := int64(r.Uint64())
+ return types.NewArray(r.typ(), len)
+ case pkgbits.TypeChan:
+ dir := types.ChanDir(r.Len())
+ return types.NewChan(dir, r.typ())
+ case pkgbits.TypeMap:
+ return types.NewMap(r.typ(), r.typ())
+ case pkgbits.TypePointer:
+ return types.NewPointer(r.typ())
+ case pkgbits.TypeSignature:
+ return r.signature(nil, nil, nil)
+ case pkgbits.TypeSlice:
+ return types.NewSlice(r.typ())
+ case pkgbits.TypeStruct:
+ return r.structType()
+ case pkgbits.TypeInterface:
+ return r.interfaceType()
+ case pkgbits.TypeUnion:
+ return r.unionType()
+ }
+}
+
+func (r *reader) structType() *types.Struct {
+ fields := make([]*types.Var, r.Len())
+ var tags []string
+ for i := range fields {
+ pos := r.pos()
+ pkg, name := r.selector()
+ ftyp := r.typ()
+ tag := r.String()
+ embedded := r.Bool()
+
+ fields[i] = types.NewField(pos, pkg, name, ftyp, embedded)
+ if tag != "" {
+ for len(tags) < i {
+ tags = append(tags, "")
+ }
+ tags = append(tags, tag)
+ }
+ }
+ return types.NewStruct(fields, tags)
+}
+
+func (r *reader) unionType() *types.Union {
+ terms := make([]*types.Term, r.Len())
+ for i := range terms {
+ terms[i] = types.NewTerm(r.Bool(), r.typ())
+ }
+ return types.NewUnion(terms)
+}
+
+func (r *reader) interfaceType() *types.Interface {
+ methods := make([]*types.Func, r.Len())
+ embeddeds := make([]types.Type, r.Len())
+ implicit := len(methods) == 0 && len(embeddeds) == 1 && r.Bool()
+
+ for i := range methods {
+ pos := r.pos()
+ pkg, name := r.selector()
+ mtyp := r.signature(nil, nil, nil)
+ methods[i] = types.NewFunc(pos, pkg, name, mtyp)
+ }
+
+ for i := range embeddeds {
+ embeddeds[i] = r.typ()
+ }
+
+ iface := types.NewInterfaceType(methods, embeddeds)
+ if implicit {
+ iface.MarkImplicit()
+ }
+
+ // We need to call iface.Complete(), but if there are any embedded
+ // defined types, then we may not have set their underlying
+ // interface type yet. So we need to defer calling Complete until
+ // after we've called SetUnderlying everywhere.
+ //
+ // TODO(mdempsky): After CL 424876 lands, it should be safe to call
+ // iface.Complete() immediately.
+ r.p.ifaces = append(r.p.ifaces, iface)
+
+ return iface
+}
+
+func (r *reader) signature(recv *types.Var, rtparams, tparams []*types.TypeParam) *types.Signature {
+ r.Sync(pkgbits.SyncSignature)
+
+ params := r.params()
+ results := r.params()
+ variadic := r.Bool()
+
+ return types.NewSignatureType(recv, rtparams, tparams, params, results, variadic)
+}
+
+func (r *reader) params() *types.Tuple {
+ r.Sync(pkgbits.SyncParams)
+
+ params := make([]*types.Var, r.Len())
+ for i := range params {
+ params[i] = r.param()
+ }
+
+ return types.NewTuple(params...)
+}
+
+func (r *reader) param() *types.Var {
+ r.Sync(pkgbits.SyncParam)
+
+ pos := r.pos()
+ pkg, name := r.localIdent()
+ typ := r.typ()
+
+ return types.NewParam(pos, pkg, name, typ)
+}
+
+// @@@ Objects
+
+func (r *reader) obj() (types.Object, []types.Type) {
+ r.Sync(pkgbits.SyncObject)
+
+ assert(!r.Bool())
+
+ pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj))
+ obj := pkgScope(pkg).Lookup(name)
+
+ targs := make([]types.Type, r.Len())
+ for i := range targs {
+ targs[i] = r.typ()
+ }
+
+ return obj, targs
+}
+
+func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
+
+ var objPkg *types.Package
+ var objName string
+ var tag pkgbits.CodeObj
+ {
+ rname := pr.tempReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
+
+ objPkg, objName = rname.qualifiedIdent()
+ assert(objName != "")
+
+ tag = pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
+ pr.retireReader(rname)
+ }
+
+ if tag == pkgbits.ObjStub {
+ assert(objPkg == nil || objPkg == types.Unsafe)
+ return objPkg, objName
+ }
+
+ // Ignore local types promoted to global scope (#55110).
+ if _, suffix := splitVargenSuffix(objName); suffix != "" {
+ return objPkg, objName
+ }
+
+ if objPkg.Scope().Lookup(objName) == nil {
+ dict := pr.objDictIdx(idx)
+
+ r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1)
+ r.dict = dict
+
+ declare := func(obj types.Object) {
+ objPkg.Scope().Insert(obj)
+ }
+
+ switch tag {
+ default:
+ panic("weird")
+
+ case pkgbits.ObjAlias:
+ pos := r.pos()
+ typ := r.typ()
+ declare(aliases.NewAlias(r.p.aliases, pos, objPkg, objName, typ))
+
+ case pkgbits.ObjConst:
+ pos := r.pos()
+ typ := r.typ()
+ val := r.Value()
+ declare(types.NewConst(pos, objPkg, objName, typ, val))
+
+ case pkgbits.ObjFunc:
+ pos := r.pos()
+ tparams := r.typeParamNames()
+ sig := r.signature(nil, nil, tparams)
+ declare(types.NewFunc(pos, objPkg, objName, sig))
+
+ case pkgbits.ObjType:
+ pos := r.pos()
+
+ obj := types.NewTypeName(pos, objPkg, objName, nil)
+ named := types.NewNamed(obj, nil, nil)
+ declare(obj)
+
+ named.SetTypeParams(r.typeParamNames())
+
+ setUnderlying := func(underlying types.Type) {
+ // If the underlying type is an interface, we need to
+ // duplicate its methods so we can replace the receiver
+ // parameter's type (#49906).
+ if iface, ok := aliases.Unalias(underlying).(*types.Interface); ok && iface.NumExplicitMethods() != 0 {
+ methods := make([]*types.Func, iface.NumExplicitMethods())
+ for i := range methods {
+ fn := iface.ExplicitMethod(i)
+ sig := fn.Type().(*types.Signature)
+
+ recv := types.NewVar(fn.Pos(), fn.Pkg(), "", named)
+ methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignature(recv, sig.Params(), sig.Results(), sig.Variadic()))
+ }
+
+ embeds := make([]types.Type, iface.NumEmbeddeds())
+ for i := range embeds {
+ embeds[i] = iface.EmbeddedType(i)
+ }
+
+ newIface := types.NewInterfaceType(methods, embeds)
+ r.p.ifaces = append(r.p.ifaces, newIface)
+ underlying = newIface
+ }
+
+ named.SetUnderlying(underlying)
+ }
+
+ // Since go.dev/cl/455279, we can assume rhs.Underlying() will
+ // always be non-nil. However, to temporarily support users of
+ // older snapshot releases, we continue to fallback to the old
+ // behavior for now.
+ //
+ // TODO(mdempsky): Remove fallback code and simplify after
+ // allowing time for snapshot users to upgrade.
+ rhs := r.typ()
+ if underlying := rhs.Underlying(); underlying != nil {
+ setUnderlying(underlying)
+ } else {
+ pk := r.p
+ pk.laterFor(named, func() {
+ // First be sure that the rhs is initialized, if it needs to be initialized.
+ delete(pk.laterFors, named) // prevent cycles
+ if i, ok := pk.laterFors[rhs]; ok {
+ f := pk.laterFns[i]
+ pk.laterFns[i] = func() {} // function is running now, so replace it with a no-op
+ f() // initialize RHS
+ }
+ setUnderlying(rhs.Underlying())
+ })
+ }
+
+ for i, n := 0, r.Len(); i < n; i++ {
+ named.AddMethod(r.method())
+ }
+
+ case pkgbits.ObjVar:
+ pos := r.pos()
+ typ := r.typ()
+ declare(types.NewVar(pos, objPkg, objName, typ))
+ }
+ }
+
+ return objPkg, objName
+}
+
+func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
+
+ var dict readerDict
+
+ {
+ r := pr.tempReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
+ if implicits := r.Len(); implicits != 0 {
+ errorf("unexpected object with %v implicit type parameter(s)", implicits)
+ }
+
+ dict.bounds = make([]typeInfo, r.Len())
+ for i := range dict.bounds {
+ dict.bounds[i] = r.typInfo()
+ }
+
+ dict.derived = make([]derivedInfo, r.Len())
+ dict.derivedTypes = make([]types.Type, len(dict.derived))
+ for i := range dict.derived {
+ dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
+ }
+
+ pr.retireReader(r)
+ }
+ // function references follow, but reader doesn't need those
+
+ return &dict
+}
+
+func (r *reader) typeParamNames() []*types.TypeParam {
+ r.Sync(pkgbits.SyncTypeParamNames)
+
+ // Note: This code assumes it only processes objects without
+ // implement type parameters. This is currently fine, because
+ // reader is only used to read in exported declarations, which are
+ // always package scoped.
+
+ if len(r.dict.bounds) == 0 {
+ return nil
+ }
+
+ // Careful: Type parameter lists may have cycles. To allow for this,
+ // we construct the type parameter list in two passes: first we
+ // create all the TypeNames and TypeParams, then we construct and
+ // set the bound type.
+
+ r.dict.tparams = make([]*types.TypeParam, len(r.dict.bounds))
+ for i := range r.dict.bounds {
+ pos := r.pos()
+ pkg, name := r.localIdent()
+
+ tname := types.NewTypeName(pos, pkg, name, nil)
+ r.dict.tparams[i] = types.NewTypeParam(tname, nil)
+ }
+
+ typs := make([]types.Type, len(r.dict.bounds))
+ for i, bound := range r.dict.bounds {
+ typs[i] = r.p.typIdx(bound, r.dict)
+ }
+
+ // TODO(mdempsky): This is subtle, elaborate further.
+ //
+ // We have to save tparams outside of the closure, because
+ // typeParamNames() can be called multiple times with the same
+ // dictionary instance.
+ //
+ // Also, this needs to happen later to make sure SetUnderlying has
+ // been called.
+ //
+ // TODO(mdempsky): Is it safe to have a single "later" slice or do
+ // we need to have multiple passes? See comments on CL 386002 and
+ // go.dev/issue/52104.
+ tparams := r.dict.tparams
+ r.p.later(func() {
+ for i, typ := range typs {
+ tparams[i].SetConstraint(typ)
+ }
+ })
+
+ return r.dict.tparams
+}
+
+func (r *reader) method() *types.Func {
+ r.Sync(pkgbits.SyncMethod)
+ pos := r.pos()
+ pkg, name := r.selector()
+
+ rparams := r.typeParamNames()
+ sig := r.signature(r.param(), rparams, nil)
+
+ _ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go.
+ return types.NewFunc(pos, pkg, name, sig)
+}
+
+func (r *reader) qualifiedIdent() (*types.Package, string) { return r.ident(pkgbits.SyncSym) }
+func (r *reader) localIdent() (*types.Package, string) { return r.ident(pkgbits.SyncLocalIdent) }
+func (r *reader) selector() (*types.Package, string) { return r.ident(pkgbits.SyncSelector) }
+
+func (r *reader) ident(marker pkgbits.SyncMarker) (*types.Package, string) {
+ r.Sync(marker)
+ return r.pkg(), r.String()
+}
+
+// pkgScope returns pkg.Scope().
+// If pkg is nil, it returns types.Universe instead.
+//
+// TODO(mdempsky): Remove after x/tools can depend on Go 1.19.
+func pkgScope(pkg *types.Package) *types.Scope {
+ if pkg != nil {
+ return pkg.Scope()
+ }
+ return types.Universe
+}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
new file mode 100644
index 0000000..2e59ff8
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
@@ -0,0 +1,555 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gocommand is a helper for calling the go command.
+package gocommand
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "reflect"
+ "regexp"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/event/keys"
+ "golang.org/x/tools/internal/event/label"
+)
+
+// An Runner will run go command invocations and serialize
+// them if it sees a concurrency error.
+type Runner struct {
+ // once guards the runner initialization.
+ once sync.Once
+
+ // inFlight tracks available workers.
+ inFlight chan struct{}
+
+ // serialized guards the ability to run a go command serially,
+ // to avoid deadlocks when claiming workers.
+ serialized chan struct{}
+}
+
+const maxInFlight = 10
+
+func (runner *Runner) initialize() {
+ runner.once.Do(func() {
+ runner.inFlight = make(chan struct{}, maxInFlight)
+ runner.serialized = make(chan struct{}, 1)
+ })
+}
+
+// 1.13: go: updates to go.mod needed, but contents have changed
+// 1.14: go: updating go.mod: existing contents have changed since last read
+var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`)
+
+// event keys for go command invocations
+var (
+ verb = keys.NewString("verb", "go command verb")
+ directory = keys.NewString("directory", "")
+)
+
+func invLabels(inv Invocation) []label.Label {
+ return []label.Label{verb.Of(inv.Verb), directory.Of(inv.WorkingDir)}
+}
+
+// Run is a convenience wrapper around RunRaw.
+// It returns only stdout and a "friendly" error.
+func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, error) {
+ ctx, done := event.Start(ctx, "gocommand.Runner.Run", invLabels(inv)...)
+ defer done()
+
+ stdout, _, friendly, _ := runner.RunRaw(ctx, inv)
+ return stdout, friendly
+}
+
+// RunPiped runs the invocation serially, always waiting for any concurrent
+// invocations to complete first.
+func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) error {
+ ctx, done := event.Start(ctx, "gocommand.Runner.RunPiped", invLabels(inv)...)
+ defer done()
+
+ _, err := runner.runPiped(ctx, inv, stdout, stderr)
+ return err
+}
+
+// RunRaw runs the invocation, serializing requests only if they fight over
+// go.mod changes.
+// Postcondition: both error results have same nilness.
+func (runner *Runner) RunRaw(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) {
+ ctx, done := event.Start(ctx, "gocommand.Runner.RunRaw", invLabels(inv)...)
+ defer done()
+ // Make sure the runner is always initialized.
+ runner.initialize()
+
+ // First, try to run the go command concurrently.
+ stdout, stderr, friendlyErr, err := runner.runConcurrent(ctx, inv)
+
+ // If we encounter a load concurrency error, we need to retry serially.
+ if friendlyErr != nil && modConcurrencyError.MatchString(friendlyErr.Error()) {
+ event.Error(ctx, "Load concurrency error, will retry serially", err)
+
+ // Run serially by calling runPiped.
+ stdout.Reset()
+ stderr.Reset()
+ friendlyErr, err = runner.runPiped(ctx, inv, stdout, stderr)
+ }
+
+ return stdout, stderr, friendlyErr, err
+}
+
+// Postcondition: both error results have same nilness.
+func (runner *Runner) runConcurrent(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) {
+ // Wait for 1 worker to become available.
+ select {
+ case <-ctx.Done():
+ return nil, nil, ctx.Err(), ctx.Err()
+ case runner.inFlight <- struct{}{}:
+ defer func() { <-runner.inFlight }()
+ }
+
+ stdout, stderr := &bytes.Buffer{}, &bytes.Buffer{}
+ friendlyErr, err := inv.runWithFriendlyError(ctx, stdout, stderr)
+ return stdout, stderr, friendlyErr, err
+}
+
+// Postcondition: both error results have same nilness.
+func (runner *Runner) runPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) (error, error) {
+ // Make sure the runner is always initialized.
+ runner.initialize()
+
+ // Acquire the serialization lock. This avoids deadlocks between two
+ // runPiped commands.
+ select {
+ case <-ctx.Done():
+ return ctx.Err(), ctx.Err()
+ case runner.serialized <- struct{}{}:
+ defer func() { <-runner.serialized }()
+ }
+
+ // Wait for all in-progress go commands to return before proceeding,
+ // to avoid load concurrency errors.
+ for i := 0; i < maxInFlight; i++ {
+ select {
+ case <-ctx.Done():
+ return ctx.Err(), ctx.Err()
+ case runner.inFlight <- struct{}{}:
+ // Make sure we always "return" any workers we took.
+ defer func() { <-runner.inFlight }()
+ }
+ }
+
+ return inv.runWithFriendlyError(ctx, stdout, stderr)
+}
+
+// An Invocation represents a call to the go command.
+type Invocation struct {
+ Verb string
+ Args []string
+ BuildFlags []string
+
+ // If ModFlag is set, the go command is invoked with -mod=ModFlag.
+ // TODO(rfindley): remove, in favor of Args.
+ ModFlag string
+
+ // If ModFile is set, the go command is invoked with -modfile=ModFile.
+ // TODO(rfindley): remove, in favor of Args.
+ ModFile string
+
+ // Overlay is the name of the JSON overlay file that describes
+ // unsaved editor buffers; see [WriteOverlays].
+ // If set, the go command is invoked with -overlay=Overlay.
+ // TODO(rfindley): remove, in favor of Args.
+ Overlay string
+
+ // If CleanEnv is set, the invocation will run only with the environment
+ // in Env, not starting with os.Environ.
+ CleanEnv bool
+ Env []string
+ WorkingDir string
+ Logf func(format string, args ...interface{})
+}
+
+// Postcondition: both error results have same nilness.
+func (i *Invocation) runWithFriendlyError(ctx context.Context, stdout, stderr io.Writer) (friendlyError error, rawError error) {
+ rawError = i.run(ctx, stdout, stderr)
+ if rawError != nil {
+ friendlyError = rawError
+ // Check for 'go' executable not being found.
+ if ee, ok := rawError.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
+ friendlyError = fmt.Errorf("go command required, not found: %v", ee)
+ }
+ if ctx.Err() != nil {
+ friendlyError = ctx.Err()
+ }
+ friendlyError = fmt.Errorf("err: %v: stderr: %s", friendlyError, stderr)
+ }
+ return
+}
+
+// logf logs if i.Logf is non-nil.
+func (i *Invocation) logf(format string, args ...any) {
+ if i.Logf != nil {
+ i.Logf(format, args...)
+ }
+}
+
+func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error {
+ goArgs := []string{i.Verb}
+
+ appendModFile := func() {
+ if i.ModFile != "" {
+ goArgs = append(goArgs, "-modfile="+i.ModFile)
+ }
+ }
+ appendModFlag := func() {
+ if i.ModFlag != "" {
+ goArgs = append(goArgs, "-mod="+i.ModFlag)
+ }
+ }
+ appendOverlayFlag := func() {
+ if i.Overlay != "" {
+ goArgs = append(goArgs, "-overlay="+i.Overlay)
+ }
+ }
+
+ switch i.Verb {
+ case "env", "version":
+ goArgs = append(goArgs, i.Args...)
+ case "mod":
+ // mod needs the sub-verb before flags.
+ goArgs = append(goArgs, i.Args[0])
+ appendModFile()
+ goArgs = append(goArgs, i.Args[1:]...)
+ case "get":
+ goArgs = append(goArgs, i.BuildFlags...)
+ appendModFile()
+ goArgs = append(goArgs, i.Args...)
+
+ default: // notably list and build.
+ goArgs = append(goArgs, i.BuildFlags...)
+ appendModFile()
+ appendModFlag()
+ appendOverlayFlag()
+ goArgs = append(goArgs, i.Args...)
+ }
+ cmd := exec.Command("go", goArgs...)
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+
+ // cmd.WaitDelay was added only in go1.20 (see #50436).
+ if waitDelay := reflect.ValueOf(cmd).Elem().FieldByName("WaitDelay"); waitDelay.IsValid() {
+ // https://go.dev/issue/59541: don't wait forever copying stderr
+ // after the command has exited.
+ // After CL 484741 we copy stdout manually, so we we'll stop reading that as
+ // soon as ctx is done. However, we also don't want to wait around forever
+ // for stderr. Give a much-longer-than-reasonable delay and then assume that
+ // something has wedged in the kernel or runtime.
+ waitDelay.Set(reflect.ValueOf(30 * time.Second))
+ }
+
+ // The cwd gets resolved to the real path. On Darwin, where
+ // /tmp is a symlink, this breaks anything that expects the
+ // working directory to keep the original path, including the
+ // go command when dealing with modules.
+ //
+ // os.Getwd has a special feature where if the cwd and the PWD
+ // are the same node then it trusts the PWD, so by setting it
+ // in the env for the child process we fix up all the paths
+ // returned by the go command.
+ if !i.CleanEnv {
+ cmd.Env = os.Environ()
+ }
+ cmd.Env = append(cmd.Env, i.Env...)
+ if i.WorkingDir != "" {
+ cmd.Env = append(cmd.Env, "PWD="+i.WorkingDir)
+ cmd.Dir = i.WorkingDir
+ }
+
+ debugStr := cmdDebugStr(cmd)
+ i.logf("starting %v", debugStr)
+ start := time.Now()
+ defer func() {
+ i.logf("%s for %v", time.Since(start), debugStr)
+ }()
+
+ return runCmdContext(ctx, cmd)
+}
+
+// DebugHangingGoCommands may be set by tests to enable additional
+// instrumentation (including panics) for debugging hanging Go commands.
+//
+// See golang/go#54461 for details.
+var DebugHangingGoCommands = false
+
+// runCmdContext is like exec.CommandContext except it sends os.Interrupt
+// before os.Kill.
+func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) {
+ // If cmd.Stdout is not an *os.File, the exec package will create a pipe and
+ // copy it to the Writer in a goroutine until the process has finished and
+ // either the pipe reaches EOF or command's WaitDelay expires.
+ //
+ // However, the output from 'go list' can be quite large, and we don't want to
+ // keep reading (and allocating buffers) if we've already decided we don't
+ // care about the output. We don't want to wait for the process to finish, and
+ // we don't wait to wait for the WaitDelay to expire either.
+ //
+ // Instead, if cmd.Stdout requires a copying goroutine we explicitly replace
+ // it with a pipe (which is an *os.File), which we can close in order to stop
+ // copying output as soon as we realize we don't care about it.
+ var stdoutW *os.File
+ if cmd.Stdout != nil {
+ if _, ok := cmd.Stdout.(*os.File); !ok {
+ var stdoutR *os.File
+ stdoutR, stdoutW, err = os.Pipe()
+ if err != nil {
+ return err
+ }
+ prevStdout := cmd.Stdout
+ cmd.Stdout = stdoutW
+
+ stdoutErr := make(chan error, 1)
+ go func() {
+ _, err := io.Copy(prevStdout, stdoutR)
+ if err != nil {
+ err = fmt.Errorf("copying stdout: %w", err)
+ }
+ stdoutErr <- err
+ }()
+ defer func() {
+ // We started a goroutine to copy a stdout pipe.
+ // Wait for it to finish, or terminate it if need be.
+ var err2 error
+ select {
+ case err2 = <-stdoutErr:
+ stdoutR.Close()
+ case <-ctx.Done():
+ stdoutR.Close()
+ // Per https://pkg.go.dev/os#File.Close, the call to stdoutR.Close
+ // should cause the Read call in io.Copy to unblock and return
+ // immediately, but we still need to receive from stdoutErr to confirm
+ // that it has happened.
+ <-stdoutErr
+ err2 = ctx.Err()
+ }
+ if err == nil {
+ err = err2
+ }
+ }()
+
+ // Per https://pkg.go.dev/os/exec#Cmd, “If Stdout and Stderr are the
+ // same writer, and have a type that can be compared with ==, at most
+ // one goroutine at a time will call Write.”
+ //
+ // Since we're starting a goroutine that writes to cmd.Stdout, we must
+ // also update cmd.Stderr so that it still holds.
+ func() {
+ defer func() { recover() }()
+ if cmd.Stderr == prevStdout {
+ cmd.Stderr = cmd.Stdout
+ }
+ }()
+ }
+ }
+
+ startTime := time.Now()
+ err = cmd.Start()
+ if stdoutW != nil {
+ // The child process has inherited the pipe file,
+ // so close the copy held in this process.
+ stdoutW.Close()
+ stdoutW = nil
+ }
+ if err != nil {
+ return err
+ }
+
+ resChan := make(chan error, 1)
+ go func() {
+ resChan <- cmd.Wait()
+ }()
+
+ // If we're interested in debugging hanging Go commands, stop waiting after a
+ // minute and panic with interesting information.
+ debug := DebugHangingGoCommands
+ if debug {
+ timer := time.NewTimer(1 * time.Minute)
+ defer timer.Stop()
+ select {
+ case err := <-resChan:
+ return err
+ case <-timer.C:
+ HandleHangingGoCommand(startTime, cmd)
+ case <-ctx.Done():
+ }
+ } else {
+ select {
+ case err := <-resChan:
+ return err
+ case <-ctx.Done():
+ }
+ }
+
+ // Cancelled. Interrupt and see if it ends voluntarily.
+ if err := cmd.Process.Signal(os.Interrupt); err == nil {
+ // (We used to wait only 1s but this proved
+ // fragile on loaded builder machines.)
+ timer := time.NewTimer(5 * time.Second)
+ defer timer.Stop()
+ select {
+ case err := <-resChan:
+ return err
+ case <-timer.C:
+ }
+ }
+
+ // Didn't shut down in response to interrupt. Kill it hard.
+ // TODO(rfindley): per advice from bcmills@, it may be better to send SIGQUIT
+ // on certain platforms, such as unix.
+ if err := cmd.Process.Kill(); err != nil && !errors.Is(err, os.ErrProcessDone) && debug {
+ log.Printf("error killing the Go command: %v", err)
+ }
+
+ return <-resChan
+}
+
+func HandleHangingGoCommand(start time.Time, cmd *exec.Cmd) {
+ switch runtime.GOOS {
+ case "linux", "darwin", "freebsd", "netbsd":
+ fmt.Fprintln(os.Stderr, `DETECTED A HANGING GO COMMAND
+
+The gopls test runner has detected a hanging go command. In order to debug
+this, the output of ps and lsof/fstat is printed below.
+
+See golang/go#54461 for more details.`)
+
+ fmt.Fprintln(os.Stderr, "\nps axo ppid,pid,command:")
+ fmt.Fprintln(os.Stderr, "-------------------------")
+ psCmd := exec.Command("ps", "axo", "ppid,pid,command")
+ psCmd.Stdout = os.Stderr
+ psCmd.Stderr = os.Stderr
+ if err := psCmd.Run(); err != nil {
+ panic(fmt.Sprintf("running ps: %v", err))
+ }
+
+ listFiles := "lsof"
+ if runtime.GOOS == "freebsd" || runtime.GOOS == "netbsd" {
+ listFiles = "fstat"
+ }
+
+ fmt.Fprintln(os.Stderr, "\n"+listFiles+":")
+ fmt.Fprintln(os.Stderr, "-----")
+ listFilesCmd := exec.Command(listFiles)
+ listFilesCmd.Stdout = os.Stderr
+ listFilesCmd.Stderr = os.Stderr
+ if err := listFilesCmd.Run(); err != nil {
+ panic(fmt.Sprintf("running %s: %v", listFiles, err))
+ }
+ }
+ panic(fmt.Sprintf("detected hanging go command (golang/go#54461); waited %s\n\tcommand:%s\n\tpid:%d", time.Since(start), cmd, cmd.Process.Pid))
+}
+
+func cmdDebugStr(cmd *exec.Cmd) string {
+ env := make(map[string]string)
+ for _, kv := range cmd.Env {
+ split := strings.SplitN(kv, "=", 2)
+ if len(split) == 2 {
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+ }
+
+ var args []string
+ for _, arg := range cmd.Args {
+ quoted := strconv.Quote(arg)
+ if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") {
+ args = append(args, quoted)
+ } else {
+ args = append(args, arg)
+ }
+ }
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " "))
+}
+
+// WriteOverlays writes each value in the overlay (see the Overlay
+// field of go/packages.Config) to a temporary file and returns the name
+// of a JSON file describing the mapping that is suitable for the "go
+// list -overlay" flag.
+//
+// On success, the caller must call the cleanup function exactly once
+// when the files are no longer needed.
+func WriteOverlays(overlay map[string][]byte) (filename string, cleanup func(), err error) {
+ // Do nothing if there are no overlays in the config.
+ if len(overlay) == 0 {
+ return "", func() {}, nil
+ }
+
+ dir, err := os.MkdirTemp("", "gocommand-*")
+ if err != nil {
+ return "", nil, err
+ }
+
+ // The caller must clean up this directory,
+ // unless this function returns an error.
+ // (The cleanup operand of each return
+ // statement below is ignored.)
+ defer func() {
+ cleanup = func() {
+ os.RemoveAll(dir)
+ }
+ if err != nil {
+ cleanup()
+ cleanup = nil
+ }
+ }()
+
+ // Write each map entry to a temporary file.
+ overlays := make(map[string]string)
+ for k, v := range overlay {
+ // Use a unique basename for each file (001-foo.go),
+ // to avoid creating nested directories.
+ base := fmt.Sprintf("%d-%s", 1+len(overlays), filepath.Base(k))
+ filename := filepath.Join(dir, base)
+ err := os.WriteFile(filename, v, 0666)
+ if err != nil {
+ return "", nil, err
+ }
+ overlays[k] = filename
+ }
+
+ // Write the JSON overlay file that maps logical file names to temp files.
+ //
+ // OverlayJSON is the format overlay files are expected to be in.
+ // The Replace map maps from overlaid paths to replacement paths:
+ // the Go command will forward all reads trying to open
+ // each overlaid path to its replacement path, or consider the overlaid
+ // path not to exist if the replacement path is empty.
+ //
+ // From golang/go#39958.
+ type OverlayJSON struct {
+ Replace map[string]string `json:"replace,omitempty"`
+ }
+ b, err := json.Marshal(OverlayJSON{Replace: overlays})
+ if err != nil {
+ return "", nil, err
+ }
+ filename = filepath.Join(dir, "overlay.json")
+ if err := os.WriteFile(filename, b, 0666); err != nil {
+ return "", nil, err
+ }
+
+ return filename, nil, nil
+}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/vendor.go b/vendor/golang.org/x/tools/internal/gocommand/vendor.go
new file mode 100644
index 0000000..e38d1fb
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gocommand/vendor.go
@@ -0,0 +1,163 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gocommand
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "time"
+
+ "golang.org/x/mod/semver"
+)
+
+// ModuleJSON holds information about a module.
+type ModuleJSON struct {
+ Path string // module path
+ Version string // module version
+ Versions []string // available module versions (with -versions)
+ Replace *ModuleJSON // replaced by this module
+ Time *time.Time // time version was created
+ Update *ModuleJSON // available update, if any (with -u)
+ Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
+ Dir string // directory holding files for this module, if any
+ GoMod string // path to go.mod file used when loading this module, if any
+ GoVersion string // go version used in module
+}
+
+var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`)
+
+// VendorEnabled reports whether vendoring is enabled. It takes a *Runner to execute Go commands
+// with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields,
+// of which only Verb and Args are modified to run the appropriate Go command.
+// Inspired by setDefaultBuildMod in modload/init.go
+func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (bool, *ModuleJSON, error) {
+ mainMod, go114, err := getMainModuleAnd114(ctx, inv, r)
+ if err != nil {
+ return false, nil, err
+ }
+
+ // We check the GOFLAGS to see if there is anything overridden or not.
+ inv.Verb = "env"
+ inv.Args = []string{"GOFLAGS"}
+ stdout, err := r.Run(ctx, inv)
+ if err != nil {
+ return false, nil, err
+ }
+ goflags := string(bytes.TrimSpace(stdout.Bytes()))
+ matches := modFlagRegexp.FindStringSubmatch(goflags)
+ var modFlag string
+ if len(matches) != 0 {
+ modFlag = matches[1]
+ }
+ // Don't override an explicit '-mod=' argument.
+ if modFlag == "vendor" {
+ return true, mainMod, nil
+ } else if modFlag != "" {
+ return false, nil, nil
+ }
+ if mainMod == nil || !go114 {
+ return false, nil, nil
+ }
+ // Check 1.14's automatic vendor mode.
+ if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() {
+ if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 {
+ // The Go version is at least 1.14, and a vendor directory exists.
+ // Set -mod=vendor by default.
+ return true, mainMod, nil
+ }
+ }
+ return false, nil, nil
+}
+
+// getMainModuleAnd114 gets one of the main modules' information and whether the
+// go command in use is 1.14+. This is the information needed to figure out
+// if vendoring should be enabled.
+func getMainModuleAnd114(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) {
+ const format = `{{.Path}}
+{{.Dir}}
+{{.GoMod}}
+{{.GoVersion}}
+{{range context.ReleaseTags}}{{if eq . "go1.14"}}{{.}}{{end}}{{end}}
+`
+ inv.Verb = "list"
+ inv.Args = []string{"-m", "-f", format}
+ stdout, err := r.Run(ctx, inv)
+ if err != nil {
+ return nil, false, err
+ }
+
+ lines := strings.Split(stdout.String(), "\n")
+ if len(lines) < 5 {
+ return nil, false, fmt.Errorf("unexpected stdout: %q", stdout.String())
+ }
+ mod := &ModuleJSON{
+ Path: lines[0],
+ Dir: lines[1],
+ GoMod: lines[2],
+ GoVersion: lines[3],
+ Main: true,
+ }
+ return mod, lines[4] == "go1.14", nil
+}
+
+// WorkspaceVendorEnabled reports whether workspace vendoring is enabled. It takes a *Runner to execute Go commands
+// with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields,
+// of which only Verb and Args are modified to run the appropriate Go command.
+// Inspired by setDefaultBuildMod in modload/init.go
+func WorkspaceVendorEnabled(ctx context.Context, inv Invocation, r *Runner) (bool, []*ModuleJSON, error) {
+ inv.Verb = "env"
+ inv.Args = []string{"GOWORK"}
+ stdout, err := r.Run(ctx, inv)
+ if err != nil {
+ return false, nil, err
+ }
+ goWork := string(bytes.TrimSpace(stdout.Bytes()))
+ if fi, err := os.Stat(filepath.Join(filepath.Dir(goWork), "vendor")); err == nil && fi.IsDir() {
+ mainMods, err := getWorkspaceMainModules(ctx, inv, r)
+ if err != nil {
+ return false, nil, err
+ }
+ return true, mainMods, nil
+ }
+ return false, nil, nil
+}
+
+// getWorkspaceMainModules gets the main modules' information.
+// This is the information needed to figure out if vendoring should be enabled.
+func getWorkspaceMainModules(ctx context.Context, inv Invocation, r *Runner) ([]*ModuleJSON, error) {
+ const format = `{{.Path}}
+{{.Dir}}
+{{.GoMod}}
+{{.GoVersion}}
+`
+ inv.Verb = "list"
+ inv.Args = []string{"-m", "-f", format}
+ stdout, err := r.Run(ctx, inv)
+ if err != nil {
+ return nil, err
+ }
+
+ lines := strings.Split(strings.TrimSuffix(stdout.String(), "\n"), "\n")
+ if len(lines) < 4 {
+ return nil, fmt.Errorf("unexpected stdout: %q", stdout.String())
+ }
+ mods := make([]*ModuleJSON, 0, len(lines)/4)
+ for i := 0; i < len(lines); i += 4 {
+ mods = append(mods, &ModuleJSON{
+ Path: lines[i],
+ Dir: lines[i+1],
+ GoMod: lines[i+2],
+ GoVersion: lines[i+3],
+ Main: true,
+ })
+ }
+ return mods, nil
+}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/version.go b/vendor/golang.org/x/tools/internal/gocommand/version.go
new file mode 100644
index 0000000..446c584
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gocommand/version.go
@@ -0,0 +1,71 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gocommand
+
+import (
+ "context"
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+// GoVersion reports the minor version number of the highest release
+// tag built into the go command on the PATH.
+//
+// Note that this may be higher than the version of the go tool used
+// to build this application, and thus the versions of the standard
+// go/{scanner,parser,ast,types} packages that are linked into it.
+// In that case, callers should either downgrade to the version of
+// go used to build the application, or report an error that the
+// application is too old to use the go command on the PATH.
+func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) {
+ inv.Verb = "list"
+ inv.Args = []string{"-e", "-f", `{{context.ReleaseTags}}`, `--`, `unsafe`}
+ inv.BuildFlags = nil // This is not a build command.
+ inv.ModFlag = ""
+ inv.ModFile = ""
+ inv.Env = append(inv.Env[:len(inv.Env):len(inv.Env)], "GO111MODULE=off")
+
+ stdoutBytes, err := r.Run(ctx, inv)
+ if err != nil {
+ return 0, err
+ }
+ stdout := stdoutBytes.String()
+ if len(stdout) < 3 {
+ return 0, fmt.Errorf("bad ReleaseTags output: %q", stdout)
+ }
+ // Split up "[go1.1 go1.15]" and return highest go1.X value.
+ tags := strings.Fields(stdout[1 : len(stdout)-2])
+ for i := len(tags) - 1; i >= 0; i-- {
+ var version int
+ if _, err := fmt.Sscanf(tags[i], "go1.%d", &version); err != nil {
+ continue
+ }
+ return version, nil
+ }
+ return 0, fmt.Errorf("no parseable ReleaseTags in %v", tags)
+}
+
+// GoVersionOutput returns the complete output of the go version command.
+func GoVersionOutput(ctx context.Context, inv Invocation, r *Runner) (string, error) {
+ inv.Verb = "version"
+ goVersion, err := r.Run(ctx, inv)
+ if err != nil {
+ return "", err
+ }
+ return goVersion.String(), nil
+}
+
+// ParseGoVersionOutput extracts the Go version string
+// from the output of the "go version" command.
+// Given an unrecognized form, it returns an empty string.
+func ParseGoVersionOutput(data string) string {
+ re := regexp.MustCompile(`^go version (go\S+|devel \S+)`)
+ m := re.FindStringSubmatch(data)
+ if len(m) != 2 {
+ return "" // unrecognized version
+ }
+ return m[1]
+}
diff --git a/vendor/golang.org/x/tools/internal/packagesinternal/packages.go b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
new file mode 100644
index 0000000..44719de
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
@@ -0,0 +1,22 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package packagesinternal exposes internal-only fields from go/packages.
+package packagesinternal
+
+var GetForTest = func(p interface{}) string { return "" }
+var GetDepsErrors = func(p interface{}) []*PackageError { return nil }
+
+type PackageError struct {
+ ImportStack []string // shortest path from package named on command line to this one
+ Pos string // position of error (if present, file:line:col)
+ Err string // the error itself
+}
+
+var TypecheckCgo int
+var DepsErrors int // must be set as a LoadMode to call GetDepsErrors
+var ForTest int // must be set as a LoadMode to call GetForTest
+
+var SetModFlag = func(config interface{}, value string) {}
+var SetModFile = func(config interface{}, value string) {}
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/codes.go b/vendor/golang.org/x/tools/internal/pkgbits/codes.go
new file mode 100644
index 0000000..f0cabde
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/codes.go
@@ -0,0 +1,77 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+// A Code is an enum value that can be encoded into bitstreams.
+//
+// Code types are preferable for enum types, because they allow
+// Decoder to detect desyncs.
+type Code interface {
+ // Marker returns the SyncMarker for the Code's dynamic type.
+ Marker() SyncMarker
+
+ // Value returns the Code's ordinal value.
+ Value() int
+}
+
+// A CodeVal distinguishes among go/constant.Value encodings.
+type CodeVal int
+
+func (c CodeVal) Marker() SyncMarker { return SyncVal }
+func (c CodeVal) Value() int { return int(c) }
+
+// Note: These values are public and cannot be changed without
+// updating the go/types importers.
+
+const (
+ ValBool CodeVal = iota
+ ValString
+ ValInt64
+ ValBigInt
+ ValBigRat
+ ValBigFloat
+)
+
+// A CodeType distinguishes among go/types.Type encodings.
+type CodeType int
+
+func (c CodeType) Marker() SyncMarker { return SyncType }
+func (c CodeType) Value() int { return int(c) }
+
+// Note: These values are public and cannot be changed without
+// updating the go/types importers.
+
+const (
+ TypeBasic CodeType = iota
+ TypeNamed
+ TypePointer
+ TypeSlice
+ TypeArray
+ TypeChan
+ TypeMap
+ TypeSignature
+ TypeStruct
+ TypeInterface
+ TypeUnion
+ TypeTypeParam
+)
+
+// A CodeObj distinguishes among go/types.Object encodings.
+type CodeObj int
+
+func (c CodeObj) Marker() SyncMarker { return SyncCodeObj }
+func (c CodeObj) Value() int { return int(c) }
+
+// Note: These values are public and cannot be changed without
+// updating the go/types importers.
+
+const (
+ ObjAlias CodeObj = iota
+ ObjConst
+ ObjType
+ ObjFunc
+ ObjVar
+ ObjStub
+)
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/decoder.go b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go
new file mode 100644
index 0000000..b92e8e6
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go
@@ -0,0 +1,517 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import (
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "io"
+ "math/big"
+ "os"
+ "runtime"
+ "strings"
+)
+
+// A PkgDecoder provides methods for decoding a package's Unified IR
+// export data.
+type PkgDecoder struct {
+ // version is the file format version.
+ version uint32
+
+ // sync indicates whether the file uses sync markers.
+ sync bool
+
+ // pkgPath is the package path for the package to be decoded.
+ //
+ // TODO(mdempsky): Remove; unneeded since CL 391014.
+ pkgPath string
+
+ // elemData is the full data payload of the encoded package.
+ // Elements are densely and contiguously packed together.
+ //
+ // The last 8 bytes of elemData are the package fingerprint.
+ elemData string
+
+ // elemEnds stores the byte-offset end positions of element
+ // bitstreams within elemData.
+ //
+ // For example, element I's bitstream data starts at elemEnds[I-1]
+ // (or 0, if I==0) and ends at elemEnds[I].
+ //
+ // Note: elemEnds is indexed by absolute indices, not
+ // section-relative indices.
+ elemEnds []uint32
+
+ // elemEndsEnds stores the index-offset end positions of relocation
+ // sections within elemEnds.
+ //
+ // For example, section K's end positions start at elemEndsEnds[K-1]
+ // (or 0, if K==0) and end at elemEndsEnds[K].
+ elemEndsEnds [numRelocs]uint32
+
+ scratchRelocEnt []RelocEnt
+}
+
+// PkgPath returns the package path for the package
+//
+// TODO(mdempsky): Remove; unneeded since CL 391014.
+func (pr *PkgDecoder) PkgPath() string { return pr.pkgPath }
+
+// SyncMarkers reports whether pr uses sync markers.
+func (pr *PkgDecoder) SyncMarkers() bool { return pr.sync }
+
+// NewPkgDecoder returns a PkgDecoder initialized to read the Unified
+// IR export data from input. pkgPath is the package path for the
+// compilation unit that produced the export data.
+//
+// TODO(mdempsky): Remove pkgPath parameter; unneeded since CL 391014.
+func NewPkgDecoder(pkgPath, input string) PkgDecoder {
+ pr := PkgDecoder{
+ pkgPath: pkgPath,
+ }
+
+ // TODO(mdempsky): Implement direct indexing of input string to
+ // avoid copying the position information.
+
+ r := strings.NewReader(input)
+
+ assert(binary.Read(r, binary.LittleEndian, &pr.version) == nil)
+
+ switch pr.version {
+ default:
+ panic(fmt.Errorf("unsupported version: %v", pr.version))
+ case 0:
+ // no flags
+ case 1:
+ var flags uint32
+ assert(binary.Read(r, binary.LittleEndian, &flags) == nil)
+ pr.sync = flags&flagSyncMarkers != 0
+ }
+
+ assert(binary.Read(r, binary.LittleEndian, pr.elemEndsEnds[:]) == nil)
+
+ pr.elemEnds = make([]uint32, pr.elemEndsEnds[len(pr.elemEndsEnds)-1])
+ assert(binary.Read(r, binary.LittleEndian, pr.elemEnds[:]) == nil)
+
+ pos, err := r.Seek(0, io.SeekCurrent)
+ assert(err == nil)
+
+ pr.elemData = input[pos:]
+ assert(len(pr.elemData)-8 == int(pr.elemEnds[len(pr.elemEnds)-1]))
+
+ return pr
+}
+
+// NumElems returns the number of elements in section k.
+func (pr *PkgDecoder) NumElems(k RelocKind) int {
+ count := int(pr.elemEndsEnds[k])
+ if k > 0 {
+ count -= int(pr.elemEndsEnds[k-1])
+ }
+ return count
+}
+
+// TotalElems returns the total number of elements across all sections.
+func (pr *PkgDecoder) TotalElems() int {
+ return len(pr.elemEnds)
+}
+
+// Fingerprint returns the package fingerprint.
+func (pr *PkgDecoder) Fingerprint() [8]byte {
+ var fp [8]byte
+ copy(fp[:], pr.elemData[len(pr.elemData)-8:])
+ return fp
+}
+
+// AbsIdx returns the absolute index for the given (section, index)
+// pair.
+func (pr *PkgDecoder) AbsIdx(k RelocKind, idx Index) int {
+ absIdx := int(idx)
+ if k > 0 {
+ absIdx += int(pr.elemEndsEnds[k-1])
+ }
+ if absIdx >= int(pr.elemEndsEnds[k]) {
+ errorf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds)
+ }
+ return absIdx
+}
+
+// DataIdx returns the raw element bitstream for the given (section,
+// index) pair.
+func (pr *PkgDecoder) DataIdx(k RelocKind, idx Index) string {
+ absIdx := pr.AbsIdx(k, idx)
+
+ var start uint32
+ if absIdx > 0 {
+ start = pr.elemEnds[absIdx-1]
+ }
+ end := pr.elemEnds[absIdx]
+
+ return pr.elemData[start:end]
+}
+
+// StringIdx returns the string value for the given string index.
+func (pr *PkgDecoder) StringIdx(idx Index) string {
+ return pr.DataIdx(RelocString, idx)
+}
+
+// NewDecoder returns a Decoder for the given (section, index) pair,
+// and decodes the given SyncMarker from the element bitstream.
+func (pr *PkgDecoder) NewDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder {
+ r := pr.NewDecoderRaw(k, idx)
+ r.Sync(marker)
+ return r
+}
+
+// TempDecoder returns a Decoder for the given (section, index) pair,
+// and decodes the given SyncMarker from the element bitstream.
+// If possible the Decoder should be RetireDecoder'd when it is no longer
+// needed, this will avoid heap allocations.
+func (pr *PkgDecoder) TempDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder {
+ r := pr.TempDecoderRaw(k, idx)
+ r.Sync(marker)
+ return r
+}
+
+func (pr *PkgDecoder) RetireDecoder(d *Decoder) {
+ pr.scratchRelocEnt = d.Relocs
+ d.Relocs = nil
+}
+
+// NewDecoderRaw returns a Decoder for the given (section, index) pair.
+//
+// Most callers should use NewDecoder instead.
+func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder {
+ r := Decoder{
+ common: pr,
+ k: k,
+ Idx: idx,
+ }
+
+ // TODO(mdempsky) r.data.Reset(...) after #44505 is resolved.
+ r.Data = *strings.NewReader(pr.DataIdx(k, idx))
+
+ r.Sync(SyncRelocs)
+ r.Relocs = make([]RelocEnt, r.Len())
+ for i := range r.Relocs {
+ r.Sync(SyncReloc)
+ r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())}
+ }
+
+ return r
+}
+
+func (pr *PkgDecoder) TempDecoderRaw(k RelocKind, idx Index) Decoder {
+ r := Decoder{
+ common: pr,
+ k: k,
+ Idx: idx,
+ }
+
+ r.Data.Reset(pr.DataIdx(k, idx))
+ r.Sync(SyncRelocs)
+ l := r.Len()
+ if cap(pr.scratchRelocEnt) >= l {
+ r.Relocs = pr.scratchRelocEnt[:l]
+ pr.scratchRelocEnt = nil
+ } else {
+ r.Relocs = make([]RelocEnt, l)
+ }
+ for i := range r.Relocs {
+ r.Sync(SyncReloc)
+ r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())}
+ }
+
+ return r
+}
+
+// A Decoder provides methods for decoding an individual element's
+// bitstream data.
+type Decoder struct {
+ common *PkgDecoder
+
+ Relocs []RelocEnt
+ Data strings.Reader
+
+ k RelocKind
+ Idx Index
+}
+
+func (r *Decoder) checkErr(err error) {
+ if err != nil {
+ errorf("unexpected decoding error: %w", err)
+ }
+}
+
+func (r *Decoder) rawUvarint() uint64 {
+ x, err := readUvarint(&r.Data)
+ r.checkErr(err)
+ return x
+}
+
+// readUvarint is a type-specialized copy of encoding/binary.ReadUvarint.
+// This avoids the interface conversion and thus has better escape properties,
+// which flows up the stack.
+func readUvarint(r *strings.Reader) (uint64, error) {
+ var x uint64
+ var s uint
+ for i := 0; i < binary.MaxVarintLen64; i++ {
+ b, err := r.ReadByte()
+ if err != nil {
+ if i > 0 && err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return x, err
+ }
+ if b < 0x80 {
+ if i == binary.MaxVarintLen64-1 && b > 1 {
+ return x, overflow
+ }
+ return x | uint64(b)<<s, nil
+ }
+ x |= uint64(b&0x7f) << s
+ s += 7
+ }
+ return x, overflow
+}
+
+var overflow = errors.New("pkgbits: readUvarint overflows a 64-bit integer")
+
+func (r *Decoder) rawVarint() int64 {
+ ux := r.rawUvarint()
+
+ // Zig-zag decode.
+ x := int64(ux >> 1)
+ if ux&1 != 0 {
+ x = ^x
+ }
+ return x
+}
+
+func (r *Decoder) rawReloc(k RelocKind, idx int) Index {
+ e := r.Relocs[idx]
+ assert(e.Kind == k)
+ return e.Idx
+}
+
+// Sync decodes a sync marker from the element bitstream and asserts
+// that it matches the expected marker.
+//
+// If r.common.sync is false, then Sync is a no-op.
+func (r *Decoder) Sync(mWant SyncMarker) {
+ if !r.common.sync {
+ return
+ }
+
+ pos, _ := r.Data.Seek(0, io.SeekCurrent)
+ mHave := SyncMarker(r.rawUvarint())
+ writerPCs := make([]int, r.rawUvarint())
+ for i := range writerPCs {
+ writerPCs[i] = int(r.rawUvarint())
+ }
+
+ if mHave == mWant {
+ return
+ }
+
+ // There's some tension here between printing:
+ //
+ // (1) full file paths that tools can recognize (e.g., so emacs
+ // hyperlinks the "file:line" text for easy navigation), or
+ //
+ // (2) short file paths that are easier for humans to read (e.g., by
+ // omitting redundant or irrelevant details, so it's easier to
+ // focus on the useful bits that remain).
+ //
+ // The current formatting favors the former, as it seems more
+ // helpful in practice. But perhaps the formatting could be improved
+ // to better address both concerns. For example, use relative file
+ // paths if they would be shorter, or rewrite file paths to contain
+ // "$GOROOT" (like objabi.AbsFile does) if tools can be taught how
+ // to reliably expand that again.
+
+ fmt.Printf("export data desync: package %q, section %v, index %v, offset %v\n", r.common.pkgPath, r.k, r.Idx, pos)
+
+ fmt.Printf("\nfound %v, written at:\n", mHave)
+ if len(writerPCs) == 0 {
+ fmt.Printf("\t[stack trace unavailable; recompile package %q with -d=syncframes]\n", r.common.pkgPath)
+ }
+ for _, pc := range writerPCs {
+ fmt.Printf("\t%s\n", r.common.StringIdx(r.rawReloc(RelocString, pc)))
+ }
+
+ fmt.Printf("\nexpected %v, reading at:\n", mWant)
+ var readerPCs [32]uintptr // TODO(mdempsky): Dynamically size?
+ n := runtime.Callers(2, readerPCs[:])
+ for _, pc := range fmtFrames(readerPCs[:n]...) {
+ fmt.Printf("\t%s\n", pc)
+ }
+
+ // We already printed a stack trace for the reader, so now we can
+ // simply exit. Printing a second one with panic or base.Fatalf
+ // would just be noise.
+ os.Exit(1)
+}
+
+// Bool decodes and returns a bool value from the element bitstream.
+func (r *Decoder) Bool() bool {
+ r.Sync(SyncBool)
+ x, err := r.Data.ReadByte()
+ r.checkErr(err)
+ assert(x < 2)
+ return x != 0
+}
+
+// Int64 decodes and returns an int64 value from the element bitstream.
+func (r *Decoder) Int64() int64 {
+ r.Sync(SyncInt64)
+ return r.rawVarint()
+}
+
+// Uint64 decodes and returns a uint64 value from the element bitstream.
+func (r *Decoder) Uint64() uint64 {
+ r.Sync(SyncUint64)
+ return r.rawUvarint()
+}
+
+// Len decodes and returns a non-negative int value from the element bitstream.
+func (r *Decoder) Len() int { x := r.Uint64(); v := int(x); assert(uint64(v) == x); return v }
+
+// Int decodes and returns an int value from the element bitstream.
+func (r *Decoder) Int() int { x := r.Int64(); v := int(x); assert(int64(v) == x); return v }
+
+// Uint decodes and returns a uint value from the element bitstream.
+func (r *Decoder) Uint() uint { x := r.Uint64(); v := uint(x); assert(uint64(v) == x); return v }
+
+// Code decodes a Code value from the element bitstream and returns
+// its ordinal value. It's the caller's responsibility to convert the
+// result to an appropriate Code type.
+//
+// TODO(mdempsky): Ideally this method would have signature "Code[T
+// Code] T" instead, but we don't allow generic methods and the
+// compiler can't depend on generics yet anyway.
+func (r *Decoder) Code(mark SyncMarker) int {
+ r.Sync(mark)
+ return r.Len()
+}
+
+// Reloc decodes a relocation of expected section k from the element
+// bitstream and returns an index to the referenced element.
+func (r *Decoder) Reloc(k RelocKind) Index {
+ r.Sync(SyncUseReloc)
+ return r.rawReloc(k, r.Len())
+}
+
+// String decodes and returns a string value from the element
+// bitstream.
+func (r *Decoder) String() string {
+ r.Sync(SyncString)
+ return r.common.StringIdx(r.Reloc(RelocString))
+}
+
+// Strings decodes and returns a variable-length slice of strings from
+// the element bitstream.
+func (r *Decoder) Strings() []string {
+ res := make([]string, r.Len())
+ for i := range res {
+ res[i] = r.String()
+ }
+ return res
+}
+
+// Value decodes and returns a constant.Value from the element
+// bitstream.
+func (r *Decoder) Value() constant.Value {
+ r.Sync(SyncValue)
+ isComplex := r.Bool()
+ val := r.scalar()
+ if isComplex {
+ val = constant.BinaryOp(val, token.ADD, constant.MakeImag(r.scalar()))
+ }
+ return val
+}
+
+func (r *Decoder) scalar() constant.Value {
+ switch tag := CodeVal(r.Code(SyncVal)); tag {
+ default:
+ panic(fmt.Errorf("unexpected scalar tag: %v", tag))
+
+ case ValBool:
+ return constant.MakeBool(r.Bool())
+ case ValString:
+ return constant.MakeString(r.String())
+ case ValInt64:
+ return constant.MakeInt64(r.Int64())
+ case ValBigInt:
+ return constant.Make(r.bigInt())
+ case ValBigRat:
+ num := r.bigInt()
+ denom := r.bigInt()
+ return constant.Make(new(big.Rat).SetFrac(num, denom))
+ case ValBigFloat:
+ return constant.Make(r.bigFloat())
+ }
+}
+
+func (r *Decoder) bigInt() *big.Int {
+ v := new(big.Int).SetBytes([]byte(r.String()))
+ if r.Bool() {
+ v.Neg(v)
+ }
+ return v
+}
+
+func (r *Decoder) bigFloat() *big.Float {
+ v := new(big.Float).SetPrec(512)
+ assert(v.UnmarshalText([]byte(r.String())) == nil)
+ return v
+}
+
+// @@@ Helpers
+
+// TODO(mdempsky): These should probably be removed. I think they're a
+// smell that the export data format is not yet quite right.
+
+// PeekPkgPath returns the package path for the specified package
+// index.
+func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
+ var path string
+ {
+ r := pr.TempDecoder(RelocPkg, idx, SyncPkgDef)
+ path = r.String()
+ pr.RetireDecoder(&r)
+ }
+ if path == "" {
+ path = pr.pkgPath
+ }
+ return path
+}
+
+// PeekObj returns the package path, object name, and CodeObj for the
+// specified object index.
+func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) {
+ var ridx Index
+ var name string
+ var rcode int
+ {
+ r := pr.TempDecoder(RelocName, idx, SyncObject1)
+ r.Sync(SyncSym)
+ r.Sync(SyncPkg)
+ ridx = r.Reloc(RelocPkg)
+ name = r.String()
+ rcode = r.Code(SyncCodeObj)
+ pr.RetireDecoder(&r)
+ }
+
+ path := pr.PeekPkgPath(ridx)
+ assert(name != "")
+
+ tag := CodeObj(rcode)
+
+ return path, name, tag
+}
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/doc.go b/vendor/golang.org/x/tools/internal/pkgbits/doc.go
new file mode 100644
index 0000000..c8a2796
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/doc.go
@@ -0,0 +1,32 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package pkgbits implements low-level coding abstractions for
+// Unified IR's export data format.
+//
+// At a low-level, a package is a collection of bitstream elements.
+// Each element has a "kind" and a dense, non-negative index.
+// Elements can be randomly accessed given their kind and index.
+//
+// Individual elements are sequences of variable-length values (e.g.,
+// integers, booleans, strings, go/constant values, cross-references
+// to other elements). Package pkgbits provides APIs for encoding and
+// decoding these low-level values, but the details of mapping
+// higher-level Go constructs into elements is left to higher-level
+// abstractions.
+//
+// Elements may cross-reference each other with "relocations." For
+// example, an element representing a pointer type has a relocation
+// referring to the element type.
+//
+// Go constructs may be composed as a constellation of multiple
+// elements. For example, a declared function may have one element to
+// describe the object (e.g., its name, type, position), and a
+// separate element to describe its function body. This allows readers
+// some flexibility in efficiently seeking or re-reading data (e.g.,
+// inlining requires re-reading the function body for each inlined
+// call, without needing to re-read the object-level details).
+//
+// This is a copy of internal/pkgbits in the Go implementation.
+package pkgbits
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/encoder.go b/vendor/golang.org/x/tools/internal/pkgbits/encoder.go
new file mode 100644
index 0000000..6482617
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/encoder.go
@@ -0,0 +1,383 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import (
+ "bytes"
+ "crypto/md5"
+ "encoding/binary"
+ "go/constant"
+ "io"
+ "math/big"
+ "runtime"
+)
+
+// currentVersion is the current version number.
+//
+// - v0: initial prototype
+//
+// - v1: adds the flags uint32 word
+const currentVersion uint32 = 1
+
+// A PkgEncoder provides methods for encoding a package's Unified IR
+// export data.
+type PkgEncoder struct {
+ // elems holds the bitstream for previously encoded elements.
+ elems [numRelocs][]string
+
+ // stringsIdx maps previously encoded strings to their index within
+ // the RelocString section, to allow deduplication. That is,
+ // elems[RelocString][stringsIdx[s]] == s (if present).
+ stringsIdx map[string]Index
+
+ // syncFrames is the number of frames to write at each sync
+ // marker. A negative value means sync markers are omitted.
+ syncFrames int
+}
+
+// SyncMarkers reports whether pw uses sync markers.
+func (pw *PkgEncoder) SyncMarkers() bool { return pw.syncFrames >= 0 }
+
+// NewPkgEncoder returns an initialized PkgEncoder.
+//
+// syncFrames is the number of caller frames that should be serialized
+// at Sync points. Serializing additional frames results in larger
+// export data files, but can help diagnosing desync errors in
+// higher-level Unified IR reader/writer code. If syncFrames is
+// negative, then sync markers are omitted entirely.
+func NewPkgEncoder(syncFrames int) PkgEncoder {
+ return PkgEncoder{
+ stringsIdx: make(map[string]Index),
+ syncFrames: syncFrames,
+ }
+}
+
+// DumpTo writes the package's encoded data to out0 and returns the
+// package fingerprint.
+func (pw *PkgEncoder) DumpTo(out0 io.Writer) (fingerprint [8]byte) {
+ h := md5.New()
+ out := io.MultiWriter(out0, h)
+
+ writeUint32 := func(x uint32) {
+ assert(binary.Write(out, binary.LittleEndian, x) == nil)
+ }
+
+ writeUint32(currentVersion)
+
+ var flags uint32
+ if pw.SyncMarkers() {
+ flags |= flagSyncMarkers
+ }
+ writeUint32(flags)
+
+ // Write elemEndsEnds.
+ var sum uint32
+ for _, elems := range &pw.elems {
+ sum += uint32(len(elems))
+ writeUint32(sum)
+ }
+
+ // Write elemEnds.
+ sum = 0
+ for _, elems := range &pw.elems {
+ for _, elem := range elems {
+ sum += uint32(len(elem))
+ writeUint32(sum)
+ }
+ }
+
+ // Write elemData.
+ for _, elems := range &pw.elems {
+ for _, elem := range elems {
+ _, err := io.WriteString(out, elem)
+ assert(err == nil)
+ }
+ }
+
+ // Write fingerprint.
+ copy(fingerprint[:], h.Sum(nil))
+ _, err := out0.Write(fingerprint[:])
+ assert(err == nil)
+
+ return
+}
+
+// StringIdx adds a string value to the strings section, if not
+// already present, and returns its index.
+func (pw *PkgEncoder) StringIdx(s string) Index {
+ if idx, ok := pw.stringsIdx[s]; ok {
+ assert(pw.elems[RelocString][idx] == s)
+ return idx
+ }
+
+ idx := Index(len(pw.elems[RelocString]))
+ pw.elems[RelocString] = append(pw.elems[RelocString], s)
+ pw.stringsIdx[s] = idx
+ return idx
+}
+
+// NewEncoder returns an Encoder for a new element within the given
+// section, and encodes the given SyncMarker as the start of the
+// element bitstream.
+func (pw *PkgEncoder) NewEncoder(k RelocKind, marker SyncMarker) Encoder {
+ e := pw.NewEncoderRaw(k)
+ e.Sync(marker)
+ return e
+}
+
+// NewEncoderRaw returns an Encoder for a new element within the given
+// section.
+//
+// Most callers should use NewEncoder instead.
+func (pw *PkgEncoder) NewEncoderRaw(k RelocKind) Encoder {
+ idx := Index(len(pw.elems[k]))
+ pw.elems[k] = append(pw.elems[k], "") // placeholder
+
+ return Encoder{
+ p: pw,
+ k: k,
+ Idx: idx,
+ }
+}
+
+// An Encoder provides methods for encoding an individual element's
+// bitstream data.
+type Encoder struct {
+ p *PkgEncoder
+
+ Relocs []RelocEnt
+ RelocMap map[RelocEnt]uint32
+ Data bytes.Buffer // accumulated element bitstream data
+
+ encodingRelocHeader bool
+
+ k RelocKind
+ Idx Index // index within relocation section
+}
+
+// Flush finalizes the element's bitstream and returns its Index.
+func (w *Encoder) Flush() Index {
+ var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved
+
+ // Backup the data so we write the relocations at the front.
+ var tmp bytes.Buffer
+ io.Copy(&tmp, &w.Data)
+
+ // TODO(mdempsky): Consider writing these out separately so they're
+ // easier to strip, along with function bodies, so that we can prune
+ // down to just the data that's relevant to go/types.
+ if w.encodingRelocHeader {
+ panic("encodingRelocHeader already true; recursive flush?")
+ }
+ w.encodingRelocHeader = true
+ w.Sync(SyncRelocs)
+ w.Len(len(w.Relocs))
+ for _, rEnt := range w.Relocs {
+ w.Sync(SyncReloc)
+ w.Len(int(rEnt.Kind))
+ w.Len(int(rEnt.Idx))
+ }
+
+ io.Copy(&sb, &w.Data)
+ io.Copy(&sb, &tmp)
+ w.p.elems[w.k][w.Idx] = sb.String()
+
+ return w.Idx
+}
+
+func (w *Encoder) checkErr(err error) {
+ if err != nil {
+ errorf("unexpected encoding error: %v", err)
+ }
+}
+
+func (w *Encoder) rawUvarint(x uint64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(buf[:], x)
+ _, err := w.Data.Write(buf[:n])
+ w.checkErr(err)
+}
+
+func (w *Encoder) rawVarint(x int64) {
+ // Zig-zag encode.
+ ux := uint64(x) << 1
+ if x < 0 {
+ ux = ^ux
+ }
+
+ w.rawUvarint(ux)
+}
+
+func (w *Encoder) rawReloc(r RelocKind, idx Index) int {
+ e := RelocEnt{r, idx}
+ if w.RelocMap != nil {
+ if i, ok := w.RelocMap[e]; ok {
+ return int(i)
+ }
+ } else {
+ w.RelocMap = make(map[RelocEnt]uint32)
+ }
+
+ i := len(w.Relocs)
+ w.RelocMap[e] = uint32(i)
+ w.Relocs = append(w.Relocs, e)
+ return i
+}
+
+func (w *Encoder) Sync(m SyncMarker) {
+ if !w.p.SyncMarkers() {
+ return
+ }
+
+ // Writing out stack frame string references requires working
+ // relocations, but writing out the relocations themselves involves
+ // sync markers. To prevent infinite recursion, we simply trim the
+ // stack frame for sync markers within the relocation header.
+ var frames []string
+ if !w.encodingRelocHeader && w.p.syncFrames > 0 {
+ pcs := make([]uintptr, w.p.syncFrames)
+ n := runtime.Callers(2, pcs)
+ frames = fmtFrames(pcs[:n]...)
+ }
+
+ // TODO(mdempsky): Save space by writing out stack frames as a
+ // linked list so we can share common stack frames.
+ w.rawUvarint(uint64(m))
+ w.rawUvarint(uint64(len(frames)))
+ for _, frame := range frames {
+ w.rawUvarint(uint64(w.rawReloc(RelocString, w.p.StringIdx(frame))))
+ }
+}
+
+// Bool encodes and writes a bool value into the element bitstream,
+// and then returns the bool value.
+//
+// For simple, 2-alternative encodings, the idiomatic way to call Bool
+// is something like:
+//
+// if w.Bool(x != 0) {
+// // alternative #1
+// } else {
+// // alternative #2
+// }
+//
+// For multi-alternative encodings, use Code instead.
+func (w *Encoder) Bool(b bool) bool {
+ w.Sync(SyncBool)
+ var x byte
+ if b {
+ x = 1
+ }
+ err := w.Data.WriteByte(x)
+ w.checkErr(err)
+ return b
+}
+
+// Int64 encodes and writes an int64 value into the element bitstream.
+func (w *Encoder) Int64(x int64) {
+ w.Sync(SyncInt64)
+ w.rawVarint(x)
+}
+
+// Uint64 encodes and writes a uint64 value into the element bitstream.
+func (w *Encoder) Uint64(x uint64) {
+ w.Sync(SyncUint64)
+ w.rawUvarint(x)
+}
+
+// Len encodes and writes a non-negative int value into the element bitstream.
+func (w *Encoder) Len(x int) { assert(x >= 0); w.Uint64(uint64(x)) }
+
+// Int encodes and writes an int value into the element bitstream.
+func (w *Encoder) Int(x int) { w.Int64(int64(x)) }
+
+// Uint encodes and writes a uint value into the element bitstream.
+func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) }
+
+// Reloc encodes and writes a relocation for the given (section,
+// index) pair into the element bitstream.
+//
+// Note: Only the index is formally written into the element
+// bitstream, so bitstream decoders must know from context which
+// section an encoded relocation refers to.
+func (w *Encoder) Reloc(r RelocKind, idx Index) {
+ w.Sync(SyncUseReloc)
+ w.Len(w.rawReloc(r, idx))
+}
+
+// Code encodes and writes a Code value into the element bitstream.
+func (w *Encoder) Code(c Code) {
+ w.Sync(c.Marker())
+ w.Len(c.Value())
+}
+
+// String encodes and writes a string value into the element
+// bitstream.
+//
+// Internally, strings are deduplicated by adding them to the strings
+// section (if not already present), and then writing a relocation
+// into the element bitstream.
+func (w *Encoder) String(s string) {
+ w.Sync(SyncString)
+ w.Reloc(RelocString, w.p.StringIdx(s))
+}
+
+// Strings encodes and writes a variable-length slice of strings into
+// the element bitstream.
+func (w *Encoder) Strings(ss []string) {
+ w.Len(len(ss))
+ for _, s := range ss {
+ w.String(s)
+ }
+}
+
+// Value encodes and writes a constant.Value into the element
+// bitstream.
+func (w *Encoder) Value(val constant.Value) {
+ w.Sync(SyncValue)
+ if w.Bool(val.Kind() == constant.Complex) {
+ w.scalar(constant.Real(val))
+ w.scalar(constant.Imag(val))
+ } else {
+ w.scalar(val)
+ }
+}
+
+func (w *Encoder) scalar(val constant.Value) {
+ switch v := constant.Val(val).(type) {
+ default:
+ errorf("unhandled %v (%v)", val, val.Kind())
+ case bool:
+ w.Code(ValBool)
+ w.Bool(v)
+ case string:
+ w.Code(ValString)
+ w.String(v)
+ case int64:
+ w.Code(ValInt64)
+ w.Int64(v)
+ case *big.Int:
+ w.Code(ValBigInt)
+ w.bigInt(v)
+ case *big.Rat:
+ w.Code(ValBigRat)
+ w.bigInt(v.Num())
+ w.bigInt(v.Denom())
+ case *big.Float:
+ w.Code(ValBigFloat)
+ w.bigFloat(v)
+ }
+}
+
+func (w *Encoder) bigInt(v *big.Int) {
+ b := v.Bytes()
+ w.String(string(b)) // TODO: More efficient encoding.
+ w.Bool(v.Sign() < 0)
+}
+
+func (w *Encoder) bigFloat(v *big.Float) {
+ b := v.Append(nil, 'p', -1)
+ w.String(string(b)) // TODO: More efficient encoding.
+}
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/flags.go b/vendor/golang.org/x/tools/internal/pkgbits/flags.go
new file mode 100644
index 0000000..6542227
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/flags.go
@@ -0,0 +1,9 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+const (
+ flagSyncMarkers = 1 << iota // file format contains sync markers
+)
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go b/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go
new file mode 100644
index 0000000..5294f6a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.7
+// +build !go1.7
+
+// TODO(mdempsky): Remove after #44505 is resolved
+
+package pkgbits
+
+import "runtime"
+
+func walkFrames(pcs []uintptr, visit frameVisitor) {
+ for _, pc := range pcs {
+ fn := runtime.FuncForPC(pc)
+ file, line := fn.FileLine(pc)
+
+ visit(file, line, fn.Name(), pc-fn.Entry())
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go b/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go
new file mode 100644
index 0000000..2324ae7
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go
@@ -0,0 +1,28 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.7
+// +build go1.7
+
+package pkgbits
+
+import "runtime"
+
+// walkFrames calls visit for each call frame represented by pcs.
+//
+// pcs should be a slice of PCs, as returned by runtime.Callers.
+func walkFrames(pcs []uintptr, visit frameVisitor) {
+ if len(pcs) == 0 {
+ return
+ }
+
+ frames := runtime.CallersFrames(pcs)
+ for {
+ frame, more := frames.Next()
+ visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry)
+ if !more {
+ return
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/reloc.go b/vendor/golang.org/x/tools/internal/pkgbits/reloc.go
new file mode 100644
index 0000000..fcdfb97
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/reloc.go
@@ -0,0 +1,42 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+// A RelocKind indicates a particular section within a unified IR export.
+type RelocKind int32
+
+// An Index represents a bitstream element index within a particular
+// section.
+type Index int32
+
+// A relocEnt (relocation entry) is an entry in an element's local
+// reference table.
+//
+// TODO(mdempsky): Rename this too.
+type RelocEnt struct {
+ Kind RelocKind
+ Idx Index
+}
+
+// Reserved indices within the meta relocation section.
+const (
+ PublicRootIdx Index = 0
+ PrivateRootIdx Index = 1
+)
+
+const (
+ RelocString RelocKind = iota
+ RelocMeta
+ RelocPosBase
+ RelocPkg
+ RelocName
+ RelocType
+ RelocObj
+ RelocObjExt
+ RelocObjDict
+ RelocBody
+
+ numRelocs = iota
+)
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/support.go b/vendor/golang.org/x/tools/internal/pkgbits/support.go
new file mode 100644
index 0000000..ad26d3b
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/support.go
@@ -0,0 +1,17 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import "fmt"
+
+func assert(b bool) {
+ if !b {
+ panic("assertion failed")
+ }
+}
+
+func errorf(format string, args ...interface{}) {
+ panic(fmt.Errorf(format, args...))
+}
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/sync.go b/vendor/golang.org/x/tools/internal/pkgbits/sync.go
new file mode 100644
index 0000000..5bd51ef
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/sync.go
@@ -0,0 +1,113 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import (
+ "fmt"
+ "strings"
+)
+
+// fmtFrames formats a backtrace for reporting reader/writer desyncs.
+func fmtFrames(pcs ...uintptr) []string {
+ res := make([]string, 0, len(pcs))
+ walkFrames(pcs, func(file string, line int, name string, offset uintptr) {
+ // Trim package from function name. It's just redundant noise.
+ name = strings.TrimPrefix(name, "cmd/compile/internal/noder.")
+
+ res = append(res, fmt.Sprintf("%s:%v: %s +0x%v", file, line, name, offset))
+ })
+ return res
+}
+
+type frameVisitor func(file string, line int, name string, offset uintptr)
+
+// SyncMarker is an enum type that represents markers that may be
+// written to export data to ensure the reader and writer stay
+// synchronized.
+type SyncMarker int
+
+//go:generate stringer -type=SyncMarker -trimprefix=Sync
+
+const (
+ _ SyncMarker = iota
+
+ // Public markers (known to go/types importers).
+
+ // Low-level coding markers.
+ SyncEOF
+ SyncBool
+ SyncInt64
+ SyncUint64
+ SyncString
+ SyncValue
+ SyncVal
+ SyncRelocs
+ SyncReloc
+ SyncUseReloc
+
+ // Higher-level object and type markers.
+ SyncPublic
+ SyncPos
+ SyncPosBase
+ SyncObject
+ SyncObject1
+ SyncPkg
+ SyncPkgDef
+ SyncMethod
+ SyncType
+ SyncTypeIdx
+ SyncTypeParamNames
+ SyncSignature
+ SyncParams
+ SyncParam
+ SyncCodeObj
+ SyncSym
+ SyncLocalIdent
+ SyncSelector
+
+ // Private markers (only known to cmd/compile).
+ SyncPrivate
+
+ SyncFuncExt
+ SyncVarExt
+ SyncTypeExt
+ SyncPragma
+
+ SyncExprList
+ SyncExprs
+ SyncExpr
+ SyncExprType
+ SyncAssign
+ SyncOp
+ SyncFuncLit
+ SyncCompLit
+
+ SyncDecl
+ SyncFuncBody
+ SyncOpenScope
+ SyncCloseScope
+ SyncCloseAnotherScope
+ SyncDeclNames
+ SyncDeclName
+
+ SyncStmts
+ SyncBlockStmt
+ SyncIfStmt
+ SyncForStmt
+ SyncSwitchStmt
+ SyncRangeStmt
+ SyncCaseClause
+ SyncCommClause
+ SyncSelectStmt
+ SyncDecls
+ SyncLabeledStmt
+ SyncUseObjLocal
+ SyncAddLocal
+ SyncLinkname
+ SyncStmt1
+ SyncStmtsEnd
+ SyncLabel
+ SyncOptLabel
+)
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go b/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go
new file mode 100644
index 0000000..4a5b0ca
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go
@@ -0,0 +1,89 @@
+// Code generated by "stringer -type=SyncMarker -trimprefix=Sync"; DO NOT EDIT.
+
+package pkgbits
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[SyncEOF-1]
+ _ = x[SyncBool-2]
+ _ = x[SyncInt64-3]
+ _ = x[SyncUint64-4]
+ _ = x[SyncString-5]
+ _ = x[SyncValue-6]
+ _ = x[SyncVal-7]
+ _ = x[SyncRelocs-8]
+ _ = x[SyncReloc-9]
+ _ = x[SyncUseReloc-10]
+ _ = x[SyncPublic-11]
+ _ = x[SyncPos-12]
+ _ = x[SyncPosBase-13]
+ _ = x[SyncObject-14]
+ _ = x[SyncObject1-15]
+ _ = x[SyncPkg-16]
+ _ = x[SyncPkgDef-17]
+ _ = x[SyncMethod-18]
+ _ = x[SyncType-19]
+ _ = x[SyncTypeIdx-20]
+ _ = x[SyncTypeParamNames-21]
+ _ = x[SyncSignature-22]
+ _ = x[SyncParams-23]
+ _ = x[SyncParam-24]
+ _ = x[SyncCodeObj-25]
+ _ = x[SyncSym-26]
+ _ = x[SyncLocalIdent-27]
+ _ = x[SyncSelector-28]
+ _ = x[SyncPrivate-29]
+ _ = x[SyncFuncExt-30]
+ _ = x[SyncVarExt-31]
+ _ = x[SyncTypeExt-32]
+ _ = x[SyncPragma-33]
+ _ = x[SyncExprList-34]
+ _ = x[SyncExprs-35]
+ _ = x[SyncExpr-36]
+ _ = x[SyncExprType-37]
+ _ = x[SyncAssign-38]
+ _ = x[SyncOp-39]
+ _ = x[SyncFuncLit-40]
+ _ = x[SyncCompLit-41]
+ _ = x[SyncDecl-42]
+ _ = x[SyncFuncBody-43]
+ _ = x[SyncOpenScope-44]
+ _ = x[SyncCloseScope-45]
+ _ = x[SyncCloseAnotherScope-46]
+ _ = x[SyncDeclNames-47]
+ _ = x[SyncDeclName-48]
+ _ = x[SyncStmts-49]
+ _ = x[SyncBlockStmt-50]
+ _ = x[SyncIfStmt-51]
+ _ = x[SyncForStmt-52]
+ _ = x[SyncSwitchStmt-53]
+ _ = x[SyncRangeStmt-54]
+ _ = x[SyncCaseClause-55]
+ _ = x[SyncCommClause-56]
+ _ = x[SyncSelectStmt-57]
+ _ = x[SyncDecls-58]
+ _ = x[SyncLabeledStmt-59]
+ _ = x[SyncUseObjLocal-60]
+ _ = x[SyncAddLocal-61]
+ _ = x[SyncLinkname-62]
+ _ = x[SyncStmt1-63]
+ _ = x[SyncStmtsEnd-64]
+ _ = x[SyncLabel-65]
+ _ = x[SyncOptLabel-66]
+}
+
+const _SyncMarker_name = "EOFBoolInt64Uint64StringValueValRelocsRelocUseRelocPublicPosPosBaseObjectObject1PkgPkgDefMethodTypeTypeIdxTypeParamNamesSignatureParamsParamCodeObjSymLocalIdentSelectorPrivateFuncExtVarExtTypeExtPragmaExprListExprsExprExprTypeAssignOpFuncLitCompLitDeclFuncBodyOpenScopeCloseScopeCloseAnotherScopeDeclNamesDeclNameStmtsBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtUseObjLocalAddLocalLinknameStmt1StmtsEndLabelOptLabel"
+
+var _SyncMarker_index = [...]uint16{0, 3, 7, 12, 18, 24, 29, 32, 38, 43, 51, 57, 60, 67, 73, 80, 83, 89, 95, 99, 106, 120, 129, 135, 140, 147, 150, 160, 168, 175, 182, 188, 195, 201, 209, 214, 218, 226, 232, 234, 241, 248, 252, 260, 269, 279, 296, 305, 313, 318, 327, 333, 340, 350, 359, 369, 379, 389, 394, 405, 416, 424, 432, 437, 445, 450, 458}
+
+func (i SyncMarker) String() string {
+ i -= 1
+ if i < 0 || i >= SyncMarker(len(_SyncMarker_index)-1) {
+ return "SyncMarker(" + strconv.FormatInt(int64(i+1), 10) + ")"
+ }
+ return _SyncMarker_name[_SyncMarker_index[i]:_SyncMarker_index[i+1]]
+}
diff --git a/vendor/golang.org/x/tools/internal/stdlib/manifest.go b/vendor/golang.org/x/tools/internal/stdlib/manifest.go
new file mode 100644
index 0000000..a928acf
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/stdlib/manifest.go
@@ -0,0 +1,17431 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by generate.go. DO NOT EDIT.
+
+package stdlib
+
+var PackageSymbols = map[string][]Symbol{
+ "archive/tar": {
+ {"(*Header).FileInfo", Method, 1},
+ {"(*Reader).Next", Method, 0},
+ {"(*Reader).Read", Method, 0},
+ {"(*Writer).AddFS", Method, 22},
+ {"(*Writer).Close", Method, 0},
+ {"(*Writer).Flush", Method, 0},
+ {"(*Writer).Write", Method, 0},
+ {"(*Writer).WriteHeader", Method, 0},
+ {"(Format).String", Method, 10},
+ {"ErrFieldTooLong", Var, 0},
+ {"ErrHeader", Var, 0},
+ {"ErrInsecurePath", Var, 20},
+ {"ErrWriteAfterClose", Var, 0},
+ {"ErrWriteTooLong", Var, 0},
+ {"FileInfoHeader", Func, 1},
+ {"FileInfoNames", Type, 23},
+ {"Format", Type, 10},
+ {"FormatGNU", Const, 10},
+ {"FormatPAX", Const, 10},
+ {"FormatUSTAR", Const, 10},
+ {"FormatUnknown", Const, 10},
+ {"Header", Type, 0},
+ {"Header.AccessTime", Field, 0},
+ {"Header.ChangeTime", Field, 0},
+ {"Header.Devmajor", Field, 0},
+ {"Header.Devminor", Field, 0},
+ {"Header.Format", Field, 10},
+ {"Header.Gid", Field, 0},
+ {"Header.Gname", Field, 0},
+ {"Header.Linkname", Field, 0},
+ {"Header.ModTime", Field, 0},
+ {"Header.Mode", Field, 0},
+ {"Header.Name", Field, 0},
+ {"Header.PAXRecords", Field, 10},
+ {"Header.Size", Field, 0},
+ {"Header.Typeflag", Field, 0},
+ {"Header.Uid", Field, 0},
+ {"Header.Uname", Field, 0},
+ {"Header.Xattrs", Field, 3},
+ {"NewReader", Func, 0},
+ {"NewWriter", Func, 0},
+ {"Reader", Type, 0},
+ {"TypeBlock", Const, 0},
+ {"TypeChar", Const, 0},
+ {"TypeCont", Const, 0},
+ {"TypeDir", Const, 0},
+ {"TypeFifo", Const, 0},
+ {"TypeGNULongLink", Const, 1},
+ {"TypeGNULongName", Const, 1},
+ {"TypeGNUSparse", Const, 3},
+ {"TypeLink", Const, 0},
+ {"TypeReg", Const, 0},
+ {"TypeRegA", Const, 0},
+ {"TypeSymlink", Const, 0},
+ {"TypeXGlobalHeader", Const, 0},
+ {"TypeXHeader", Const, 0},
+ {"Writer", Type, 0},
+ },
+ "archive/zip": {
+ {"(*File).DataOffset", Method, 2},
+ {"(*File).FileInfo", Method, 0},
+ {"(*File).ModTime", Method, 0},
+ {"(*File).Mode", Method, 0},
+ {"(*File).Open", Method, 0},
+ {"(*File).OpenRaw", Method, 17},
+ {"(*File).SetModTime", Method, 0},
+ {"(*File).SetMode", Method, 0},
+ {"(*FileHeader).FileInfo", Method, 0},
+ {"(*FileHeader).ModTime", Method, 0},
+ {"(*FileHeader).Mode", Method, 0},
+ {"(*FileHeader).SetModTime", Method, 0},
+ {"(*FileHeader).SetMode", Method, 0},
+ {"(*ReadCloser).Close", Method, 0},
+ {"(*ReadCloser).Open", Method, 16},
+ {"(*ReadCloser).RegisterDecompressor", Method, 6},
+ {"(*Reader).Open", Method, 16},
+ {"(*Reader).RegisterDecompressor", Method, 6},
+ {"(*Writer).AddFS", Method, 22},
+ {"(*Writer).Close", Method, 0},
+ {"(*Writer).Copy", Method, 17},
+ {"(*Writer).Create", Method, 0},
+ {"(*Writer).CreateHeader", Method, 0},
+ {"(*Writer).CreateRaw", Method, 17},
+ {"(*Writer).Flush", Method, 4},
+ {"(*Writer).RegisterCompressor", Method, 6},
+ {"(*Writer).SetComment", Method, 10},
+ {"(*Writer).SetOffset", Method, 5},
+ {"Compressor", Type, 2},
+ {"Decompressor", Type, 2},
+ {"Deflate", Const, 0},
+ {"ErrAlgorithm", Var, 0},
+ {"ErrChecksum", Var, 0},
+ {"ErrFormat", Var, 0},
+ {"ErrInsecurePath", Var, 20},
+ {"File", Type, 0},
+ {"File.FileHeader", Field, 0},
+ {"FileHeader", Type, 0},
+ {"FileHeader.CRC32", Field, 0},
+ {"FileHeader.Comment", Field, 0},
+ {"FileHeader.CompressedSize", Field, 0},
+ {"FileHeader.CompressedSize64", Field, 1},
+ {"FileHeader.CreatorVersion", Field, 0},
+ {"FileHeader.ExternalAttrs", Field, 0},
+ {"FileHeader.Extra", Field, 0},
+ {"FileHeader.Flags", Field, 0},
+ {"FileHeader.Method", Field, 0},
+ {"FileHeader.Modified", Field, 10},
+ {"FileHeader.ModifiedDate", Field, 0},
+ {"FileHeader.ModifiedTime", Field, 0},
+ {"FileHeader.Name", Field, 0},
+ {"FileHeader.NonUTF8", Field, 10},
+ {"FileHeader.ReaderVersion", Field, 0},
+ {"FileHeader.UncompressedSize", Field, 0},
+ {"FileHeader.UncompressedSize64", Field, 1},
+ {"FileInfoHeader", Func, 0},
+ {"NewReader", Func, 0},
+ {"NewWriter", Func, 0},
+ {"OpenReader", Func, 0},
+ {"ReadCloser", Type, 0},
+ {"ReadCloser.Reader", Field, 0},
+ {"Reader", Type, 0},
+ {"Reader.Comment", Field, 0},
+ {"Reader.File", Field, 0},
+ {"RegisterCompressor", Func, 2},
+ {"RegisterDecompressor", Func, 2},
+ {"Store", Const, 0},
+ {"Writer", Type, 0},
+ },
+ "bufio": {
+ {"(*Reader).Buffered", Method, 0},
+ {"(*Reader).Discard", Method, 5},
+ {"(*Reader).Peek", Method, 0},
+ {"(*Reader).Read", Method, 0},
+ {"(*Reader).ReadByte", Method, 0},
+ {"(*Reader).ReadBytes", Method, 0},
+ {"(*Reader).ReadLine", Method, 0},
+ {"(*Reader).ReadRune", Method, 0},
+ {"(*Reader).ReadSlice", Method, 0},
+ {"(*Reader).ReadString", Method, 0},
+ {"(*Reader).Reset", Method, 2},
+ {"(*Reader).Size", Method, 10},
+ {"(*Reader).UnreadByte", Method, 0},
+ {"(*Reader).UnreadRune", Method, 0},
+ {"(*Reader).WriteTo", Method, 1},
+ {"(*Scanner).Buffer", Method, 6},
+ {"(*Scanner).Bytes", Method, 1},
+ {"(*Scanner).Err", Method, 1},
+ {"(*Scanner).Scan", Method, 1},
+ {"(*Scanner).Split", Method, 1},
+ {"(*Scanner).Text", Method, 1},
+ {"(*Writer).Available", Method, 0},
+ {"(*Writer).AvailableBuffer", Method, 18},
+ {"(*Writer).Buffered", Method, 0},
+ {"(*Writer).Flush", Method, 0},
+ {"(*Writer).ReadFrom", Method, 1},
+ {"(*Writer).Reset", Method, 2},
+ {"(*Writer).Size", Method, 10},
+ {"(*Writer).Write", Method, 0},
+ {"(*Writer).WriteByte", Method, 0},
+ {"(*Writer).WriteRune", Method, 0},
+ {"(*Writer).WriteString", Method, 0},
+ {"(ReadWriter).Available", Method, 0},
+ {"(ReadWriter).AvailableBuffer", Method, 18},
+ {"(ReadWriter).Discard", Method, 5},
+ {"(ReadWriter).Flush", Method, 0},
+ {"(ReadWriter).Peek", Method, 0},
+ {"(ReadWriter).Read", Method, 0},
+ {"(ReadWriter).ReadByte", Method, 0},
+ {"(ReadWriter).ReadBytes", Method, 0},
+ {"(ReadWriter).ReadFrom", Method, 1},
+ {"(ReadWriter).ReadLine", Method, 0},
+ {"(ReadWriter).ReadRune", Method, 0},
+ {"(ReadWriter).ReadSlice", Method, 0},
+ {"(ReadWriter).ReadString", Method, 0},
+ {"(ReadWriter).UnreadByte", Method, 0},
+ {"(ReadWriter).UnreadRune", Method, 0},
+ {"(ReadWriter).Write", Method, 0},
+ {"(ReadWriter).WriteByte", Method, 0},
+ {"(ReadWriter).WriteRune", Method, 0},
+ {"(ReadWriter).WriteString", Method, 0},
+ {"(ReadWriter).WriteTo", Method, 1},
+ {"ErrAdvanceTooFar", Var, 1},
+ {"ErrBadReadCount", Var, 15},
+ {"ErrBufferFull", Var, 0},
+ {"ErrFinalToken", Var, 6},
+ {"ErrInvalidUnreadByte", Var, 0},
+ {"ErrInvalidUnreadRune", Var, 0},
+ {"ErrNegativeAdvance", Var, 1},
+ {"ErrNegativeCount", Var, 0},
+ {"ErrTooLong", Var, 1},
+ {"MaxScanTokenSize", Const, 1},
+ {"NewReadWriter", Func, 0},
+ {"NewReader", Func, 0},
+ {"NewReaderSize", Func, 0},
+ {"NewScanner", Func, 1},
+ {"NewWriter", Func, 0},
+ {"NewWriterSize", Func, 0},
+ {"ReadWriter", Type, 0},
+ {"ReadWriter.Reader", Field, 0},
+ {"ReadWriter.Writer", Field, 0},
+ {"Reader", Type, 0},
+ {"ScanBytes", Func, 1},
+ {"ScanLines", Func, 1},
+ {"ScanRunes", Func, 1},
+ {"ScanWords", Func, 1},
+ {"Scanner", Type, 1},
+ {"SplitFunc", Type, 1},
+ {"Writer", Type, 0},
+ },
+ "bytes": {
+ {"(*Buffer).Available", Method, 21},
+ {"(*Buffer).AvailableBuffer", Method, 21},
+ {"(*Buffer).Bytes", Method, 0},
+ {"(*Buffer).Cap", Method, 5},
+ {"(*Buffer).Grow", Method, 1},
+ {"(*Buffer).Len", Method, 0},
+ {"(*Buffer).Next", Method, 0},
+ {"(*Buffer).Read", Method, 0},
+ {"(*Buffer).ReadByte", Method, 0},
+ {"(*Buffer).ReadBytes", Method, 0},
+ {"(*Buffer).ReadFrom", Method, 0},
+ {"(*Buffer).ReadRune", Method, 0},
+ {"(*Buffer).ReadString", Method, 0},
+ {"(*Buffer).Reset", Method, 0},
+ {"(*Buffer).String", Method, 0},
+ {"(*Buffer).Truncate", Method, 0},
+ {"(*Buffer).UnreadByte", Method, 0},
+ {"(*Buffer).UnreadRune", Method, 0},
+ {"(*Buffer).Write", Method, 0},
+ {"(*Buffer).WriteByte", Method, 0},
+ {"(*Buffer).WriteRune", Method, 0},
+ {"(*Buffer).WriteString", Method, 0},
+ {"(*Buffer).WriteTo", Method, 0},
+ {"(*Reader).Len", Method, 0},
+ {"(*Reader).Read", Method, 0},
+ {"(*Reader).ReadAt", Method, 0},
+ {"(*Reader).ReadByte", Method, 0},
+ {"(*Reader).ReadRune", Method, 0},
+ {"(*Reader).Reset", Method, 7},
+ {"(*Reader).Seek", Method, 0},
+ {"(*Reader).Size", Method, 5},
+ {"(*Reader).UnreadByte", Method, 0},
+ {"(*Reader).UnreadRune", Method, 0},
+ {"(*Reader).WriteTo", Method, 1},
+ {"Buffer", Type, 0},
+ {"Clone", Func, 20},
+ {"Compare", Func, 0},
+ {"Contains", Func, 0},
+ {"ContainsAny", Func, 7},
+ {"ContainsFunc", Func, 21},
+ {"ContainsRune", Func, 7},
+ {"Count", Func, 0},
+ {"Cut", Func, 18},
+ {"CutPrefix", Func, 20},
+ {"CutSuffix", Func, 20},
+ {"Equal", Func, 0},
+ {"EqualFold", Func, 0},
+ {"ErrTooLarge", Var, 0},
+ {"Fields", Func, 0},
+ {"FieldsFunc", Func, 0},
+ {"HasPrefix", Func, 0},
+ {"HasSuffix", Func, 0},
+ {"Index", Func, 0},
+ {"IndexAny", Func, 0},
+ {"IndexByte", Func, 0},
+ {"IndexFunc", Func, 0},
+ {"IndexRune", Func, 0},
+ {"Join", Func, 0},
+ {"LastIndex", Func, 0},
+ {"LastIndexAny", Func, 0},
+ {"LastIndexByte", Func, 5},
+ {"LastIndexFunc", Func, 0},
+ {"Map", Func, 0},
+ {"MinRead", Const, 0},
+ {"NewBuffer", Func, 0},
+ {"NewBufferString", Func, 0},
+ {"NewReader", Func, 0},
+ {"Reader", Type, 0},
+ {"Repeat", Func, 0},
+ {"Replace", Func, 0},
+ {"ReplaceAll", Func, 12},
+ {"Runes", Func, 0},
+ {"Split", Func, 0},
+ {"SplitAfter", Func, 0},
+ {"SplitAfterN", Func, 0},
+ {"SplitN", Func, 0},
+ {"Title", Func, 0},
+ {"ToLower", Func, 0},
+ {"ToLowerSpecial", Func, 0},
+ {"ToTitle", Func, 0},
+ {"ToTitleSpecial", Func, 0},
+ {"ToUpper", Func, 0},
+ {"ToUpperSpecial", Func, 0},
+ {"ToValidUTF8", Func, 13},
+ {"Trim", Func, 0},
+ {"TrimFunc", Func, 0},
+ {"TrimLeft", Func, 0},
+ {"TrimLeftFunc", Func, 0},
+ {"TrimPrefix", Func, 1},
+ {"TrimRight", Func, 0},
+ {"TrimRightFunc", Func, 0},
+ {"TrimSpace", Func, 0},
+ {"TrimSuffix", Func, 1},
+ },
+ "cmp": {
+ {"Compare", Func, 21},
+ {"Less", Func, 21},
+ {"Or", Func, 22},
+ {"Ordered", Type, 21},
+ },
+ "compress/bzip2": {
+ {"(StructuralError).Error", Method, 0},
+ {"NewReader", Func, 0},
+ {"StructuralError", Type, 0},
+ },
+ "compress/flate": {
+ {"(*ReadError).Error", Method, 0},
+ {"(*WriteError).Error", Method, 0},
+ {"(*Writer).Close", Method, 0},
+ {"(*Writer).Flush", Method, 0},
+ {"(*Writer).Reset", Method, 2},
+ {"(*Writer).Write", Method, 0},
+ {"(CorruptInputError).Error", Method, 0},
+ {"(InternalError).Error", Method, 0},
+ {"BestCompression", Const, 0},
+ {"BestSpeed", Const, 0},
+ {"CorruptInputError", Type, 0},
+ {"DefaultCompression", Const, 0},
+ {"HuffmanOnly", Const, 7},
+ {"InternalError", Type, 0},
+ {"NewReader", Func, 0},
+ {"NewReaderDict", Func, 0},
+ {"NewWriter", Func, 0},
+ {"NewWriterDict", Func, 0},
+ {"NoCompression", Const, 0},
+ {"ReadError", Type, 0},
+ {"ReadError.Err", Field, 0},
+ {"ReadError.Offset", Field, 0},
+ {"Reader", Type, 0},
+ {"Resetter", Type, 4},
+ {"WriteError", Type, 0},
+ {"WriteError.Err", Field, 0},
+ {"WriteError.Offset", Field, 0},
+ {"Writer", Type, 0},
+ },
+ "compress/gzip": {
+ {"(*Reader).Close", Method, 0},
+ {"(*Reader).Multistream", Method, 4},
+ {"(*Reader).Read", Method, 0},
+ {"(*Reader).Reset", Method, 3},
+ {"(*Writer).Close", Method, 0},
+ {"(*Writer).Flush", Method, 1},
+ {"(*Writer).Reset", Method, 2},
+ {"(*Writer).Write", Method, 0},
+ {"BestCompression", Const, 0},
+ {"BestSpeed", Const, 0},
+ {"DefaultCompression", Const, 0},
+ {"ErrChecksum", Var, 0},
+ {"ErrHeader", Var, 0},
+ {"Header", Type, 0},
+ {"Header.Comment", Field, 0},
+ {"Header.Extra", Field, 0},
+ {"Header.ModTime", Field, 0},
+ {"Header.Name", Field, 0},
+ {"Header.OS", Field, 0},
+ {"HuffmanOnly", Const, 8},
+ {"NewReader", Func, 0},
+ {"NewWriter", Func, 0},
+ {"NewWriterLevel", Func, 0},
+ {"NoCompression", Const, 0},
+ {"Reader", Type, 0},
+ {"Reader.Header", Field, 0},
+ {"Writer", Type, 0},
+ {"Writer.Header", Field, 0},
+ },
+ "compress/lzw": {
+ {"(*Reader).Close", Method, 17},
+ {"(*Reader).Read", Method, 17},
+ {"(*Reader).Reset", Method, 17},
+ {"(*Writer).Close", Method, 17},
+ {"(*Writer).Reset", Method, 17},
+ {"(*Writer).Write", Method, 17},
+ {"LSB", Const, 0},
+ {"MSB", Const, 0},
+ {"NewReader", Func, 0},
+ {"NewWriter", Func, 0},
+ {"Order", Type, 0},
+ {"Reader", Type, 17},
+ {"Writer", Type, 17},
+ },
+ "compress/zlib": {
+ {"(*Writer).Close", Method, 0},
+ {"(*Writer).Flush", Method, 0},
+ {"(*Writer).Reset", Method, 2},
+ {"(*Writer).Write", Method, 0},
+ {"BestCompression", Const, 0},
+ {"BestSpeed", Const, 0},
+ {"DefaultCompression", Const, 0},
+ {"ErrChecksum", Var, 0},
+ {"ErrDictionary", Var, 0},
+ {"ErrHeader", Var, 0},
+ {"HuffmanOnly", Const, 8},
+ {"NewReader", Func, 0},
+ {"NewReaderDict", Func, 0},
+ {"NewWriter", Func, 0},
+ {"NewWriterLevel", Func, 0},
+ {"NewWriterLevelDict", Func, 0},
+ {"NoCompression", Const, 0},
+ {"Resetter", Type, 4},
+ {"Writer", Type, 0},
+ },
+ "container/heap": {
+ {"Fix", Func, 2},
+ {"Init", Func, 0},
+ {"Interface", Type, 0},
+ {"Pop", Func, 0},
+ {"Push", Func, 0},
+ {"Remove", Func, 0},
+ },
+ "container/list": {
+ {"(*Element).Next", Method, 0},
+ {"(*Element).Prev", Method, 0},
+ {"(*List).Back", Method, 0},
+ {"(*List).Front", Method, 0},
+ {"(*List).Init", Method, 0},
+ {"(*List).InsertAfter", Method, 0},
+ {"(*List).InsertBefore", Method, 0},
+ {"(*List).Len", Method, 0},
+ {"(*List).MoveAfter", Method, 2},
+ {"(*List).MoveBefore", Method, 2},
+ {"(*List).MoveToBack", Method, 0},
+ {"(*List).MoveToFront", Method, 0},
+ {"(*List).PushBack", Method, 0},
+ {"(*List).PushBackList", Method, 0},
+ {"(*List).PushFront", Method, 0},
+ {"(*List).PushFrontList", Method, 0},
+ {"(*List).Remove", Method, 0},
+ {"Element", Type, 0},
+ {"Element.Value", Field, 0},
+ {"List", Type, 0},
+ {"New", Func, 0},
+ },
+ "container/ring": {
+ {"(*Ring).Do", Method, 0},
+ {"(*Ring).Len", Method, 0},
+ {"(*Ring).Link", Method, 0},
+ {"(*Ring).Move", Method, 0},
+ {"(*Ring).Next", Method, 0},
+ {"(*Ring).Prev", Method, 0},
+ {"(*Ring).Unlink", Method, 0},
+ {"New", Func, 0},
+ {"Ring", Type, 0},
+ {"Ring.Value", Field, 0},
+ },
+ "context": {
+ {"AfterFunc", Func, 21},
+ {"Background", Func, 7},
+ {"CancelCauseFunc", Type, 20},
+ {"CancelFunc", Type, 7},
+ {"Canceled", Var, 7},
+ {"Cause", Func, 20},
+ {"Context", Type, 7},
+ {"DeadlineExceeded", Var, 7},
+ {"TODO", Func, 7},
+ {"WithCancel", Func, 7},
+ {"WithCancelCause", Func, 20},
+ {"WithDeadline", Func, 7},
+ {"WithDeadlineCause", Func, 21},
+ {"WithTimeout", Func, 7},
+ {"WithTimeoutCause", Func, 21},
+ {"WithValue", Func, 7},
+ {"WithoutCancel", Func, 21},
+ },
+ "crypto": {
+ {"(Hash).Available", Method, 0},
+ {"(Hash).HashFunc", Method, 4},
+ {"(Hash).New", Method, 0},
+ {"(Hash).Size", Method, 0},
+ {"(Hash).String", Method, 15},
+ {"BLAKE2b_256", Const, 9},
+ {"BLAKE2b_384", Const, 9},
+ {"BLAKE2b_512", Const, 9},
+ {"BLAKE2s_256", Const, 9},
+ {"Decrypter", Type, 5},
+ {"DecrypterOpts", Type, 5},
+ {"Hash", Type, 0},
+ {"MD4", Const, 0},
+ {"MD5", Const, 0},
+ {"MD5SHA1", Const, 0},
+ {"PrivateKey", Type, 0},
+ {"PublicKey", Type, 2},
+ {"RIPEMD160", Const, 0},
+ {"RegisterHash", Func, 0},
+ {"SHA1", Const, 0},
+ {"SHA224", Const, 0},
+ {"SHA256", Const, 0},
+ {"SHA384", Const, 0},
+ {"SHA3_224", Const, 4},
+ {"SHA3_256", Const, 4},
+ {"SHA3_384", Const, 4},
+ {"SHA3_512", Const, 4},
+ {"SHA512", Const, 0},
+ {"SHA512_224", Const, 5},
+ {"SHA512_256", Const, 5},
+ {"Signer", Type, 4},
+ {"SignerOpts", Type, 4},
+ },
+ "crypto/aes": {
+ {"(KeySizeError).Error", Method, 0},
+ {"BlockSize", Const, 0},
+ {"KeySizeError", Type, 0},
+ {"NewCipher", Func, 0},
+ },
+ "crypto/cipher": {
+ {"(StreamReader).Read", Method, 0},
+ {"(StreamWriter).Close", Method, 0},
+ {"(StreamWriter).Write", Method, 0},
+ {"AEAD", Type, 2},
+ {"Block", Type, 0},
+ {"BlockMode", Type, 0},
+ {"NewCBCDecrypter", Func, 0},
+ {"NewCBCEncrypter", Func, 0},
+ {"NewCFBDecrypter", Func, 0},
+ {"NewCFBEncrypter", Func, 0},
+ {"NewCTR", Func, 0},
+ {"NewGCM", Func, 2},
+ {"NewGCMWithNonceSize", Func, 5},
+ {"NewGCMWithTagSize", Func, 11},
+ {"NewOFB", Func, 0},
+ {"Stream", Type, 0},
+ {"StreamReader", Type, 0},
+ {"StreamReader.R", Field, 0},
+ {"StreamReader.S", Field, 0},
+ {"StreamWriter", Type, 0},
+ {"StreamWriter.Err", Field, 0},
+ {"StreamWriter.S", Field, 0},
+ {"StreamWriter.W", Field, 0},
+ },
+ "crypto/des": {
+ {"(KeySizeError).Error", Method, 0},
+ {"BlockSize", Const, 0},
+ {"KeySizeError", Type, 0},
+ {"NewCipher", Func, 0},
+ {"NewTripleDESCipher", Func, 0},
+ },
+ "crypto/dsa": {
+ {"ErrInvalidPublicKey", Var, 0},
+ {"GenerateKey", Func, 0},
+ {"GenerateParameters", Func, 0},
+ {"L1024N160", Const, 0},
+ {"L2048N224", Const, 0},
+ {"L2048N256", Const, 0},
+ {"L3072N256", Const, 0},
+ {"ParameterSizes", Type, 0},
+ {"Parameters", Type, 0},
+ {"Parameters.G", Field, 0},
+ {"Parameters.P", Field, 0},
+ {"Parameters.Q", Field, 0},
+ {"PrivateKey", Type, 0},
+ {"PrivateKey.PublicKey", Field, 0},
+ {"PrivateKey.X", Field, 0},
+ {"PublicKey", Type, 0},
+ {"PublicKey.Parameters", Field, 0},
+ {"PublicKey.Y", Field, 0},
+ {"Sign", Func, 0},
+ {"Verify", Func, 0},
+ },
+ "crypto/ecdh": {
+ {"(*PrivateKey).Bytes", Method, 20},
+ {"(*PrivateKey).Curve", Method, 20},
+ {"(*PrivateKey).ECDH", Method, 20},
+ {"(*PrivateKey).Equal", Method, 20},
+ {"(*PrivateKey).Public", Method, 20},
+ {"(*PrivateKey).PublicKey", Method, 20},
+ {"(*PublicKey).Bytes", Method, 20},
+ {"(*PublicKey).Curve", Method, 20},
+ {"(*PublicKey).Equal", Method, 20},
+ {"Curve", Type, 20},
+ {"P256", Func, 20},
+ {"P384", Func, 20},
+ {"P521", Func, 20},
+ {"PrivateKey", Type, 20},
+ {"PublicKey", Type, 20},
+ {"X25519", Func, 20},
+ },
+ "crypto/ecdsa": {
+ {"(*PrivateKey).ECDH", Method, 20},
+ {"(*PrivateKey).Equal", Method, 15},
+ {"(*PrivateKey).Public", Method, 4},
+ {"(*PrivateKey).Sign", Method, 4},
+ {"(*PublicKey).ECDH", Method, 20},
+ {"(*PublicKey).Equal", Method, 15},
+ {"(PrivateKey).Add", Method, 0},
+ {"(PrivateKey).Double", Method, 0},
+ {"(PrivateKey).IsOnCurve", Method, 0},
+ {"(PrivateKey).Params", Method, 0},
+ {"(PrivateKey).ScalarBaseMult", Method, 0},
+ {"(PrivateKey).ScalarMult", Method, 0},
+ {"(PublicKey).Add", Method, 0},
+ {"(PublicKey).Double", Method, 0},
+ {"(PublicKey).IsOnCurve", Method, 0},
+ {"(PublicKey).Params", Method, 0},
+ {"(PublicKey).ScalarBaseMult", Method, 0},
+ {"(PublicKey).ScalarMult", Method, 0},
+ {"GenerateKey", Func, 0},
+ {"PrivateKey", Type, 0},
+ {"PrivateKey.D", Field, 0},
+ {"PrivateKey.PublicKey", Field, 0},
+ {"PublicKey", Type, 0},
+ {"PublicKey.Curve", Field, 0},
+ {"PublicKey.X", Field, 0},
+ {"PublicKey.Y", Field, 0},
+ {"Sign", Func, 0},
+ {"SignASN1", Func, 15},
+ {"Verify", Func, 0},
+ {"VerifyASN1", Func, 15},
+ },
+ "crypto/ed25519": {
+ {"(*Options).HashFunc", Method, 20},
+ {"(PrivateKey).Equal", Method, 15},
+ {"(PrivateKey).Public", Method, 13},
+ {"(PrivateKey).Seed", Method, 13},
+ {"(PrivateKey).Sign", Method, 13},
+ {"(PublicKey).Equal", Method, 15},
+ {"GenerateKey", Func, 13},
+ {"NewKeyFromSeed", Func, 13},
+ {"Options", Type, 20},
+ {"Options.Context", Field, 20},
+ {"Options.Hash", Field, 20},
+ {"PrivateKey", Type, 13},
+ {"PrivateKeySize", Const, 13},
+ {"PublicKey", Type, 13},
+ {"PublicKeySize", Const, 13},
+ {"SeedSize", Const, 13},
+ {"Sign", Func, 13},
+ {"SignatureSize", Const, 13},
+ {"Verify", Func, 13},
+ {"VerifyWithOptions", Func, 20},
+ },
+ "crypto/elliptic": {
+ {"(*CurveParams).Add", Method, 0},
+ {"(*CurveParams).Double", Method, 0},
+ {"(*CurveParams).IsOnCurve", Method, 0},
+ {"(*CurveParams).Params", Method, 0},
+ {"(*CurveParams).ScalarBaseMult", Method, 0},
+ {"(*CurveParams).ScalarMult", Method, 0},
+ {"Curve", Type, 0},
+ {"CurveParams", Type, 0},
+ {"CurveParams.B", Field, 0},
+ {"CurveParams.BitSize", Field, 0},
+ {"CurveParams.Gx", Field, 0},
+ {"CurveParams.Gy", Field, 0},
+ {"CurveParams.N", Field, 0},
+ {"CurveParams.Name", Field, 5},
+ {"CurveParams.P", Field, 0},
+ {"GenerateKey", Func, 0},
+ {"Marshal", Func, 0},
+ {"MarshalCompressed", Func, 15},
+ {"P224", Func, 0},
+ {"P256", Func, 0},
+ {"P384", Func, 0},
+ {"P521", Func, 0},
+ {"Unmarshal", Func, 0},
+ {"UnmarshalCompressed", Func, 15},
+ },
+ "crypto/hmac": {
+ {"Equal", Func, 1},
+ {"New", Func, 0},
+ },
+ "crypto/md5": {
+ {"BlockSize", Const, 0},
+ {"New", Func, 0},
+ {"Size", Const, 0},
+ {"Sum", Func, 2},
+ },
+ "crypto/rand": {
+ {"Int", Func, 0},
+ {"Prime", Func, 0},
+ {"Read", Func, 0},
+ {"Reader", Var, 0},
+ },
+ "crypto/rc4": {
+ {"(*Cipher).Reset", Method, 0},
+ {"(*Cipher).XORKeyStream", Method, 0},
+ {"(KeySizeError).Error", Method, 0},
+ {"Cipher", Type, 0},
+ {"KeySizeError", Type, 0},
+ {"NewCipher", Func, 0},
+ },
+ "crypto/rsa": {
+ {"(*PSSOptions).HashFunc", Method, 4},
+ {"(*PrivateKey).Decrypt", Method, 5},
+ {"(*PrivateKey).Equal", Method, 15},
+ {"(*PrivateKey).Precompute", Method, 0},
+ {"(*PrivateKey).Public", Method, 4},
+ {"(*PrivateKey).Sign", Method, 4},
+ {"(*PrivateKey).Size", Method, 11},
+ {"(*PrivateKey).Validate", Method, 0},
+ {"(*PublicKey).Equal", Method, 15},
+ {"(*PublicKey).Size", Method, 11},
+ {"CRTValue", Type, 0},
+ {"CRTValue.Coeff", Field, 0},
+ {"CRTValue.Exp", Field, 0},
+ {"CRTValue.R", Field, 0},
+ {"DecryptOAEP", Func, 0},
+ {"DecryptPKCS1v15", Func, 0},
+ {"DecryptPKCS1v15SessionKey", Func, 0},
+ {"EncryptOAEP", Func, 0},
+ {"EncryptPKCS1v15", Func, 0},
+ {"ErrDecryption", Var, 0},
+ {"ErrMessageTooLong", Var, 0},
+ {"ErrVerification", Var, 0},
+ {"GenerateKey", Func, 0},
+ {"GenerateMultiPrimeKey", Func, 0},
+ {"OAEPOptions", Type, 5},
+ {"OAEPOptions.Hash", Field, 5},
+ {"OAEPOptions.Label", Field, 5},
+ {"OAEPOptions.MGFHash", Field, 20},
+ {"PKCS1v15DecryptOptions", Type, 5},
+ {"PKCS1v15DecryptOptions.SessionKeyLen", Field, 5},
+ {"PSSOptions", Type, 2},
+ {"PSSOptions.Hash", Field, 4},
+ {"PSSOptions.SaltLength", Field, 2},
+ {"PSSSaltLengthAuto", Const, 2},
+ {"PSSSaltLengthEqualsHash", Const, 2},
+ {"PrecomputedValues", Type, 0},
+ {"PrecomputedValues.CRTValues", Field, 0},
+ {"PrecomputedValues.Dp", Field, 0},
+ {"PrecomputedValues.Dq", Field, 0},
+ {"PrecomputedValues.Qinv", Field, 0},
+ {"PrivateKey", Type, 0},
+ {"PrivateKey.D", Field, 0},
+ {"PrivateKey.Precomputed", Field, 0},
+ {"PrivateKey.Primes", Field, 0},
+ {"PrivateKey.PublicKey", Field, 0},
+ {"PublicKey", Type, 0},
+ {"PublicKey.E", Field, 0},
+ {"PublicKey.N", Field, 0},
+ {"SignPKCS1v15", Func, 0},
+ {"SignPSS", Func, 2},
+ {"VerifyPKCS1v15", Func, 0},
+ {"VerifyPSS", Func, 2},
+ },
+ "crypto/sha1": {
+ {"BlockSize", Const, 0},
+ {"New", Func, 0},
+ {"Size", Const, 0},
+ {"Sum", Func, 2},
+ },
+ "crypto/sha256": {
+ {"BlockSize", Const, 0},
+ {"New", Func, 0},
+ {"New224", Func, 0},
+ {"Size", Const, 0},
+ {"Size224", Const, 0},
+ {"Sum224", Func, 2},
+ {"Sum256", Func, 2},
+ },
+ "crypto/sha512": {
+ {"BlockSize", Const, 0},
+ {"New", Func, 0},
+ {"New384", Func, 0},
+ {"New512_224", Func, 5},
+ {"New512_256", Func, 5},
+ {"Size", Const, 0},
+ {"Size224", Const, 5},
+ {"Size256", Const, 5},
+ {"Size384", Const, 0},
+ {"Sum384", Func, 2},
+ {"Sum512", Func, 2},
+ {"Sum512_224", Func, 5},
+ {"Sum512_256", Func, 5},
+ },
+ "crypto/subtle": {
+ {"ConstantTimeByteEq", Func, 0},
+ {"ConstantTimeCompare", Func, 0},
+ {"ConstantTimeCopy", Func, 0},
+ {"ConstantTimeEq", Func, 0},
+ {"ConstantTimeLessOrEq", Func, 2},
+ {"ConstantTimeSelect", Func, 0},
+ {"XORBytes", Func, 20},
+ },
+ "crypto/tls": {
+ {"(*CertificateRequestInfo).Context", Method, 17},
+ {"(*CertificateRequestInfo).SupportsCertificate", Method, 14},
+ {"(*CertificateVerificationError).Error", Method, 20},
+ {"(*CertificateVerificationError).Unwrap", Method, 20},
+ {"(*ClientHelloInfo).Context", Method, 17},
+ {"(*ClientHelloInfo).SupportsCertificate", Method, 14},
+ {"(*ClientSessionState).ResumptionState", Method, 21},
+ {"(*Config).BuildNameToCertificate", Method, 0},
+ {"(*Config).Clone", Method, 8},
+ {"(*Config).DecryptTicket", Method, 21},
+ {"(*Config).EncryptTicket", Method, 21},
+ {"(*Config).SetSessionTicketKeys", Method, 5},
+ {"(*Conn).Close", Method, 0},
+ {"(*Conn).CloseWrite", Method, 8},
+ {"(*Conn).ConnectionState", Method, 0},
+ {"(*Conn).Handshake", Method, 0},
+ {"(*Conn).HandshakeContext", Method, 17},
+ {"(*Conn).LocalAddr", Method, 0},
+ {"(*Conn).NetConn", Method, 18},
+ {"(*Conn).OCSPResponse", Method, 0},
+ {"(*Conn).Read", Method, 0},
+ {"(*Conn).RemoteAddr", Method, 0},
+ {"(*Conn).SetDeadline", Method, 0},
+ {"(*Conn).SetReadDeadline", Method, 0},
+ {"(*Conn).SetWriteDeadline", Method, 0},
+ {"(*Conn).VerifyHostname", Method, 0},
+ {"(*Conn).Write", Method, 0},
+ {"(*ConnectionState).ExportKeyingMaterial", Method, 11},
+ {"(*Dialer).Dial", Method, 15},
+ {"(*Dialer).DialContext", Method, 15},
+ {"(*ECHRejectionError).Error", Method, 23},
+ {"(*QUICConn).Close", Method, 21},
+ {"(*QUICConn).ConnectionState", Method, 21},
+ {"(*QUICConn).HandleData", Method, 21},
+ {"(*QUICConn).NextEvent", Method, 21},
+ {"(*QUICConn).SendSessionTicket", Method, 21},
+ {"(*QUICConn).SetTransportParameters", Method, 21},
+ {"(*QUICConn).Start", Method, 21},
+ {"(*QUICConn).StoreSession", Method, 23},
+ {"(*SessionState).Bytes", Method, 21},
+ {"(AlertError).Error", Method, 21},
+ {"(ClientAuthType).String", Method, 15},
+ {"(CurveID).String", Method, 15},
+ {"(QUICEncryptionLevel).String", Method, 21},
+ {"(RecordHeaderError).Error", Method, 6},
+ {"(SignatureScheme).String", Method, 15},
+ {"AlertError", Type, 21},
+ {"Certificate", Type, 0},
+ {"Certificate.Certificate", Field, 0},
+ {"Certificate.Leaf", Field, 0},
+ {"Certificate.OCSPStaple", Field, 0},
+ {"Certificate.PrivateKey", Field, 0},
+ {"Certificate.SignedCertificateTimestamps", Field, 5},
+ {"Certificate.SupportedSignatureAlgorithms", Field, 14},
+ {"CertificateRequestInfo", Type, 8},
+ {"CertificateRequestInfo.AcceptableCAs", Field, 8},
+ {"CertificateRequestInfo.SignatureSchemes", Field, 8},
+ {"CertificateRequestInfo.Version", Field, 14},
+ {"CertificateVerificationError", Type, 20},
+ {"CertificateVerificationError.Err", Field, 20},
+ {"CertificateVerificationError.UnverifiedCertificates", Field, 20},
+ {"CipherSuite", Type, 14},
+ {"CipherSuite.ID", Field, 14},
+ {"CipherSuite.Insecure", Field, 14},
+ {"CipherSuite.Name", Field, 14},
+ {"CipherSuite.SupportedVersions", Field, 14},
+ {"CipherSuiteName", Func, 14},
+ {"CipherSuites", Func, 14},
+ {"Client", Func, 0},
+ {"ClientAuthType", Type, 0},
+ {"ClientHelloInfo", Type, 4},
+ {"ClientHelloInfo.CipherSuites", Field, 4},
+ {"ClientHelloInfo.Conn", Field, 8},
+ {"ClientHelloInfo.ServerName", Field, 4},
+ {"ClientHelloInfo.SignatureSchemes", Field, 8},
+ {"ClientHelloInfo.SupportedCurves", Field, 4},
+ {"ClientHelloInfo.SupportedPoints", Field, 4},
+ {"ClientHelloInfo.SupportedProtos", Field, 8},
+ {"ClientHelloInfo.SupportedVersions", Field, 8},
+ {"ClientSessionCache", Type, 3},
+ {"ClientSessionState", Type, 3},
+ {"Config", Type, 0},
+ {"Config.Certificates", Field, 0},
+ {"Config.CipherSuites", Field, 0},
+ {"Config.ClientAuth", Field, 0},
+ {"Config.ClientCAs", Field, 0},
+ {"Config.ClientSessionCache", Field, 3},
+ {"Config.CurvePreferences", Field, 3},
+ {"Config.DynamicRecordSizingDisabled", Field, 7},
+ {"Config.EncryptedClientHelloConfigList", Field, 23},
+ {"Config.EncryptedClientHelloRejectionVerify", Field, 23},
+ {"Config.GetCertificate", Field, 4},
+ {"Config.GetClientCertificate", Field, 8},
+ {"Config.GetConfigForClient", Field, 8},
+ {"Config.InsecureSkipVerify", Field, 0},
+ {"Config.KeyLogWriter", Field, 8},
+ {"Config.MaxVersion", Field, 2},
+ {"Config.MinVersion", Field, 2},
+ {"Config.NameToCertificate", Field, 0},
+ {"Config.NextProtos", Field, 0},
+ {"Config.PreferServerCipherSuites", Field, 1},
+ {"Config.Rand", Field, 0},
+ {"Config.Renegotiation", Field, 7},
+ {"Config.RootCAs", Field, 0},
+ {"Config.ServerName", Field, 0},
+ {"Config.SessionTicketKey", Field, 1},
+ {"Config.SessionTicketsDisabled", Field, 1},
+ {"Config.Time", Field, 0},
+ {"Config.UnwrapSession", Field, 21},
+ {"Config.VerifyConnection", Field, 15},
+ {"Config.VerifyPeerCertificate", Field, 8},
+ {"Config.WrapSession", Field, 21},
+ {"Conn", Type, 0},
+ {"ConnectionState", Type, 0},
+ {"ConnectionState.CipherSuite", Field, 0},
+ {"ConnectionState.DidResume", Field, 1},
+ {"ConnectionState.ECHAccepted", Field, 23},
+ {"ConnectionState.HandshakeComplete", Field, 0},
+ {"ConnectionState.NegotiatedProtocol", Field, 0},
+ {"ConnectionState.NegotiatedProtocolIsMutual", Field, 0},
+ {"ConnectionState.OCSPResponse", Field, 5},
+ {"ConnectionState.PeerCertificates", Field, 0},
+ {"ConnectionState.ServerName", Field, 0},
+ {"ConnectionState.SignedCertificateTimestamps", Field, 5},
+ {"ConnectionState.TLSUnique", Field, 4},
+ {"ConnectionState.VerifiedChains", Field, 0},
+ {"ConnectionState.Version", Field, 3},
+ {"CurveID", Type, 3},
+ {"CurveP256", Const, 3},
+ {"CurveP384", Const, 3},
+ {"CurveP521", Const, 3},
+ {"Dial", Func, 0},
+ {"DialWithDialer", Func, 3},
+ {"Dialer", Type, 15},
+ {"Dialer.Config", Field, 15},
+ {"Dialer.NetDialer", Field, 15},
+ {"ECDSAWithP256AndSHA256", Const, 8},
+ {"ECDSAWithP384AndSHA384", Const, 8},
+ {"ECDSAWithP521AndSHA512", Const, 8},
+ {"ECDSAWithSHA1", Const, 10},
+ {"ECHRejectionError", Type, 23},
+ {"ECHRejectionError.RetryConfigList", Field, 23},
+ {"Ed25519", Const, 13},
+ {"InsecureCipherSuites", Func, 14},
+ {"Listen", Func, 0},
+ {"LoadX509KeyPair", Func, 0},
+ {"NewLRUClientSessionCache", Func, 3},
+ {"NewListener", Func, 0},
+ {"NewResumptionState", Func, 21},
+ {"NoClientCert", Const, 0},
+ {"PKCS1WithSHA1", Const, 8},
+ {"PKCS1WithSHA256", Const, 8},
+ {"PKCS1WithSHA384", Const, 8},
+ {"PKCS1WithSHA512", Const, 8},
+ {"PSSWithSHA256", Const, 8},
+ {"PSSWithSHA384", Const, 8},
+ {"PSSWithSHA512", Const, 8},
+ {"ParseSessionState", Func, 21},
+ {"QUICClient", Func, 21},
+ {"QUICConfig", Type, 21},
+ {"QUICConfig.EnableStoreSessionEvent", Field, 23},
+ {"QUICConfig.TLSConfig", Field, 21},
+ {"QUICConn", Type, 21},
+ {"QUICEncryptionLevel", Type, 21},
+ {"QUICEncryptionLevelApplication", Const, 21},
+ {"QUICEncryptionLevelEarly", Const, 21},
+ {"QUICEncryptionLevelHandshake", Const, 21},
+ {"QUICEncryptionLevelInitial", Const, 21},
+ {"QUICEvent", Type, 21},
+ {"QUICEvent.Data", Field, 21},
+ {"QUICEvent.Kind", Field, 21},
+ {"QUICEvent.Level", Field, 21},
+ {"QUICEvent.SessionState", Field, 23},
+ {"QUICEvent.Suite", Field, 21},
+ {"QUICEventKind", Type, 21},
+ {"QUICHandshakeDone", Const, 21},
+ {"QUICNoEvent", Const, 21},
+ {"QUICRejectedEarlyData", Const, 21},
+ {"QUICResumeSession", Const, 23},
+ {"QUICServer", Func, 21},
+ {"QUICSessionTicketOptions", Type, 21},
+ {"QUICSessionTicketOptions.EarlyData", Field, 21},
+ {"QUICSessionTicketOptions.Extra", Field, 23},
+ {"QUICSetReadSecret", Const, 21},
+ {"QUICSetWriteSecret", Const, 21},
+ {"QUICStoreSession", Const, 23},
+ {"QUICTransportParameters", Const, 21},
+ {"QUICTransportParametersRequired", Const, 21},
+ {"QUICWriteData", Const, 21},
+ {"RecordHeaderError", Type, 6},
+ {"RecordHeaderError.Conn", Field, 12},
+ {"RecordHeaderError.Msg", Field, 6},
+ {"RecordHeaderError.RecordHeader", Field, 6},
+ {"RenegotiateFreelyAsClient", Const, 7},
+ {"RenegotiateNever", Const, 7},
+ {"RenegotiateOnceAsClient", Const, 7},
+ {"RenegotiationSupport", Type, 7},
+ {"RequestClientCert", Const, 0},
+ {"RequireAndVerifyClientCert", Const, 0},
+ {"RequireAnyClientCert", Const, 0},
+ {"Server", Func, 0},
+ {"SessionState", Type, 21},
+ {"SessionState.EarlyData", Field, 21},
+ {"SessionState.Extra", Field, 21},
+ {"SignatureScheme", Type, 8},
+ {"TLS_AES_128_GCM_SHA256", Const, 12},
+ {"TLS_AES_256_GCM_SHA384", Const, 12},
+ {"TLS_CHACHA20_POLY1305_SHA256", Const, 12},
+ {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", Const, 2},
+ {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", Const, 8},
+ {"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", Const, 2},
+ {"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", Const, 2},
+ {"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", Const, 5},
+ {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", Const, 8},
+ {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14},
+ {"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", Const, 2},
+ {"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0},
+ {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", Const, 0},
+ {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", Const, 8},
+ {"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", Const, 2},
+ {"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", Const, 1},
+ {"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", Const, 5},
+ {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", Const, 8},
+ {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14},
+ {"TLS_ECDHE_RSA_WITH_RC4_128_SHA", Const, 0},
+ {"TLS_FALLBACK_SCSV", Const, 4},
+ {"TLS_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0},
+ {"TLS_RSA_WITH_AES_128_CBC_SHA", Const, 0},
+ {"TLS_RSA_WITH_AES_128_CBC_SHA256", Const, 8},
+ {"TLS_RSA_WITH_AES_128_GCM_SHA256", Const, 6},
+ {"TLS_RSA_WITH_AES_256_CBC_SHA", Const, 1},
+ {"TLS_RSA_WITH_AES_256_GCM_SHA384", Const, 6},
+ {"TLS_RSA_WITH_RC4_128_SHA", Const, 0},
+ {"VerifyClientCertIfGiven", Const, 0},
+ {"VersionName", Func, 21},
+ {"VersionSSL30", Const, 2},
+ {"VersionTLS10", Const, 2},
+ {"VersionTLS11", Const, 2},
+ {"VersionTLS12", Const, 2},
+ {"VersionTLS13", Const, 12},
+ {"X25519", Const, 8},
+ {"X509KeyPair", Func, 0},
+ },
+ "crypto/x509": {
+ {"(*CertPool).AddCert", Method, 0},
+ {"(*CertPool).AddCertWithConstraint", Method, 22},
+ {"(*CertPool).AppendCertsFromPEM", Method, 0},
+ {"(*CertPool).Clone", Method, 19},
+ {"(*CertPool).Equal", Method, 19},
+ {"(*CertPool).Subjects", Method, 0},
+ {"(*Certificate).CheckCRLSignature", Method, 0},
+ {"(*Certificate).CheckSignature", Method, 0},
+ {"(*Certificate).CheckSignatureFrom", Method, 0},
+ {"(*Certificate).CreateCRL", Method, 0},
+ {"(*Certificate).Equal", Method, 0},
+ {"(*Certificate).Verify", Method, 0},
+ {"(*Certificate).VerifyHostname", Method, 0},
+ {"(*CertificateRequest).CheckSignature", Method, 5},
+ {"(*OID).UnmarshalBinary", Method, 23},
+ {"(*OID).UnmarshalText", Method, 23},
+ {"(*RevocationList).CheckSignatureFrom", Method, 19},
+ {"(CertificateInvalidError).Error", Method, 0},
+ {"(ConstraintViolationError).Error", Method, 0},
+ {"(HostnameError).Error", Method, 0},
+ {"(InsecureAlgorithmError).Error", Method, 6},
+ {"(OID).Equal", Method, 22},
+ {"(OID).EqualASN1OID", Method, 22},
+ {"(OID).MarshalBinary", Method, 23},
+ {"(OID).MarshalText", Method, 23},
+ {"(OID).String", Method, 22},
+ {"(PublicKeyAlgorithm).String", Method, 10},
+ {"(SignatureAlgorithm).String", Method, 6},
+ {"(SystemRootsError).Error", Method, 1},
+ {"(SystemRootsError).Unwrap", Method, 16},
+ {"(UnhandledCriticalExtension).Error", Method, 0},
+ {"(UnknownAuthorityError).Error", Method, 0},
+ {"CANotAuthorizedForExtKeyUsage", Const, 10},
+ {"CANotAuthorizedForThisName", Const, 0},
+ {"CertPool", Type, 0},
+ {"Certificate", Type, 0},
+ {"Certificate.AuthorityKeyId", Field, 0},
+ {"Certificate.BasicConstraintsValid", Field, 0},
+ {"Certificate.CRLDistributionPoints", Field, 2},
+ {"Certificate.DNSNames", Field, 0},
+ {"Certificate.EmailAddresses", Field, 0},
+ {"Certificate.ExcludedDNSDomains", Field, 9},
+ {"Certificate.ExcludedEmailAddresses", Field, 10},
+ {"Certificate.ExcludedIPRanges", Field, 10},
+ {"Certificate.ExcludedURIDomains", Field, 10},
+ {"Certificate.ExtKeyUsage", Field, 0},
+ {"Certificate.Extensions", Field, 2},
+ {"Certificate.ExtraExtensions", Field, 2},
+ {"Certificate.IPAddresses", Field, 1},
+ {"Certificate.IsCA", Field, 0},
+ {"Certificate.Issuer", Field, 0},
+ {"Certificate.IssuingCertificateURL", Field, 2},
+ {"Certificate.KeyUsage", Field, 0},
+ {"Certificate.MaxPathLen", Field, 0},
+ {"Certificate.MaxPathLenZero", Field, 4},
+ {"Certificate.NotAfter", Field, 0},
+ {"Certificate.NotBefore", Field, 0},
+ {"Certificate.OCSPServer", Field, 2},
+ {"Certificate.PermittedDNSDomains", Field, 0},
+ {"Certificate.PermittedDNSDomainsCritical", Field, 0},
+ {"Certificate.PermittedEmailAddresses", Field, 10},
+ {"Certificate.PermittedIPRanges", Field, 10},
+ {"Certificate.PermittedURIDomains", Field, 10},
+ {"Certificate.Policies", Field, 22},
+ {"Certificate.PolicyIdentifiers", Field, 0},
+ {"Certificate.PublicKey", Field, 0},
+ {"Certificate.PublicKeyAlgorithm", Field, 0},
+ {"Certificate.Raw", Field, 0},
+ {"Certificate.RawIssuer", Field, 0},
+ {"Certificate.RawSubject", Field, 0},
+ {"Certificate.RawSubjectPublicKeyInfo", Field, 0},
+ {"Certificate.RawTBSCertificate", Field, 0},
+ {"Certificate.SerialNumber", Field, 0},
+ {"Certificate.Signature", Field, 0},
+ {"Certificate.SignatureAlgorithm", Field, 0},
+ {"Certificate.Subject", Field, 0},
+ {"Certificate.SubjectKeyId", Field, 0},
+ {"Certificate.URIs", Field, 10},
+ {"Certificate.UnhandledCriticalExtensions", Field, 5},
+ {"Certificate.UnknownExtKeyUsage", Field, 0},
+ {"Certificate.Version", Field, 0},
+ {"CertificateInvalidError", Type, 0},
+ {"CertificateInvalidError.Cert", Field, 0},
+ {"CertificateInvalidError.Detail", Field, 10},
+ {"CertificateInvalidError.Reason", Field, 0},
+ {"CertificateRequest", Type, 3},
+ {"CertificateRequest.Attributes", Field, 3},
+ {"CertificateRequest.DNSNames", Field, 3},
+ {"CertificateRequest.EmailAddresses", Field, 3},
+ {"CertificateRequest.Extensions", Field, 3},
+ {"CertificateRequest.ExtraExtensions", Field, 3},
+ {"CertificateRequest.IPAddresses", Field, 3},
+ {"CertificateRequest.PublicKey", Field, 3},
+ {"CertificateRequest.PublicKeyAlgorithm", Field, 3},
+ {"CertificateRequest.Raw", Field, 3},
+ {"CertificateRequest.RawSubject", Field, 3},
+ {"CertificateRequest.RawSubjectPublicKeyInfo", Field, 3},
+ {"CertificateRequest.RawTBSCertificateRequest", Field, 3},
+ {"CertificateRequest.Signature", Field, 3},
+ {"CertificateRequest.SignatureAlgorithm", Field, 3},
+ {"CertificateRequest.Subject", Field, 3},
+ {"CertificateRequest.URIs", Field, 10},
+ {"CertificateRequest.Version", Field, 3},
+ {"ConstraintViolationError", Type, 0},
+ {"CreateCertificate", Func, 0},
+ {"CreateCertificateRequest", Func, 3},
+ {"CreateRevocationList", Func, 15},
+ {"DSA", Const, 0},
+ {"DSAWithSHA1", Const, 0},
+ {"DSAWithSHA256", Const, 0},
+ {"DecryptPEMBlock", Func, 1},
+ {"ECDSA", Const, 1},
+ {"ECDSAWithSHA1", Const, 1},
+ {"ECDSAWithSHA256", Const, 1},
+ {"ECDSAWithSHA384", Const, 1},
+ {"ECDSAWithSHA512", Const, 1},
+ {"Ed25519", Const, 13},
+ {"EncryptPEMBlock", Func, 1},
+ {"ErrUnsupportedAlgorithm", Var, 0},
+ {"Expired", Const, 0},
+ {"ExtKeyUsage", Type, 0},
+ {"ExtKeyUsageAny", Const, 0},
+ {"ExtKeyUsageClientAuth", Const, 0},
+ {"ExtKeyUsageCodeSigning", Const, 0},
+ {"ExtKeyUsageEmailProtection", Const, 0},
+ {"ExtKeyUsageIPSECEndSystem", Const, 1},
+ {"ExtKeyUsageIPSECTunnel", Const, 1},
+ {"ExtKeyUsageIPSECUser", Const, 1},
+ {"ExtKeyUsageMicrosoftCommercialCodeSigning", Const, 10},
+ {"ExtKeyUsageMicrosoftKernelCodeSigning", Const, 10},
+ {"ExtKeyUsageMicrosoftServerGatedCrypto", Const, 1},
+ {"ExtKeyUsageNetscapeServerGatedCrypto", Const, 1},
+ {"ExtKeyUsageOCSPSigning", Const, 0},
+ {"ExtKeyUsageServerAuth", Const, 0},
+ {"ExtKeyUsageTimeStamping", Const, 0},
+ {"HostnameError", Type, 0},
+ {"HostnameError.Certificate", Field, 0},
+ {"HostnameError.Host", Field, 0},
+ {"IncompatibleUsage", Const, 1},
+ {"IncorrectPasswordError", Var, 1},
+ {"InsecureAlgorithmError", Type, 6},
+ {"InvalidReason", Type, 0},
+ {"IsEncryptedPEMBlock", Func, 1},
+ {"KeyUsage", Type, 0},
+ {"KeyUsageCRLSign", Const, 0},
+ {"KeyUsageCertSign", Const, 0},
+ {"KeyUsageContentCommitment", Const, 0},
+ {"KeyUsageDataEncipherment", Const, 0},
+ {"KeyUsageDecipherOnly", Const, 0},
+ {"KeyUsageDigitalSignature", Const, 0},
+ {"KeyUsageEncipherOnly", Const, 0},
+ {"KeyUsageKeyAgreement", Const, 0},
+ {"KeyUsageKeyEncipherment", Const, 0},
+ {"MD2WithRSA", Const, 0},
+ {"MD5WithRSA", Const, 0},
+ {"MarshalECPrivateKey", Func, 2},
+ {"MarshalPKCS1PrivateKey", Func, 0},
+ {"MarshalPKCS1PublicKey", Func, 10},
+ {"MarshalPKCS8PrivateKey", Func, 10},
+ {"MarshalPKIXPublicKey", Func, 0},
+ {"NameConstraintsWithoutSANs", Const, 10},
+ {"NameMismatch", Const, 8},
+ {"NewCertPool", Func, 0},
+ {"NotAuthorizedToSign", Const, 0},
+ {"OID", Type, 22},
+ {"OIDFromInts", Func, 22},
+ {"PEMCipher", Type, 1},
+ {"PEMCipher3DES", Const, 1},
+ {"PEMCipherAES128", Const, 1},
+ {"PEMCipherAES192", Const, 1},
+ {"PEMCipherAES256", Const, 1},
+ {"PEMCipherDES", Const, 1},
+ {"ParseCRL", Func, 0},
+ {"ParseCertificate", Func, 0},
+ {"ParseCertificateRequest", Func, 3},
+ {"ParseCertificates", Func, 0},
+ {"ParseDERCRL", Func, 0},
+ {"ParseECPrivateKey", Func, 1},
+ {"ParseOID", Func, 23},
+ {"ParsePKCS1PrivateKey", Func, 0},
+ {"ParsePKCS1PublicKey", Func, 10},
+ {"ParsePKCS8PrivateKey", Func, 0},
+ {"ParsePKIXPublicKey", Func, 0},
+ {"ParseRevocationList", Func, 19},
+ {"PublicKeyAlgorithm", Type, 0},
+ {"PureEd25519", Const, 13},
+ {"RSA", Const, 0},
+ {"RevocationList", Type, 15},
+ {"RevocationList.AuthorityKeyId", Field, 19},
+ {"RevocationList.Extensions", Field, 19},
+ {"RevocationList.ExtraExtensions", Field, 15},
+ {"RevocationList.Issuer", Field, 19},
+ {"RevocationList.NextUpdate", Field, 15},
+ {"RevocationList.Number", Field, 15},
+ {"RevocationList.Raw", Field, 19},
+ {"RevocationList.RawIssuer", Field, 19},
+ {"RevocationList.RawTBSRevocationList", Field, 19},
+ {"RevocationList.RevokedCertificateEntries", Field, 21},
+ {"RevocationList.RevokedCertificates", Field, 15},
+ {"RevocationList.Signature", Field, 19},
+ {"RevocationList.SignatureAlgorithm", Field, 15},
+ {"RevocationList.ThisUpdate", Field, 15},
+ {"RevocationListEntry", Type, 21},
+ {"RevocationListEntry.Extensions", Field, 21},
+ {"RevocationListEntry.ExtraExtensions", Field, 21},
+ {"RevocationListEntry.Raw", Field, 21},
+ {"RevocationListEntry.ReasonCode", Field, 21},
+ {"RevocationListEntry.RevocationTime", Field, 21},
+ {"RevocationListEntry.SerialNumber", Field, 21},
+ {"SHA1WithRSA", Const, 0},
+ {"SHA256WithRSA", Const, 0},
+ {"SHA256WithRSAPSS", Const, 8},
+ {"SHA384WithRSA", Const, 0},
+ {"SHA384WithRSAPSS", Const, 8},
+ {"SHA512WithRSA", Const, 0},
+ {"SHA512WithRSAPSS", Const, 8},
+ {"SetFallbackRoots", Func, 20},
+ {"SignatureAlgorithm", Type, 0},
+ {"SystemCertPool", Func, 7},
+ {"SystemRootsError", Type, 1},
+ {"SystemRootsError.Err", Field, 7},
+ {"TooManyConstraints", Const, 10},
+ {"TooManyIntermediates", Const, 0},
+ {"UnconstrainedName", Const, 10},
+ {"UnhandledCriticalExtension", Type, 0},
+ {"UnknownAuthorityError", Type, 0},
+ {"UnknownAuthorityError.Cert", Field, 8},
+ {"UnknownPublicKeyAlgorithm", Const, 0},
+ {"UnknownSignatureAlgorithm", Const, 0},
+ {"VerifyOptions", Type, 0},
+ {"VerifyOptions.CurrentTime", Field, 0},
+ {"VerifyOptions.DNSName", Field, 0},
+ {"VerifyOptions.Intermediates", Field, 0},
+ {"VerifyOptions.KeyUsages", Field, 1},
+ {"VerifyOptions.MaxConstraintComparisions", Field, 10},
+ {"VerifyOptions.Roots", Field, 0},
+ },
+ "crypto/x509/pkix": {
+ {"(*CertificateList).HasExpired", Method, 0},
+ {"(*Name).FillFromRDNSequence", Method, 0},
+ {"(Name).String", Method, 10},
+ {"(Name).ToRDNSequence", Method, 0},
+ {"(RDNSequence).String", Method, 10},
+ {"AlgorithmIdentifier", Type, 0},
+ {"AlgorithmIdentifier.Algorithm", Field, 0},
+ {"AlgorithmIdentifier.Parameters", Field, 0},
+ {"AttributeTypeAndValue", Type, 0},
+ {"AttributeTypeAndValue.Type", Field, 0},
+ {"AttributeTypeAndValue.Value", Field, 0},
+ {"AttributeTypeAndValueSET", Type, 3},
+ {"AttributeTypeAndValueSET.Type", Field, 3},
+ {"AttributeTypeAndValueSET.Value", Field, 3},
+ {"CertificateList", Type, 0},
+ {"CertificateList.SignatureAlgorithm", Field, 0},
+ {"CertificateList.SignatureValue", Field, 0},
+ {"CertificateList.TBSCertList", Field, 0},
+ {"Extension", Type, 0},
+ {"Extension.Critical", Field, 0},
+ {"Extension.Id", Field, 0},
+ {"Extension.Value", Field, 0},
+ {"Name", Type, 0},
+ {"Name.CommonName", Field, 0},
+ {"Name.Country", Field, 0},
+ {"Name.ExtraNames", Field, 5},
+ {"Name.Locality", Field, 0},
+ {"Name.Names", Field, 0},
+ {"Name.Organization", Field, 0},
+ {"Name.OrganizationalUnit", Field, 0},
+ {"Name.PostalCode", Field, 0},
+ {"Name.Province", Field, 0},
+ {"Name.SerialNumber", Field, 0},
+ {"Name.StreetAddress", Field, 0},
+ {"RDNSequence", Type, 0},
+ {"RelativeDistinguishedNameSET", Type, 0},
+ {"RevokedCertificate", Type, 0},
+ {"RevokedCertificate.Extensions", Field, 0},
+ {"RevokedCertificate.RevocationTime", Field, 0},
+ {"RevokedCertificate.SerialNumber", Field, 0},
+ {"TBSCertificateList", Type, 0},
+ {"TBSCertificateList.Extensions", Field, 0},
+ {"TBSCertificateList.Issuer", Field, 0},
+ {"TBSCertificateList.NextUpdate", Field, 0},
+ {"TBSCertificateList.Raw", Field, 0},
+ {"TBSCertificateList.RevokedCertificates", Field, 0},
+ {"TBSCertificateList.Signature", Field, 0},
+ {"TBSCertificateList.ThisUpdate", Field, 0},
+ {"TBSCertificateList.Version", Field, 0},
+ },
+ "database/sql": {
+ {"(*ColumnType).DatabaseTypeName", Method, 8},
+ {"(*ColumnType).DecimalSize", Method, 8},
+ {"(*ColumnType).Length", Method, 8},
+ {"(*ColumnType).Name", Method, 8},
+ {"(*ColumnType).Nullable", Method, 8},
+ {"(*ColumnType).ScanType", Method, 8},
+ {"(*Conn).BeginTx", Method, 9},
+ {"(*Conn).Close", Method, 9},
+ {"(*Conn).ExecContext", Method, 9},
+ {"(*Conn).PingContext", Method, 9},
+ {"(*Conn).PrepareContext", Method, 9},
+ {"(*Conn).QueryContext", Method, 9},
+ {"(*Conn).QueryRowContext", Method, 9},
+ {"(*Conn).Raw", Method, 13},
+ {"(*DB).Begin", Method, 0},
+ {"(*DB).BeginTx", Method, 8},
+ {"(*DB).Close", Method, 0},
+ {"(*DB).Conn", Method, 9},
+ {"(*DB).Driver", Method, 0},
+ {"(*DB).Exec", Method, 0},
+ {"(*DB).ExecContext", Method, 8},
+ {"(*DB).Ping", Method, 1},
+ {"(*DB).PingContext", Method, 8},
+ {"(*DB).Prepare", Method, 0},
+ {"(*DB).PrepareContext", Method, 8},
+ {"(*DB).Query", Method, 0},
+ {"(*DB).QueryContext", Method, 8},
+ {"(*DB).QueryRow", Method, 0},
+ {"(*DB).QueryRowContext", Method, 8},
+ {"(*DB).SetConnMaxIdleTime", Method, 15},
+ {"(*DB).SetConnMaxLifetime", Method, 6},
+ {"(*DB).SetMaxIdleConns", Method, 1},
+ {"(*DB).SetMaxOpenConns", Method, 2},
+ {"(*DB).Stats", Method, 5},
+ {"(*Null).Scan", Method, 22},
+ {"(*NullBool).Scan", Method, 0},
+ {"(*NullByte).Scan", Method, 17},
+ {"(*NullFloat64).Scan", Method, 0},
+ {"(*NullInt16).Scan", Method, 17},
+ {"(*NullInt32).Scan", Method, 13},
+ {"(*NullInt64).Scan", Method, 0},
+ {"(*NullString).Scan", Method, 0},
+ {"(*NullTime).Scan", Method, 13},
+ {"(*Row).Err", Method, 15},
+ {"(*Row).Scan", Method, 0},
+ {"(*Rows).Close", Method, 0},
+ {"(*Rows).ColumnTypes", Method, 8},
+ {"(*Rows).Columns", Method, 0},
+ {"(*Rows).Err", Method, 0},
+ {"(*Rows).Next", Method, 0},
+ {"(*Rows).NextResultSet", Method, 8},
+ {"(*Rows).Scan", Method, 0},
+ {"(*Stmt).Close", Method, 0},
+ {"(*Stmt).Exec", Method, 0},
+ {"(*Stmt).ExecContext", Method, 8},
+ {"(*Stmt).Query", Method, 0},
+ {"(*Stmt).QueryContext", Method, 8},
+ {"(*Stmt).QueryRow", Method, 0},
+ {"(*Stmt).QueryRowContext", Method, 8},
+ {"(*Tx).Commit", Method, 0},
+ {"(*Tx).Exec", Method, 0},
+ {"(*Tx).ExecContext", Method, 8},
+ {"(*Tx).Prepare", Method, 0},
+ {"(*Tx).PrepareContext", Method, 8},
+ {"(*Tx).Query", Method, 0},
+ {"(*Tx).QueryContext", Method, 8},
+ {"(*Tx).QueryRow", Method, 0},
+ {"(*Tx).QueryRowContext", Method, 8},
+ {"(*Tx).Rollback", Method, 0},
+ {"(*Tx).Stmt", Method, 0},
+ {"(*Tx).StmtContext", Method, 8},
+ {"(IsolationLevel).String", Method, 11},
+ {"(Null).Value", Method, 22},
+ {"(NullBool).Value", Method, 0},
+ {"(NullByte).Value", Method, 17},
+ {"(NullFloat64).Value", Method, 0},
+ {"(NullInt16).Value", Method, 17},
+ {"(NullInt32).Value", Method, 13},
+ {"(NullInt64).Value", Method, 0},
+ {"(NullString).Value", Method, 0},
+ {"(NullTime).Value", Method, 13},
+ {"ColumnType", Type, 8},
+ {"Conn", Type, 9},
+ {"DB", Type, 0},
+ {"DBStats", Type, 5},
+ {"DBStats.Idle", Field, 11},
+ {"DBStats.InUse", Field, 11},
+ {"DBStats.MaxIdleClosed", Field, 11},
+ {"DBStats.MaxIdleTimeClosed", Field, 15},
+ {"DBStats.MaxLifetimeClosed", Field, 11},
+ {"DBStats.MaxOpenConnections", Field, 11},
+ {"DBStats.OpenConnections", Field, 5},
+ {"DBStats.WaitCount", Field, 11},
+ {"DBStats.WaitDuration", Field, 11},
+ {"Drivers", Func, 4},
+ {"ErrConnDone", Var, 9},
+ {"ErrNoRows", Var, 0},
+ {"ErrTxDone", Var, 0},
+ {"IsolationLevel", Type, 8},
+ {"LevelDefault", Const, 8},
+ {"LevelLinearizable", Const, 8},
+ {"LevelReadCommitted", Const, 8},
+ {"LevelReadUncommitted", Const, 8},
+ {"LevelRepeatableRead", Const, 8},
+ {"LevelSerializable", Const, 8},
+ {"LevelSnapshot", Const, 8},
+ {"LevelWriteCommitted", Const, 8},
+ {"Named", Func, 8},
+ {"NamedArg", Type, 8},
+ {"NamedArg.Name", Field, 8},
+ {"NamedArg.Value", Field, 8},
+ {"Null", Type, 22},
+ {"Null.V", Field, 22},
+ {"Null.Valid", Field, 22},
+ {"NullBool", Type, 0},
+ {"NullBool.Bool", Field, 0},
+ {"NullBool.Valid", Field, 0},
+ {"NullByte", Type, 17},
+ {"NullByte.Byte", Field, 17},
+ {"NullByte.Valid", Field, 17},
+ {"NullFloat64", Type, 0},
+ {"NullFloat64.Float64", Field, 0},
+ {"NullFloat64.Valid", Field, 0},
+ {"NullInt16", Type, 17},
+ {"NullInt16.Int16", Field, 17},
+ {"NullInt16.Valid", Field, 17},
+ {"NullInt32", Type, 13},
+ {"NullInt32.Int32", Field, 13},
+ {"NullInt32.Valid", Field, 13},
+ {"NullInt64", Type, 0},
+ {"NullInt64.Int64", Field, 0},
+ {"NullInt64.Valid", Field, 0},
+ {"NullString", Type, 0},
+ {"NullString.String", Field, 0},
+ {"NullString.Valid", Field, 0},
+ {"NullTime", Type, 13},
+ {"NullTime.Time", Field, 13},
+ {"NullTime.Valid", Field, 13},
+ {"Open", Func, 0},
+ {"OpenDB", Func, 10},
+ {"Out", Type, 9},
+ {"Out.Dest", Field, 9},
+ {"Out.In", Field, 9},
+ {"RawBytes", Type, 0},
+ {"Register", Func, 0},
+ {"Result", Type, 0},
+ {"Row", Type, 0},
+ {"Rows", Type, 0},
+ {"Scanner", Type, 0},
+ {"Stmt", Type, 0},
+ {"Tx", Type, 0},
+ {"TxOptions", Type, 8},
+ {"TxOptions.Isolation", Field, 8},
+ {"TxOptions.ReadOnly", Field, 8},
+ },
+ "database/sql/driver": {
+ {"(NotNull).ConvertValue", Method, 0},
+ {"(Null).ConvertValue", Method, 0},
+ {"(RowsAffected).LastInsertId", Method, 0},
+ {"(RowsAffected).RowsAffected", Method, 0},
+ {"Bool", Var, 0},
+ {"ColumnConverter", Type, 0},
+ {"Conn", Type, 0},
+ {"ConnBeginTx", Type, 8},
+ {"ConnPrepareContext", Type, 8},
+ {"Connector", Type, 10},
+ {"DefaultParameterConverter", Var, 0},
+ {"Driver", Type, 0},
+ {"DriverContext", Type, 10},
+ {"ErrBadConn", Var, 0},
+ {"ErrRemoveArgument", Var, 9},
+ {"ErrSkip", Var, 0},
+ {"Execer", Type, 0},
+ {"ExecerContext", Type, 8},
+ {"Int32", Var, 0},
+ {"IsScanValue", Func, 0},
+ {"IsValue", Func, 0},
+ {"IsolationLevel", Type, 8},
+ {"NamedValue", Type, 8},
+ {"NamedValue.Name", Field, 8},
+ {"NamedValue.Ordinal", Field, 8},
+ {"NamedValue.Value", Field, 8},
+ {"NamedValueChecker", Type, 9},
+ {"NotNull", Type, 0},
+ {"NotNull.Converter", Field, 0},
+ {"Null", Type, 0},
+ {"Null.Converter", Field, 0},
+ {"Pinger", Type, 8},
+ {"Queryer", Type, 1},
+ {"QueryerContext", Type, 8},
+ {"Result", Type, 0},
+ {"ResultNoRows", Var, 0},
+ {"Rows", Type, 0},
+ {"RowsAffected", Type, 0},
+ {"RowsColumnTypeDatabaseTypeName", Type, 8},
+ {"RowsColumnTypeLength", Type, 8},
+ {"RowsColumnTypeNullable", Type, 8},
+ {"RowsColumnTypePrecisionScale", Type, 8},
+ {"RowsColumnTypeScanType", Type, 8},
+ {"RowsNextResultSet", Type, 8},
+ {"SessionResetter", Type, 10},
+ {"Stmt", Type, 0},
+ {"StmtExecContext", Type, 8},
+ {"StmtQueryContext", Type, 8},
+ {"String", Var, 0},
+ {"Tx", Type, 0},
+ {"TxOptions", Type, 8},
+ {"TxOptions.Isolation", Field, 8},
+ {"TxOptions.ReadOnly", Field, 8},
+ {"Validator", Type, 15},
+ {"Value", Type, 0},
+ {"ValueConverter", Type, 0},
+ {"Valuer", Type, 0},
+ },
+ "debug/buildinfo": {
+ {"BuildInfo", Type, 18},
+ {"Read", Func, 18},
+ {"ReadFile", Func, 18},
+ },
+ "debug/dwarf": {
+ {"(*AddrType).Basic", Method, 0},
+ {"(*AddrType).Common", Method, 0},
+ {"(*AddrType).Size", Method, 0},
+ {"(*AddrType).String", Method, 0},
+ {"(*ArrayType).Common", Method, 0},
+ {"(*ArrayType).Size", Method, 0},
+ {"(*ArrayType).String", Method, 0},
+ {"(*BasicType).Basic", Method, 0},
+ {"(*BasicType).Common", Method, 0},
+ {"(*BasicType).Size", Method, 0},
+ {"(*BasicType).String", Method, 0},
+ {"(*BoolType).Basic", Method, 0},
+ {"(*BoolType).Common", Method, 0},
+ {"(*BoolType).Size", Method, 0},
+ {"(*BoolType).String", Method, 0},
+ {"(*CharType).Basic", Method, 0},
+ {"(*CharType).Common", Method, 0},
+ {"(*CharType).Size", Method, 0},
+ {"(*CharType).String", Method, 0},
+ {"(*CommonType).Common", Method, 0},
+ {"(*CommonType).Size", Method, 0},
+ {"(*ComplexType).Basic", Method, 0},
+ {"(*ComplexType).Common", Method, 0},
+ {"(*ComplexType).Size", Method, 0},
+ {"(*ComplexType).String", Method, 0},
+ {"(*Data).AddSection", Method, 14},
+ {"(*Data).AddTypes", Method, 3},
+ {"(*Data).LineReader", Method, 5},
+ {"(*Data).Ranges", Method, 7},
+ {"(*Data).Reader", Method, 0},
+ {"(*Data).Type", Method, 0},
+ {"(*DotDotDotType).Common", Method, 0},
+ {"(*DotDotDotType).Size", Method, 0},
+ {"(*DotDotDotType).String", Method, 0},
+ {"(*Entry).AttrField", Method, 5},
+ {"(*Entry).Val", Method, 0},
+ {"(*EnumType).Common", Method, 0},
+ {"(*EnumType).Size", Method, 0},
+ {"(*EnumType).String", Method, 0},
+ {"(*FloatType).Basic", Method, 0},
+ {"(*FloatType).Common", Method, 0},
+ {"(*FloatType).Size", Method, 0},
+ {"(*FloatType).String", Method, 0},
+ {"(*FuncType).Common", Method, 0},
+ {"(*FuncType).Size", Method, 0},
+ {"(*FuncType).String", Method, 0},
+ {"(*IntType).Basic", Method, 0},
+ {"(*IntType).Common", Method, 0},
+ {"(*IntType).Size", Method, 0},
+ {"(*IntType).String", Method, 0},
+ {"(*LineReader).Files", Method, 14},
+ {"(*LineReader).Next", Method, 5},
+ {"(*LineReader).Reset", Method, 5},
+ {"(*LineReader).Seek", Method, 5},
+ {"(*LineReader).SeekPC", Method, 5},
+ {"(*LineReader).Tell", Method, 5},
+ {"(*PtrType).Common", Method, 0},
+ {"(*PtrType).Size", Method, 0},
+ {"(*PtrType).String", Method, 0},
+ {"(*QualType).Common", Method, 0},
+ {"(*QualType).Size", Method, 0},
+ {"(*QualType).String", Method, 0},
+ {"(*Reader).AddressSize", Method, 5},
+ {"(*Reader).ByteOrder", Method, 14},
+ {"(*Reader).Next", Method, 0},
+ {"(*Reader).Seek", Method, 0},
+ {"(*Reader).SeekPC", Method, 7},
+ {"(*Reader).SkipChildren", Method, 0},
+ {"(*StructType).Common", Method, 0},
+ {"(*StructType).Defn", Method, 0},
+ {"(*StructType).Size", Method, 0},
+ {"(*StructType).String", Method, 0},
+ {"(*TypedefType).Common", Method, 0},
+ {"(*TypedefType).Size", Method, 0},
+ {"(*TypedefType).String", Method, 0},
+ {"(*UcharType).Basic", Method, 0},
+ {"(*UcharType).Common", Method, 0},
+ {"(*UcharType).Size", Method, 0},
+ {"(*UcharType).String", Method, 0},
+ {"(*UintType).Basic", Method, 0},
+ {"(*UintType).Common", Method, 0},
+ {"(*UintType).Size", Method, 0},
+ {"(*UintType).String", Method, 0},
+ {"(*UnspecifiedType).Basic", Method, 4},
+ {"(*UnspecifiedType).Common", Method, 4},
+ {"(*UnspecifiedType).Size", Method, 4},
+ {"(*UnspecifiedType).String", Method, 4},
+ {"(*UnsupportedType).Common", Method, 13},
+ {"(*UnsupportedType).Size", Method, 13},
+ {"(*UnsupportedType).String", Method, 13},
+ {"(*VoidType).Common", Method, 0},
+ {"(*VoidType).Size", Method, 0},
+ {"(*VoidType).String", Method, 0},
+ {"(Attr).GoString", Method, 0},
+ {"(Attr).String", Method, 0},
+ {"(Class).GoString", Method, 5},
+ {"(Class).String", Method, 5},
+ {"(DecodeError).Error", Method, 0},
+ {"(Tag).GoString", Method, 0},
+ {"(Tag).String", Method, 0},
+ {"AddrType", Type, 0},
+ {"AddrType.BasicType", Field, 0},
+ {"ArrayType", Type, 0},
+ {"ArrayType.CommonType", Field, 0},
+ {"ArrayType.Count", Field, 0},
+ {"ArrayType.StrideBitSize", Field, 0},
+ {"ArrayType.Type", Field, 0},
+ {"Attr", Type, 0},
+ {"AttrAbstractOrigin", Const, 0},
+ {"AttrAccessibility", Const, 0},
+ {"AttrAddrBase", Const, 14},
+ {"AttrAddrClass", Const, 0},
+ {"AttrAlignment", Const, 14},
+ {"AttrAllocated", Const, 0},
+ {"AttrArtificial", Const, 0},
+ {"AttrAssociated", Const, 0},
+ {"AttrBaseTypes", Const, 0},
+ {"AttrBinaryScale", Const, 14},
+ {"AttrBitOffset", Const, 0},
+ {"AttrBitSize", Const, 0},
+ {"AttrByteSize", Const, 0},
+ {"AttrCallAllCalls", Const, 14},
+ {"AttrCallAllSourceCalls", Const, 14},
+ {"AttrCallAllTailCalls", Const, 14},
+ {"AttrCallColumn", Const, 0},
+ {"AttrCallDataLocation", Const, 14},
+ {"AttrCallDataValue", Const, 14},
+ {"AttrCallFile", Const, 0},
+ {"AttrCallLine", Const, 0},
+ {"AttrCallOrigin", Const, 14},
+ {"AttrCallPC", Const, 14},
+ {"AttrCallParameter", Const, 14},
+ {"AttrCallReturnPC", Const, 14},
+ {"AttrCallTailCall", Const, 14},
+ {"AttrCallTarget", Const, 14},
+ {"AttrCallTargetClobbered", Const, 14},
+ {"AttrCallValue", Const, 14},
+ {"AttrCalling", Const, 0},
+ {"AttrCommonRef", Const, 0},
+ {"AttrCompDir", Const, 0},
+ {"AttrConstExpr", Const, 14},
+ {"AttrConstValue", Const, 0},
+ {"AttrContainingType", Const, 0},
+ {"AttrCount", Const, 0},
+ {"AttrDataBitOffset", Const, 14},
+ {"AttrDataLocation", Const, 0},
+ {"AttrDataMemberLoc", Const, 0},
+ {"AttrDecimalScale", Const, 14},
+ {"AttrDecimalSign", Const, 14},
+ {"AttrDeclColumn", Const, 0},
+ {"AttrDeclFile", Const, 0},
+ {"AttrDeclLine", Const, 0},
+ {"AttrDeclaration", Const, 0},
+ {"AttrDefaultValue", Const, 0},
+ {"AttrDefaulted", Const, 14},
+ {"AttrDeleted", Const, 14},
+ {"AttrDescription", Const, 0},
+ {"AttrDigitCount", Const, 14},
+ {"AttrDiscr", Const, 0},
+ {"AttrDiscrList", Const, 0},
+ {"AttrDiscrValue", Const, 0},
+ {"AttrDwoName", Const, 14},
+ {"AttrElemental", Const, 14},
+ {"AttrEncoding", Const, 0},
+ {"AttrEndianity", Const, 14},
+ {"AttrEntrypc", Const, 0},
+ {"AttrEnumClass", Const, 14},
+ {"AttrExplicit", Const, 14},
+ {"AttrExportSymbols", Const, 14},
+ {"AttrExtension", Const, 0},
+ {"AttrExternal", Const, 0},
+ {"AttrFrameBase", Const, 0},
+ {"AttrFriend", Const, 0},
+ {"AttrHighpc", Const, 0},
+ {"AttrIdentifierCase", Const, 0},
+ {"AttrImport", Const, 0},
+ {"AttrInline", Const, 0},
+ {"AttrIsOptional", Const, 0},
+ {"AttrLanguage", Const, 0},
+ {"AttrLinkageName", Const, 14},
+ {"AttrLocation", Const, 0},
+ {"AttrLoclistsBase", Const, 14},
+ {"AttrLowerBound", Const, 0},
+ {"AttrLowpc", Const, 0},
+ {"AttrMacroInfo", Const, 0},
+ {"AttrMacros", Const, 14},
+ {"AttrMainSubprogram", Const, 14},
+ {"AttrMutable", Const, 14},
+ {"AttrName", Const, 0},
+ {"AttrNamelistItem", Const, 0},
+ {"AttrNoreturn", Const, 14},
+ {"AttrObjectPointer", Const, 14},
+ {"AttrOrdering", Const, 0},
+ {"AttrPictureString", Const, 14},
+ {"AttrPriority", Const, 0},
+ {"AttrProducer", Const, 0},
+ {"AttrPrototyped", Const, 0},
+ {"AttrPure", Const, 14},
+ {"AttrRanges", Const, 0},
+ {"AttrRank", Const, 14},
+ {"AttrRecursive", Const, 14},
+ {"AttrReference", Const, 14},
+ {"AttrReturnAddr", Const, 0},
+ {"AttrRnglistsBase", Const, 14},
+ {"AttrRvalueReference", Const, 14},
+ {"AttrSegment", Const, 0},
+ {"AttrSibling", Const, 0},
+ {"AttrSignature", Const, 14},
+ {"AttrSmall", Const, 14},
+ {"AttrSpecification", Const, 0},
+ {"AttrStartScope", Const, 0},
+ {"AttrStaticLink", Const, 0},
+ {"AttrStmtList", Const, 0},
+ {"AttrStrOffsetsBase", Const, 14},
+ {"AttrStride", Const, 0},
+ {"AttrStrideSize", Const, 0},
+ {"AttrStringLength", Const, 0},
+ {"AttrStringLengthBitSize", Const, 14},
+ {"AttrStringLengthByteSize", Const, 14},
+ {"AttrThreadsScaled", Const, 14},
+ {"AttrTrampoline", Const, 0},
+ {"AttrType", Const, 0},
+ {"AttrUpperBound", Const, 0},
+ {"AttrUseLocation", Const, 0},
+ {"AttrUseUTF8", Const, 0},
+ {"AttrVarParam", Const, 0},
+ {"AttrVirtuality", Const, 0},
+ {"AttrVisibility", Const, 0},
+ {"AttrVtableElemLoc", Const, 0},
+ {"BasicType", Type, 0},
+ {"BasicType.BitOffset", Field, 0},
+ {"BasicType.BitSize", Field, 0},
+ {"BasicType.CommonType", Field, 0},
+ {"BasicType.DataBitOffset", Field, 18},
+ {"BoolType", Type, 0},
+ {"BoolType.BasicType", Field, 0},
+ {"CharType", Type, 0},
+ {"CharType.BasicType", Field, 0},
+ {"Class", Type, 5},
+ {"ClassAddrPtr", Const, 14},
+ {"ClassAddress", Const, 5},
+ {"ClassBlock", Const, 5},
+ {"ClassConstant", Const, 5},
+ {"ClassExprLoc", Const, 5},
+ {"ClassFlag", Const, 5},
+ {"ClassLinePtr", Const, 5},
+ {"ClassLocList", Const, 14},
+ {"ClassLocListPtr", Const, 5},
+ {"ClassMacPtr", Const, 5},
+ {"ClassRangeListPtr", Const, 5},
+ {"ClassReference", Const, 5},
+ {"ClassReferenceAlt", Const, 5},
+ {"ClassReferenceSig", Const, 5},
+ {"ClassRngList", Const, 14},
+ {"ClassRngListsPtr", Const, 14},
+ {"ClassStrOffsetsPtr", Const, 14},
+ {"ClassString", Const, 5},
+ {"ClassStringAlt", Const, 5},
+ {"ClassUnknown", Const, 6},
+ {"CommonType", Type, 0},
+ {"CommonType.ByteSize", Field, 0},
+ {"CommonType.Name", Field, 0},
+ {"ComplexType", Type, 0},
+ {"ComplexType.BasicType", Field, 0},
+ {"Data", Type, 0},
+ {"DecodeError", Type, 0},
+ {"DecodeError.Err", Field, 0},
+ {"DecodeError.Name", Field, 0},
+ {"DecodeError.Offset", Field, 0},
+ {"DotDotDotType", Type, 0},
+ {"DotDotDotType.CommonType", Field, 0},
+ {"Entry", Type, 0},
+ {"Entry.Children", Field, 0},
+ {"Entry.Field", Field, 0},
+ {"Entry.Offset", Field, 0},
+ {"Entry.Tag", Field, 0},
+ {"EnumType", Type, 0},
+ {"EnumType.CommonType", Field, 0},
+ {"EnumType.EnumName", Field, 0},
+ {"EnumType.Val", Field, 0},
+ {"EnumValue", Type, 0},
+ {"EnumValue.Name", Field, 0},
+ {"EnumValue.Val", Field, 0},
+ {"ErrUnknownPC", Var, 5},
+ {"Field", Type, 0},
+ {"Field.Attr", Field, 0},
+ {"Field.Class", Field, 5},
+ {"Field.Val", Field, 0},
+ {"FloatType", Type, 0},
+ {"FloatType.BasicType", Field, 0},
+ {"FuncType", Type, 0},
+ {"FuncType.CommonType", Field, 0},
+ {"FuncType.ParamType", Field, 0},
+ {"FuncType.ReturnType", Field, 0},
+ {"IntType", Type, 0},
+ {"IntType.BasicType", Field, 0},
+ {"LineEntry", Type, 5},
+ {"LineEntry.Address", Field, 5},
+ {"LineEntry.BasicBlock", Field, 5},
+ {"LineEntry.Column", Field, 5},
+ {"LineEntry.Discriminator", Field, 5},
+ {"LineEntry.EndSequence", Field, 5},
+ {"LineEntry.EpilogueBegin", Field, 5},
+ {"LineEntry.File", Field, 5},
+ {"LineEntry.ISA", Field, 5},
+ {"LineEntry.IsStmt", Field, 5},
+ {"LineEntry.Line", Field, 5},
+ {"LineEntry.OpIndex", Field, 5},
+ {"LineEntry.PrologueEnd", Field, 5},
+ {"LineFile", Type, 5},
+ {"LineFile.Length", Field, 5},
+ {"LineFile.Mtime", Field, 5},
+ {"LineFile.Name", Field, 5},
+ {"LineReader", Type, 5},
+ {"LineReaderPos", Type, 5},
+ {"New", Func, 0},
+ {"Offset", Type, 0},
+ {"PtrType", Type, 0},
+ {"PtrType.CommonType", Field, 0},
+ {"PtrType.Type", Field, 0},
+ {"QualType", Type, 0},
+ {"QualType.CommonType", Field, 0},
+ {"QualType.Qual", Field, 0},
+ {"QualType.Type", Field, 0},
+ {"Reader", Type, 0},
+ {"StructField", Type, 0},
+ {"StructField.BitOffset", Field, 0},
+ {"StructField.BitSize", Field, 0},
+ {"StructField.ByteOffset", Field, 0},
+ {"StructField.ByteSize", Field, 0},
+ {"StructField.DataBitOffset", Field, 18},
+ {"StructField.Name", Field, 0},
+ {"StructField.Type", Field, 0},
+ {"StructType", Type, 0},
+ {"StructType.CommonType", Field, 0},
+ {"StructType.Field", Field, 0},
+ {"StructType.Incomplete", Field, 0},
+ {"StructType.Kind", Field, 0},
+ {"StructType.StructName", Field, 0},
+ {"Tag", Type, 0},
+ {"TagAccessDeclaration", Const, 0},
+ {"TagArrayType", Const, 0},
+ {"TagAtomicType", Const, 14},
+ {"TagBaseType", Const, 0},
+ {"TagCallSite", Const, 14},
+ {"TagCallSiteParameter", Const, 14},
+ {"TagCatchDwarfBlock", Const, 0},
+ {"TagClassType", Const, 0},
+ {"TagCoarrayType", Const, 14},
+ {"TagCommonDwarfBlock", Const, 0},
+ {"TagCommonInclusion", Const, 0},
+ {"TagCompileUnit", Const, 0},
+ {"TagCondition", Const, 3},
+ {"TagConstType", Const, 0},
+ {"TagConstant", Const, 0},
+ {"TagDwarfProcedure", Const, 0},
+ {"TagDynamicType", Const, 14},
+ {"TagEntryPoint", Const, 0},
+ {"TagEnumerationType", Const, 0},
+ {"TagEnumerator", Const, 0},
+ {"TagFileType", Const, 0},
+ {"TagFormalParameter", Const, 0},
+ {"TagFriend", Const, 0},
+ {"TagGenericSubrange", Const, 14},
+ {"TagImmutableType", Const, 14},
+ {"TagImportedDeclaration", Const, 0},
+ {"TagImportedModule", Const, 0},
+ {"TagImportedUnit", Const, 0},
+ {"TagInheritance", Const, 0},
+ {"TagInlinedSubroutine", Const, 0},
+ {"TagInterfaceType", Const, 0},
+ {"TagLabel", Const, 0},
+ {"TagLexDwarfBlock", Const, 0},
+ {"TagMember", Const, 0},
+ {"TagModule", Const, 0},
+ {"TagMutableType", Const, 0},
+ {"TagNamelist", Const, 0},
+ {"TagNamelistItem", Const, 0},
+ {"TagNamespace", Const, 0},
+ {"TagPackedType", Const, 0},
+ {"TagPartialUnit", Const, 0},
+ {"TagPointerType", Const, 0},
+ {"TagPtrToMemberType", Const, 0},
+ {"TagReferenceType", Const, 0},
+ {"TagRestrictType", Const, 0},
+ {"TagRvalueReferenceType", Const, 3},
+ {"TagSetType", Const, 0},
+ {"TagSharedType", Const, 3},
+ {"TagSkeletonUnit", Const, 14},
+ {"TagStringType", Const, 0},
+ {"TagStructType", Const, 0},
+ {"TagSubprogram", Const, 0},
+ {"TagSubrangeType", Const, 0},
+ {"TagSubroutineType", Const, 0},
+ {"TagTemplateAlias", Const, 3},
+ {"TagTemplateTypeParameter", Const, 0},
+ {"TagTemplateValueParameter", Const, 0},
+ {"TagThrownType", Const, 0},
+ {"TagTryDwarfBlock", Const, 0},
+ {"TagTypeUnit", Const, 3},
+ {"TagTypedef", Const, 0},
+ {"TagUnionType", Const, 0},
+ {"TagUnspecifiedParameters", Const, 0},
+ {"TagUnspecifiedType", Const, 0},
+ {"TagVariable", Const, 0},
+ {"TagVariant", Const, 0},
+ {"TagVariantPart", Const, 0},
+ {"TagVolatileType", Const, 0},
+ {"TagWithStmt", Const, 0},
+ {"Type", Type, 0},
+ {"TypedefType", Type, 0},
+ {"TypedefType.CommonType", Field, 0},
+ {"TypedefType.Type", Field, 0},
+ {"UcharType", Type, 0},
+ {"UcharType.BasicType", Field, 0},
+ {"UintType", Type, 0},
+ {"UintType.BasicType", Field, 0},
+ {"UnspecifiedType", Type, 4},
+ {"UnspecifiedType.BasicType", Field, 4},
+ {"UnsupportedType", Type, 13},
+ {"UnsupportedType.CommonType", Field, 13},
+ {"UnsupportedType.Tag", Field, 13},
+ {"VoidType", Type, 0},
+ {"VoidType.CommonType", Field, 0},
+ },
+ "debug/elf": {
+ {"(*File).Close", Method, 0},
+ {"(*File).DWARF", Method, 0},
+ {"(*File).DynString", Method, 1},
+ {"(*File).DynValue", Method, 21},
+ {"(*File).DynamicSymbols", Method, 4},
+ {"(*File).ImportedLibraries", Method, 0},
+ {"(*File).ImportedSymbols", Method, 0},
+ {"(*File).Section", Method, 0},
+ {"(*File).SectionByType", Method, 0},
+ {"(*File).Symbols", Method, 0},
+ {"(*FormatError).Error", Method, 0},
+ {"(*Prog).Open", Method, 0},
+ {"(*Section).Data", Method, 0},
+ {"(*Section).Open", Method, 0},
+ {"(Class).GoString", Method, 0},
+ {"(Class).String", Method, 0},
+ {"(CompressionType).GoString", Method, 6},
+ {"(CompressionType).String", Method, 6},
+ {"(Data).GoString", Method, 0},
+ {"(Data).String", Method, 0},
+ {"(DynFlag).GoString", Method, 0},
+ {"(DynFlag).String", Method, 0},
+ {"(DynFlag1).GoString", Method, 21},
+ {"(DynFlag1).String", Method, 21},
+ {"(DynTag).GoString", Method, 0},
+ {"(DynTag).String", Method, 0},
+ {"(Machine).GoString", Method, 0},
+ {"(Machine).String", Method, 0},
+ {"(NType).GoString", Method, 0},
+ {"(NType).String", Method, 0},
+ {"(OSABI).GoString", Method, 0},
+ {"(OSABI).String", Method, 0},
+ {"(Prog).ReadAt", Method, 0},
+ {"(ProgFlag).GoString", Method, 0},
+ {"(ProgFlag).String", Method, 0},
+ {"(ProgType).GoString", Method, 0},
+ {"(ProgType).String", Method, 0},
+ {"(R_386).GoString", Method, 0},
+ {"(R_386).String", Method, 0},
+ {"(R_390).GoString", Method, 7},
+ {"(R_390).String", Method, 7},
+ {"(R_AARCH64).GoString", Method, 4},
+ {"(R_AARCH64).String", Method, 4},
+ {"(R_ALPHA).GoString", Method, 0},
+ {"(R_ALPHA).String", Method, 0},
+ {"(R_ARM).GoString", Method, 0},
+ {"(R_ARM).String", Method, 0},
+ {"(R_LARCH).GoString", Method, 19},
+ {"(R_LARCH).String", Method, 19},
+ {"(R_MIPS).GoString", Method, 6},
+ {"(R_MIPS).String", Method, 6},
+ {"(R_PPC).GoString", Method, 0},
+ {"(R_PPC).String", Method, 0},
+ {"(R_PPC64).GoString", Method, 5},
+ {"(R_PPC64).String", Method, 5},
+ {"(R_RISCV).GoString", Method, 11},
+ {"(R_RISCV).String", Method, 11},
+ {"(R_SPARC).GoString", Method, 0},
+ {"(R_SPARC).String", Method, 0},
+ {"(R_X86_64).GoString", Method, 0},
+ {"(R_X86_64).String", Method, 0},
+ {"(Section).ReadAt", Method, 0},
+ {"(SectionFlag).GoString", Method, 0},
+ {"(SectionFlag).String", Method, 0},
+ {"(SectionIndex).GoString", Method, 0},
+ {"(SectionIndex).String", Method, 0},
+ {"(SectionType).GoString", Method, 0},
+ {"(SectionType).String", Method, 0},
+ {"(SymBind).GoString", Method, 0},
+ {"(SymBind).String", Method, 0},
+ {"(SymType).GoString", Method, 0},
+ {"(SymType).String", Method, 0},
+ {"(SymVis).GoString", Method, 0},
+ {"(SymVis).String", Method, 0},
+ {"(Type).GoString", Method, 0},
+ {"(Type).String", Method, 0},
+ {"(Version).GoString", Method, 0},
+ {"(Version).String", Method, 0},
+ {"ARM_MAGIC_TRAMP_NUMBER", Const, 0},
+ {"COMPRESS_HIOS", Const, 6},
+ {"COMPRESS_HIPROC", Const, 6},
+ {"COMPRESS_LOOS", Const, 6},
+ {"COMPRESS_LOPROC", Const, 6},
+ {"COMPRESS_ZLIB", Const, 6},
+ {"COMPRESS_ZSTD", Const, 21},
+ {"Chdr32", Type, 6},
+ {"Chdr32.Addralign", Field, 6},
+ {"Chdr32.Size", Field, 6},
+ {"Chdr32.Type", Field, 6},
+ {"Chdr64", Type, 6},
+ {"Chdr64.Addralign", Field, 6},
+ {"Chdr64.Size", Field, 6},
+ {"Chdr64.Type", Field, 6},
+ {"Class", Type, 0},
+ {"CompressionType", Type, 6},
+ {"DF_1_CONFALT", Const, 21},
+ {"DF_1_DIRECT", Const, 21},
+ {"DF_1_DISPRELDNE", Const, 21},
+ {"DF_1_DISPRELPND", Const, 21},
+ {"DF_1_EDITED", Const, 21},
+ {"DF_1_ENDFILTEE", Const, 21},
+ {"DF_1_GLOBAL", Const, 21},
+ {"DF_1_GLOBAUDIT", Const, 21},
+ {"DF_1_GROUP", Const, 21},
+ {"DF_1_IGNMULDEF", Const, 21},
+ {"DF_1_INITFIRST", Const, 21},
+ {"DF_1_INTERPOSE", Const, 21},
+ {"DF_1_KMOD", Const, 21},
+ {"DF_1_LOADFLTR", Const, 21},
+ {"DF_1_NOCOMMON", Const, 21},
+ {"DF_1_NODEFLIB", Const, 21},
+ {"DF_1_NODELETE", Const, 21},
+ {"DF_1_NODIRECT", Const, 21},
+ {"DF_1_NODUMP", Const, 21},
+ {"DF_1_NOHDR", Const, 21},
+ {"DF_1_NOKSYMS", Const, 21},
+ {"DF_1_NOOPEN", Const, 21},
+ {"DF_1_NORELOC", Const, 21},
+ {"DF_1_NOW", Const, 21},
+ {"DF_1_ORIGIN", Const, 21},
+ {"DF_1_PIE", Const, 21},
+ {"DF_1_SINGLETON", Const, 21},
+ {"DF_1_STUB", Const, 21},
+ {"DF_1_SYMINTPOSE", Const, 21},
+ {"DF_1_TRANS", Const, 21},
+ {"DF_1_WEAKFILTER", Const, 21},
+ {"DF_BIND_NOW", Const, 0},
+ {"DF_ORIGIN", Const, 0},
+ {"DF_STATIC_TLS", Const, 0},
+ {"DF_SYMBOLIC", Const, 0},
+ {"DF_TEXTREL", Const, 0},
+ {"DT_ADDRRNGHI", Const, 16},
+ {"DT_ADDRRNGLO", Const, 16},
+ {"DT_AUDIT", Const, 16},
+ {"DT_AUXILIARY", Const, 16},
+ {"DT_BIND_NOW", Const, 0},
+ {"DT_CHECKSUM", Const, 16},
+ {"DT_CONFIG", Const, 16},
+ {"DT_DEBUG", Const, 0},
+ {"DT_DEPAUDIT", Const, 16},
+ {"DT_ENCODING", Const, 0},
+ {"DT_FEATURE", Const, 16},
+ {"DT_FILTER", Const, 16},
+ {"DT_FINI", Const, 0},
+ {"DT_FINI_ARRAY", Const, 0},
+ {"DT_FINI_ARRAYSZ", Const, 0},
+ {"DT_FLAGS", Const, 0},
+ {"DT_FLAGS_1", Const, 16},
+ {"DT_GNU_CONFLICT", Const, 16},
+ {"DT_GNU_CONFLICTSZ", Const, 16},
+ {"DT_GNU_HASH", Const, 16},
+ {"DT_GNU_LIBLIST", Const, 16},
+ {"DT_GNU_LIBLISTSZ", Const, 16},
+ {"DT_GNU_PRELINKED", Const, 16},
+ {"DT_HASH", Const, 0},
+ {"DT_HIOS", Const, 0},
+ {"DT_HIPROC", Const, 0},
+ {"DT_INIT", Const, 0},
+ {"DT_INIT_ARRAY", Const, 0},
+ {"DT_INIT_ARRAYSZ", Const, 0},
+ {"DT_JMPREL", Const, 0},
+ {"DT_LOOS", Const, 0},
+ {"DT_LOPROC", Const, 0},
+ {"DT_MIPS_AUX_DYNAMIC", Const, 16},
+ {"DT_MIPS_BASE_ADDRESS", Const, 16},
+ {"DT_MIPS_COMPACT_SIZE", Const, 16},
+ {"DT_MIPS_CONFLICT", Const, 16},
+ {"DT_MIPS_CONFLICTNO", Const, 16},
+ {"DT_MIPS_CXX_FLAGS", Const, 16},
+ {"DT_MIPS_DELTA_CLASS", Const, 16},
+ {"DT_MIPS_DELTA_CLASSSYM", Const, 16},
+ {"DT_MIPS_DELTA_CLASSSYM_NO", Const, 16},
+ {"DT_MIPS_DELTA_CLASS_NO", Const, 16},
+ {"DT_MIPS_DELTA_INSTANCE", Const, 16},
+ {"DT_MIPS_DELTA_INSTANCE_NO", Const, 16},
+ {"DT_MIPS_DELTA_RELOC", Const, 16},
+ {"DT_MIPS_DELTA_RELOC_NO", Const, 16},
+ {"DT_MIPS_DELTA_SYM", Const, 16},
+ {"DT_MIPS_DELTA_SYM_NO", Const, 16},
+ {"DT_MIPS_DYNSTR_ALIGN", Const, 16},
+ {"DT_MIPS_FLAGS", Const, 16},
+ {"DT_MIPS_GOTSYM", Const, 16},
+ {"DT_MIPS_GP_VALUE", Const, 16},
+ {"DT_MIPS_HIDDEN_GOTIDX", Const, 16},
+ {"DT_MIPS_HIPAGENO", Const, 16},
+ {"DT_MIPS_ICHECKSUM", Const, 16},
+ {"DT_MIPS_INTERFACE", Const, 16},
+ {"DT_MIPS_INTERFACE_SIZE", Const, 16},
+ {"DT_MIPS_IVERSION", Const, 16},
+ {"DT_MIPS_LIBLIST", Const, 16},
+ {"DT_MIPS_LIBLISTNO", Const, 16},
+ {"DT_MIPS_LOCALPAGE_GOTIDX", Const, 16},
+ {"DT_MIPS_LOCAL_GOTIDX", Const, 16},
+ {"DT_MIPS_LOCAL_GOTNO", Const, 16},
+ {"DT_MIPS_MSYM", Const, 16},
+ {"DT_MIPS_OPTIONS", Const, 16},
+ {"DT_MIPS_PERF_SUFFIX", Const, 16},
+ {"DT_MIPS_PIXIE_INIT", Const, 16},
+ {"DT_MIPS_PLTGOT", Const, 16},
+ {"DT_MIPS_PROTECTED_GOTIDX", Const, 16},
+ {"DT_MIPS_RLD_MAP", Const, 16},
+ {"DT_MIPS_RLD_MAP_REL", Const, 16},
+ {"DT_MIPS_RLD_TEXT_RESOLVE_ADDR", Const, 16},
+ {"DT_MIPS_RLD_VERSION", Const, 16},
+ {"DT_MIPS_RWPLT", Const, 16},
+ {"DT_MIPS_SYMBOL_LIB", Const, 16},
+ {"DT_MIPS_SYMTABNO", Const, 16},
+ {"DT_MIPS_TIME_STAMP", Const, 16},
+ {"DT_MIPS_UNREFEXTNO", Const, 16},
+ {"DT_MOVEENT", Const, 16},
+ {"DT_MOVESZ", Const, 16},
+ {"DT_MOVETAB", Const, 16},
+ {"DT_NEEDED", Const, 0},
+ {"DT_NULL", Const, 0},
+ {"DT_PLTGOT", Const, 0},
+ {"DT_PLTPAD", Const, 16},
+ {"DT_PLTPADSZ", Const, 16},
+ {"DT_PLTREL", Const, 0},
+ {"DT_PLTRELSZ", Const, 0},
+ {"DT_POSFLAG_1", Const, 16},
+ {"DT_PPC64_GLINK", Const, 16},
+ {"DT_PPC64_OPD", Const, 16},
+ {"DT_PPC64_OPDSZ", Const, 16},
+ {"DT_PPC64_OPT", Const, 16},
+ {"DT_PPC_GOT", Const, 16},
+ {"DT_PPC_OPT", Const, 16},
+ {"DT_PREINIT_ARRAY", Const, 0},
+ {"DT_PREINIT_ARRAYSZ", Const, 0},
+ {"DT_REL", Const, 0},
+ {"DT_RELA", Const, 0},
+ {"DT_RELACOUNT", Const, 16},
+ {"DT_RELAENT", Const, 0},
+ {"DT_RELASZ", Const, 0},
+ {"DT_RELCOUNT", Const, 16},
+ {"DT_RELENT", Const, 0},
+ {"DT_RELSZ", Const, 0},
+ {"DT_RPATH", Const, 0},
+ {"DT_RUNPATH", Const, 0},
+ {"DT_SONAME", Const, 0},
+ {"DT_SPARC_REGISTER", Const, 16},
+ {"DT_STRSZ", Const, 0},
+ {"DT_STRTAB", Const, 0},
+ {"DT_SYMBOLIC", Const, 0},
+ {"DT_SYMENT", Const, 0},
+ {"DT_SYMINENT", Const, 16},
+ {"DT_SYMINFO", Const, 16},
+ {"DT_SYMINSZ", Const, 16},
+ {"DT_SYMTAB", Const, 0},
+ {"DT_SYMTAB_SHNDX", Const, 16},
+ {"DT_TEXTREL", Const, 0},
+ {"DT_TLSDESC_GOT", Const, 16},
+ {"DT_TLSDESC_PLT", Const, 16},
+ {"DT_USED", Const, 16},
+ {"DT_VALRNGHI", Const, 16},
+ {"DT_VALRNGLO", Const, 16},
+ {"DT_VERDEF", Const, 16},
+ {"DT_VERDEFNUM", Const, 16},
+ {"DT_VERNEED", Const, 0},
+ {"DT_VERNEEDNUM", Const, 0},
+ {"DT_VERSYM", Const, 0},
+ {"Data", Type, 0},
+ {"Dyn32", Type, 0},
+ {"Dyn32.Tag", Field, 0},
+ {"Dyn32.Val", Field, 0},
+ {"Dyn64", Type, 0},
+ {"Dyn64.Tag", Field, 0},
+ {"Dyn64.Val", Field, 0},
+ {"DynFlag", Type, 0},
+ {"DynFlag1", Type, 21},
+ {"DynTag", Type, 0},
+ {"EI_ABIVERSION", Const, 0},
+ {"EI_CLASS", Const, 0},
+ {"EI_DATA", Const, 0},
+ {"EI_NIDENT", Const, 0},
+ {"EI_OSABI", Const, 0},
+ {"EI_PAD", Const, 0},
+ {"EI_VERSION", Const, 0},
+ {"ELFCLASS32", Const, 0},
+ {"ELFCLASS64", Const, 0},
+ {"ELFCLASSNONE", Const, 0},
+ {"ELFDATA2LSB", Const, 0},
+ {"ELFDATA2MSB", Const, 0},
+ {"ELFDATANONE", Const, 0},
+ {"ELFMAG", Const, 0},
+ {"ELFOSABI_86OPEN", Const, 0},
+ {"ELFOSABI_AIX", Const, 0},
+ {"ELFOSABI_ARM", Const, 0},
+ {"ELFOSABI_AROS", Const, 11},
+ {"ELFOSABI_CLOUDABI", Const, 11},
+ {"ELFOSABI_FENIXOS", Const, 11},
+ {"ELFOSABI_FREEBSD", Const, 0},
+ {"ELFOSABI_HPUX", Const, 0},
+ {"ELFOSABI_HURD", Const, 0},
+ {"ELFOSABI_IRIX", Const, 0},
+ {"ELFOSABI_LINUX", Const, 0},
+ {"ELFOSABI_MODESTO", Const, 0},
+ {"ELFOSABI_NETBSD", Const, 0},
+ {"ELFOSABI_NONE", Const, 0},
+ {"ELFOSABI_NSK", Const, 0},
+ {"ELFOSABI_OPENBSD", Const, 0},
+ {"ELFOSABI_OPENVMS", Const, 0},
+ {"ELFOSABI_SOLARIS", Const, 0},
+ {"ELFOSABI_STANDALONE", Const, 0},
+ {"ELFOSABI_TRU64", Const, 0},
+ {"EM_386", Const, 0},
+ {"EM_486", Const, 0},
+ {"EM_56800EX", Const, 11},
+ {"EM_68HC05", Const, 11},
+ {"EM_68HC08", Const, 11},
+ {"EM_68HC11", Const, 11},
+ {"EM_68HC12", Const, 0},
+ {"EM_68HC16", Const, 11},
+ {"EM_68K", Const, 0},
+ {"EM_78KOR", Const, 11},
+ {"EM_8051", Const, 11},
+ {"EM_860", Const, 0},
+ {"EM_88K", Const, 0},
+ {"EM_960", Const, 0},
+ {"EM_AARCH64", Const, 4},
+ {"EM_ALPHA", Const, 0},
+ {"EM_ALPHA_STD", Const, 0},
+ {"EM_ALTERA_NIOS2", Const, 11},
+ {"EM_AMDGPU", Const, 11},
+ {"EM_ARC", Const, 0},
+ {"EM_ARCA", Const, 11},
+ {"EM_ARC_COMPACT", Const, 11},
+ {"EM_ARC_COMPACT2", Const, 11},
+ {"EM_ARM", Const, 0},
+ {"EM_AVR", Const, 11},
+ {"EM_AVR32", Const, 11},
+ {"EM_BA1", Const, 11},
+ {"EM_BA2", Const, 11},
+ {"EM_BLACKFIN", Const, 11},
+ {"EM_BPF", Const, 11},
+ {"EM_C166", Const, 11},
+ {"EM_CDP", Const, 11},
+ {"EM_CE", Const, 11},
+ {"EM_CLOUDSHIELD", Const, 11},
+ {"EM_COGE", Const, 11},
+ {"EM_COLDFIRE", Const, 0},
+ {"EM_COOL", Const, 11},
+ {"EM_COREA_1ST", Const, 11},
+ {"EM_COREA_2ND", Const, 11},
+ {"EM_CR", Const, 11},
+ {"EM_CR16", Const, 11},
+ {"EM_CRAYNV2", Const, 11},
+ {"EM_CRIS", Const, 11},
+ {"EM_CRX", Const, 11},
+ {"EM_CSR_KALIMBA", Const, 11},
+ {"EM_CUDA", Const, 11},
+ {"EM_CYPRESS_M8C", Const, 11},
+ {"EM_D10V", Const, 11},
+ {"EM_D30V", Const, 11},
+ {"EM_DSP24", Const, 11},
+ {"EM_DSPIC30F", Const, 11},
+ {"EM_DXP", Const, 11},
+ {"EM_ECOG1", Const, 11},
+ {"EM_ECOG16", Const, 11},
+ {"EM_ECOG1X", Const, 11},
+ {"EM_ECOG2", Const, 11},
+ {"EM_ETPU", Const, 11},
+ {"EM_EXCESS", Const, 11},
+ {"EM_F2MC16", Const, 11},
+ {"EM_FIREPATH", Const, 11},
+ {"EM_FR20", Const, 0},
+ {"EM_FR30", Const, 11},
+ {"EM_FT32", Const, 11},
+ {"EM_FX66", Const, 11},
+ {"EM_H8S", Const, 0},
+ {"EM_H8_300", Const, 0},
+ {"EM_H8_300H", Const, 0},
+ {"EM_H8_500", Const, 0},
+ {"EM_HUANY", Const, 11},
+ {"EM_IA_64", Const, 0},
+ {"EM_INTEL205", Const, 11},
+ {"EM_INTEL206", Const, 11},
+ {"EM_INTEL207", Const, 11},
+ {"EM_INTEL208", Const, 11},
+ {"EM_INTEL209", Const, 11},
+ {"EM_IP2K", Const, 11},
+ {"EM_JAVELIN", Const, 11},
+ {"EM_K10M", Const, 11},
+ {"EM_KM32", Const, 11},
+ {"EM_KMX16", Const, 11},
+ {"EM_KMX32", Const, 11},
+ {"EM_KMX8", Const, 11},
+ {"EM_KVARC", Const, 11},
+ {"EM_L10M", Const, 11},
+ {"EM_LANAI", Const, 11},
+ {"EM_LATTICEMICO32", Const, 11},
+ {"EM_LOONGARCH", Const, 19},
+ {"EM_M16C", Const, 11},
+ {"EM_M32", Const, 0},
+ {"EM_M32C", Const, 11},
+ {"EM_M32R", Const, 11},
+ {"EM_MANIK", Const, 11},
+ {"EM_MAX", Const, 11},
+ {"EM_MAXQ30", Const, 11},
+ {"EM_MCHP_PIC", Const, 11},
+ {"EM_MCST_ELBRUS", Const, 11},
+ {"EM_ME16", Const, 0},
+ {"EM_METAG", Const, 11},
+ {"EM_MICROBLAZE", Const, 11},
+ {"EM_MIPS", Const, 0},
+ {"EM_MIPS_RS3_LE", Const, 0},
+ {"EM_MIPS_RS4_BE", Const, 0},
+ {"EM_MIPS_X", Const, 0},
+ {"EM_MMA", Const, 0},
+ {"EM_MMDSP_PLUS", Const, 11},
+ {"EM_MMIX", Const, 11},
+ {"EM_MN10200", Const, 11},
+ {"EM_MN10300", Const, 11},
+ {"EM_MOXIE", Const, 11},
+ {"EM_MSP430", Const, 11},
+ {"EM_NCPU", Const, 0},
+ {"EM_NDR1", Const, 0},
+ {"EM_NDS32", Const, 11},
+ {"EM_NONE", Const, 0},
+ {"EM_NORC", Const, 11},
+ {"EM_NS32K", Const, 11},
+ {"EM_OPEN8", Const, 11},
+ {"EM_OPENRISC", Const, 11},
+ {"EM_PARISC", Const, 0},
+ {"EM_PCP", Const, 0},
+ {"EM_PDP10", Const, 11},
+ {"EM_PDP11", Const, 11},
+ {"EM_PDSP", Const, 11},
+ {"EM_PJ", Const, 11},
+ {"EM_PPC", Const, 0},
+ {"EM_PPC64", Const, 0},
+ {"EM_PRISM", Const, 11},
+ {"EM_QDSP6", Const, 11},
+ {"EM_R32C", Const, 11},
+ {"EM_RCE", Const, 0},
+ {"EM_RH32", Const, 0},
+ {"EM_RISCV", Const, 11},
+ {"EM_RL78", Const, 11},
+ {"EM_RS08", Const, 11},
+ {"EM_RX", Const, 11},
+ {"EM_S370", Const, 0},
+ {"EM_S390", Const, 0},
+ {"EM_SCORE7", Const, 11},
+ {"EM_SEP", Const, 11},
+ {"EM_SE_C17", Const, 11},
+ {"EM_SE_C33", Const, 11},
+ {"EM_SH", Const, 0},
+ {"EM_SHARC", Const, 11},
+ {"EM_SLE9X", Const, 11},
+ {"EM_SNP1K", Const, 11},
+ {"EM_SPARC", Const, 0},
+ {"EM_SPARC32PLUS", Const, 0},
+ {"EM_SPARCV9", Const, 0},
+ {"EM_ST100", Const, 0},
+ {"EM_ST19", Const, 11},
+ {"EM_ST200", Const, 11},
+ {"EM_ST7", Const, 11},
+ {"EM_ST9PLUS", Const, 11},
+ {"EM_STARCORE", Const, 0},
+ {"EM_STM8", Const, 11},
+ {"EM_STXP7X", Const, 11},
+ {"EM_SVX", Const, 11},
+ {"EM_TILE64", Const, 11},
+ {"EM_TILEGX", Const, 11},
+ {"EM_TILEPRO", Const, 11},
+ {"EM_TINYJ", Const, 0},
+ {"EM_TI_ARP32", Const, 11},
+ {"EM_TI_C2000", Const, 11},
+ {"EM_TI_C5500", Const, 11},
+ {"EM_TI_C6000", Const, 11},
+ {"EM_TI_PRU", Const, 11},
+ {"EM_TMM_GPP", Const, 11},
+ {"EM_TPC", Const, 11},
+ {"EM_TRICORE", Const, 0},
+ {"EM_TRIMEDIA", Const, 11},
+ {"EM_TSK3000", Const, 11},
+ {"EM_UNICORE", Const, 11},
+ {"EM_V800", Const, 0},
+ {"EM_V850", Const, 11},
+ {"EM_VAX", Const, 11},
+ {"EM_VIDEOCORE", Const, 11},
+ {"EM_VIDEOCORE3", Const, 11},
+ {"EM_VIDEOCORE5", Const, 11},
+ {"EM_VISIUM", Const, 11},
+ {"EM_VPP500", Const, 0},
+ {"EM_X86_64", Const, 0},
+ {"EM_XCORE", Const, 11},
+ {"EM_XGATE", Const, 11},
+ {"EM_XIMO16", Const, 11},
+ {"EM_XTENSA", Const, 11},
+ {"EM_Z80", Const, 11},
+ {"EM_ZSP", Const, 11},
+ {"ET_CORE", Const, 0},
+ {"ET_DYN", Const, 0},
+ {"ET_EXEC", Const, 0},
+ {"ET_HIOS", Const, 0},
+ {"ET_HIPROC", Const, 0},
+ {"ET_LOOS", Const, 0},
+ {"ET_LOPROC", Const, 0},
+ {"ET_NONE", Const, 0},
+ {"ET_REL", Const, 0},
+ {"EV_CURRENT", Const, 0},
+ {"EV_NONE", Const, 0},
+ {"ErrNoSymbols", Var, 4},
+ {"File", Type, 0},
+ {"File.FileHeader", Field, 0},
+ {"File.Progs", Field, 0},
+ {"File.Sections", Field, 0},
+ {"FileHeader", Type, 0},
+ {"FileHeader.ABIVersion", Field, 0},
+ {"FileHeader.ByteOrder", Field, 0},
+ {"FileHeader.Class", Field, 0},
+ {"FileHeader.Data", Field, 0},
+ {"FileHeader.Entry", Field, 1},
+ {"FileHeader.Machine", Field, 0},
+ {"FileHeader.OSABI", Field, 0},
+ {"FileHeader.Type", Field, 0},
+ {"FileHeader.Version", Field, 0},
+ {"FormatError", Type, 0},
+ {"Header32", Type, 0},
+ {"Header32.Ehsize", Field, 0},
+ {"Header32.Entry", Field, 0},
+ {"Header32.Flags", Field, 0},
+ {"Header32.Ident", Field, 0},
+ {"Header32.Machine", Field, 0},
+ {"Header32.Phentsize", Field, 0},
+ {"Header32.Phnum", Field, 0},
+ {"Header32.Phoff", Field, 0},
+ {"Header32.Shentsize", Field, 0},
+ {"Header32.Shnum", Field, 0},
+ {"Header32.Shoff", Field, 0},
+ {"Header32.Shstrndx", Field, 0},
+ {"Header32.Type", Field, 0},
+ {"Header32.Version", Field, 0},
+ {"Header64", Type, 0},
+ {"Header64.Ehsize", Field, 0},
+ {"Header64.Entry", Field, 0},
+ {"Header64.Flags", Field, 0},
+ {"Header64.Ident", Field, 0},
+ {"Header64.Machine", Field, 0},
+ {"Header64.Phentsize", Field, 0},
+ {"Header64.Phnum", Field, 0},
+ {"Header64.Phoff", Field, 0},
+ {"Header64.Shentsize", Field, 0},
+ {"Header64.Shnum", Field, 0},
+ {"Header64.Shoff", Field, 0},
+ {"Header64.Shstrndx", Field, 0},
+ {"Header64.Type", Field, 0},
+ {"Header64.Version", Field, 0},
+ {"ImportedSymbol", Type, 0},
+ {"ImportedSymbol.Library", Field, 0},
+ {"ImportedSymbol.Name", Field, 0},
+ {"ImportedSymbol.Version", Field, 0},
+ {"Machine", Type, 0},
+ {"NT_FPREGSET", Const, 0},
+ {"NT_PRPSINFO", Const, 0},
+ {"NT_PRSTATUS", Const, 0},
+ {"NType", Type, 0},
+ {"NewFile", Func, 0},
+ {"OSABI", Type, 0},
+ {"Open", Func, 0},
+ {"PF_MASKOS", Const, 0},
+ {"PF_MASKPROC", Const, 0},
+ {"PF_R", Const, 0},
+ {"PF_W", Const, 0},
+ {"PF_X", Const, 0},
+ {"PT_AARCH64_ARCHEXT", Const, 16},
+ {"PT_AARCH64_UNWIND", Const, 16},
+ {"PT_ARM_ARCHEXT", Const, 16},
+ {"PT_ARM_EXIDX", Const, 16},
+ {"PT_DYNAMIC", Const, 0},
+ {"PT_GNU_EH_FRAME", Const, 16},
+ {"PT_GNU_MBIND_HI", Const, 16},
+ {"PT_GNU_MBIND_LO", Const, 16},
+ {"PT_GNU_PROPERTY", Const, 16},
+ {"PT_GNU_RELRO", Const, 16},
+ {"PT_GNU_STACK", Const, 16},
+ {"PT_HIOS", Const, 0},
+ {"PT_HIPROC", Const, 0},
+ {"PT_INTERP", Const, 0},
+ {"PT_LOAD", Const, 0},
+ {"PT_LOOS", Const, 0},
+ {"PT_LOPROC", Const, 0},
+ {"PT_MIPS_ABIFLAGS", Const, 16},
+ {"PT_MIPS_OPTIONS", Const, 16},
+ {"PT_MIPS_REGINFO", Const, 16},
+ {"PT_MIPS_RTPROC", Const, 16},
+ {"PT_NOTE", Const, 0},
+ {"PT_NULL", Const, 0},
+ {"PT_OPENBSD_BOOTDATA", Const, 16},
+ {"PT_OPENBSD_NOBTCFI", Const, 23},
+ {"PT_OPENBSD_RANDOMIZE", Const, 16},
+ {"PT_OPENBSD_WXNEEDED", Const, 16},
+ {"PT_PAX_FLAGS", Const, 16},
+ {"PT_PHDR", Const, 0},
+ {"PT_S390_PGSTE", Const, 16},
+ {"PT_SHLIB", Const, 0},
+ {"PT_SUNWSTACK", Const, 16},
+ {"PT_SUNW_EH_FRAME", Const, 16},
+ {"PT_TLS", Const, 0},
+ {"Prog", Type, 0},
+ {"Prog.ProgHeader", Field, 0},
+ {"Prog.ReaderAt", Field, 0},
+ {"Prog32", Type, 0},
+ {"Prog32.Align", Field, 0},
+ {"Prog32.Filesz", Field, 0},
+ {"Prog32.Flags", Field, 0},
+ {"Prog32.Memsz", Field, 0},
+ {"Prog32.Off", Field, 0},
+ {"Prog32.Paddr", Field, 0},
+ {"Prog32.Type", Field, 0},
+ {"Prog32.Vaddr", Field, 0},
+ {"Prog64", Type, 0},
+ {"Prog64.Align", Field, 0},
+ {"Prog64.Filesz", Field, 0},
+ {"Prog64.Flags", Field, 0},
+ {"Prog64.Memsz", Field, 0},
+ {"Prog64.Off", Field, 0},
+ {"Prog64.Paddr", Field, 0},
+ {"Prog64.Type", Field, 0},
+ {"Prog64.Vaddr", Field, 0},
+ {"ProgFlag", Type, 0},
+ {"ProgHeader", Type, 0},
+ {"ProgHeader.Align", Field, 0},
+ {"ProgHeader.Filesz", Field, 0},
+ {"ProgHeader.Flags", Field, 0},
+ {"ProgHeader.Memsz", Field, 0},
+ {"ProgHeader.Off", Field, 0},
+ {"ProgHeader.Paddr", Field, 0},
+ {"ProgHeader.Type", Field, 0},
+ {"ProgHeader.Vaddr", Field, 0},
+ {"ProgType", Type, 0},
+ {"R_386", Type, 0},
+ {"R_386_16", Const, 10},
+ {"R_386_32", Const, 0},
+ {"R_386_32PLT", Const, 10},
+ {"R_386_8", Const, 10},
+ {"R_386_COPY", Const, 0},
+ {"R_386_GLOB_DAT", Const, 0},
+ {"R_386_GOT32", Const, 0},
+ {"R_386_GOT32X", Const, 10},
+ {"R_386_GOTOFF", Const, 0},
+ {"R_386_GOTPC", Const, 0},
+ {"R_386_IRELATIVE", Const, 10},
+ {"R_386_JMP_SLOT", Const, 0},
+ {"R_386_NONE", Const, 0},
+ {"R_386_PC16", Const, 10},
+ {"R_386_PC32", Const, 0},
+ {"R_386_PC8", Const, 10},
+ {"R_386_PLT32", Const, 0},
+ {"R_386_RELATIVE", Const, 0},
+ {"R_386_SIZE32", Const, 10},
+ {"R_386_TLS_DESC", Const, 10},
+ {"R_386_TLS_DESC_CALL", Const, 10},
+ {"R_386_TLS_DTPMOD32", Const, 0},
+ {"R_386_TLS_DTPOFF32", Const, 0},
+ {"R_386_TLS_GD", Const, 0},
+ {"R_386_TLS_GD_32", Const, 0},
+ {"R_386_TLS_GD_CALL", Const, 0},
+ {"R_386_TLS_GD_POP", Const, 0},
+ {"R_386_TLS_GD_PUSH", Const, 0},
+ {"R_386_TLS_GOTDESC", Const, 10},
+ {"R_386_TLS_GOTIE", Const, 0},
+ {"R_386_TLS_IE", Const, 0},
+ {"R_386_TLS_IE_32", Const, 0},
+ {"R_386_TLS_LDM", Const, 0},
+ {"R_386_TLS_LDM_32", Const, 0},
+ {"R_386_TLS_LDM_CALL", Const, 0},
+ {"R_386_TLS_LDM_POP", Const, 0},
+ {"R_386_TLS_LDM_PUSH", Const, 0},
+ {"R_386_TLS_LDO_32", Const, 0},
+ {"R_386_TLS_LE", Const, 0},
+ {"R_386_TLS_LE_32", Const, 0},
+ {"R_386_TLS_TPOFF", Const, 0},
+ {"R_386_TLS_TPOFF32", Const, 0},
+ {"R_390", Type, 7},
+ {"R_390_12", Const, 7},
+ {"R_390_16", Const, 7},
+ {"R_390_20", Const, 7},
+ {"R_390_32", Const, 7},
+ {"R_390_64", Const, 7},
+ {"R_390_8", Const, 7},
+ {"R_390_COPY", Const, 7},
+ {"R_390_GLOB_DAT", Const, 7},
+ {"R_390_GOT12", Const, 7},
+ {"R_390_GOT16", Const, 7},
+ {"R_390_GOT20", Const, 7},
+ {"R_390_GOT32", Const, 7},
+ {"R_390_GOT64", Const, 7},
+ {"R_390_GOTENT", Const, 7},
+ {"R_390_GOTOFF", Const, 7},
+ {"R_390_GOTOFF16", Const, 7},
+ {"R_390_GOTOFF64", Const, 7},
+ {"R_390_GOTPC", Const, 7},
+ {"R_390_GOTPCDBL", Const, 7},
+ {"R_390_GOTPLT12", Const, 7},
+ {"R_390_GOTPLT16", Const, 7},
+ {"R_390_GOTPLT20", Const, 7},
+ {"R_390_GOTPLT32", Const, 7},
+ {"R_390_GOTPLT64", Const, 7},
+ {"R_390_GOTPLTENT", Const, 7},
+ {"R_390_GOTPLTOFF16", Const, 7},
+ {"R_390_GOTPLTOFF32", Const, 7},
+ {"R_390_GOTPLTOFF64", Const, 7},
+ {"R_390_JMP_SLOT", Const, 7},
+ {"R_390_NONE", Const, 7},
+ {"R_390_PC16", Const, 7},
+ {"R_390_PC16DBL", Const, 7},
+ {"R_390_PC32", Const, 7},
+ {"R_390_PC32DBL", Const, 7},
+ {"R_390_PC64", Const, 7},
+ {"R_390_PLT16DBL", Const, 7},
+ {"R_390_PLT32", Const, 7},
+ {"R_390_PLT32DBL", Const, 7},
+ {"R_390_PLT64", Const, 7},
+ {"R_390_RELATIVE", Const, 7},
+ {"R_390_TLS_DTPMOD", Const, 7},
+ {"R_390_TLS_DTPOFF", Const, 7},
+ {"R_390_TLS_GD32", Const, 7},
+ {"R_390_TLS_GD64", Const, 7},
+ {"R_390_TLS_GDCALL", Const, 7},
+ {"R_390_TLS_GOTIE12", Const, 7},
+ {"R_390_TLS_GOTIE20", Const, 7},
+ {"R_390_TLS_GOTIE32", Const, 7},
+ {"R_390_TLS_GOTIE64", Const, 7},
+ {"R_390_TLS_IE32", Const, 7},
+ {"R_390_TLS_IE64", Const, 7},
+ {"R_390_TLS_IEENT", Const, 7},
+ {"R_390_TLS_LDCALL", Const, 7},
+ {"R_390_TLS_LDM32", Const, 7},
+ {"R_390_TLS_LDM64", Const, 7},
+ {"R_390_TLS_LDO32", Const, 7},
+ {"R_390_TLS_LDO64", Const, 7},
+ {"R_390_TLS_LE32", Const, 7},
+ {"R_390_TLS_LE64", Const, 7},
+ {"R_390_TLS_LOAD", Const, 7},
+ {"R_390_TLS_TPOFF", Const, 7},
+ {"R_AARCH64", Type, 4},
+ {"R_AARCH64_ABS16", Const, 4},
+ {"R_AARCH64_ABS32", Const, 4},
+ {"R_AARCH64_ABS64", Const, 4},
+ {"R_AARCH64_ADD_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_ADR_GOT_PAGE", Const, 4},
+ {"R_AARCH64_ADR_PREL_LO21", Const, 4},
+ {"R_AARCH64_ADR_PREL_PG_HI21", Const, 4},
+ {"R_AARCH64_ADR_PREL_PG_HI21_NC", Const, 4},
+ {"R_AARCH64_CALL26", Const, 4},
+ {"R_AARCH64_CONDBR19", Const, 4},
+ {"R_AARCH64_COPY", Const, 4},
+ {"R_AARCH64_GLOB_DAT", Const, 4},
+ {"R_AARCH64_GOT_LD_PREL19", Const, 4},
+ {"R_AARCH64_IRELATIVE", Const, 4},
+ {"R_AARCH64_JUMP26", Const, 4},
+ {"R_AARCH64_JUMP_SLOT", Const, 4},
+ {"R_AARCH64_LD64_GOTOFF_LO15", Const, 10},
+ {"R_AARCH64_LD64_GOTPAGE_LO15", Const, 10},
+ {"R_AARCH64_LD64_GOT_LO12_NC", Const, 4},
+ {"R_AARCH64_LDST128_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_LDST16_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_LDST32_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_LDST64_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_LDST8_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_LD_PREL_LO19", Const, 4},
+ {"R_AARCH64_MOVW_SABS_G0", Const, 4},
+ {"R_AARCH64_MOVW_SABS_G1", Const, 4},
+ {"R_AARCH64_MOVW_SABS_G2", Const, 4},
+ {"R_AARCH64_MOVW_UABS_G0", Const, 4},
+ {"R_AARCH64_MOVW_UABS_G0_NC", Const, 4},
+ {"R_AARCH64_MOVW_UABS_G1", Const, 4},
+ {"R_AARCH64_MOVW_UABS_G1_NC", Const, 4},
+ {"R_AARCH64_MOVW_UABS_G2", Const, 4},
+ {"R_AARCH64_MOVW_UABS_G2_NC", Const, 4},
+ {"R_AARCH64_MOVW_UABS_G3", Const, 4},
+ {"R_AARCH64_NONE", Const, 4},
+ {"R_AARCH64_NULL", Const, 4},
+ {"R_AARCH64_P32_ABS16", Const, 4},
+ {"R_AARCH64_P32_ABS32", Const, 4},
+ {"R_AARCH64_P32_ADD_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_ADR_GOT_PAGE", Const, 4},
+ {"R_AARCH64_P32_ADR_PREL_LO21", Const, 4},
+ {"R_AARCH64_P32_ADR_PREL_PG_HI21", Const, 4},
+ {"R_AARCH64_P32_CALL26", Const, 4},
+ {"R_AARCH64_P32_CONDBR19", Const, 4},
+ {"R_AARCH64_P32_COPY", Const, 4},
+ {"R_AARCH64_P32_GLOB_DAT", Const, 4},
+ {"R_AARCH64_P32_GOT_LD_PREL19", Const, 4},
+ {"R_AARCH64_P32_IRELATIVE", Const, 4},
+ {"R_AARCH64_P32_JUMP26", Const, 4},
+ {"R_AARCH64_P32_JUMP_SLOT", Const, 4},
+ {"R_AARCH64_P32_LD32_GOT_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_LDST128_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_LDST16_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_LDST32_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_LDST64_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_LDST8_ABS_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_LD_PREL_LO19", Const, 4},
+ {"R_AARCH64_P32_MOVW_SABS_G0", Const, 4},
+ {"R_AARCH64_P32_MOVW_UABS_G0", Const, 4},
+ {"R_AARCH64_P32_MOVW_UABS_G0_NC", Const, 4},
+ {"R_AARCH64_P32_MOVW_UABS_G1", Const, 4},
+ {"R_AARCH64_P32_PREL16", Const, 4},
+ {"R_AARCH64_P32_PREL32", Const, 4},
+ {"R_AARCH64_P32_RELATIVE", Const, 4},
+ {"R_AARCH64_P32_TLSDESC", Const, 4},
+ {"R_AARCH64_P32_TLSDESC_ADD_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_TLSDESC_ADR_PAGE21", Const, 4},
+ {"R_AARCH64_P32_TLSDESC_ADR_PREL21", Const, 4},
+ {"R_AARCH64_P32_TLSDESC_CALL", Const, 4},
+ {"R_AARCH64_P32_TLSDESC_LD32_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_TLSDESC_LD_PREL19", Const, 4},
+ {"R_AARCH64_P32_TLSGD_ADD_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_TLSGD_ADR_PAGE21", Const, 4},
+ {"R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4},
+ {"R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", Const, 4},
+ {"R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", Const, 4},
+ {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", Const, 4},
+ {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", Const, 4},
+ {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", Const, 4},
+ {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", Const, 4},
+ {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", Const, 4},
+ {"R_AARCH64_P32_TLS_DTPMOD", Const, 4},
+ {"R_AARCH64_P32_TLS_DTPREL", Const, 4},
+ {"R_AARCH64_P32_TLS_TPREL", Const, 4},
+ {"R_AARCH64_P32_TSTBR14", Const, 4},
+ {"R_AARCH64_PREL16", Const, 4},
+ {"R_AARCH64_PREL32", Const, 4},
+ {"R_AARCH64_PREL64", Const, 4},
+ {"R_AARCH64_RELATIVE", Const, 4},
+ {"R_AARCH64_TLSDESC", Const, 4},
+ {"R_AARCH64_TLSDESC_ADD", Const, 4},
+ {"R_AARCH64_TLSDESC_ADD_LO12_NC", Const, 4},
+ {"R_AARCH64_TLSDESC_ADR_PAGE21", Const, 4},
+ {"R_AARCH64_TLSDESC_ADR_PREL21", Const, 4},
+ {"R_AARCH64_TLSDESC_CALL", Const, 4},
+ {"R_AARCH64_TLSDESC_LD64_LO12_NC", Const, 4},
+ {"R_AARCH64_TLSDESC_LDR", Const, 4},
+ {"R_AARCH64_TLSDESC_LD_PREL19", Const, 4},
+ {"R_AARCH64_TLSDESC_OFF_G0_NC", Const, 4},
+ {"R_AARCH64_TLSDESC_OFF_G1", Const, 4},
+ {"R_AARCH64_TLSGD_ADD_LO12_NC", Const, 4},
+ {"R_AARCH64_TLSGD_ADR_PAGE21", Const, 4},
+ {"R_AARCH64_TLSGD_ADR_PREL21", Const, 10},
+ {"R_AARCH64_TLSGD_MOVW_G0_NC", Const, 10},
+ {"R_AARCH64_TLSGD_MOVW_G1", Const, 10},
+ {"R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4},
+ {"R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", Const, 4},
+ {"R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", Const, 4},
+ {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", Const, 4},
+ {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", Const, 4},
+ {"R_AARCH64_TLSLD_ADR_PAGE21", Const, 10},
+ {"R_AARCH64_TLSLD_ADR_PREL21", Const, 10},
+ {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12", Const, 10},
+ {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", Const, 10},
+ {"R_AARCH64_TLSLE_ADD_TPREL_HI12", Const, 4},
+ {"R_AARCH64_TLSLE_ADD_TPREL_LO12", Const, 4},
+ {"R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", Const, 4},
+ {"R_AARCH64_TLSLE_LDST128_TPREL_LO12", Const, 10},
+ {"R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", Const, 10},
+ {"R_AARCH64_TLSLE_MOVW_TPREL_G0", Const, 4},
+ {"R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", Const, 4},
+ {"R_AARCH64_TLSLE_MOVW_TPREL_G1", Const, 4},
+ {"R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", Const, 4},
+ {"R_AARCH64_TLSLE_MOVW_TPREL_G2", Const, 4},
+ {"R_AARCH64_TLS_DTPMOD64", Const, 4},
+ {"R_AARCH64_TLS_DTPREL64", Const, 4},
+ {"R_AARCH64_TLS_TPREL64", Const, 4},
+ {"R_AARCH64_TSTBR14", Const, 4},
+ {"R_ALPHA", Type, 0},
+ {"R_ALPHA_BRADDR", Const, 0},
+ {"R_ALPHA_COPY", Const, 0},
+ {"R_ALPHA_GLOB_DAT", Const, 0},
+ {"R_ALPHA_GPDISP", Const, 0},
+ {"R_ALPHA_GPREL32", Const, 0},
+ {"R_ALPHA_GPRELHIGH", Const, 0},
+ {"R_ALPHA_GPRELLOW", Const, 0},
+ {"R_ALPHA_GPVALUE", Const, 0},
+ {"R_ALPHA_HINT", Const, 0},
+ {"R_ALPHA_IMMED_BR_HI32", Const, 0},
+ {"R_ALPHA_IMMED_GP_16", Const, 0},
+ {"R_ALPHA_IMMED_GP_HI32", Const, 0},
+ {"R_ALPHA_IMMED_LO32", Const, 0},
+ {"R_ALPHA_IMMED_SCN_HI32", Const, 0},
+ {"R_ALPHA_JMP_SLOT", Const, 0},
+ {"R_ALPHA_LITERAL", Const, 0},
+ {"R_ALPHA_LITUSE", Const, 0},
+ {"R_ALPHA_NONE", Const, 0},
+ {"R_ALPHA_OP_PRSHIFT", Const, 0},
+ {"R_ALPHA_OP_PSUB", Const, 0},
+ {"R_ALPHA_OP_PUSH", Const, 0},
+ {"R_ALPHA_OP_STORE", Const, 0},
+ {"R_ALPHA_REFLONG", Const, 0},
+ {"R_ALPHA_REFQUAD", Const, 0},
+ {"R_ALPHA_RELATIVE", Const, 0},
+ {"R_ALPHA_SREL16", Const, 0},
+ {"R_ALPHA_SREL32", Const, 0},
+ {"R_ALPHA_SREL64", Const, 0},
+ {"R_ARM", Type, 0},
+ {"R_ARM_ABS12", Const, 0},
+ {"R_ARM_ABS16", Const, 0},
+ {"R_ARM_ABS32", Const, 0},
+ {"R_ARM_ABS32_NOI", Const, 10},
+ {"R_ARM_ABS8", Const, 0},
+ {"R_ARM_ALU_PCREL_15_8", Const, 10},
+ {"R_ARM_ALU_PCREL_23_15", Const, 10},
+ {"R_ARM_ALU_PCREL_7_0", Const, 10},
+ {"R_ARM_ALU_PC_G0", Const, 10},
+ {"R_ARM_ALU_PC_G0_NC", Const, 10},
+ {"R_ARM_ALU_PC_G1", Const, 10},
+ {"R_ARM_ALU_PC_G1_NC", Const, 10},
+ {"R_ARM_ALU_PC_G2", Const, 10},
+ {"R_ARM_ALU_SBREL_19_12_NC", Const, 10},
+ {"R_ARM_ALU_SBREL_27_20_CK", Const, 10},
+ {"R_ARM_ALU_SB_G0", Const, 10},
+ {"R_ARM_ALU_SB_G0_NC", Const, 10},
+ {"R_ARM_ALU_SB_G1", Const, 10},
+ {"R_ARM_ALU_SB_G1_NC", Const, 10},
+ {"R_ARM_ALU_SB_G2", Const, 10},
+ {"R_ARM_AMP_VCALL9", Const, 0},
+ {"R_ARM_BASE_ABS", Const, 10},
+ {"R_ARM_CALL", Const, 10},
+ {"R_ARM_COPY", Const, 0},
+ {"R_ARM_GLOB_DAT", Const, 0},
+ {"R_ARM_GNU_VTENTRY", Const, 0},
+ {"R_ARM_GNU_VTINHERIT", Const, 0},
+ {"R_ARM_GOT32", Const, 0},
+ {"R_ARM_GOTOFF", Const, 0},
+ {"R_ARM_GOTOFF12", Const, 10},
+ {"R_ARM_GOTPC", Const, 0},
+ {"R_ARM_GOTRELAX", Const, 10},
+ {"R_ARM_GOT_ABS", Const, 10},
+ {"R_ARM_GOT_BREL12", Const, 10},
+ {"R_ARM_GOT_PREL", Const, 10},
+ {"R_ARM_IRELATIVE", Const, 10},
+ {"R_ARM_JUMP24", Const, 10},
+ {"R_ARM_JUMP_SLOT", Const, 0},
+ {"R_ARM_LDC_PC_G0", Const, 10},
+ {"R_ARM_LDC_PC_G1", Const, 10},
+ {"R_ARM_LDC_PC_G2", Const, 10},
+ {"R_ARM_LDC_SB_G0", Const, 10},
+ {"R_ARM_LDC_SB_G1", Const, 10},
+ {"R_ARM_LDC_SB_G2", Const, 10},
+ {"R_ARM_LDRS_PC_G0", Const, 10},
+ {"R_ARM_LDRS_PC_G1", Const, 10},
+ {"R_ARM_LDRS_PC_G2", Const, 10},
+ {"R_ARM_LDRS_SB_G0", Const, 10},
+ {"R_ARM_LDRS_SB_G1", Const, 10},
+ {"R_ARM_LDRS_SB_G2", Const, 10},
+ {"R_ARM_LDR_PC_G1", Const, 10},
+ {"R_ARM_LDR_PC_G2", Const, 10},
+ {"R_ARM_LDR_SBREL_11_10_NC", Const, 10},
+ {"R_ARM_LDR_SB_G0", Const, 10},
+ {"R_ARM_LDR_SB_G1", Const, 10},
+ {"R_ARM_LDR_SB_G2", Const, 10},
+ {"R_ARM_ME_TOO", Const, 10},
+ {"R_ARM_MOVT_ABS", Const, 10},
+ {"R_ARM_MOVT_BREL", Const, 10},
+ {"R_ARM_MOVT_PREL", Const, 10},
+ {"R_ARM_MOVW_ABS_NC", Const, 10},
+ {"R_ARM_MOVW_BREL", Const, 10},
+ {"R_ARM_MOVW_BREL_NC", Const, 10},
+ {"R_ARM_MOVW_PREL_NC", Const, 10},
+ {"R_ARM_NONE", Const, 0},
+ {"R_ARM_PC13", Const, 0},
+ {"R_ARM_PC24", Const, 0},
+ {"R_ARM_PLT32", Const, 0},
+ {"R_ARM_PLT32_ABS", Const, 10},
+ {"R_ARM_PREL31", Const, 10},
+ {"R_ARM_PRIVATE_0", Const, 10},
+ {"R_ARM_PRIVATE_1", Const, 10},
+ {"R_ARM_PRIVATE_10", Const, 10},
+ {"R_ARM_PRIVATE_11", Const, 10},
+ {"R_ARM_PRIVATE_12", Const, 10},
+ {"R_ARM_PRIVATE_13", Const, 10},
+ {"R_ARM_PRIVATE_14", Const, 10},
+ {"R_ARM_PRIVATE_15", Const, 10},
+ {"R_ARM_PRIVATE_2", Const, 10},
+ {"R_ARM_PRIVATE_3", Const, 10},
+ {"R_ARM_PRIVATE_4", Const, 10},
+ {"R_ARM_PRIVATE_5", Const, 10},
+ {"R_ARM_PRIVATE_6", Const, 10},
+ {"R_ARM_PRIVATE_7", Const, 10},
+ {"R_ARM_PRIVATE_8", Const, 10},
+ {"R_ARM_PRIVATE_9", Const, 10},
+ {"R_ARM_RABS32", Const, 0},
+ {"R_ARM_RBASE", Const, 0},
+ {"R_ARM_REL32", Const, 0},
+ {"R_ARM_REL32_NOI", Const, 10},
+ {"R_ARM_RELATIVE", Const, 0},
+ {"R_ARM_RPC24", Const, 0},
+ {"R_ARM_RREL32", Const, 0},
+ {"R_ARM_RSBREL32", Const, 0},
+ {"R_ARM_RXPC25", Const, 10},
+ {"R_ARM_SBREL31", Const, 10},
+ {"R_ARM_SBREL32", Const, 0},
+ {"R_ARM_SWI24", Const, 0},
+ {"R_ARM_TARGET1", Const, 10},
+ {"R_ARM_TARGET2", Const, 10},
+ {"R_ARM_THM_ABS5", Const, 0},
+ {"R_ARM_THM_ALU_ABS_G0_NC", Const, 10},
+ {"R_ARM_THM_ALU_ABS_G1_NC", Const, 10},
+ {"R_ARM_THM_ALU_ABS_G2_NC", Const, 10},
+ {"R_ARM_THM_ALU_ABS_G3", Const, 10},
+ {"R_ARM_THM_ALU_PREL_11_0", Const, 10},
+ {"R_ARM_THM_GOT_BREL12", Const, 10},
+ {"R_ARM_THM_JUMP11", Const, 10},
+ {"R_ARM_THM_JUMP19", Const, 10},
+ {"R_ARM_THM_JUMP24", Const, 10},
+ {"R_ARM_THM_JUMP6", Const, 10},
+ {"R_ARM_THM_JUMP8", Const, 10},
+ {"R_ARM_THM_MOVT_ABS", Const, 10},
+ {"R_ARM_THM_MOVT_BREL", Const, 10},
+ {"R_ARM_THM_MOVT_PREL", Const, 10},
+ {"R_ARM_THM_MOVW_ABS_NC", Const, 10},
+ {"R_ARM_THM_MOVW_BREL", Const, 10},
+ {"R_ARM_THM_MOVW_BREL_NC", Const, 10},
+ {"R_ARM_THM_MOVW_PREL_NC", Const, 10},
+ {"R_ARM_THM_PC12", Const, 10},
+ {"R_ARM_THM_PC22", Const, 0},
+ {"R_ARM_THM_PC8", Const, 0},
+ {"R_ARM_THM_RPC22", Const, 0},
+ {"R_ARM_THM_SWI8", Const, 0},
+ {"R_ARM_THM_TLS_CALL", Const, 10},
+ {"R_ARM_THM_TLS_DESCSEQ16", Const, 10},
+ {"R_ARM_THM_TLS_DESCSEQ32", Const, 10},
+ {"R_ARM_THM_XPC22", Const, 0},
+ {"R_ARM_TLS_CALL", Const, 10},
+ {"R_ARM_TLS_DESCSEQ", Const, 10},
+ {"R_ARM_TLS_DTPMOD32", Const, 10},
+ {"R_ARM_TLS_DTPOFF32", Const, 10},
+ {"R_ARM_TLS_GD32", Const, 10},
+ {"R_ARM_TLS_GOTDESC", Const, 10},
+ {"R_ARM_TLS_IE12GP", Const, 10},
+ {"R_ARM_TLS_IE32", Const, 10},
+ {"R_ARM_TLS_LDM32", Const, 10},
+ {"R_ARM_TLS_LDO12", Const, 10},
+ {"R_ARM_TLS_LDO32", Const, 10},
+ {"R_ARM_TLS_LE12", Const, 10},
+ {"R_ARM_TLS_LE32", Const, 10},
+ {"R_ARM_TLS_TPOFF32", Const, 10},
+ {"R_ARM_V4BX", Const, 10},
+ {"R_ARM_XPC25", Const, 0},
+ {"R_INFO", Func, 0},
+ {"R_INFO32", Func, 0},
+ {"R_LARCH", Type, 19},
+ {"R_LARCH_32", Const, 19},
+ {"R_LARCH_32_PCREL", Const, 20},
+ {"R_LARCH_64", Const, 19},
+ {"R_LARCH_64_PCREL", Const, 22},
+ {"R_LARCH_ABS64_HI12", Const, 20},
+ {"R_LARCH_ABS64_LO20", Const, 20},
+ {"R_LARCH_ABS_HI20", Const, 20},
+ {"R_LARCH_ABS_LO12", Const, 20},
+ {"R_LARCH_ADD16", Const, 19},
+ {"R_LARCH_ADD24", Const, 19},
+ {"R_LARCH_ADD32", Const, 19},
+ {"R_LARCH_ADD6", Const, 22},
+ {"R_LARCH_ADD64", Const, 19},
+ {"R_LARCH_ADD8", Const, 19},
+ {"R_LARCH_ADD_ULEB128", Const, 22},
+ {"R_LARCH_ALIGN", Const, 22},
+ {"R_LARCH_B16", Const, 20},
+ {"R_LARCH_B21", Const, 20},
+ {"R_LARCH_B26", Const, 20},
+ {"R_LARCH_CFA", Const, 22},
+ {"R_LARCH_COPY", Const, 19},
+ {"R_LARCH_DELETE", Const, 22},
+ {"R_LARCH_GNU_VTENTRY", Const, 20},
+ {"R_LARCH_GNU_VTINHERIT", Const, 20},
+ {"R_LARCH_GOT64_HI12", Const, 20},
+ {"R_LARCH_GOT64_LO20", Const, 20},
+ {"R_LARCH_GOT64_PC_HI12", Const, 20},
+ {"R_LARCH_GOT64_PC_LO20", Const, 20},
+ {"R_LARCH_GOT_HI20", Const, 20},
+ {"R_LARCH_GOT_LO12", Const, 20},
+ {"R_LARCH_GOT_PC_HI20", Const, 20},
+ {"R_LARCH_GOT_PC_LO12", Const, 20},
+ {"R_LARCH_IRELATIVE", Const, 19},
+ {"R_LARCH_JUMP_SLOT", Const, 19},
+ {"R_LARCH_MARK_LA", Const, 19},
+ {"R_LARCH_MARK_PCREL", Const, 19},
+ {"R_LARCH_NONE", Const, 19},
+ {"R_LARCH_PCALA64_HI12", Const, 20},
+ {"R_LARCH_PCALA64_LO20", Const, 20},
+ {"R_LARCH_PCALA_HI20", Const, 20},
+ {"R_LARCH_PCALA_LO12", Const, 20},
+ {"R_LARCH_PCREL20_S2", Const, 22},
+ {"R_LARCH_RELATIVE", Const, 19},
+ {"R_LARCH_RELAX", Const, 20},
+ {"R_LARCH_SOP_ADD", Const, 19},
+ {"R_LARCH_SOP_AND", Const, 19},
+ {"R_LARCH_SOP_ASSERT", Const, 19},
+ {"R_LARCH_SOP_IF_ELSE", Const, 19},
+ {"R_LARCH_SOP_NOT", Const, 19},
+ {"R_LARCH_SOP_POP_32_S_0_10_10_16_S2", Const, 19},
+ {"R_LARCH_SOP_POP_32_S_0_5_10_16_S2", Const, 19},
+ {"R_LARCH_SOP_POP_32_S_10_12", Const, 19},
+ {"R_LARCH_SOP_POP_32_S_10_16", Const, 19},
+ {"R_LARCH_SOP_POP_32_S_10_16_S2", Const, 19},
+ {"R_LARCH_SOP_POP_32_S_10_5", Const, 19},
+ {"R_LARCH_SOP_POP_32_S_5_20", Const, 19},
+ {"R_LARCH_SOP_POP_32_U", Const, 19},
+ {"R_LARCH_SOP_POP_32_U_10_12", Const, 19},
+ {"R_LARCH_SOP_PUSH_ABSOLUTE", Const, 19},
+ {"R_LARCH_SOP_PUSH_DUP", Const, 19},
+ {"R_LARCH_SOP_PUSH_GPREL", Const, 19},
+ {"R_LARCH_SOP_PUSH_PCREL", Const, 19},
+ {"R_LARCH_SOP_PUSH_PLT_PCREL", Const, 19},
+ {"R_LARCH_SOP_PUSH_TLS_GD", Const, 19},
+ {"R_LARCH_SOP_PUSH_TLS_GOT", Const, 19},
+ {"R_LARCH_SOP_PUSH_TLS_TPREL", Const, 19},
+ {"R_LARCH_SOP_SL", Const, 19},
+ {"R_LARCH_SOP_SR", Const, 19},
+ {"R_LARCH_SOP_SUB", Const, 19},
+ {"R_LARCH_SUB16", Const, 19},
+ {"R_LARCH_SUB24", Const, 19},
+ {"R_LARCH_SUB32", Const, 19},
+ {"R_LARCH_SUB6", Const, 22},
+ {"R_LARCH_SUB64", Const, 19},
+ {"R_LARCH_SUB8", Const, 19},
+ {"R_LARCH_SUB_ULEB128", Const, 22},
+ {"R_LARCH_TLS_DTPMOD32", Const, 19},
+ {"R_LARCH_TLS_DTPMOD64", Const, 19},
+ {"R_LARCH_TLS_DTPREL32", Const, 19},
+ {"R_LARCH_TLS_DTPREL64", Const, 19},
+ {"R_LARCH_TLS_GD_HI20", Const, 20},
+ {"R_LARCH_TLS_GD_PC_HI20", Const, 20},
+ {"R_LARCH_TLS_IE64_HI12", Const, 20},
+ {"R_LARCH_TLS_IE64_LO20", Const, 20},
+ {"R_LARCH_TLS_IE64_PC_HI12", Const, 20},
+ {"R_LARCH_TLS_IE64_PC_LO20", Const, 20},
+ {"R_LARCH_TLS_IE_HI20", Const, 20},
+ {"R_LARCH_TLS_IE_LO12", Const, 20},
+ {"R_LARCH_TLS_IE_PC_HI20", Const, 20},
+ {"R_LARCH_TLS_IE_PC_LO12", Const, 20},
+ {"R_LARCH_TLS_LD_HI20", Const, 20},
+ {"R_LARCH_TLS_LD_PC_HI20", Const, 20},
+ {"R_LARCH_TLS_LE64_HI12", Const, 20},
+ {"R_LARCH_TLS_LE64_LO20", Const, 20},
+ {"R_LARCH_TLS_LE_HI20", Const, 20},
+ {"R_LARCH_TLS_LE_LO12", Const, 20},
+ {"R_LARCH_TLS_TPREL32", Const, 19},
+ {"R_LARCH_TLS_TPREL64", Const, 19},
+ {"R_MIPS", Type, 6},
+ {"R_MIPS_16", Const, 6},
+ {"R_MIPS_26", Const, 6},
+ {"R_MIPS_32", Const, 6},
+ {"R_MIPS_64", Const, 6},
+ {"R_MIPS_ADD_IMMEDIATE", Const, 6},
+ {"R_MIPS_CALL16", Const, 6},
+ {"R_MIPS_CALL_HI16", Const, 6},
+ {"R_MIPS_CALL_LO16", Const, 6},
+ {"R_MIPS_DELETE", Const, 6},
+ {"R_MIPS_GOT16", Const, 6},
+ {"R_MIPS_GOT_DISP", Const, 6},
+ {"R_MIPS_GOT_HI16", Const, 6},
+ {"R_MIPS_GOT_LO16", Const, 6},
+ {"R_MIPS_GOT_OFST", Const, 6},
+ {"R_MIPS_GOT_PAGE", Const, 6},
+ {"R_MIPS_GPREL16", Const, 6},
+ {"R_MIPS_GPREL32", Const, 6},
+ {"R_MIPS_HI16", Const, 6},
+ {"R_MIPS_HIGHER", Const, 6},
+ {"R_MIPS_HIGHEST", Const, 6},
+ {"R_MIPS_INSERT_A", Const, 6},
+ {"R_MIPS_INSERT_B", Const, 6},
+ {"R_MIPS_JALR", Const, 6},
+ {"R_MIPS_LITERAL", Const, 6},
+ {"R_MIPS_LO16", Const, 6},
+ {"R_MIPS_NONE", Const, 6},
+ {"R_MIPS_PC16", Const, 6},
+ {"R_MIPS_PC32", Const, 22},
+ {"R_MIPS_PJUMP", Const, 6},
+ {"R_MIPS_REL16", Const, 6},
+ {"R_MIPS_REL32", Const, 6},
+ {"R_MIPS_RELGOT", Const, 6},
+ {"R_MIPS_SCN_DISP", Const, 6},
+ {"R_MIPS_SHIFT5", Const, 6},
+ {"R_MIPS_SHIFT6", Const, 6},
+ {"R_MIPS_SUB", Const, 6},
+ {"R_MIPS_TLS_DTPMOD32", Const, 6},
+ {"R_MIPS_TLS_DTPMOD64", Const, 6},
+ {"R_MIPS_TLS_DTPREL32", Const, 6},
+ {"R_MIPS_TLS_DTPREL64", Const, 6},
+ {"R_MIPS_TLS_DTPREL_HI16", Const, 6},
+ {"R_MIPS_TLS_DTPREL_LO16", Const, 6},
+ {"R_MIPS_TLS_GD", Const, 6},
+ {"R_MIPS_TLS_GOTTPREL", Const, 6},
+ {"R_MIPS_TLS_LDM", Const, 6},
+ {"R_MIPS_TLS_TPREL32", Const, 6},
+ {"R_MIPS_TLS_TPREL64", Const, 6},
+ {"R_MIPS_TLS_TPREL_HI16", Const, 6},
+ {"R_MIPS_TLS_TPREL_LO16", Const, 6},
+ {"R_PPC", Type, 0},
+ {"R_PPC64", Type, 5},
+ {"R_PPC64_ADDR14", Const, 5},
+ {"R_PPC64_ADDR14_BRNTAKEN", Const, 5},
+ {"R_PPC64_ADDR14_BRTAKEN", Const, 5},
+ {"R_PPC64_ADDR16", Const, 5},
+ {"R_PPC64_ADDR16_DS", Const, 5},
+ {"R_PPC64_ADDR16_HA", Const, 5},
+ {"R_PPC64_ADDR16_HI", Const, 5},
+ {"R_PPC64_ADDR16_HIGH", Const, 10},
+ {"R_PPC64_ADDR16_HIGHA", Const, 10},
+ {"R_PPC64_ADDR16_HIGHER", Const, 5},
+ {"R_PPC64_ADDR16_HIGHER34", Const, 20},
+ {"R_PPC64_ADDR16_HIGHERA", Const, 5},
+ {"R_PPC64_ADDR16_HIGHERA34", Const, 20},
+ {"R_PPC64_ADDR16_HIGHEST", Const, 5},
+ {"R_PPC64_ADDR16_HIGHEST34", Const, 20},
+ {"R_PPC64_ADDR16_HIGHESTA", Const, 5},
+ {"R_PPC64_ADDR16_HIGHESTA34", Const, 20},
+ {"R_PPC64_ADDR16_LO", Const, 5},
+ {"R_PPC64_ADDR16_LO_DS", Const, 5},
+ {"R_PPC64_ADDR24", Const, 5},
+ {"R_PPC64_ADDR32", Const, 5},
+ {"R_PPC64_ADDR64", Const, 5},
+ {"R_PPC64_ADDR64_LOCAL", Const, 10},
+ {"R_PPC64_COPY", Const, 20},
+ {"R_PPC64_D28", Const, 20},
+ {"R_PPC64_D34", Const, 20},
+ {"R_PPC64_D34_HA30", Const, 20},
+ {"R_PPC64_D34_HI30", Const, 20},
+ {"R_PPC64_D34_LO", Const, 20},
+ {"R_PPC64_DTPMOD64", Const, 5},
+ {"R_PPC64_DTPREL16", Const, 5},
+ {"R_PPC64_DTPREL16_DS", Const, 5},
+ {"R_PPC64_DTPREL16_HA", Const, 5},
+ {"R_PPC64_DTPREL16_HI", Const, 5},
+ {"R_PPC64_DTPREL16_HIGH", Const, 10},
+ {"R_PPC64_DTPREL16_HIGHA", Const, 10},
+ {"R_PPC64_DTPREL16_HIGHER", Const, 5},
+ {"R_PPC64_DTPREL16_HIGHERA", Const, 5},
+ {"R_PPC64_DTPREL16_HIGHEST", Const, 5},
+ {"R_PPC64_DTPREL16_HIGHESTA", Const, 5},
+ {"R_PPC64_DTPREL16_LO", Const, 5},
+ {"R_PPC64_DTPREL16_LO_DS", Const, 5},
+ {"R_PPC64_DTPREL34", Const, 20},
+ {"R_PPC64_DTPREL64", Const, 5},
+ {"R_PPC64_ENTRY", Const, 10},
+ {"R_PPC64_GLOB_DAT", Const, 20},
+ {"R_PPC64_GNU_VTENTRY", Const, 20},
+ {"R_PPC64_GNU_VTINHERIT", Const, 20},
+ {"R_PPC64_GOT16", Const, 5},
+ {"R_PPC64_GOT16_DS", Const, 5},
+ {"R_PPC64_GOT16_HA", Const, 5},
+ {"R_PPC64_GOT16_HI", Const, 5},
+ {"R_PPC64_GOT16_LO", Const, 5},
+ {"R_PPC64_GOT16_LO_DS", Const, 5},
+ {"R_PPC64_GOT_DTPREL16_DS", Const, 5},
+ {"R_PPC64_GOT_DTPREL16_HA", Const, 5},
+ {"R_PPC64_GOT_DTPREL16_HI", Const, 5},
+ {"R_PPC64_GOT_DTPREL16_LO_DS", Const, 5},
+ {"R_PPC64_GOT_DTPREL_PCREL34", Const, 20},
+ {"R_PPC64_GOT_PCREL34", Const, 20},
+ {"R_PPC64_GOT_TLSGD16", Const, 5},
+ {"R_PPC64_GOT_TLSGD16_HA", Const, 5},
+ {"R_PPC64_GOT_TLSGD16_HI", Const, 5},
+ {"R_PPC64_GOT_TLSGD16_LO", Const, 5},
+ {"R_PPC64_GOT_TLSGD_PCREL34", Const, 20},
+ {"R_PPC64_GOT_TLSLD16", Const, 5},
+ {"R_PPC64_GOT_TLSLD16_HA", Const, 5},
+ {"R_PPC64_GOT_TLSLD16_HI", Const, 5},
+ {"R_PPC64_GOT_TLSLD16_LO", Const, 5},
+ {"R_PPC64_GOT_TLSLD_PCREL34", Const, 20},
+ {"R_PPC64_GOT_TPREL16_DS", Const, 5},
+ {"R_PPC64_GOT_TPREL16_HA", Const, 5},
+ {"R_PPC64_GOT_TPREL16_HI", Const, 5},
+ {"R_PPC64_GOT_TPREL16_LO_DS", Const, 5},
+ {"R_PPC64_GOT_TPREL_PCREL34", Const, 20},
+ {"R_PPC64_IRELATIVE", Const, 10},
+ {"R_PPC64_JMP_IREL", Const, 10},
+ {"R_PPC64_JMP_SLOT", Const, 5},
+ {"R_PPC64_NONE", Const, 5},
+ {"R_PPC64_PCREL28", Const, 20},
+ {"R_PPC64_PCREL34", Const, 20},
+ {"R_PPC64_PCREL_OPT", Const, 20},
+ {"R_PPC64_PLT16_HA", Const, 20},
+ {"R_PPC64_PLT16_HI", Const, 20},
+ {"R_PPC64_PLT16_LO", Const, 20},
+ {"R_PPC64_PLT16_LO_DS", Const, 10},
+ {"R_PPC64_PLT32", Const, 20},
+ {"R_PPC64_PLT64", Const, 20},
+ {"R_PPC64_PLTCALL", Const, 20},
+ {"R_PPC64_PLTCALL_NOTOC", Const, 20},
+ {"R_PPC64_PLTGOT16", Const, 10},
+ {"R_PPC64_PLTGOT16_DS", Const, 10},
+ {"R_PPC64_PLTGOT16_HA", Const, 10},
+ {"R_PPC64_PLTGOT16_HI", Const, 10},
+ {"R_PPC64_PLTGOT16_LO", Const, 10},
+ {"R_PPC64_PLTGOT_LO_DS", Const, 10},
+ {"R_PPC64_PLTREL32", Const, 20},
+ {"R_PPC64_PLTREL64", Const, 20},
+ {"R_PPC64_PLTSEQ", Const, 20},
+ {"R_PPC64_PLTSEQ_NOTOC", Const, 20},
+ {"R_PPC64_PLT_PCREL34", Const, 20},
+ {"R_PPC64_PLT_PCREL34_NOTOC", Const, 20},
+ {"R_PPC64_REL14", Const, 5},
+ {"R_PPC64_REL14_BRNTAKEN", Const, 5},
+ {"R_PPC64_REL14_BRTAKEN", Const, 5},
+ {"R_PPC64_REL16", Const, 5},
+ {"R_PPC64_REL16DX_HA", Const, 10},
+ {"R_PPC64_REL16_HA", Const, 5},
+ {"R_PPC64_REL16_HI", Const, 5},
+ {"R_PPC64_REL16_HIGH", Const, 20},
+ {"R_PPC64_REL16_HIGHA", Const, 20},
+ {"R_PPC64_REL16_HIGHER", Const, 20},
+ {"R_PPC64_REL16_HIGHER34", Const, 20},
+ {"R_PPC64_REL16_HIGHERA", Const, 20},
+ {"R_PPC64_REL16_HIGHERA34", Const, 20},
+ {"R_PPC64_REL16_HIGHEST", Const, 20},
+ {"R_PPC64_REL16_HIGHEST34", Const, 20},
+ {"R_PPC64_REL16_HIGHESTA", Const, 20},
+ {"R_PPC64_REL16_HIGHESTA34", Const, 20},
+ {"R_PPC64_REL16_LO", Const, 5},
+ {"R_PPC64_REL24", Const, 5},
+ {"R_PPC64_REL24_NOTOC", Const, 10},
+ {"R_PPC64_REL24_P9NOTOC", Const, 21},
+ {"R_PPC64_REL30", Const, 20},
+ {"R_PPC64_REL32", Const, 5},
+ {"R_PPC64_REL64", Const, 5},
+ {"R_PPC64_RELATIVE", Const, 18},
+ {"R_PPC64_SECTOFF", Const, 20},
+ {"R_PPC64_SECTOFF_DS", Const, 10},
+ {"R_PPC64_SECTOFF_HA", Const, 20},
+ {"R_PPC64_SECTOFF_HI", Const, 20},
+ {"R_PPC64_SECTOFF_LO", Const, 20},
+ {"R_PPC64_SECTOFF_LO_DS", Const, 10},
+ {"R_PPC64_TLS", Const, 5},
+ {"R_PPC64_TLSGD", Const, 5},
+ {"R_PPC64_TLSLD", Const, 5},
+ {"R_PPC64_TOC", Const, 5},
+ {"R_PPC64_TOC16", Const, 5},
+ {"R_PPC64_TOC16_DS", Const, 5},
+ {"R_PPC64_TOC16_HA", Const, 5},
+ {"R_PPC64_TOC16_HI", Const, 5},
+ {"R_PPC64_TOC16_LO", Const, 5},
+ {"R_PPC64_TOC16_LO_DS", Const, 5},
+ {"R_PPC64_TOCSAVE", Const, 10},
+ {"R_PPC64_TPREL16", Const, 5},
+ {"R_PPC64_TPREL16_DS", Const, 5},
+ {"R_PPC64_TPREL16_HA", Const, 5},
+ {"R_PPC64_TPREL16_HI", Const, 5},
+ {"R_PPC64_TPREL16_HIGH", Const, 10},
+ {"R_PPC64_TPREL16_HIGHA", Const, 10},
+ {"R_PPC64_TPREL16_HIGHER", Const, 5},
+ {"R_PPC64_TPREL16_HIGHERA", Const, 5},
+ {"R_PPC64_TPREL16_HIGHEST", Const, 5},
+ {"R_PPC64_TPREL16_HIGHESTA", Const, 5},
+ {"R_PPC64_TPREL16_LO", Const, 5},
+ {"R_PPC64_TPREL16_LO_DS", Const, 5},
+ {"R_PPC64_TPREL34", Const, 20},
+ {"R_PPC64_TPREL64", Const, 5},
+ {"R_PPC64_UADDR16", Const, 20},
+ {"R_PPC64_UADDR32", Const, 20},
+ {"R_PPC64_UADDR64", Const, 20},
+ {"R_PPC_ADDR14", Const, 0},
+ {"R_PPC_ADDR14_BRNTAKEN", Const, 0},
+ {"R_PPC_ADDR14_BRTAKEN", Const, 0},
+ {"R_PPC_ADDR16", Const, 0},
+ {"R_PPC_ADDR16_HA", Const, 0},
+ {"R_PPC_ADDR16_HI", Const, 0},
+ {"R_PPC_ADDR16_LO", Const, 0},
+ {"R_PPC_ADDR24", Const, 0},
+ {"R_PPC_ADDR32", Const, 0},
+ {"R_PPC_COPY", Const, 0},
+ {"R_PPC_DTPMOD32", Const, 0},
+ {"R_PPC_DTPREL16", Const, 0},
+ {"R_PPC_DTPREL16_HA", Const, 0},
+ {"R_PPC_DTPREL16_HI", Const, 0},
+ {"R_PPC_DTPREL16_LO", Const, 0},
+ {"R_PPC_DTPREL32", Const, 0},
+ {"R_PPC_EMB_BIT_FLD", Const, 0},
+ {"R_PPC_EMB_MRKREF", Const, 0},
+ {"R_PPC_EMB_NADDR16", Const, 0},
+ {"R_PPC_EMB_NADDR16_HA", Const, 0},
+ {"R_PPC_EMB_NADDR16_HI", Const, 0},
+ {"R_PPC_EMB_NADDR16_LO", Const, 0},
+ {"R_PPC_EMB_NADDR32", Const, 0},
+ {"R_PPC_EMB_RELSDA", Const, 0},
+ {"R_PPC_EMB_RELSEC16", Const, 0},
+ {"R_PPC_EMB_RELST_HA", Const, 0},
+ {"R_PPC_EMB_RELST_HI", Const, 0},
+ {"R_PPC_EMB_RELST_LO", Const, 0},
+ {"R_PPC_EMB_SDA21", Const, 0},
+ {"R_PPC_EMB_SDA2I16", Const, 0},
+ {"R_PPC_EMB_SDA2REL", Const, 0},
+ {"R_PPC_EMB_SDAI16", Const, 0},
+ {"R_PPC_GLOB_DAT", Const, 0},
+ {"R_PPC_GOT16", Const, 0},
+ {"R_PPC_GOT16_HA", Const, 0},
+ {"R_PPC_GOT16_HI", Const, 0},
+ {"R_PPC_GOT16_LO", Const, 0},
+ {"R_PPC_GOT_TLSGD16", Const, 0},
+ {"R_PPC_GOT_TLSGD16_HA", Const, 0},
+ {"R_PPC_GOT_TLSGD16_HI", Const, 0},
+ {"R_PPC_GOT_TLSGD16_LO", Const, 0},
+ {"R_PPC_GOT_TLSLD16", Const, 0},
+ {"R_PPC_GOT_TLSLD16_HA", Const, 0},
+ {"R_PPC_GOT_TLSLD16_HI", Const, 0},
+ {"R_PPC_GOT_TLSLD16_LO", Const, 0},
+ {"R_PPC_GOT_TPREL16", Const, 0},
+ {"R_PPC_GOT_TPREL16_HA", Const, 0},
+ {"R_PPC_GOT_TPREL16_HI", Const, 0},
+ {"R_PPC_GOT_TPREL16_LO", Const, 0},
+ {"R_PPC_JMP_SLOT", Const, 0},
+ {"R_PPC_LOCAL24PC", Const, 0},
+ {"R_PPC_NONE", Const, 0},
+ {"R_PPC_PLT16_HA", Const, 0},
+ {"R_PPC_PLT16_HI", Const, 0},
+ {"R_PPC_PLT16_LO", Const, 0},
+ {"R_PPC_PLT32", Const, 0},
+ {"R_PPC_PLTREL24", Const, 0},
+ {"R_PPC_PLTREL32", Const, 0},
+ {"R_PPC_REL14", Const, 0},
+ {"R_PPC_REL14_BRNTAKEN", Const, 0},
+ {"R_PPC_REL14_BRTAKEN", Const, 0},
+ {"R_PPC_REL24", Const, 0},
+ {"R_PPC_REL32", Const, 0},
+ {"R_PPC_RELATIVE", Const, 0},
+ {"R_PPC_SDAREL16", Const, 0},
+ {"R_PPC_SECTOFF", Const, 0},
+ {"R_PPC_SECTOFF_HA", Const, 0},
+ {"R_PPC_SECTOFF_HI", Const, 0},
+ {"R_PPC_SECTOFF_LO", Const, 0},
+ {"R_PPC_TLS", Const, 0},
+ {"R_PPC_TPREL16", Const, 0},
+ {"R_PPC_TPREL16_HA", Const, 0},
+ {"R_PPC_TPREL16_HI", Const, 0},
+ {"R_PPC_TPREL16_LO", Const, 0},
+ {"R_PPC_TPREL32", Const, 0},
+ {"R_PPC_UADDR16", Const, 0},
+ {"R_PPC_UADDR32", Const, 0},
+ {"R_RISCV", Type, 11},
+ {"R_RISCV_32", Const, 11},
+ {"R_RISCV_32_PCREL", Const, 12},
+ {"R_RISCV_64", Const, 11},
+ {"R_RISCV_ADD16", Const, 11},
+ {"R_RISCV_ADD32", Const, 11},
+ {"R_RISCV_ADD64", Const, 11},
+ {"R_RISCV_ADD8", Const, 11},
+ {"R_RISCV_ALIGN", Const, 11},
+ {"R_RISCV_BRANCH", Const, 11},
+ {"R_RISCV_CALL", Const, 11},
+ {"R_RISCV_CALL_PLT", Const, 11},
+ {"R_RISCV_COPY", Const, 11},
+ {"R_RISCV_GNU_VTENTRY", Const, 11},
+ {"R_RISCV_GNU_VTINHERIT", Const, 11},
+ {"R_RISCV_GOT_HI20", Const, 11},
+ {"R_RISCV_GPREL_I", Const, 11},
+ {"R_RISCV_GPREL_S", Const, 11},
+ {"R_RISCV_HI20", Const, 11},
+ {"R_RISCV_JAL", Const, 11},
+ {"R_RISCV_JUMP_SLOT", Const, 11},
+ {"R_RISCV_LO12_I", Const, 11},
+ {"R_RISCV_LO12_S", Const, 11},
+ {"R_RISCV_NONE", Const, 11},
+ {"R_RISCV_PCREL_HI20", Const, 11},
+ {"R_RISCV_PCREL_LO12_I", Const, 11},
+ {"R_RISCV_PCREL_LO12_S", Const, 11},
+ {"R_RISCV_RELATIVE", Const, 11},
+ {"R_RISCV_RELAX", Const, 11},
+ {"R_RISCV_RVC_BRANCH", Const, 11},
+ {"R_RISCV_RVC_JUMP", Const, 11},
+ {"R_RISCV_RVC_LUI", Const, 11},
+ {"R_RISCV_SET16", Const, 11},
+ {"R_RISCV_SET32", Const, 11},
+ {"R_RISCV_SET6", Const, 11},
+ {"R_RISCV_SET8", Const, 11},
+ {"R_RISCV_SUB16", Const, 11},
+ {"R_RISCV_SUB32", Const, 11},
+ {"R_RISCV_SUB6", Const, 11},
+ {"R_RISCV_SUB64", Const, 11},
+ {"R_RISCV_SUB8", Const, 11},
+ {"R_RISCV_TLS_DTPMOD32", Const, 11},
+ {"R_RISCV_TLS_DTPMOD64", Const, 11},
+ {"R_RISCV_TLS_DTPREL32", Const, 11},
+ {"R_RISCV_TLS_DTPREL64", Const, 11},
+ {"R_RISCV_TLS_GD_HI20", Const, 11},
+ {"R_RISCV_TLS_GOT_HI20", Const, 11},
+ {"R_RISCV_TLS_TPREL32", Const, 11},
+ {"R_RISCV_TLS_TPREL64", Const, 11},
+ {"R_RISCV_TPREL_ADD", Const, 11},
+ {"R_RISCV_TPREL_HI20", Const, 11},
+ {"R_RISCV_TPREL_I", Const, 11},
+ {"R_RISCV_TPREL_LO12_I", Const, 11},
+ {"R_RISCV_TPREL_LO12_S", Const, 11},
+ {"R_RISCV_TPREL_S", Const, 11},
+ {"R_SPARC", Type, 0},
+ {"R_SPARC_10", Const, 0},
+ {"R_SPARC_11", Const, 0},
+ {"R_SPARC_13", Const, 0},
+ {"R_SPARC_16", Const, 0},
+ {"R_SPARC_22", Const, 0},
+ {"R_SPARC_32", Const, 0},
+ {"R_SPARC_5", Const, 0},
+ {"R_SPARC_6", Const, 0},
+ {"R_SPARC_64", Const, 0},
+ {"R_SPARC_7", Const, 0},
+ {"R_SPARC_8", Const, 0},
+ {"R_SPARC_COPY", Const, 0},
+ {"R_SPARC_DISP16", Const, 0},
+ {"R_SPARC_DISP32", Const, 0},
+ {"R_SPARC_DISP64", Const, 0},
+ {"R_SPARC_DISP8", Const, 0},
+ {"R_SPARC_GLOB_DAT", Const, 0},
+ {"R_SPARC_GLOB_JMP", Const, 0},
+ {"R_SPARC_GOT10", Const, 0},
+ {"R_SPARC_GOT13", Const, 0},
+ {"R_SPARC_GOT22", Const, 0},
+ {"R_SPARC_H44", Const, 0},
+ {"R_SPARC_HH22", Const, 0},
+ {"R_SPARC_HI22", Const, 0},
+ {"R_SPARC_HIPLT22", Const, 0},
+ {"R_SPARC_HIX22", Const, 0},
+ {"R_SPARC_HM10", Const, 0},
+ {"R_SPARC_JMP_SLOT", Const, 0},
+ {"R_SPARC_L44", Const, 0},
+ {"R_SPARC_LM22", Const, 0},
+ {"R_SPARC_LO10", Const, 0},
+ {"R_SPARC_LOPLT10", Const, 0},
+ {"R_SPARC_LOX10", Const, 0},
+ {"R_SPARC_M44", Const, 0},
+ {"R_SPARC_NONE", Const, 0},
+ {"R_SPARC_OLO10", Const, 0},
+ {"R_SPARC_PC10", Const, 0},
+ {"R_SPARC_PC22", Const, 0},
+ {"R_SPARC_PCPLT10", Const, 0},
+ {"R_SPARC_PCPLT22", Const, 0},
+ {"R_SPARC_PCPLT32", Const, 0},
+ {"R_SPARC_PC_HH22", Const, 0},
+ {"R_SPARC_PC_HM10", Const, 0},
+ {"R_SPARC_PC_LM22", Const, 0},
+ {"R_SPARC_PLT32", Const, 0},
+ {"R_SPARC_PLT64", Const, 0},
+ {"R_SPARC_REGISTER", Const, 0},
+ {"R_SPARC_RELATIVE", Const, 0},
+ {"R_SPARC_UA16", Const, 0},
+ {"R_SPARC_UA32", Const, 0},
+ {"R_SPARC_UA64", Const, 0},
+ {"R_SPARC_WDISP16", Const, 0},
+ {"R_SPARC_WDISP19", Const, 0},
+ {"R_SPARC_WDISP22", Const, 0},
+ {"R_SPARC_WDISP30", Const, 0},
+ {"R_SPARC_WPLT30", Const, 0},
+ {"R_SYM32", Func, 0},
+ {"R_SYM64", Func, 0},
+ {"R_TYPE32", Func, 0},
+ {"R_TYPE64", Func, 0},
+ {"R_X86_64", Type, 0},
+ {"R_X86_64_16", Const, 0},
+ {"R_X86_64_32", Const, 0},
+ {"R_X86_64_32S", Const, 0},
+ {"R_X86_64_64", Const, 0},
+ {"R_X86_64_8", Const, 0},
+ {"R_X86_64_COPY", Const, 0},
+ {"R_X86_64_DTPMOD64", Const, 0},
+ {"R_X86_64_DTPOFF32", Const, 0},
+ {"R_X86_64_DTPOFF64", Const, 0},
+ {"R_X86_64_GLOB_DAT", Const, 0},
+ {"R_X86_64_GOT32", Const, 0},
+ {"R_X86_64_GOT64", Const, 10},
+ {"R_X86_64_GOTOFF64", Const, 10},
+ {"R_X86_64_GOTPC32", Const, 10},
+ {"R_X86_64_GOTPC32_TLSDESC", Const, 10},
+ {"R_X86_64_GOTPC64", Const, 10},
+ {"R_X86_64_GOTPCREL", Const, 0},
+ {"R_X86_64_GOTPCREL64", Const, 10},
+ {"R_X86_64_GOTPCRELX", Const, 10},
+ {"R_X86_64_GOTPLT64", Const, 10},
+ {"R_X86_64_GOTTPOFF", Const, 0},
+ {"R_X86_64_IRELATIVE", Const, 10},
+ {"R_X86_64_JMP_SLOT", Const, 0},
+ {"R_X86_64_NONE", Const, 0},
+ {"R_X86_64_PC16", Const, 0},
+ {"R_X86_64_PC32", Const, 0},
+ {"R_X86_64_PC32_BND", Const, 10},
+ {"R_X86_64_PC64", Const, 10},
+ {"R_X86_64_PC8", Const, 0},
+ {"R_X86_64_PLT32", Const, 0},
+ {"R_X86_64_PLT32_BND", Const, 10},
+ {"R_X86_64_PLTOFF64", Const, 10},
+ {"R_X86_64_RELATIVE", Const, 0},
+ {"R_X86_64_RELATIVE64", Const, 10},
+ {"R_X86_64_REX_GOTPCRELX", Const, 10},
+ {"R_X86_64_SIZE32", Const, 10},
+ {"R_X86_64_SIZE64", Const, 10},
+ {"R_X86_64_TLSDESC", Const, 10},
+ {"R_X86_64_TLSDESC_CALL", Const, 10},
+ {"R_X86_64_TLSGD", Const, 0},
+ {"R_X86_64_TLSLD", Const, 0},
+ {"R_X86_64_TPOFF32", Const, 0},
+ {"R_X86_64_TPOFF64", Const, 0},
+ {"Rel32", Type, 0},
+ {"Rel32.Info", Field, 0},
+ {"Rel32.Off", Field, 0},
+ {"Rel64", Type, 0},
+ {"Rel64.Info", Field, 0},
+ {"Rel64.Off", Field, 0},
+ {"Rela32", Type, 0},
+ {"Rela32.Addend", Field, 0},
+ {"Rela32.Info", Field, 0},
+ {"Rela32.Off", Field, 0},
+ {"Rela64", Type, 0},
+ {"Rela64.Addend", Field, 0},
+ {"Rela64.Info", Field, 0},
+ {"Rela64.Off", Field, 0},
+ {"SHF_ALLOC", Const, 0},
+ {"SHF_COMPRESSED", Const, 6},
+ {"SHF_EXECINSTR", Const, 0},
+ {"SHF_GROUP", Const, 0},
+ {"SHF_INFO_LINK", Const, 0},
+ {"SHF_LINK_ORDER", Const, 0},
+ {"SHF_MASKOS", Const, 0},
+ {"SHF_MASKPROC", Const, 0},
+ {"SHF_MERGE", Const, 0},
+ {"SHF_OS_NONCONFORMING", Const, 0},
+ {"SHF_STRINGS", Const, 0},
+ {"SHF_TLS", Const, 0},
+ {"SHF_WRITE", Const, 0},
+ {"SHN_ABS", Const, 0},
+ {"SHN_COMMON", Const, 0},
+ {"SHN_HIOS", Const, 0},
+ {"SHN_HIPROC", Const, 0},
+ {"SHN_HIRESERVE", Const, 0},
+ {"SHN_LOOS", Const, 0},
+ {"SHN_LOPROC", Const, 0},
+ {"SHN_LORESERVE", Const, 0},
+ {"SHN_UNDEF", Const, 0},
+ {"SHN_XINDEX", Const, 0},
+ {"SHT_DYNAMIC", Const, 0},
+ {"SHT_DYNSYM", Const, 0},
+ {"SHT_FINI_ARRAY", Const, 0},
+ {"SHT_GNU_ATTRIBUTES", Const, 0},
+ {"SHT_GNU_HASH", Const, 0},
+ {"SHT_GNU_LIBLIST", Const, 0},
+ {"SHT_GNU_VERDEF", Const, 0},
+ {"SHT_GNU_VERNEED", Const, 0},
+ {"SHT_GNU_VERSYM", Const, 0},
+ {"SHT_GROUP", Const, 0},
+ {"SHT_HASH", Const, 0},
+ {"SHT_HIOS", Const, 0},
+ {"SHT_HIPROC", Const, 0},
+ {"SHT_HIUSER", Const, 0},
+ {"SHT_INIT_ARRAY", Const, 0},
+ {"SHT_LOOS", Const, 0},
+ {"SHT_LOPROC", Const, 0},
+ {"SHT_LOUSER", Const, 0},
+ {"SHT_MIPS_ABIFLAGS", Const, 17},
+ {"SHT_NOBITS", Const, 0},
+ {"SHT_NOTE", Const, 0},
+ {"SHT_NULL", Const, 0},
+ {"SHT_PREINIT_ARRAY", Const, 0},
+ {"SHT_PROGBITS", Const, 0},
+ {"SHT_REL", Const, 0},
+ {"SHT_RELA", Const, 0},
+ {"SHT_SHLIB", Const, 0},
+ {"SHT_STRTAB", Const, 0},
+ {"SHT_SYMTAB", Const, 0},
+ {"SHT_SYMTAB_SHNDX", Const, 0},
+ {"STB_GLOBAL", Const, 0},
+ {"STB_HIOS", Const, 0},
+ {"STB_HIPROC", Const, 0},
+ {"STB_LOCAL", Const, 0},
+ {"STB_LOOS", Const, 0},
+ {"STB_LOPROC", Const, 0},
+ {"STB_WEAK", Const, 0},
+ {"STT_COMMON", Const, 0},
+ {"STT_FILE", Const, 0},
+ {"STT_FUNC", Const, 0},
+ {"STT_GNU_IFUNC", Const, 23},
+ {"STT_HIOS", Const, 0},
+ {"STT_HIPROC", Const, 0},
+ {"STT_LOOS", Const, 0},
+ {"STT_LOPROC", Const, 0},
+ {"STT_NOTYPE", Const, 0},
+ {"STT_OBJECT", Const, 0},
+ {"STT_RELC", Const, 23},
+ {"STT_SECTION", Const, 0},
+ {"STT_SRELC", Const, 23},
+ {"STT_TLS", Const, 0},
+ {"STV_DEFAULT", Const, 0},
+ {"STV_HIDDEN", Const, 0},
+ {"STV_INTERNAL", Const, 0},
+ {"STV_PROTECTED", Const, 0},
+ {"ST_BIND", Func, 0},
+ {"ST_INFO", Func, 0},
+ {"ST_TYPE", Func, 0},
+ {"ST_VISIBILITY", Func, 0},
+ {"Section", Type, 0},
+ {"Section.ReaderAt", Field, 0},
+ {"Section.SectionHeader", Field, 0},
+ {"Section32", Type, 0},
+ {"Section32.Addr", Field, 0},
+ {"Section32.Addralign", Field, 0},
+ {"Section32.Entsize", Field, 0},
+ {"Section32.Flags", Field, 0},
+ {"Section32.Info", Field, 0},
+ {"Section32.Link", Field, 0},
+ {"Section32.Name", Field, 0},
+ {"Section32.Off", Field, 0},
+ {"Section32.Size", Field, 0},
+ {"Section32.Type", Field, 0},
+ {"Section64", Type, 0},
+ {"Section64.Addr", Field, 0},
+ {"Section64.Addralign", Field, 0},
+ {"Section64.Entsize", Field, 0},
+ {"Section64.Flags", Field, 0},
+ {"Section64.Info", Field, 0},
+ {"Section64.Link", Field, 0},
+ {"Section64.Name", Field, 0},
+ {"Section64.Off", Field, 0},
+ {"Section64.Size", Field, 0},
+ {"Section64.Type", Field, 0},
+ {"SectionFlag", Type, 0},
+ {"SectionHeader", Type, 0},
+ {"SectionHeader.Addr", Field, 0},
+ {"SectionHeader.Addralign", Field, 0},
+ {"SectionHeader.Entsize", Field, 0},
+ {"SectionHeader.FileSize", Field, 6},
+ {"SectionHeader.Flags", Field, 0},
+ {"SectionHeader.Info", Field, 0},
+ {"SectionHeader.Link", Field, 0},
+ {"SectionHeader.Name", Field, 0},
+ {"SectionHeader.Offset", Field, 0},
+ {"SectionHeader.Size", Field, 0},
+ {"SectionHeader.Type", Field, 0},
+ {"SectionIndex", Type, 0},
+ {"SectionType", Type, 0},
+ {"Sym32", Type, 0},
+ {"Sym32.Info", Field, 0},
+ {"Sym32.Name", Field, 0},
+ {"Sym32.Other", Field, 0},
+ {"Sym32.Shndx", Field, 0},
+ {"Sym32.Size", Field, 0},
+ {"Sym32.Value", Field, 0},
+ {"Sym32Size", Const, 0},
+ {"Sym64", Type, 0},
+ {"Sym64.Info", Field, 0},
+ {"Sym64.Name", Field, 0},
+ {"Sym64.Other", Field, 0},
+ {"Sym64.Shndx", Field, 0},
+ {"Sym64.Size", Field, 0},
+ {"Sym64.Value", Field, 0},
+ {"Sym64Size", Const, 0},
+ {"SymBind", Type, 0},
+ {"SymType", Type, 0},
+ {"SymVis", Type, 0},
+ {"Symbol", Type, 0},
+ {"Symbol.Info", Field, 0},
+ {"Symbol.Library", Field, 13},
+ {"Symbol.Name", Field, 0},
+ {"Symbol.Other", Field, 0},
+ {"Symbol.Section", Field, 0},
+ {"Symbol.Size", Field, 0},
+ {"Symbol.Value", Field, 0},
+ {"Symbol.Version", Field, 13},
+ {"Type", Type, 0},
+ {"Version", Type, 0},
+ },
+ "debug/gosym": {
+ {"(*DecodingError).Error", Method, 0},
+ {"(*LineTable).LineToPC", Method, 0},
+ {"(*LineTable).PCToLine", Method, 0},
+ {"(*Sym).BaseName", Method, 0},
+ {"(*Sym).PackageName", Method, 0},
+ {"(*Sym).ReceiverName", Method, 0},
+ {"(*Sym).Static", Method, 0},
+ {"(*Table).LineToPC", Method, 0},
+ {"(*Table).LookupFunc", Method, 0},
+ {"(*Table).LookupSym", Method, 0},
+ {"(*Table).PCToFunc", Method, 0},
+ {"(*Table).PCToLine", Method, 0},
+ {"(*Table).SymByAddr", Method, 0},
+ {"(*UnknownLineError).Error", Method, 0},
+ {"(Func).BaseName", Method, 0},
+ {"(Func).PackageName", Method, 0},
+ {"(Func).ReceiverName", Method, 0},
+ {"(Func).Static", Method, 0},
+ {"(UnknownFileError).Error", Method, 0},
+ {"DecodingError", Type, 0},
+ {"Func", Type, 0},
+ {"Func.End", Field, 0},
+ {"Func.Entry", Field, 0},
+ {"Func.FrameSize", Field, 0},
+ {"Func.LineTable", Field, 0},
+ {"Func.Locals", Field, 0},
+ {"Func.Obj", Field, 0},
+ {"Func.Params", Field, 0},
+ {"Func.Sym", Field, 0},
+ {"LineTable", Type, 0},
+ {"LineTable.Data", Field, 0},
+ {"LineTable.Line", Field, 0},
+ {"LineTable.PC", Field, 0},
+ {"NewLineTable", Func, 0},
+ {"NewTable", Func, 0},
+ {"Obj", Type, 0},
+ {"Obj.Funcs", Field, 0},
+ {"Obj.Paths", Field, 0},
+ {"Sym", Type, 0},
+ {"Sym.Func", Field, 0},
+ {"Sym.GoType", Field, 0},
+ {"Sym.Name", Field, 0},
+ {"Sym.Type", Field, 0},
+ {"Sym.Value", Field, 0},
+ {"Table", Type, 0},
+ {"Table.Files", Field, 0},
+ {"Table.Funcs", Field, 0},
+ {"Table.Objs", Field, 0},
+ {"Table.Syms", Field, 0},
+ {"UnknownFileError", Type, 0},
+ {"UnknownLineError", Type, 0},
+ {"UnknownLineError.File", Field, 0},
+ {"UnknownLineError.Line", Field, 0},
+ },
+ "debug/macho": {
+ {"(*FatFile).Close", Method, 3},
+ {"(*File).Close", Method, 0},
+ {"(*File).DWARF", Method, 0},
+ {"(*File).ImportedLibraries", Method, 0},
+ {"(*File).ImportedSymbols", Method, 0},
+ {"(*File).Section", Method, 0},
+ {"(*File).Segment", Method, 0},
+ {"(*FormatError).Error", Method, 0},
+ {"(*Section).Data", Method, 0},
+ {"(*Section).Open", Method, 0},
+ {"(*Segment).Data", Method, 0},
+ {"(*Segment).Open", Method, 0},
+ {"(Cpu).GoString", Method, 0},
+ {"(Cpu).String", Method, 0},
+ {"(Dylib).Raw", Method, 0},
+ {"(Dysymtab).Raw", Method, 0},
+ {"(FatArch).Close", Method, 3},
+ {"(FatArch).DWARF", Method, 3},
+ {"(FatArch).ImportedLibraries", Method, 3},
+ {"(FatArch).ImportedSymbols", Method, 3},
+ {"(FatArch).Section", Method, 3},
+ {"(FatArch).Segment", Method, 3},
+ {"(LoadBytes).Raw", Method, 0},
+ {"(LoadCmd).GoString", Method, 0},
+ {"(LoadCmd).String", Method, 0},
+ {"(RelocTypeARM).GoString", Method, 10},
+ {"(RelocTypeARM).String", Method, 10},
+ {"(RelocTypeARM64).GoString", Method, 10},
+ {"(RelocTypeARM64).String", Method, 10},
+ {"(RelocTypeGeneric).GoString", Method, 10},
+ {"(RelocTypeGeneric).String", Method, 10},
+ {"(RelocTypeX86_64).GoString", Method, 10},
+ {"(RelocTypeX86_64).String", Method, 10},
+ {"(Rpath).Raw", Method, 10},
+ {"(Section).ReadAt", Method, 0},
+ {"(Segment).Raw", Method, 0},
+ {"(Segment).ReadAt", Method, 0},
+ {"(Symtab).Raw", Method, 0},
+ {"(Type).GoString", Method, 10},
+ {"(Type).String", Method, 10},
+ {"ARM64_RELOC_ADDEND", Const, 10},
+ {"ARM64_RELOC_BRANCH26", Const, 10},
+ {"ARM64_RELOC_GOT_LOAD_PAGE21", Const, 10},
+ {"ARM64_RELOC_GOT_LOAD_PAGEOFF12", Const, 10},
+ {"ARM64_RELOC_PAGE21", Const, 10},
+ {"ARM64_RELOC_PAGEOFF12", Const, 10},
+ {"ARM64_RELOC_POINTER_TO_GOT", Const, 10},
+ {"ARM64_RELOC_SUBTRACTOR", Const, 10},
+ {"ARM64_RELOC_TLVP_LOAD_PAGE21", Const, 10},
+ {"ARM64_RELOC_TLVP_LOAD_PAGEOFF12", Const, 10},
+ {"ARM64_RELOC_UNSIGNED", Const, 10},
+ {"ARM_RELOC_BR24", Const, 10},
+ {"ARM_RELOC_HALF", Const, 10},
+ {"ARM_RELOC_HALF_SECTDIFF", Const, 10},
+ {"ARM_RELOC_LOCAL_SECTDIFF", Const, 10},
+ {"ARM_RELOC_PAIR", Const, 10},
+ {"ARM_RELOC_PB_LA_PTR", Const, 10},
+ {"ARM_RELOC_SECTDIFF", Const, 10},
+ {"ARM_RELOC_VANILLA", Const, 10},
+ {"ARM_THUMB_32BIT_BRANCH", Const, 10},
+ {"ARM_THUMB_RELOC_BR22", Const, 10},
+ {"Cpu", Type, 0},
+ {"Cpu386", Const, 0},
+ {"CpuAmd64", Const, 0},
+ {"CpuArm", Const, 3},
+ {"CpuArm64", Const, 11},
+ {"CpuPpc", Const, 3},
+ {"CpuPpc64", Const, 3},
+ {"Dylib", Type, 0},
+ {"Dylib.CompatVersion", Field, 0},
+ {"Dylib.CurrentVersion", Field, 0},
+ {"Dylib.LoadBytes", Field, 0},
+ {"Dylib.Name", Field, 0},
+ {"Dylib.Time", Field, 0},
+ {"DylibCmd", Type, 0},
+ {"DylibCmd.Cmd", Field, 0},
+ {"DylibCmd.CompatVersion", Field, 0},
+ {"DylibCmd.CurrentVersion", Field, 0},
+ {"DylibCmd.Len", Field, 0},
+ {"DylibCmd.Name", Field, 0},
+ {"DylibCmd.Time", Field, 0},
+ {"Dysymtab", Type, 0},
+ {"Dysymtab.DysymtabCmd", Field, 0},
+ {"Dysymtab.IndirectSyms", Field, 0},
+ {"Dysymtab.LoadBytes", Field, 0},
+ {"DysymtabCmd", Type, 0},
+ {"DysymtabCmd.Cmd", Field, 0},
+ {"DysymtabCmd.Extrefsymoff", Field, 0},
+ {"DysymtabCmd.Extreloff", Field, 0},
+ {"DysymtabCmd.Iextdefsym", Field, 0},
+ {"DysymtabCmd.Ilocalsym", Field, 0},
+ {"DysymtabCmd.Indirectsymoff", Field, 0},
+ {"DysymtabCmd.Iundefsym", Field, 0},
+ {"DysymtabCmd.Len", Field, 0},
+ {"DysymtabCmd.Locreloff", Field, 0},
+ {"DysymtabCmd.Modtaboff", Field, 0},
+ {"DysymtabCmd.Nextdefsym", Field, 0},
+ {"DysymtabCmd.Nextrefsyms", Field, 0},
+ {"DysymtabCmd.Nextrel", Field, 0},
+ {"DysymtabCmd.Nindirectsyms", Field, 0},
+ {"DysymtabCmd.Nlocalsym", Field, 0},
+ {"DysymtabCmd.Nlocrel", Field, 0},
+ {"DysymtabCmd.Nmodtab", Field, 0},
+ {"DysymtabCmd.Ntoc", Field, 0},
+ {"DysymtabCmd.Nundefsym", Field, 0},
+ {"DysymtabCmd.Tocoffset", Field, 0},
+ {"ErrNotFat", Var, 3},
+ {"FatArch", Type, 3},
+ {"FatArch.FatArchHeader", Field, 3},
+ {"FatArch.File", Field, 3},
+ {"FatArchHeader", Type, 3},
+ {"FatArchHeader.Align", Field, 3},
+ {"FatArchHeader.Cpu", Field, 3},
+ {"FatArchHeader.Offset", Field, 3},
+ {"FatArchHeader.Size", Field, 3},
+ {"FatArchHeader.SubCpu", Field, 3},
+ {"FatFile", Type, 3},
+ {"FatFile.Arches", Field, 3},
+ {"FatFile.Magic", Field, 3},
+ {"File", Type, 0},
+ {"File.ByteOrder", Field, 0},
+ {"File.Dysymtab", Field, 0},
+ {"File.FileHeader", Field, 0},
+ {"File.Loads", Field, 0},
+ {"File.Sections", Field, 0},
+ {"File.Symtab", Field, 0},
+ {"FileHeader", Type, 0},
+ {"FileHeader.Cmdsz", Field, 0},
+ {"FileHeader.Cpu", Field, 0},
+ {"FileHeader.Flags", Field, 0},
+ {"FileHeader.Magic", Field, 0},
+ {"FileHeader.Ncmd", Field, 0},
+ {"FileHeader.SubCpu", Field, 0},
+ {"FileHeader.Type", Field, 0},
+ {"FlagAllModsBound", Const, 10},
+ {"FlagAllowStackExecution", Const, 10},
+ {"FlagAppExtensionSafe", Const, 10},
+ {"FlagBindAtLoad", Const, 10},
+ {"FlagBindsToWeak", Const, 10},
+ {"FlagCanonical", Const, 10},
+ {"FlagDeadStrippableDylib", Const, 10},
+ {"FlagDyldLink", Const, 10},
+ {"FlagForceFlat", Const, 10},
+ {"FlagHasTLVDescriptors", Const, 10},
+ {"FlagIncrLink", Const, 10},
+ {"FlagLazyInit", Const, 10},
+ {"FlagNoFixPrebinding", Const, 10},
+ {"FlagNoHeapExecution", Const, 10},
+ {"FlagNoMultiDefs", Const, 10},
+ {"FlagNoReexportedDylibs", Const, 10},
+ {"FlagNoUndefs", Const, 10},
+ {"FlagPIE", Const, 10},
+ {"FlagPrebindable", Const, 10},
+ {"FlagPrebound", Const, 10},
+ {"FlagRootSafe", Const, 10},
+ {"FlagSetuidSafe", Const, 10},
+ {"FlagSplitSegs", Const, 10},
+ {"FlagSubsectionsViaSymbols", Const, 10},
+ {"FlagTwoLevel", Const, 10},
+ {"FlagWeakDefines", Const, 10},
+ {"FormatError", Type, 0},
+ {"GENERIC_RELOC_LOCAL_SECTDIFF", Const, 10},
+ {"GENERIC_RELOC_PAIR", Const, 10},
+ {"GENERIC_RELOC_PB_LA_PTR", Const, 10},
+ {"GENERIC_RELOC_SECTDIFF", Const, 10},
+ {"GENERIC_RELOC_TLV", Const, 10},
+ {"GENERIC_RELOC_VANILLA", Const, 10},
+ {"Load", Type, 0},
+ {"LoadBytes", Type, 0},
+ {"LoadCmd", Type, 0},
+ {"LoadCmdDylib", Const, 0},
+ {"LoadCmdDylinker", Const, 0},
+ {"LoadCmdDysymtab", Const, 0},
+ {"LoadCmdRpath", Const, 10},
+ {"LoadCmdSegment", Const, 0},
+ {"LoadCmdSegment64", Const, 0},
+ {"LoadCmdSymtab", Const, 0},
+ {"LoadCmdThread", Const, 0},
+ {"LoadCmdUnixThread", Const, 0},
+ {"Magic32", Const, 0},
+ {"Magic64", Const, 0},
+ {"MagicFat", Const, 3},
+ {"NewFatFile", Func, 3},
+ {"NewFile", Func, 0},
+ {"Nlist32", Type, 0},
+ {"Nlist32.Desc", Field, 0},
+ {"Nlist32.Name", Field, 0},
+ {"Nlist32.Sect", Field, 0},
+ {"Nlist32.Type", Field, 0},
+ {"Nlist32.Value", Field, 0},
+ {"Nlist64", Type, 0},
+ {"Nlist64.Desc", Field, 0},
+ {"Nlist64.Name", Field, 0},
+ {"Nlist64.Sect", Field, 0},
+ {"Nlist64.Type", Field, 0},
+ {"Nlist64.Value", Field, 0},
+ {"Open", Func, 0},
+ {"OpenFat", Func, 3},
+ {"Regs386", Type, 0},
+ {"Regs386.AX", Field, 0},
+ {"Regs386.BP", Field, 0},
+ {"Regs386.BX", Field, 0},
+ {"Regs386.CS", Field, 0},
+ {"Regs386.CX", Field, 0},
+ {"Regs386.DI", Field, 0},
+ {"Regs386.DS", Field, 0},
+ {"Regs386.DX", Field, 0},
+ {"Regs386.ES", Field, 0},
+ {"Regs386.FLAGS", Field, 0},
+ {"Regs386.FS", Field, 0},
+ {"Regs386.GS", Field, 0},
+ {"Regs386.IP", Field, 0},
+ {"Regs386.SI", Field, 0},
+ {"Regs386.SP", Field, 0},
+ {"Regs386.SS", Field, 0},
+ {"RegsAMD64", Type, 0},
+ {"RegsAMD64.AX", Field, 0},
+ {"RegsAMD64.BP", Field, 0},
+ {"RegsAMD64.BX", Field, 0},
+ {"RegsAMD64.CS", Field, 0},
+ {"RegsAMD64.CX", Field, 0},
+ {"RegsAMD64.DI", Field, 0},
+ {"RegsAMD64.DX", Field, 0},
+ {"RegsAMD64.FLAGS", Field, 0},
+ {"RegsAMD64.FS", Field, 0},
+ {"RegsAMD64.GS", Field, 0},
+ {"RegsAMD64.IP", Field, 0},
+ {"RegsAMD64.R10", Field, 0},
+ {"RegsAMD64.R11", Field, 0},
+ {"RegsAMD64.R12", Field, 0},
+ {"RegsAMD64.R13", Field, 0},
+ {"RegsAMD64.R14", Field, 0},
+ {"RegsAMD64.R15", Field, 0},
+ {"RegsAMD64.R8", Field, 0},
+ {"RegsAMD64.R9", Field, 0},
+ {"RegsAMD64.SI", Field, 0},
+ {"RegsAMD64.SP", Field, 0},
+ {"Reloc", Type, 10},
+ {"Reloc.Addr", Field, 10},
+ {"Reloc.Extern", Field, 10},
+ {"Reloc.Len", Field, 10},
+ {"Reloc.Pcrel", Field, 10},
+ {"Reloc.Scattered", Field, 10},
+ {"Reloc.Type", Field, 10},
+ {"Reloc.Value", Field, 10},
+ {"RelocTypeARM", Type, 10},
+ {"RelocTypeARM64", Type, 10},
+ {"RelocTypeGeneric", Type, 10},
+ {"RelocTypeX86_64", Type, 10},
+ {"Rpath", Type, 10},
+ {"Rpath.LoadBytes", Field, 10},
+ {"Rpath.Path", Field, 10},
+ {"RpathCmd", Type, 10},
+ {"RpathCmd.Cmd", Field, 10},
+ {"RpathCmd.Len", Field, 10},
+ {"RpathCmd.Path", Field, 10},
+ {"Section", Type, 0},
+ {"Section.ReaderAt", Field, 0},
+ {"Section.Relocs", Field, 10},
+ {"Section.SectionHeader", Field, 0},
+ {"Section32", Type, 0},
+ {"Section32.Addr", Field, 0},
+ {"Section32.Align", Field, 0},
+ {"Section32.Flags", Field, 0},
+ {"Section32.Name", Field, 0},
+ {"Section32.Nreloc", Field, 0},
+ {"Section32.Offset", Field, 0},
+ {"Section32.Reloff", Field, 0},
+ {"Section32.Reserve1", Field, 0},
+ {"Section32.Reserve2", Field, 0},
+ {"Section32.Seg", Field, 0},
+ {"Section32.Size", Field, 0},
+ {"Section64", Type, 0},
+ {"Section64.Addr", Field, 0},
+ {"Section64.Align", Field, 0},
+ {"Section64.Flags", Field, 0},
+ {"Section64.Name", Field, 0},
+ {"Section64.Nreloc", Field, 0},
+ {"Section64.Offset", Field, 0},
+ {"Section64.Reloff", Field, 0},
+ {"Section64.Reserve1", Field, 0},
+ {"Section64.Reserve2", Field, 0},
+ {"Section64.Reserve3", Field, 0},
+ {"Section64.Seg", Field, 0},
+ {"Section64.Size", Field, 0},
+ {"SectionHeader", Type, 0},
+ {"SectionHeader.Addr", Field, 0},
+ {"SectionHeader.Align", Field, 0},
+ {"SectionHeader.Flags", Field, 0},
+ {"SectionHeader.Name", Field, 0},
+ {"SectionHeader.Nreloc", Field, 0},
+ {"SectionHeader.Offset", Field, 0},
+ {"SectionHeader.Reloff", Field, 0},
+ {"SectionHeader.Seg", Field, 0},
+ {"SectionHeader.Size", Field, 0},
+ {"Segment", Type, 0},
+ {"Segment.LoadBytes", Field, 0},
+ {"Segment.ReaderAt", Field, 0},
+ {"Segment.SegmentHeader", Field, 0},
+ {"Segment32", Type, 0},
+ {"Segment32.Addr", Field, 0},
+ {"Segment32.Cmd", Field, 0},
+ {"Segment32.Filesz", Field, 0},
+ {"Segment32.Flag", Field, 0},
+ {"Segment32.Len", Field, 0},
+ {"Segment32.Maxprot", Field, 0},
+ {"Segment32.Memsz", Field, 0},
+ {"Segment32.Name", Field, 0},
+ {"Segment32.Nsect", Field, 0},
+ {"Segment32.Offset", Field, 0},
+ {"Segment32.Prot", Field, 0},
+ {"Segment64", Type, 0},
+ {"Segment64.Addr", Field, 0},
+ {"Segment64.Cmd", Field, 0},
+ {"Segment64.Filesz", Field, 0},
+ {"Segment64.Flag", Field, 0},
+ {"Segment64.Len", Field, 0},
+ {"Segment64.Maxprot", Field, 0},
+ {"Segment64.Memsz", Field, 0},
+ {"Segment64.Name", Field, 0},
+ {"Segment64.Nsect", Field, 0},
+ {"Segment64.Offset", Field, 0},
+ {"Segment64.Prot", Field, 0},
+ {"SegmentHeader", Type, 0},
+ {"SegmentHeader.Addr", Field, 0},
+ {"SegmentHeader.Cmd", Field, 0},
+ {"SegmentHeader.Filesz", Field, 0},
+ {"SegmentHeader.Flag", Field, 0},
+ {"SegmentHeader.Len", Field, 0},
+ {"SegmentHeader.Maxprot", Field, 0},
+ {"SegmentHeader.Memsz", Field, 0},
+ {"SegmentHeader.Name", Field, 0},
+ {"SegmentHeader.Nsect", Field, 0},
+ {"SegmentHeader.Offset", Field, 0},
+ {"SegmentHeader.Prot", Field, 0},
+ {"Symbol", Type, 0},
+ {"Symbol.Desc", Field, 0},
+ {"Symbol.Name", Field, 0},
+ {"Symbol.Sect", Field, 0},
+ {"Symbol.Type", Field, 0},
+ {"Symbol.Value", Field, 0},
+ {"Symtab", Type, 0},
+ {"Symtab.LoadBytes", Field, 0},
+ {"Symtab.Syms", Field, 0},
+ {"Symtab.SymtabCmd", Field, 0},
+ {"SymtabCmd", Type, 0},
+ {"SymtabCmd.Cmd", Field, 0},
+ {"SymtabCmd.Len", Field, 0},
+ {"SymtabCmd.Nsyms", Field, 0},
+ {"SymtabCmd.Stroff", Field, 0},
+ {"SymtabCmd.Strsize", Field, 0},
+ {"SymtabCmd.Symoff", Field, 0},
+ {"Thread", Type, 0},
+ {"Thread.Cmd", Field, 0},
+ {"Thread.Data", Field, 0},
+ {"Thread.Len", Field, 0},
+ {"Thread.Type", Field, 0},
+ {"Type", Type, 0},
+ {"TypeBundle", Const, 3},
+ {"TypeDylib", Const, 3},
+ {"TypeExec", Const, 0},
+ {"TypeObj", Const, 0},
+ {"X86_64_RELOC_BRANCH", Const, 10},
+ {"X86_64_RELOC_GOT", Const, 10},
+ {"X86_64_RELOC_GOT_LOAD", Const, 10},
+ {"X86_64_RELOC_SIGNED", Const, 10},
+ {"X86_64_RELOC_SIGNED_1", Const, 10},
+ {"X86_64_RELOC_SIGNED_2", Const, 10},
+ {"X86_64_RELOC_SIGNED_4", Const, 10},
+ {"X86_64_RELOC_SUBTRACTOR", Const, 10},
+ {"X86_64_RELOC_TLV", Const, 10},
+ {"X86_64_RELOC_UNSIGNED", Const, 10},
+ },
+ "debug/pe": {
+ {"(*COFFSymbol).FullName", Method, 8},
+ {"(*File).COFFSymbolReadSectionDefAux", Method, 19},
+ {"(*File).Close", Method, 0},
+ {"(*File).DWARF", Method, 0},
+ {"(*File).ImportedLibraries", Method, 0},
+ {"(*File).ImportedSymbols", Method, 0},
+ {"(*File).Section", Method, 0},
+ {"(*FormatError).Error", Method, 0},
+ {"(*Section).Data", Method, 0},
+ {"(*Section).Open", Method, 0},
+ {"(Section).ReadAt", Method, 0},
+ {"(StringTable).String", Method, 8},
+ {"COFFSymbol", Type, 1},
+ {"COFFSymbol.Name", Field, 1},
+ {"COFFSymbol.NumberOfAuxSymbols", Field, 1},
+ {"COFFSymbol.SectionNumber", Field, 1},
+ {"COFFSymbol.StorageClass", Field, 1},
+ {"COFFSymbol.Type", Field, 1},
+ {"COFFSymbol.Value", Field, 1},
+ {"COFFSymbolAuxFormat5", Type, 19},
+ {"COFFSymbolAuxFormat5.Checksum", Field, 19},
+ {"COFFSymbolAuxFormat5.NumLineNumbers", Field, 19},
+ {"COFFSymbolAuxFormat5.NumRelocs", Field, 19},
+ {"COFFSymbolAuxFormat5.SecNum", Field, 19},
+ {"COFFSymbolAuxFormat5.Selection", Field, 19},
+ {"COFFSymbolAuxFormat5.Size", Field, 19},
+ {"COFFSymbolSize", Const, 1},
+ {"DataDirectory", Type, 3},
+ {"DataDirectory.Size", Field, 3},
+ {"DataDirectory.VirtualAddress", Field, 3},
+ {"File", Type, 0},
+ {"File.COFFSymbols", Field, 8},
+ {"File.FileHeader", Field, 0},
+ {"File.OptionalHeader", Field, 3},
+ {"File.Sections", Field, 0},
+ {"File.StringTable", Field, 8},
+ {"File.Symbols", Field, 1},
+ {"FileHeader", Type, 0},
+ {"FileHeader.Characteristics", Field, 0},
+ {"FileHeader.Machine", Field, 0},
+ {"FileHeader.NumberOfSections", Field, 0},
+ {"FileHeader.NumberOfSymbols", Field, 0},
+ {"FileHeader.PointerToSymbolTable", Field, 0},
+ {"FileHeader.SizeOfOptionalHeader", Field, 0},
+ {"FileHeader.TimeDateStamp", Field, 0},
+ {"FormatError", Type, 0},
+ {"IMAGE_COMDAT_SELECT_ANY", Const, 19},
+ {"IMAGE_COMDAT_SELECT_ASSOCIATIVE", Const, 19},
+ {"IMAGE_COMDAT_SELECT_EXACT_MATCH", Const, 19},
+ {"IMAGE_COMDAT_SELECT_LARGEST", Const, 19},
+ {"IMAGE_COMDAT_SELECT_NODUPLICATES", Const, 19},
+ {"IMAGE_COMDAT_SELECT_SAME_SIZE", Const, 19},
+ {"IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_BASERELOC", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_DEBUG", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_EXCEPTION", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_EXPORT", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_GLOBALPTR", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_IAT", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_IMPORT", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_RESOURCE", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_SECURITY", Const, 11},
+ {"IMAGE_DIRECTORY_ENTRY_TLS", Const, 11},
+ {"IMAGE_DLLCHARACTERISTICS_APPCONTAINER", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_GUARD_CF", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_NO_BIND", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_NO_SEH", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_NX_COMPAT", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", Const, 15},
+ {"IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", Const, 15},
+ {"IMAGE_FILE_32BIT_MACHINE", Const, 15},
+ {"IMAGE_FILE_AGGRESIVE_WS_TRIM", Const, 15},
+ {"IMAGE_FILE_BYTES_REVERSED_HI", Const, 15},
+ {"IMAGE_FILE_BYTES_REVERSED_LO", Const, 15},
+ {"IMAGE_FILE_DEBUG_STRIPPED", Const, 15},
+ {"IMAGE_FILE_DLL", Const, 15},
+ {"IMAGE_FILE_EXECUTABLE_IMAGE", Const, 15},
+ {"IMAGE_FILE_LARGE_ADDRESS_AWARE", Const, 15},
+ {"IMAGE_FILE_LINE_NUMS_STRIPPED", Const, 15},
+ {"IMAGE_FILE_LOCAL_SYMS_STRIPPED", Const, 15},
+ {"IMAGE_FILE_MACHINE_AM33", Const, 0},
+ {"IMAGE_FILE_MACHINE_AMD64", Const, 0},
+ {"IMAGE_FILE_MACHINE_ARM", Const, 0},
+ {"IMAGE_FILE_MACHINE_ARM64", Const, 11},
+ {"IMAGE_FILE_MACHINE_ARMNT", Const, 12},
+ {"IMAGE_FILE_MACHINE_EBC", Const, 0},
+ {"IMAGE_FILE_MACHINE_I386", Const, 0},
+ {"IMAGE_FILE_MACHINE_IA64", Const, 0},
+ {"IMAGE_FILE_MACHINE_LOONGARCH32", Const, 19},
+ {"IMAGE_FILE_MACHINE_LOONGARCH64", Const, 19},
+ {"IMAGE_FILE_MACHINE_M32R", Const, 0},
+ {"IMAGE_FILE_MACHINE_MIPS16", Const, 0},
+ {"IMAGE_FILE_MACHINE_MIPSFPU", Const, 0},
+ {"IMAGE_FILE_MACHINE_MIPSFPU16", Const, 0},
+ {"IMAGE_FILE_MACHINE_POWERPC", Const, 0},
+ {"IMAGE_FILE_MACHINE_POWERPCFP", Const, 0},
+ {"IMAGE_FILE_MACHINE_R4000", Const, 0},
+ {"IMAGE_FILE_MACHINE_RISCV128", Const, 20},
+ {"IMAGE_FILE_MACHINE_RISCV32", Const, 20},
+ {"IMAGE_FILE_MACHINE_RISCV64", Const, 20},
+ {"IMAGE_FILE_MACHINE_SH3", Const, 0},
+ {"IMAGE_FILE_MACHINE_SH3DSP", Const, 0},
+ {"IMAGE_FILE_MACHINE_SH4", Const, 0},
+ {"IMAGE_FILE_MACHINE_SH5", Const, 0},
+ {"IMAGE_FILE_MACHINE_THUMB", Const, 0},
+ {"IMAGE_FILE_MACHINE_UNKNOWN", Const, 0},
+ {"IMAGE_FILE_MACHINE_WCEMIPSV2", Const, 0},
+ {"IMAGE_FILE_NET_RUN_FROM_SWAP", Const, 15},
+ {"IMAGE_FILE_RELOCS_STRIPPED", Const, 15},
+ {"IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", Const, 15},
+ {"IMAGE_FILE_SYSTEM", Const, 15},
+ {"IMAGE_FILE_UP_SYSTEM_ONLY", Const, 15},
+ {"IMAGE_SCN_CNT_CODE", Const, 19},
+ {"IMAGE_SCN_CNT_INITIALIZED_DATA", Const, 19},
+ {"IMAGE_SCN_CNT_UNINITIALIZED_DATA", Const, 19},
+ {"IMAGE_SCN_LNK_COMDAT", Const, 19},
+ {"IMAGE_SCN_MEM_DISCARDABLE", Const, 19},
+ {"IMAGE_SCN_MEM_EXECUTE", Const, 19},
+ {"IMAGE_SCN_MEM_READ", Const, 19},
+ {"IMAGE_SCN_MEM_WRITE", Const, 19},
+ {"IMAGE_SUBSYSTEM_EFI_APPLICATION", Const, 15},
+ {"IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", Const, 15},
+ {"IMAGE_SUBSYSTEM_EFI_ROM", Const, 15},
+ {"IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", Const, 15},
+ {"IMAGE_SUBSYSTEM_NATIVE", Const, 15},
+ {"IMAGE_SUBSYSTEM_NATIVE_WINDOWS", Const, 15},
+ {"IMAGE_SUBSYSTEM_OS2_CUI", Const, 15},
+ {"IMAGE_SUBSYSTEM_POSIX_CUI", Const, 15},
+ {"IMAGE_SUBSYSTEM_UNKNOWN", Const, 15},
+ {"IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", Const, 15},
+ {"IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", Const, 15},
+ {"IMAGE_SUBSYSTEM_WINDOWS_CUI", Const, 15},
+ {"IMAGE_SUBSYSTEM_WINDOWS_GUI", Const, 15},
+ {"IMAGE_SUBSYSTEM_XBOX", Const, 15},
+ {"ImportDirectory", Type, 0},
+ {"ImportDirectory.FirstThunk", Field, 0},
+ {"ImportDirectory.ForwarderChain", Field, 0},
+ {"ImportDirectory.Name", Field, 0},
+ {"ImportDirectory.OriginalFirstThunk", Field, 0},
+ {"ImportDirectory.TimeDateStamp", Field, 0},
+ {"NewFile", Func, 0},
+ {"Open", Func, 0},
+ {"OptionalHeader32", Type, 3},
+ {"OptionalHeader32.AddressOfEntryPoint", Field, 3},
+ {"OptionalHeader32.BaseOfCode", Field, 3},
+ {"OptionalHeader32.BaseOfData", Field, 3},
+ {"OptionalHeader32.CheckSum", Field, 3},
+ {"OptionalHeader32.DataDirectory", Field, 3},
+ {"OptionalHeader32.DllCharacteristics", Field, 3},
+ {"OptionalHeader32.FileAlignment", Field, 3},
+ {"OptionalHeader32.ImageBase", Field, 3},
+ {"OptionalHeader32.LoaderFlags", Field, 3},
+ {"OptionalHeader32.Magic", Field, 3},
+ {"OptionalHeader32.MajorImageVersion", Field, 3},
+ {"OptionalHeader32.MajorLinkerVersion", Field, 3},
+ {"OptionalHeader32.MajorOperatingSystemVersion", Field, 3},
+ {"OptionalHeader32.MajorSubsystemVersion", Field, 3},
+ {"OptionalHeader32.MinorImageVersion", Field, 3},
+ {"OptionalHeader32.MinorLinkerVersion", Field, 3},
+ {"OptionalHeader32.MinorOperatingSystemVersion", Field, 3},
+ {"OptionalHeader32.MinorSubsystemVersion", Field, 3},
+ {"OptionalHeader32.NumberOfRvaAndSizes", Field, 3},
+ {"OptionalHeader32.SectionAlignment", Field, 3},
+ {"OptionalHeader32.SizeOfCode", Field, 3},
+ {"OptionalHeader32.SizeOfHeaders", Field, 3},
+ {"OptionalHeader32.SizeOfHeapCommit", Field, 3},
+ {"OptionalHeader32.SizeOfHeapReserve", Field, 3},
+ {"OptionalHeader32.SizeOfImage", Field, 3},
+ {"OptionalHeader32.SizeOfInitializedData", Field, 3},
+ {"OptionalHeader32.SizeOfStackCommit", Field, 3},
+ {"OptionalHeader32.SizeOfStackReserve", Field, 3},
+ {"OptionalHeader32.SizeOfUninitializedData", Field, 3},
+ {"OptionalHeader32.Subsystem", Field, 3},
+ {"OptionalHeader32.Win32VersionValue", Field, 3},
+ {"OptionalHeader64", Type, 3},
+ {"OptionalHeader64.AddressOfEntryPoint", Field, 3},
+ {"OptionalHeader64.BaseOfCode", Field, 3},
+ {"OptionalHeader64.CheckSum", Field, 3},
+ {"OptionalHeader64.DataDirectory", Field, 3},
+ {"OptionalHeader64.DllCharacteristics", Field, 3},
+ {"OptionalHeader64.FileAlignment", Field, 3},
+ {"OptionalHeader64.ImageBase", Field, 3},
+ {"OptionalHeader64.LoaderFlags", Field, 3},
+ {"OptionalHeader64.Magic", Field, 3},
+ {"OptionalHeader64.MajorImageVersion", Field, 3},
+ {"OptionalHeader64.MajorLinkerVersion", Field, 3},
+ {"OptionalHeader64.MajorOperatingSystemVersion", Field, 3},
+ {"OptionalHeader64.MajorSubsystemVersion", Field, 3},
+ {"OptionalHeader64.MinorImageVersion", Field, 3},
+ {"OptionalHeader64.MinorLinkerVersion", Field, 3},
+ {"OptionalHeader64.MinorOperatingSystemVersion", Field, 3},
+ {"OptionalHeader64.MinorSubsystemVersion", Field, 3},
+ {"OptionalHeader64.NumberOfRvaAndSizes", Field, 3},
+ {"OptionalHeader64.SectionAlignment", Field, 3},
+ {"OptionalHeader64.SizeOfCode", Field, 3},
+ {"OptionalHeader64.SizeOfHeaders", Field, 3},
+ {"OptionalHeader64.SizeOfHeapCommit", Field, 3},
+ {"OptionalHeader64.SizeOfHeapReserve", Field, 3},
+ {"OptionalHeader64.SizeOfImage", Field, 3},
+ {"OptionalHeader64.SizeOfInitializedData", Field, 3},
+ {"OptionalHeader64.SizeOfStackCommit", Field, 3},
+ {"OptionalHeader64.SizeOfStackReserve", Field, 3},
+ {"OptionalHeader64.SizeOfUninitializedData", Field, 3},
+ {"OptionalHeader64.Subsystem", Field, 3},
+ {"OptionalHeader64.Win32VersionValue", Field, 3},
+ {"Reloc", Type, 8},
+ {"Reloc.SymbolTableIndex", Field, 8},
+ {"Reloc.Type", Field, 8},
+ {"Reloc.VirtualAddress", Field, 8},
+ {"Section", Type, 0},
+ {"Section.ReaderAt", Field, 0},
+ {"Section.Relocs", Field, 8},
+ {"Section.SectionHeader", Field, 0},
+ {"SectionHeader", Type, 0},
+ {"SectionHeader.Characteristics", Field, 0},
+ {"SectionHeader.Name", Field, 0},
+ {"SectionHeader.NumberOfLineNumbers", Field, 0},
+ {"SectionHeader.NumberOfRelocations", Field, 0},
+ {"SectionHeader.Offset", Field, 0},
+ {"SectionHeader.PointerToLineNumbers", Field, 0},
+ {"SectionHeader.PointerToRelocations", Field, 0},
+ {"SectionHeader.Size", Field, 0},
+ {"SectionHeader.VirtualAddress", Field, 0},
+ {"SectionHeader.VirtualSize", Field, 0},
+ {"SectionHeader32", Type, 0},
+ {"SectionHeader32.Characteristics", Field, 0},
+ {"SectionHeader32.Name", Field, 0},
+ {"SectionHeader32.NumberOfLineNumbers", Field, 0},
+ {"SectionHeader32.NumberOfRelocations", Field, 0},
+ {"SectionHeader32.PointerToLineNumbers", Field, 0},
+ {"SectionHeader32.PointerToRawData", Field, 0},
+ {"SectionHeader32.PointerToRelocations", Field, 0},
+ {"SectionHeader32.SizeOfRawData", Field, 0},
+ {"SectionHeader32.VirtualAddress", Field, 0},
+ {"SectionHeader32.VirtualSize", Field, 0},
+ {"StringTable", Type, 8},
+ {"Symbol", Type, 1},
+ {"Symbol.Name", Field, 1},
+ {"Symbol.SectionNumber", Field, 1},
+ {"Symbol.StorageClass", Field, 1},
+ {"Symbol.Type", Field, 1},
+ {"Symbol.Value", Field, 1},
+ },
+ "debug/plan9obj": {
+ {"(*File).Close", Method, 3},
+ {"(*File).Section", Method, 3},
+ {"(*File).Symbols", Method, 3},
+ {"(*Section).Data", Method, 3},
+ {"(*Section).Open", Method, 3},
+ {"(Section).ReadAt", Method, 3},
+ {"ErrNoSymbols", Var, 18},
+ {"File", Type, 3},
+ {"File.FileHeader", Field, 3},
+ {"File.Sections", Field, 3},
+ {"FileHeader", Type, 3},
+ {"FileHeader.Bss", Field, 3},
+ {"FileHeader.Entry", Field, 3},
+ {"FileHeader.HdrSize", Field, 4},
+ {"FileHeader.LoadAddress", Field, 4},
+ {"FileHeader.Magic", Field, 3},
+ {"FileHeader.PtrSize", Field, 3},
+ {"Magic386", Const, 3},
+ {"Magic64", Const, 3},
+ {"MagicAMD64", Const, 3},
+ {"MagicARM", Const, 3},
+ {"NewFile", Func, 3},
+ {"Open", Func, 3},
+ {"Section", Type, 3},
+ {"Section.ReaderAt", Field, 3},
+ {"Section.SectionHeader", Field, 3},
+ {"SectionHeader", Type, 3},
+ {"SectionHeader.Name", Field, 3},
+ {"SectionHeader.Offset", Field, 3},
+ {"SectionHeader.Size", Field, 3},
+ {"Sym", Type, 3},
+ {"Sym.Name", Field, 3},
+ {"Sym.Type", Field, 3},
+ {"Sym.Value", Field, 3},
+ },
+ "embed": {
+ {"(FS).Open", Method, 16},
+ {"(FS).ReadDir", Method, 16},
+ {"(FS).ReadFile", Method, 16},
+ {"FS", Type, 16},
+ },
+ "encoding": {
+ {"BinaryMarshaler", Type, 2},
+ {"BinaryUnmarshaler", Type, 2},
+ {"TextMarshaler", Type, 2},
+ {"TextUnmarshaler", Type, 2},
+ },
+ "encoding/ascii85": {
+ {"(CorruptInputError).Error", Method, 0},
+ {"CorruptInputError", Type, 0},
+ {"Decode", Func, 0},
+ {"Encode", Func, 0},
+ {"MaxEncodedLen", Func, 0},
+ {"NewDecoder", Func, 0},
+ {"NewEncoder", Func, 0},
+ },
+ "encoding/asn1": {
+ {"(BitString).At", Method, 0},
+ {"(BitString).RightAlign", Method, 0},
+ {"(ObjectIdentifier).Equal", Method, 0},
+ {"(ObjectIdentifier).String", Method, 3},
+ {"(StructuralError).Error", Method, 0},
+ {"(SyntaxError).Error", Method, 0},
+ {"BitString", Type, 0},
+ {"BitString.BitLength", Field, 0},
+ {"BitString.Bytes", Field, 0},
+ {"ClassApplication", Const, 6},
+ {"ClassContextSpecific", Const, 6},
+ {"ClassPrivate", Const, 6},
+ {"ClassUniversal", Const, 6},
+ {"Enumerated", Type, 0},
+ {"Flag", Type, 0},
+ {"Marshal", Func, 0},
+ {"MarshalWithParams", Func, 10},
+ {"NullBytes", Var, 9},
+ {"NullRawValue", Var, 9},
+ {"ObjectIdentifier", Type, 0},
+ {"RawContent", Type, 0},
+ {"RawValue", Type, 0},
+ {"RawValue.Bytes", Field, 0},
+ {"RawValue.Class", Field, 0},
+ {"RawValue.FullBytes", Field, 0},
+ {"RawValue.IsCompound", Field, 0},
+ {"RawValue.Tag", Field, 0},
+ {"StructuralError", Type, 0},
+ {"StructuralError.Msg", Field, 0},
+ {"SyntaxError", Type, 0},
+ {"SyntaxError.Msg", Field, 0},
+ {"TagBMPString", Const, 14},
+ {"TagBitString", Const, 6},
+ {"TagBoolean", Const, 6},
+ {"TagEnum", Const, 6},
+ {"TagGeneralString", Const, 6},
+ {"TagGeneralizedTime", Const, 6},
+ {"TagIA5String", Const, 6},
+ {"TagInteger", Const, 6},
+ {"TagNull", Const, 9},
+ {"TagNumericString", Const, 10},
+ {"TagOID", Const, 6},
+ {"TagOctetString", Const, 6},
+ {"TagPrintableString", Const, 6},
+ {"TagSequence", Const, 6},
+ {"TagSet", Const, 6},
+ {"TagT61String", Const, 6},
+ {"TagUTCTime", Const, 6},
+ {"TagUTF8String", Const, 6},
+ {"Unmarshal", Func, 0},
+ {"UnmarshalWithParams", Func, 0},
+ },
+ "encoding/base32": {
+ {"(*Encoding).AppendDecode", Method, 22},
+ {"(*Encoding).AppendEncode", Method, 22},
+ {"(*Encoding).Decode", Method, 0},
+ {"(*Encoding).DecodeString", Method, 0},
+ {"(*Encoding).DecodedLen", Method, 0},
+ {"(*Encoding).Encode", Method, 0},
+ {"(*Encoding).EncodeToString", Method, 0},
+ {"(*Encoding).EncodedLen", Method, 0},
+ {"(CorruptInputError).Error", Method, 0},
+ {"(Encoding).WithPadding", Method, 9},
+ {"CorruptInputError", Type, 0},
+ {"Encoding", Type, 0},
+ {"HexEncoding", Var, 0},
+ {"NewDecoder", Func, 0},
+ {"NewEncoder", Func, 0},
+ {"NewEncoding", Func, 0},
+ {"NoPadding", Const, 9},
+ {"StdEncoding", Var, 0},
+ {"StdPadding", Const, 9},
+ },
+ "encoding/base64": {
+ {"(*Encoding).AppendDecode", Method, 22},
+ {"(*Encoding).AppendEncode", Method, 22},
+ {"(*Encoding).Decode", Method, 0},
+ {"(*Encoding).DecodeString", Method, 0},
+ {"(*Encoding).DecodedLen", Method, 0},
+ {"(*Encoding).Encode", Method, 0},
+ {"(*Encoding).EncodeToString", Method, 0},
+ {"(*Encoding).EncodedLen", Method, 0},
+ {"(CorruptInputError).Error", Method, 0},
+ {"(Encoding).Strict", Method, 8},
+ {"(Encoding).WithPadding", Method, 5},
+ {"CorruptInputError", Type, 0},
+ {"Encoding", Type, 0},
+ {"NewDecoder", Func, 0},
+ {"NewEncoder", Func, 0},
+ {"NewEncoding", Func, 0},
+ {"NoPadding", Const, 5},
+ {"RawStdEncoding", Var, 5},
+ {"RawURLEncoding", Var, 5},
+ {"StdEncoding", Var, 0},
+ {"StdPadding", Const, 5},
+ {"URLEncoding", Var, 0},
+ },
+ "encoding/binary": {
+ {"Append", Func, 23},
+ {"AppendByteOrder", Type, 19},
+ {"AppendUvarint", Func, 19},
+ {"AppendVarint", Func, 19},
+ {"BigEndian", Var, 0},
+ {"ByteOrder", Type, 0},
+ {"Decode", Func, 23},
+ {"Encode", Func, 23},
+ {"LittleEndian", Var, 0},
+ {"MaxVarintLen16", Const, 0},
+ {"MaxVarintLen32", Const, 0},
+ {"MaxVarintLen64", Const, 0},
+ {"NativeEndian", Var, 21},
+ {"PutUvarint", Func, 0},
+ {"PutVarint", Func, 0},
+ {"Read", Func, 0},
+ {"ReadUvarint", Func, 0},
+ {"ReadVarint", Func, 0},
+ {"Size", Func, 0},
+ {"Uvarint", Func, 0},
+ {"Varint", Func, 0},
+ {"Write", Func, 0},
+ },
+ "encoding/csv": {
+ {"(*ParseError).Error", Method, 0},
+ {"(*ParseError).Unwrap", Method, 13},
+ {"(*Reader).FieldPos", Method, 17},
+ {"(*Reader).InputOffset", Method, 19},
+ {"(*Reader).Read", Method, 0},
+ {"(*Reader).ReadAll", Method, 0},
+ {"(*Writer).Error", Method, 1},
+ {"(*Writer).Flush", Method, 0},
+ {"(*Writer).Write", Method, 0},
+ {"(*Writer).WriteAll", Method, 0},
+ {"ErrBareQuote", Var, 0},
+ {"ErrFieldCount", Var, 0},
+ {"ErrQuote", Var, 0},
+ {"ErrTrailingComma", Var, 0},
+ {"NewReader", Func, 0},
+ {"NewWriter", Func, 0},
+ {"ParseError", Type, 0},
+ {"ParseError.Column", Field, 0},
+ {"ParseError.Err", Field, 0},
+ {"ParseError.Line", Field, 0},
+ {"ParseError.StartLine", Field, 10},
+ {"Reader", Type, 0},
+ {"Reader.Comma", Field, 0},
+ {"Reader.Comment", Field, 0},
+ {"Reader.FieldsPerRecord", Field, 0},
+ {"Reader.LazyQuotes", Field, 0},
+ {"Reader.ReuseRecord", Field, 9},
+ {"Reader.TrailingComma", Field, 0},
+ {"Reader.TrimLeadingSpace", Field, 0},
+ {"Writer", Type, 0},
+ {"Writer.Comma", Field, 0},
+ {"Writer.UseCRLF", Field, 0},
+ },
+ "encoding/gob": {
+ {"(*Decoder).Decode", Method, 0},
+ {"(*Decoder).DecodeValue", Method, 0},
+ {"(*Encoder).Encode", Method, 0},
+ {"(*Encoder).EncodeValue", Method, 0},
+ {"CommonType", Type, 0},
+ {"CommonType.Id", Field, 0},
+ {"CommonType.Name", Field, 0},
+ {"Decoder", Type, 0},
+ {"Encoder", Type, 0},
+ {"GobDecoder", Type, 0},
+ {"GobEncoder", Type, 0},
+ {"NewDecoder", Func, 0},
+ {"NewEncoder", Func, 0},
+ {"Register", Func, 0},
+ {"RegisterName", Func, 0},
+ },
+ "encoding/hex": {
+ {"(InvalidByteError).Error", Method, 0},
+ {"AppendDecode", Func, 22},
+ {"AppendEncode", Func, 22},
+ {"Decode", Func, 0},
+ {"DecodeString", Func, 0},
+ {"DecodedLen", Func, 0},
+ {"Dump", Func, 0},
+ {"Dumper", Func, 0},
+ {"Encode", Func, 0},
+ {"EncodeToString", Func, 0},
+ {"EncodedLen", Func, 0},
+ {"ErrLength", Var, 0},
+ {"InvalidByteError", Type, 0},
+ {"NewDecoder", Func, 10},
+ {"NewEncoder", Func, 10},
+ },
+ "encoding/json": {
+ {"(*Decoder).Buffered", Method, 1},
+ {"(*Decoder).Decode", Method, 0},
+ {"(*Decoder).DisallowUnknownFields", Method, 10},
+ {"(*Decoder).InputOffset", Method, 14},
+ {"(*Decoder).More", Method, 5},
+ {"(*Decoder).Token", Method, 5},
+ {"(*Decoder).UseNumber", Method, 1},
+ {"(*Encoder).Encode", Method, 0},
+ {"(*Encoder).SetEscapeHTML", Method, 7},
+ {"(*Encoder).SetIndent", Method, 7},
+ {"(*InvalidUTF8Error).Error", Method, 0},
+ {"(*InvalidUnmarshalError).Error", Method, 0},
+ {"(*MarshalerError).Error", Method, 0},
+ {"(*MarshalerError).Unwrap", Method, 13},
+ {"(*RawMessage).MarshalJSON", Method, 0},
+ {"(*RawMessage).UnmarshalJSON", Method, 0},
+ {"(*SyntaxError).Error", Method, 0},
+ {"(*UnmarshalFieldError).Error", Method, 0},
+ {"(*UnmarshalTypeError).Error", Method, 0},
+ {"(*UnsupportedTypeError).Error", Method, 0},
+ {"(*UnsupportedValueError).Error", Method, 0},
+ {"(Delim).String", Method, 5},
+ {"(Number).Float64", Method, 1},
+ {"(Number).Int64", Method, 1},
+ {"(Number).String", Method, 1},
+ {"(RawMessage).MarshalJSON", Method, 8},
+ {"Compact", Func, 0},
+ {"Decoder", Type, 0},
+ {"Delim", Type, 5},
+ {"Encoder", Type, 0},
+ {"HTMLEscape", Func, 0},
+ {"Indent", Func, 0},
+ {"InvalidUTF8Error", Type, 0},
+ {"InvalidUTF8Error.S", Field, 0},
+ {"InvalidUnmarshalError", Type, 0},
+ {"InvalidUnmarshalError.Type", Field, 0},
+ {"Marshal", Func, 0},
+ {"MarshalIndent", Func, 0},
+ {"Marshaler", Type, 0},
+ {"MarshalerError", Type, 0},
+ {"MarshalerError.Err", Field, 0},
+ {"MarshalerError.Type", Field, 0},
+ {"NewDecoder", Func, 0},
+ {"NewEncoder", Func, 0},
+ {"Number", Type, 1},
+ {"RawMessage", Type, 0},
+ {"SyntaxError", Type, 0},
+ {"SyntaxError.Offset", Field, 0},
+ {"Token", Type, 5},
+ {"Unmarshal", Func, 0},
+ {"UnmarshalFieldError", Type, 0},
+ {"UnmarshalFieldError.Field", Field, 0},
+ {"UnmarshalFieldError.Key", Field, 0},
+ {"UnmarshalFieldError.Type", Field, 0},
+ {"UnmarshalTypeError", Type, 0},
+ {"UnmarshalTypeError.Field", Field, 8},
+ {"UnmarshalTypeError.Offset", Field, 5},
+ {"UnmarshalTypeError.Struct", Field, 8},
+ {"UnmarshalTypeError.Type", Field, 0},
+ {"UnmarshalTypeError.Value", Field, 0},
+ {"Unmarshaler", Type, 0},
+ {"UnsupportedTypeError", Type, 0},
+ {"UnsupportedTypeError.Type", Field, 0},
+ {"UnsupportedValueError", Type, 0},
+ {"UnsupportedValueError.Str", Field, 0},
+ {"UnsupportedValueError.Value", Field, 0},
+ {"Valid", Func, 9},
+ },
+ "encoding/pem": {
+ {"Block", Type, 0},
+ {"Block.Bytes", Field, 0},
+ {"Block.Headers", Field, 0},
+ {"Block.Type", Field, 0},
+ {"Decode", Func, 0},
+ {"Encode", Func, 0},
+ {"EncodeToMemory", Func, 0},
+ },
+ "encoding/xml": {
+ {"(*Decoder).Decode", Method, 0},
+ {"(*Decoder).DecodeElement", Method, 0},
+ {"(*Decoder).InputOffset", Method, 4},
+ {"(*Decoder).InputPos", Method, 19},
+ {"(*Decoder).RawToken", Method, 0},
+ {"(*Decoder).Skip", Method, 0},
+ {"(*Decoder).Token", Method, 0},
+ {"(*Encoder).Close", Method, 20},
+ {"(*Encoder).Encode", Method, 0},
+ {"(*Encoder).EncodeElement", Method, 2},
+ {"(*Encoder).EncodeToken", Method, 2},
+ {"(*Encoder).Flush", Method, 2},
+ {"(*Encoder).Indent", Method, 1},
+ {"(*SyntaxError).Error", Method, 0},
+ {"(*TagPathError).Error", Method, 0},
+ {"(*UnsupportedTypeError).Error", Method, 0},
+ {"(CharData).Copy", Method, 0},
+ {"(Comment).Copy", Method, 0},
+ {"(Directive).Copy", Method, 0},
+ {"(ProcInst).Copy", Method, 0},
+ {"(StartElement).Copy", Method, 0},
+ {"(StartElement).End", Method, 2},
+ {"(UnmarshalError).Error", Method, 0},
+ {"Attr", Type, 0},
+ {"Attr.Name", Field, 0},
+ {"Attr.Value", Field, 0},
+ {"CharData", Type, 0},
+ {"Comment", Type, 0},
+ {"CopyToken", Func, 0},
+ {"Decoder", Type, 0},
+ {"Decoder.AutoClose", Field, 0},
+ {"Decoder.CharsetReader", Field, 0},
+ {"Decoder.DefaultSpace", Field, 1},
+ {"Decoder.Entity", Field, 0},
+ {"Decoder.Strict", Field, 0},
+ {"Directive", Type, 0},
+ {"Encoder", Type, 0},
+ {"EndElement", Type, 0},
+ {"EndElement.Name", Field, 0},
+ {"Escape", Func, 0},
+ {"EscapeText", Func, 1},
+ {"HTMLAutoClose", Var, 0},
+ {"HTMLEntity", Var, 0},
+ {"Header", Const, 0},
+ {"Marshal", Func, 0},
+ {"MarshalIndent", Func, 0},
+ {"Marshaler", Type, 2},
+ {"MarshalerAttr", Type, 2},
+ {"Name", Type, 0},
+ {"Name.Local", Field, 0},
+ {"Name.Space", Field, 0},
+ {"NewDecoder", Func, 0},
+ {"NewEncoder", Func, 0},
+ {"NewTokenDecoder", Func, 10},
+ {"ProcInst", Type, 0},
+ {"ProcInst.Inst", Field, 0},
+ {"ProcInst.Target", Field, 0},
+ {"StartElement", Type, 0},
+ {"StartElement.Attr", Field, 0},
+ {"StartElement.Name", Field, 0},
+ {"SyntaxError", Type, 0},
+ {"SyntaxError.Line", Field, 0},
+ {"SyntaxError.Msg", Field, 0},
+ {"TagPathError", Type, 0},
+ {"TagPathError.Field1", Field, 0},
+ {"TagPathError.Field2", Field, 0},
+ {"TagPathError.Struct", Field, 0},
+ {"TagPathError.Tag1", Field, 0},
+ {"TagPathError.Tag2", Field, 0},
+ {"Token", Type, 0},
+ {"TokenReader", Type, 10},
+ {"Unmarshal", Func, 0},
+ {"UnmarshalError", Type, 0},
+ {"Unmarshaler", Type, 2},
+ {"UnmarshalerAttr", Type, 2},
+ {"UnsupportedTypeError", Type, 0},
+ {"UnsupportedTypeError.Type", Field, 0},
+ },
+ "errors": {
+ {"As", Func, 13},
+ {"ErrUnsupported", Var, 21},
+ {"Is", Func, 13},
+ {"Join", Func, 20},
+ {"New", Func, 0},
+ {"Unwrap", Func, 13},
+ },
+ "expvar": {
+ {"(*Float).Add", Method, 0},
+ {"(*Float).Set", Method, 0},
+ {"(*Float).String", Method, 0},
+ {"(*Float).Value", Method, 8},
+ {"(*Int).Add", Method, 0},
+ {"(*Int).Set", Method, 0},
+ {"(*Int).String", Method, 0},
+ {"(*Int).Value", Method, 8},
+ {"(*Map).Add", Method, 0},
+ {"(*Map).AddFloat", Method, 0},
+ {"(*Map).Delete", Method, 12},
+ {"(*Map).Do", Method, 0},
+ {"(*Map).Get", Method, 0},
+ {"(*Map).Init", Method, 0},
+ {"(*Map).Set", Method, 0},
+ {"(*Map).String", Method, 0},
+ {"(*String).Set", Method, 0},
+ {"(*String).String", Method, 0},
+ {"(*String).Value", Method, 8},
+ {"(Func).String", Method, 0},
+ {"(Func).Value", Method, 8},
+ {"Do", Func, 0},
+ {"Float", Type, 0},
+ {"Func", Type, 0},
+ {"Get", Func, 0},
+ {"Handler", Func, 8},
+ {"Int", Type, 0},
+ {"KeyValue", Type, 0},
+ {"KeyValue.Key", Field, 0},
+ {"KeyValue.Value", Field, 0},
+ {"Map", Type, 0},
+ {"NewFloat", Func, 0},
+ {"NewInt", Func, 0},
+ {"NewMap", Func, 0},
+ {"NewString", Func, 0},
+ {"Publish", Func, 0},
+ {"String", Type, 0},
+ {"Var", Type, 0},
+ },
+ "flag": {
+ {"(*FlagSet).Arg", Method, 0},
+ {"(*FlagSet).Args", Method, 0},
+ {"(*FlagSet).Bool", Method, 0},
+ {"(*FlagSet).BoolFunc", Method, 21},
+ {"(*FlagSet).BoolVar", Method, 0},
+ {"(*FlagSet).Duration", Method, 0},
+ {"(*FlagSet).DurationVar", Method, 0},
+ {"(*FlagSet).ErrorHandling", Method, 10},
+ {"(*FlagSet).Float64", Method, 0},
+ {"(*FlagSet).Float64Var", Method, 0},
+ {"(*FlagSet).Func", Method, 16},
+ {"(*FlagSet).Init", Method, 0},
+ {"(*FlagSet).Int", Method, 0},
+ {"(*FlagSet).Int64", Method, 0},
+ {"(*FlagSet).Int64Var", Method, 0},
+ {"(*FlagSet).IntVar", Method, 0},
+ {"(*FlagSet).Lookup", Method, 0},
+ {"(*FlagSet).NArg", Method, 0},
+ {"(*FlagSet).NFlag", Method, 0},
+ {"(*FlagSet).Name", Method, 10},
+ {"(*FlagSet).Output", Method, 10},
+ {"(*FlagSet).Parse", Method, 0},
+ {"(*FlagSet).Parsed", Method, 0},
+ {"(*FlagSet).PrintDefaults", Method, 0},
+ {"(*FlagSet).Set", Method, 0},
+ {"(*FlagSet).SetOutput", Method, 0},
+ {"(*FlagSet).String", Method, 0},
+ {"(*FlagSet).StringVar", Method, 0},
+ {"(*FlagSet).TextVar", Method, 19},
+ {"(*FlagSet).Uint", Method, 0},
+ {"(*FlagSet).Uint64", Method, 0},
+ {"(*FlagSet).Uint64Var", Method, 0},
+ {"(*FlagSet).UintVar", Method, 0},
+ {"(*FlagSet).Var", Method, 0},
+ {"(*FlagSet).Visit", Method, 0},
+ {"(*FlagSet).VisitAll", Method, 0},
+ {"Arg", Func, 0},
+ {"Args", Func, 0},
+ {"Bool", Func, 0},
+ {"BoolFunc", Func, 21},
+ {"BoolVar", Func, 0},
+ {"CommandLine", Var, 2},
+ {"ContinueOnError", Const, 0},
+ {"Duration", Func, 0},
+ {"DurationVar", Func, 0},
+ {"ErrHelp", Var, 0},
+ {"ErrorHandling", Type, 0},
+ {"ExitOnError", Const, 0},
+ {"Flag", Type, 0},
+ {"Flag.DefValue", Field, 0},
+ {"Flag.Name", Field, 0},
+ {"Flag.Usage", Field, 0},
+ {"Flag.Value", Field, 0},
+ {"FlagSet", Type, 0},
+ {"FlagSet.Usage", Field, 0},
+ {"Float64", Func, 0},
+ {"Float64Var", Func, 0},
+ {"Func", Func, 16},
+ {"Getter", Type, 2},
+ {"Int", Func, 0},
+ {"Int64", Func, 0},
+ {"Int64Var", Func, 0},
+ {"IntVar", Func, 0},
+ {"Lookup", Func, 0},
+ {"NArg", Func, 0},
+ {"NFlag", Func, 0},
+ {"NewFlagSet", Func, 0},
+ {"PanicOnError", Const, 0},
+ {"Parse", Func, 0},
+ {"Parsed", Func, 0},
+ {"PrintDefaults", Func, 0},
+ {"Set", Func, 0},
+ {"String", Func, 0},
+ {"StringVar", Func, 0},
+ {"TextVar", Func, 19},
+ {"Uint", Func, 0},
+ {"Uint64", Func, 0},
+ {"Uint64Var", Func, 0},
+ {"UintVar", Func, 0},
+ {"UnquoteUsage", Func, 5},
+ {"Usage", Var, 0},
+ {"Value", Type, 0},
+ {"Var", Func, 0},
+ {"Visit", Func, 0},
+ {"VisitAll", Func, 0},
+ },
+ "fmt": {
+ {"Append", Func, 19},
+ {"Appendf", Func, 19},
+ {"Appendln", Func, 19},
+ {"Errorf", Func, 0},
+ {"FormatString", Func, 20},
+ {"Formatter", Type, 0},
+ {"Fprint", Func, 0},
+ {"Fprintf", Func, 0},
+ {"Fprintln", Func, 0},
+ {"Fscan", Func, 0},
+ {"Fscanf", Func, 0},
+ {"Fscanln", Func, 0},
+ {"GoStringer", Type, 0},
+ {"Print", Func, 0},
+ {"Printf", Func, 0},
+ {"Println", Func, 0},
+ {"Scan", Func, 0},
+ {"ScanState", Type, 0},
+ {"Scanf", Func, 0},
+ {"Scanln", Func, 0},
+ {"Scanner", Type, 0},
+ {"Sprint", Func, 0},
+ {"Sprintf", Func, 0},
+ {"Sprintln", Func, 0},
+ {"Sscan", Func, 0},
+ {"Sscanf", Func, 0},
+ {"Sscanln", Func, 0},
+ {"State", Type, 0},
+ {"Stringer", Type, 0},
+ },
+ "go/ast": {
+ {"(*ArrayType).End", Method, 0},
+ {"(*ArrayType).Pos", Method, 0},
+ {"(*AssignStmt).End", Method, 0},
+ {"(*AssignStmt).Pos", Method, 0},
+ {"(*BadDecl).End", Method, 0},
+ {"(*BadDecl).Pos", Method, 0},
+ {"(*BadExpr).End", Method, 0},
+ {"(*BadExpr).Pos", Method, 0},
+ {"(*BadStmt).End", Method, 0},
+ {"(*BadStmt).Pos", Method, 0},
+ {"(*BasicLit).End", Method, 0},
+ {"(*BasicLit).Pos", Method, 0},
+ {"(*BinaryExpr).End", Method, 0},
+ {"(*BinaryExpr).Pos", Method, 0},
+ {"(*BlockStmt).End", Method, 0},
+ {"(*BlockStmt).Pos", Method, 0},
+ {"(*BranchStmt).End", Method, 0},
+ {"(*BranchStmt).Pos", Method, 0},
+ {"(*CallExpr).End", Method, 0},
+ {"(*CallExpr).Pos", Method, 0},
+ {"(*CaseClause).End", Method, 0},
+ {"(*CaseClause).Pos", Method, 0},
+ {"(*ChanType).End", Method, 0},
+ {"(*ChanType).Pos", Method, 0},
+ {"(*CommClause).End", Method, 0},
+ {"(*CommClause).Pos", Method, 0},
+ {"(*Comment).End", Method, 0},
+ {"(*Comment).Pos", Method, 0},
+ {"(*CommentGroup).End", Method, 0},
+ {"(*CommentGroup).Pos", Method, 0},
+ {"(*CommentGroup).Text", Method, 0},
+ {"(*CompositeLit).End", Method, 0},
+ {"(*CompositeLit).Pos", Method, 0},
+ {"(*DeclStmt).End", Method, 0},
+ {"(*DeclStmt).Pos", Method, 0},
+ {"(*DeferStmt).End", Method, 0},
+ {"(*DeferStmt).Pos", Method, 0},
+ {"(*Ellipsis).End", Method, 0},
+ {"(*Ellipsis).Pos", Method, 0},
+ {"(*EmptyStmt).End", Method, 0},
+ {"(*EmptyStmt).Pos", Method, 0},
+ {"(*ExprStmt).End", Method, 0},
+ {"(*ExprStmt).Pos", Method, 0},
+ {"(*Field).End", Method, 0},
+ {"(*Field).Pos", Method, 0},
+ {"(*FieldList).End", Method, 0},
+ {"(*FieldList).NumFields", Method, 0},
+ {"(*FieldList).Pos", Method, 0},
+ {"(*File).End", Method, 0},
+ {"(*File).Pos", Method, 0},
+ {"(*ForStmt).End", Method, 0},
+ {"(*ForStmt).Pos", Method, 0},
+ {"(*FuncDecl).End", Method, 0},
+ {"(*FuncDecl).Pos", Method, 0},
+ {"(*FuncLit).End", Method, 0},
+ {"(*FuncLit).Pos", Method, 0},
+ {"(*FuncType).End", Method, 0},
+ {"(*FuncType).Pos", Method, 0},
+ {"(*GenDecl).End", Method, 0},
+ {"(*GenDecl).Pos", Method, 0},
+ {"(*GoStmt).End", Method, 0},
+ {"(*GoStmt).Pos", Method, 0},
+ {"(*Ident).End", Method, 0},
+ {"(*Ident).IsExported", Method, 0},
+ {"(*Ident).Pos", Method, 0},
+ {"(*Ident).String", Method, 0},
+ {"(*IfStmt).End", Method, 0},
+ {"(*IfStmt).Pos", Method, 0},
+ {"(*ImportSpec).End", Method, 0},
+ {"(*ImportSpec).Pos", Method, 0},
+ {"(*IncDecStmt).End", Method, 0},
+ {"(*IncDecStmt).Pos", Method, 0},
+ {"(*IndexExpr).End", Method, 0},
+ {"(*IndexExpr).Pos", Method, 0},
+ {"(*IndexListExpr).End", Method, 18},
+ {"(*IndexListExpr).Pos", Method, 18},
+ {"(*InterfaceType).End", Method, 0},
+ {"(*InterfaceType).Pos", Method, 0},
+ {"(*KeyValueExpr).End", Method, 0},
+ {"(*KeyValueExpr).Pos", Method, 0},
+ {"(*LabeledStmt).End", Method, 0},
+ {"(*LabeledStmt).Pos", Method, 0},
+ {"(*MapType).End", Method, 0},
+ {"(*MapType).Pos", Method, 0},
+ {"(*Object).Pos", Method, 0},
+ {"(*Package).End", Method, 0},
+ {"(*Package).Pos", Method, 0},
+ {"(*ParenExpr).End", Method, 0},
+ {"(*ParenExpr).Pos", Method, 0},
+ {"(*RangeStmt).End", Method, 0},
+ {"(*RangeStmt).Pos", Method, 0},
+ {"(*ReturnStmt).End", Method, 0},
+ {"(*ReturnStmt).Pos", Method, 0},
+ {"(*Scope).Insert", Method, 0},
+ {"(*Scope).Lookup", Method, 0},
+ {"(*Scope).String", Method, 0},
+ {"(*SelectStmt).End", Method, 0},
+ {"(*SelectStmt).Pos", Method, 0},
+ {"(*SelectorExpr).End", Method, 0},
+ {"(*SelectorExpr).Pos", Method, 0},
+ {"(*SendStmt).End", Method, 0},
+ {"(*SendStmt).Pos", Method, 0},
+ {"(*SliceExpr).End", Method, 0},
+ {"(*SliceExpr).Pos", Method, 0},
+ {"(*StarExpr).End", Method, 0},
+ {"(*StarExpr).Pos", Method, 0},
+ {"(*StructType).End", Method, 0},
+ {"(*StructType).Pos", Method, 0},
+ {"(*SwitchStmt).End", Method, 0},
+ {"(*SwitchStmt).Pos", Method, 0},
+ {"(*TypeAssertExpr).End", Method, 0},
+ {"(*TypeAssertExpr).Pos", Method, 0},
+ {"(*TypeSpec).End", Method, 0},
+ {"(*TypeSpec).Pos", Method, 0},
+ {"(*TypeSwitchStmt).End", Method, 0},
+ {"(*TypeSwitchStmt).Pos", Method, 0},
+ {"(*UnaryExpr).End", Method, 0},
+ {"(*UnaryExpr).Pos", Method, 0},
+ {"(*ValueSpec).End", Method, 0},
+ {"(*ValueSpec).Pos", Method, 0},
+ {"(CommentMap).Comments", Method, 1},
+ {"(CommentMap).Filter", Method, 1},
+ {"(CommentMap).String", Method, 1},
+ {"(CommentMap).Update", Method, 1},
+ {"(ObjKind).String", Method, 0},
+ {"ArrayType", Type, 0},
+ {"ArrayType.Elt", Field, 0},
+ {"ArrayType.Lbrack", Field, 0},
+ {"ArrayType.Len", Field, 0},
+ {"AssignStmt", Type, 0},
+ {"AssignStmt.Lhs", Field, 0},
+ {"AssignStmt.Rhs", Field, 0},
+ {"AssignStmt.Tok", Field, 0},
+ {"AssignStmt.TokPos", Field, 0},
+ {"Bad", Const, 0},
+ {"BadDecl", Type, 0},
+ {"BadDecl.From", Field, 0},
+ {"BadDecl.To", Field, 0},
+ {"BadExpr", Type, 0},
+ {"BadExpr.From", Field, 0},
+ {"BadExpr.To", Field, 0},
+ {"BadStmt", Type, 0},
+ {"BadStmt.From", Field, 0},
+ {"BadStmt.To", Field, 0},
+ {"BasicLit", Type, 0},
+ {"BasicLit.Kind", Field, 0},
+ {"BasicLit.Value", Field, 0},
+ {"BasicLit.ValuePos", Field, 0},
+ {"BinaryExpr", Type, 0},
+ {"BinaryExpr.Op", Field, 0},
+ {"BinaryExpr.OpPos", Field, 0},
+ {"BinaryExpr.X", Field, 0},
+ {"BinaryExpr.Y", Field, 0},
+ {"BlockStmt", Type, 0},
+ {"BlockStmt.Lbrace", Field, 0},
+ {"BlockStmt.List", Field, 0},
+ {"BlockStmt.Rbrace", Field, 0},
+ {"BranchStmt", Type, 0},
+ {"BranchStmt.Label", Field, 0},
+ {"BranchStmt.Tok", Field, 0},
+ {"BranchStmt.TokPos", Field, 0},
+ {"CallExpr", Type, 0},
+ {"CallExpr.Args", Field, 0},
+ {"CallExpr.Ellipsis", Field, 0},
+ {"CallExpr.Fun", Field, 0},
+ {"CallExpr.Lparen", Field, 0},
+ {"CallExpr.Rparen", Field, 0},
+ {"CaseClause", Type, 0},
+ {"CaseClause.Body", Field, 0},
+ {"CaseClause.Case", Field, 0},
+ {"CaseClause.Colon", Field, 0},
+ {"CaseClause.List", Field, 0},
+ {"ChanDir", Type, 0},
+ {"ChanType", Type, 0},
+ {"ChanType.Arrow", Field, 1},
+ {"ChanType.Begin", Field, 0},
+ {"ChanType.Dir", Field, 0},
+ {"ChanType.Value", Field, 0},
+ {"CommClause", Type, 0},
+ {"CommClause.Body", Field, 0},
+ {"CommClause.Case", Field, 0},
+ {"CommClause.Colon", Field, 0},
+ {"CommClause.Comm", Field, 0},
+ {"Comment", Type, 0},
+ {"Comment.Slash", Field, 0},
+ {"Comment.Text", Field, 0},
+ {"CommentGroup", Type, 0},
+ {"CommentGroup.List", Field, 0},
+ {"CommentMap", Type, 1},
+ {"CompositeLit", Type, 0},
+ {"CompositeLit.Elts", Field, 0},
+ {"CompositeLit.Incomplete", Field, 11},
+ {"CompositeLit.Lbrace", Field, 0},
+ {"CompositeLit.Rbrace", Field, 0},
+ {"CompositeLit.Type", Field, 0},
+ {"Con", Const, 0},
+ {"Decl", Type, 0},
+ {"DeclStmt", Type, 0},
+ {"DeclStmt.Decl", Field, 0},
+ {"DeferStmt", Type, 0},
+ {"DeferStmt.Call", Field, 0},
+ {"DeferStmt.Defer", Field, 0},
+ {"Ellipsis", Type, 0},
+ {"Ellipsis.Ellipsis", Field, 0},
+ {"Ellipsis.Elt", Field, 0},
+ {"EmptyStmt", Type, 0},
+ {"EmptyStmt.Implicit", Field, 5},
+ {"EmptyStmt.Semicolon", Field, 0},
+ {"Expr", Type, 0},
+ {"ExprStmt", Type, 0},
+ {"ExprStmt.X", Field, 0},
+ {"Field", Type, 0},
+ {"Field.Comment", Field, 0},
+ {"Field.Doc", Field, 0},
+ {"Field.Names", Field, 0},
+ {"Field.Tag", Field, 0},
+ {"Field.Type", Field, 0},
+ {"FieldFilter", Type, 0},
+ {"FieldList", Type, 0},
+ {"FieldList.Closing", Field, 0},
+ {"FieldList.List", Field, 0},
+ {"FieldList.Opening", Field, 0},
+ {"File", Type, 0},
+ {"File.Comments", Field, 0},
+ {"File.Decls", Field, 0},
+ {"File.Doc", Field, 0},
+ {"File.FileEnd", Field, 20},
+ {"File.FileStart", Field, 20},
+ {"File.GoVersion", Field, 21},
+ {"File.Imports", Field, 0},
+ {"File.Name", Field, 0},
+ {"File.Package", Field, 0},
+ {"File.Scope", Field, 0},
+ {"File.Unresolved", Field, 0},
+ {"FileExports", Func, 0},
+ {"Filter", Type, 0},
+ {"FilterDecl", Func, 0},
+ {"FilterFile", Func, 0},
+ {"FilterFuncDuplicates", Const, 0},
+ {"FilterImportDuplicates", Const, 0},
+ {"FilterPackage", Func, 0},
+ {"FilterUnassociatedComments", Const, 0},
+ {"ForStmt", Type, 0},
+ {"ForStmt.Body", Field, 0},
+ {"ForStmt.Cond", Field, 0},
+ {"ForStmt.For", Field, 0},
+ {"ForStmt.Init", Field, 0},
+ {"ForStmt.Post", Field, 0},
+ {"Fprint", Func, 0},
+ {"Fun", Const, 0},
+ {"FuncDecl", Type, 0},
+ {"FuncDecl.Body", Field, 0},
+ {"FuncDecl.Doc", Field, 0},
+ {"FuncDecl.Name", Field, 0},
+ {"FuncDecl.Recv", Field, 0},
+ {"FuncDecl.Type", Field, 0},
+ {"FuncLit", Type, 0},
+ {"FuncLit.Body", Field, 0},
+ {"FuncLit.Type", Field, 0},
+ {"FuncType", Type, 0},
+ {"FuncType.Func", Field, 0},
+ {"FuncType.Params", Field, 0},
+ {"FuncType.Results", Field, 0},
+ {"FuncType.TypeParams", Field, 18},
+ {"GenDecl", Type, 0},
+ {"GenDecl.Doc", Field, 0},
+ {"GenDecl.Lparen", Field, 0},
+ {"GenDecl.Rparen", Field, 0},
+ {"GenDecl.Specs", Field, 0},
+ {"GenDecl.Tok", Field, 0},
+ {"GenDecl.TokPos", Field, 0},
+ {"GoStmt", Type, 0},
+ {"GoStmt.Call", Field, 0},
+ {"GoStmt.Go", Field, 0},
+ {"Ident", Type, 0},
+ {"Ident.Name", Field, 0},
+ {"Ident.NamePos", Field, 0},
+ {"Ident.Obj", Field, 0},
+ {"IfStmt", Type, 0},
+ {"IfStmt.Body", Field, 0},
+ {"IfStmt.Cond", Field, 0},
+ {"IfStmt.Else", Field, 0},
+ {"IfStmt.If", Field, 0},
+ {"IfStmt.Init", Field, 0},
+ {"ImportSpec", Type, 0},
+ {"ImportSpec.Comment", Field, 0},
+ {"ImportSpec.Doc", Field, 0},
+ {"ImportSpec.EndPos", Field, 0},
+ {"ImportSpec.Name", Field, 0},
+ {"ImportSpec.Path", Field, 0},
+ {"Importer", Type, 0},
+ {"IncDecStmt", Type, 0},
+ {"IncDecStmt.Tok", Field, 0},
+ {"IncDecStmt.TokPos", Field, 0},
+ {"IncDecStmt.X", Field, 0},
+ {"IndexExpr", Type, 0},
+ {"IndexExpr.Index", Field, 0},
+ {"IndexExpr.Lbrack", Field, 0},
+ {"IndexExpr.Rbrack", Field, 0},
+ {"IndexExpr.X", Field, 0},
+ {"IndexListExpr", Type, 18},
+ {"IndexListExpr.Indices", Field, 18},
+ {"IndexListExpr.Lbrack", Field, 18},
+ {"IndexListExpr.Rbrack", Field, 18},
+ {"IndexListExpr.X", Field, 18},
+ {"Inspect", Func, 0},
+ {"InterfaceType", Type, 0},
+ {"InterfaceType.Incomplete", Field, 0},
+ {"InterfaceType.Interface", Field, 0},
+ {"InterfaceType.Methods", Field, 0},
+ {"IsExported", Func, 0},
+ {"IsGenerated", Func, 21},
+ {"KeyValueExpr", Type, 0},
+ {"KeyValueExpr.Colon", Field, 0},
+ {"KeyValueExpr.Key", Field, 0},
+ {"KeyValueExpr.Value", Field, 0},
+ {"LabeledStmt", Type, 0},
+ {"LabeledStmt.Colon", Field, 0},
+ {"LabeledStmt.Label", Field, 0},
+ {"LabeledStmt.Stmt", Field, 0},
+ {"Lbl", Const, 0},
+ {"MapType", Type, 0},
+ {"MapType.Key", Field, 0},
+ {"MapType.Map", Field, 0},
+ {"MapType.Value", Field, 0},
+ {"MergeMode", Type, 0},
+ {"MergePackageFiles", Func, 0},
+ {"NewCommentMap", Func, 1},
+ {"NewIdent", Func, 0},
+ {"NewObj", Func, 0},
+ {"NewPackage", Func, 0},
+ {"NewScope", Func, 0},
+ {"Node", Type, 0},
+ {"NotNilFilter", Func, 0},
+ {"ObjKind", Type, 0},
+ {"Object", Type, 0},
+ {"Object.Data", Field, 0},
+ {"Object.Decl", Field, 0},
+ {"Object.Kind", Field, 0},
+ {"Object.Name", Field, 0},
+ {"Object.Type", Field, 0},
+ {"Package", Type, 0},
+ {"Package.Files", Field, 0},
+ {"Package.Imports", Field, 0},
+ {"Package.Name", Field, 0},
+ {"Package.Scope", Field, 0},
+ {"PackageExports", Func, 0},
+ {"ParenExpr", Type, 0},
+ {"ParenExpr.Lparen", Field, 0},
+ {"ParenExpr.Rparen", Field, 0},
+ {"ParenExpr.X", Field, 0},
+ {"Pkg", Const, 0},
+ {"Preorder", Func, 23},
+ {"Print", Func, 0},
+ {"RECV", Const, 0},
+ {"RangeStmt", Type, 0},
+ {"RangeStmt.Body", Field, 0},
+ {"RangeStmt.For", Field, 0},
+ {"RangeStmt.Key", Field, 0},
+ {"RangeStmt.Range", Field, 20},
+ {"RangeStmt.Tok", Field, 0},
+ {"RangeStmt.TokPos", Field, 0},
+ {"RangeStmt.Value", Field, 0},
+ {"RangeStmt.X", Field, 0},
+ {"ReturnStmt", Type, 0},
+ {"ReturnStmt.Results", Field, 0},
+ {"ReturnStmt.Return", Field, 0},
+ {"SEND", Const, 0},
+ {"Scope", Type, 0},
+ {"Scope.Objects", Field, 0},
+ {"Scope.Outer", Field, 0},
+ {"SelectStmt", Type, 0},
+ {"SelectStmt.Body", Field, 0},
+ {"SelectStmt.Select", Field, 0},
+ {"SelectorExpr", Type, 0},
+ {"SelectorExpr.Sel", Field, 0},
+ {"SelectorExpr.X", Field, 0},
+ {"SendStmt", Type, 0},
+ {"SendStmt.Arrow", Field, 0},
+ {"SendStmt.Chan", Field, 0},
+ {"SendStmt.Value", Field, 0},
+ {"SliceExpr", Type, 0},
+ {"SliceExpr.High", Field, 0},
+ {"SliceExpr.Lbrack", Field, 0},
+ {"SliceExpr.Low", Field, 0},
+ {"SliceExpr.Max", Field, 2},
+ {"SliceExpr.Rbrack", Field, 0},
+ {"SliceExpr.Slice3", Field, 2},
+ {"SliceExpr.X", Field, 0},
+ {"SortImports", Func, 0},
+ {"Spec", Type, 0},
+ {"StarExpr", Type, 0},
+ {"StarExpr.Star", Field, 0},
+ {"StarExpr.X", Field, 0},
+ {"Stmt", Type, 0},
+ {"StructType", Type, 0},
+ {"StructType.Fields", Field, 0},
+ {"StructType.Incomplete", Field, 0},
+ {"StructType.Struct", Field, 0},
+ {"SwitchStmt", Type, 0},
+ {"SwitchStmt.Body", Field, 0},
+ {"SwitchStmt.Init", Field, 0},
+ {"SwitchStmt.Switch", Field, 0},
+ {"SwitchStmt.Tag", Field, 0},
+ {"Typ", Const, 0},
+ {"TypeAssertExpr", Type, 0},
+ {"TypeAssertExpr.Lparen", Field, 2},
+ {"TypeAssertExpr.Rparen", Field, 2},
+ {"TypeAssertExpr.Type", Field, 0},
+ {"TypeAssertExpr.X", Field, 0},
+ {"TypeSpec", Type, 0},
+ {"TypeSpec.Assign", Field, 9},
+ {"TypeSpec.Comment", Field, 0},
+ {"TypeSpec.Doc", Field, 0},
+ {"TypeSpec.Name", Field, 0},
+ {"TypeSpec.Type", Field, 0},
+ {"TypeSpec.TypeParams", Field, 18},
+ {"TypeSwitchStmt", Type, 0},
+ {"TypeSwitchStmt.Assign", Field, 0},
+ {"TypeSwitchStmt.Body", Field, 0},
+ {"TypeSwitchStmt.Init", Field, 0},
+ {"TypeSwitchStmt.Switch", Field, 0},
+ {"UnaryExpr", Type, 0},
+ {"UnaryExpr.Op", Field, 0},
+ {"UnaryExpr.OpPos", Field, 0},
+ {"UnaryExpr.X", Field, 0},
+ {"Unparen", Func, 22},
+ {"ValueSpec", Type, 0},
+ {"ValueSpec.Comment", Field, 0},
+ {"ValueSpec.Doc", Field, 0},
+ {"ValueSpec.Names", Field, 0},
+ {"ValueSpec.Type", Field, 0},
+ {"ValueSpec.Values", Field, 0},
+ {"Var", Const, 0},
+ {"Visitor", Type, 0},
+ {"Walk", Func, 0},
+ },
+ "go/build": {
+ {"(*Context).Import", Method, 0},
+ {"(*Context).ImportDir", Method, 0},
+ {"(*Context).MatchFile", Method, 2},
+ {"(*Context).SrcDirs", Method, 0},
+ {"(*MultiplePackageError).Error", Method, 4},
+ {"(*NoGoError).Error", Method, 0},
+ {"(*Package).IsCommand", Method, 0},
+ {"AllowBinary", Const, 0},
+ {"ArchChar", Func, 0},
+ {"Context", Type, 0},
+ {"Context.BuildTags", Field, 0},
+ {"Context.CgoEnabled", Field, 0},
+ {"Context.Compiler", Field, 0},
+ {"Context.Dir", Field, 14},
+ {"Context.GOARCH", Field, 0},
+ {"Context.GOOS", Field, 0},
+ {"Context.GOPATH", Field, 0},
+ {"Context.GOROOT", Field, 0},
+ {"Context.HasSubdir", Field, 0},
+ {"Context.InstallSuffix", Field, 1},
+ {"Context.IsAbsPath", Field, 0},
+ {"Context.IsDir", Field, 0},
+ {"Context.JoinPath", Field, 0},
+ {"Context.OpenFile", Field, 0},
+ {"Context.ReadDir", Field, 0},
+ {"Context.ReleaseTags", Field, 1},
+ {"Context.SplitPathList", Field, 0},
+ {"Context.ToolTags", Field, 17},
+ {"Context.UseAllFiles", Field, 0},
+ {"Default", Var, 0},
+ {"Directive", Type, 21},
+ {"Directive.Pos", Field, 21},
+ {"Directive.Text", Field, 21},
+ {"FindOnly", Const, 0},
+ {"IgnoreVendor", Const, 6},
+ {"Import", Func, 0},
+ {"ImportComment", Const, 4},
+ {"ImportDir", Func, 0},
+ {"ImportMode", Type, 0},
+ {"IsLocalImport", Func, 0},
+ {"MultiplePackageError", Type, 4},
+ {"MultiplePackageError.Dir", Field, 4},
+ {"MultiplePackageError.Files", Field, 4},
+ {"MultiplePackageError.Packages", Field, 4},
+ {"NoGoError", Type, 0},
+ {"NoGoError.Dir", Field, 0},
+ {"Package", Type, 0},
+ {"Package.AllTags", Field, 2},
+ {"Package.BinDir", Field, 0},
+ {"Package.BinaryOnly", Field, 7},
+ {"Package.CFiles", Field, 0},
+ {"Package.CXXFiles", Field, 2},
+ {"Package.CgoCFLAGS", Field, 0},
+ {"Package.CgoCPPFLAGS", Field, 2},
+ {"Package.CgoCXXFLAGS", Field, 2},
+ {"Package.CgoFFLAGS", Field, 7},
+ {"Package.CgoFiles", Field, 0},
+ {"Package.CgoLDFLAGS", Field, 0},
+ {"Package.CgoPkgConfig", Field, 0},
+ {"Package.ConflictDir", Field, 2},
+ {"Package.Dir", Field, 0},
+ {"Package.Directives", Field, 21},
+ {"Package.Doc", Field, 0},
+ {"Package.EmbedPatternPos", Field, 16},
+ {"Package.EmbedPatterns", Field, 16},
+ {"Package.FFiles", Field, 7},
+ {"Package.GoFiles", Field, 0},
+ {"Package.Goroot", Field, 0},
+ {"Package.HFiles", Field, 0},
+ {"Package.IgnoredGoFiles", Field, 1},
+ {"Package.IgnoredOtherFiles", Field, 16},
+ {"Package.ImportComment", Field, 4},
+ {"Package.ImportPath", Field, 0},
+ {"Package.ImportPos", Field, 0},
+ {"Package.Imports", Field, 0},
+ {"Package.InvalidGoFiles", Field, 6},
+ {"Package.MFiles", Field, 3},
+ {"Package.Name", Field, 0},
+ {"Package.PkgObj", Field, 0},
+ {"Package.PkgRoot", Field, 0},
+ {"Package.PkgTargetRoot", Field, 5},
+ {"Package.Root", Field, 0},
+ {"Package.SFiles", Field, 0},
+ {"Package.SrcRoot", Field, 0},
+ {"Package.SwigCXXFiles", Field, 1},
+ {"Package.SwigFiles", Field, 1},
+ {"Package.SysoFiles", Field, 0},
+ {"Package.TestDirectives", Field, 21},
+ {"Package.TestEmbedPatternPos", Field, 16},
+ {"Package.TestEmbedPatterns", Field, 16},
+ {"Package.TestGoFiles", Field, 0},
+ {"Package.TestImportPos", Field, 0},
+ {"Package.TestImports", Field, 0},
+ {"Package.XTestDirectives", Field, 21},
+ {"Package.XTestEmbedPatternPos", Field, 16},
+ {"Package.XTestEmbedPatterns", Field, 16},
+ {"Package.XTestGoFiles", Field, 0},
+ {"Package.XTestImportPos", Field, 0},
+ {"Package.XTestImports", Field, 0},
+ {"ToolDir", Var, 0},
+ },
+ "go/build/constraint": {
+ {"(*AndExpr).Eval", Method, 16},
+ {"(*AndExpr).String", Method, 16},
+ {"(*NotExpr).Eval", Method, 16},
+ {"(*NotExpr).String", Method, 16},
+ {"(*OrExpr).Eval", Method, 16},
+ {"(*OrExpr).String", Method, 16},
+ {"(*SyntaxError).Error", Method, 16},
+ {"(*TagExpr).Eval", Method, 16},
+ {"(*TagExpr).String", Method, 16},
+ {"AndExpr", Type, 16},
+ {"AndExpr.X", Field, 16},
+ {"AndExpr.Y", Field, 16},
+ {"Expr", Type, 16},
+ {"GoVersion", Func, 21},
+ {"IsGoBuild", Func, 16},
+ {"IsPlusBuild", Func, 16},
+ {"NotExpr", Type, 16},
+ {"NotExpr.X", Field, 16},
+ {"OrExpr", Type, 16},
+ {"OrExpr.X", Field, 16},
+ {"OrExpr.Y", Field, 16},
+ {"Parse", Func, 16},
+ {"PlusBuildLines", Func, 16},
+ {"SyntaxError", Type, 16},
+ {"SyntaxError.Err", Field, 16},
+ {"SyntaxError.Offset", Field, 16},
+ {"TagExpr", Type, 16},
+ {"TagExpr.Tag", Field, 16},
+ },
+ "go/constant": {
+ {"(Kind).String", Method, 18},
+ {"BinaryOp", Func, 5},
+ {"BitLen", Func, 5},
+ {"Bool", Const, 5},
+ {"BoolVal", Func, 5},
+ {"Bytes", Func, 5},
+ {"Compare", Func, 5},
+ {"Complex", Const, 5},
+ {"Denom", Func, 5},
+ {"Float", Const, 5},
+ {"Float32Val", Func, 5},
+ {"Float64Val", Func, 5},
+ {"Imag", Func, 5},
+ {"Int", Const, 5},
+ {"Int64Val", Func, 5},
+ {"Kind", Type, 5},
+ {"Make", Func, 13},
+ {"MakeBool", Func, 5},
+ {"MakeFloat64", Func, 5},
+ {"MakeFromBytes", Func, 5},
+ {"MakeFromLiteral", Func, 5},
+ {"MakeImag", Func, 5},
+ {"MakeInt64", Func, 5},
+ {"MakeString", Func, 5},
+ {"MakeUint64", Func, 5},
+ {"MakeUnknown", Func, 5},
+ {"Num", Func, 5},
+ {"Real", Func, 5},
+ {"Shift", Func, 5},
+ {"Sign", Func, 5},
+ {"String", Const, 5},
+ {"StringVal", Func, 5},
+ {"ToComplex", Func, 6},
+ {"ToFloat", Func, 6},
+ {"ToInt", Func, 6},
+ {"Uint64Val", Func, 5},
+ {"UnaryOp", Func, 5},
+ {"Unknown", Const, 5},
+ {"Val", Func, 13},
+ {"Value", Type, 5},
+ },
+ "go/doc": {
+ {"(*Package).Filter", Method, 0},
+ {"(*Package).HTML", Method, 19},
+ {"(*Package).Markdown", Method, 19},
+ {"(*Package).Parser", Method, 19},
+ {"(*Package).Printer", Method, 19},
+ {"(*Package).Synopsis", Method, 19},
+ {"(*Package).Text", Method, 19},
+ {"AllDecls", Const, 0},
+ {"AllMethods", Const, 0},
+ {"Example", Type, 0},
+ {"Example.Code", Field, 0},
+ {"Example.Comments", Field, 0},
+ {"Example.Doc", Field, 0},
+ {"Example.EmptyOutput", Field, 1},
+ {"Example.Name", Field, 0},
+ {"Example.Order", Field, 1},
+ {"Example.Output", Field, 0},
+ {"Example.Play", Field, 1},
+ {"Example.Suffix", Field, 14},
+ {"Example.Unordered", Field, 7},
+ {"Examples", Func, 0},
+ {"Filter", Type, 0},
+ {"Func", Type, 0},
+ {"Func.Decl", Field, 0},
+ {"Func.Doc", Field, 0},
+ {"Func.Examples", Field, 14},
+ {"Func.Level", Field, 0},
+ {"Func.Name", Field, 0},
+ {"Func.Orig", Field, 0},
+ {"Func.Recv", Field, 0},
+ {"IllegalPrefixes", Var, 1},
+ {"IsPredeclared", Func, 8},
+ {"Mode", Type, 0},
+ {"New", Func, 0},
+ {"NewFromFiles", Func, 14},
+ {"Note", Type, 1},
+ {"Note.Body", Field, 1},
+ {"Note.End", Field, 1},
+ {"Note.Pos", Field, 1},
+ {"Note.UID", Field, 1},
+ {"Package", Type, 0},
+ {"Package.Bugs", Field, 0},
+ {"Package.Consts", Field, 0},
+ {"Package.Doc", Field, 0},
+ {"Package.Examples", Field, 14},
+ {"Package.Filenames", Field, 0},
+ {"Package.Funcs", Field, 0},
+ {"Package.ImportPath", Field, 0},
+ {"Package.Imports", Field, 0},
+ {"Package.Name", Field, 0},
+ {"Package.Notes", Field, 1},
+ {"Package.Types", Field, 0},
+ {"Package.Vars", Field, 0},
+ {"PreserveAST", Const, 12},
+ {"Synopsis", Func, 0},
+ {"ToHTML", Func, 0},
+ {"ToText", Func, 0},
+ {"Type", Type, 0},
+ {"Type.Consts", Field, 0},
+ {"Type.Decl", Field, 0},
+ {"Type.Doc", Field, 0},
+ {"Type.Examples", Field, 14},
+ {"Type.Funcs", Field, 0},
+ {"Type.Methods", Field, 0},
+ {"Type.Name", Field, 0},
+ {"Type.Vars", Field, 0},
+ {"Value", Type, 0},
+ {"Value.Decl", Field, 0},
+ {"Value.Doc", Field, 0},
+ {"Value.Names", Field, 0},
+ },
+ "go/doc/comment": {
+ {"(*DocLink).DefaultURL", Method, 19},
+ {"(*Heading).DefaultID", Method, 19},
+ {"(*List).BlankBefore", Method, 19},
+ {"(*List).BlankBetween", Method, 19},
+ {"(*Parser).Parse", Method, 19},
+ {"(*Printer).Comment", Method, 19},
+ {"(*Printer).HTML", Method, 19},
+ {"(*Printer).Markdown", Method, 19},
+ {"(*Printer).Text", Method, 19},
+ {"Block", Type, 19},
+ {"Code", Type, 19},
+ {"Code.Text", Field, 19},
+ {"DefaultLookupPackage", Func, 19},
+ {"Doc", Type, 19},
+ {"Doc.Content", Field, 19},
+ {"Doc.Links", Field, 19},
+ {"DocLink", Type, 19},
+ {"DocLink.ImportPath", Field, 19},
+ {"DocLink.Name", Field, 19},
+ {"DocLink.Recv", Field, 19},
+ {"DocLink.Text", Field, 19},
+ {"Heading", Type, 19},
+ {"Heading.Text", Field, 19},
+ {"Italic", Type, 19},
+ {"Link", Type, 19},
+ {"Link.Auto", Field, 19},
+ {"Link.Text", Field, 19},
+ {"Link.URL", Field, 19},
+ {"LinkDef", Type, 19},
+ {"LinkDef.Text", Field, 19},
+ {"LinkDef.URL", Field, 19},
+ {"LinkDef.Used", Field, 19},
+ {"List", Type, 19},
+ {"List.ForceBlankBefore", Field, 19},
+ {"List.ForceBlankBetween", Field, 19},
+ {"List.Items", Field, 19},
+ {"ListItem", Type, 19},
+ {"ListItem.Content", Field, 19},
+ {"ListItem.Number", Field, 19},
+ {"Paragraph", Type, 19},
+ {"Paragraph.Text", Field, 19},
+ {"Parser", Type, 19},
+ {"Parser.LookupPackage", Field, 19},
+ {"Parser.LookupSym", Field, 19},
+ {"Parser.Words", Field, 19},
+ {"Plain", Type, 19},
+ {"Printer", Type, 19},
+ {"Printer.DocLinkBaseURL", Field, 19},
+ {"Printer.DocLinkURL", Field, 19},
+ {"Printer.HeadingID", Field, 19},
+ {"Printer.HeadingLevel", Field, 19},
+ {"Printer.TextCodePrefix", Field, 19},
+ {"Printer.TextPrefix", Field, 19},
+ {"Printer.TextWidth", Field, 19},
+ {"Text", Type, 19},
+ },
+ "go/format": {
+ {"Node", Func, 1},
+ {"Source", Func, 1},
+ },
+ "go/importer": {
+ {"Default", Func, 5},
+ {"For", Func, 5},
+ {"ForCompiler", Func, 12},
+ {"Lookup", Type, 5},
+ },
+ "go/parser": {
+ {"AllErrors", Const, 1},
+ {"DeclarationErrors", Const, 0},
+ {"ImportsOnly", Const, 0},
+ {"Mode", Type, 0},
+ {"PackageClauseOnly", Const, 0},
+ {"ParseComments", Const, 0},
+ {"ParseDir", Func, 0},
+ {"ParseExpr", Func, 0},
+ {"ParseExprFrom", Func, 5},
+ {"ParseFile", Func, 0},
+ {"SkipObjectResolution", Const, 17},
+ {"SpuriousErrors", Const, 0},
+ {"Trace", Const, 0},
+ },
+ "go/printer": {
+ {"(*Config).Fprint", Method, 0},
+ {"CommentedNode", Type, 0},
+ {"CommentedNode.Comments", Field, 0},
+ {"CommentedNode.Node", Field, 0},
+ {"Config", Type, 0},
+ {"Config.Indent", Field, 1},
+ {"Config.Mode", Field, 0},
+ {"Config.Tabwidth", Field, 0},
+ {"Fprint", Func, 0},
+ {"Mode", Type, 0},
+ {"RawFormat", Const, 0},
+ {"SourcePos", Const, 0},
+ {"TabIndent", Const, 0},
+ {"UseSpaces", Const, 0},
+ },
+ "go/scanner": {
+ {"(*ErrorList).Add", Method, 0},
+ {"(*ErrorList).RemoveMultiples", Method, 0},
+ {"(*ErrorList).Reset", Method, 0},
+ {"(*Scanner).Init", Method, 0},
+ {"(*Scanner).Scan", Method, 0},
+ {"(Error).Error", Method, 0},
+ {"(ErrorList).Err", Method, 0},
+ {"(ErrorList).Error", Method, 0},
+ {"(ErrorList).Len", Method, 0},
+ {"(ErrorList).Less", Method, 0},
+ {"(ErrorList).Sort", Method, 0},
+ {"(ErrorList).Swap", Method, 0},
+ {"Error", Type, 0},
+ {"Error.Msg", Field, 0},
+ {"Error.Pos", Field, 0},
+ {"ErrorHandler", Type, 0},
+ {"ErrorList", Type, 0},
+ {"Mode", Type, 0},
+ {"PrintError", Func, 0},
+ {"ScanComments", Const, 0},
+ {"Scanner", Type, 0},
+ {"Scanner.ErrorCount", Field, 0},
+ },
+ "go/token": {
+ {"(*File).AddLine", Method, 0},
+ {"(*File).AddLineColumnInfo", Method, 11},
+ {"(*File).AddLineInfo", Method, 0},
+ {"(*File).Base", Method, 0},
+ {"(*File).Line", Method, 0},
+ {"(*File).LineCount", Method, 0},
+ {"(*File).LineStart", Method, 12},
+ {"(*File).Lines", Method, 21},
+ {"(*File).MergeLine", Method, 2},
+ {"(*File).Name", Method, 0},
+ {"(*File).Offset", Method, 0},
+ {"(*File).Pos", Method, 0},
+ {"(*File).Position", Method, 0},
+ {"(*File).PositionFor", Method, 4},
+ {"(*File).SetLines", Method, 0},
+ {"(*File).SetLinesForContent", Method, 0},
+ {"(*File).Size", Method, 0},
+ {"(*FileSet).AddFile", Method, 0},
+ {"(*FileSet).Base", Method, 0},
+ {"(*FileSet).File", Method, 0},
+ {"(*FileSet).Iterate", Method, 0},
+ {"(*FileSet).Position", Method, 0},
+ {"(*FileSet).PositionFor", Method, 4},
+ {"(*FileSet).Read", Method, 0},
+ {"(*FileSet).RemoveFile", Method, 20},
+ {"(*FileSet).Write", Method, 0},
+ {"(*Position).IsValid", Method, 0},
+ {"(Pos).IsValid", Method, 0},
+ {"(Position).String", Method, 0},
+ {"(Token).IsKeyword", Method, 0},
+ {"(Token).IsLiteral", Method, 0},
+ {"(Token).IsOperator", Method, 0},
+ {"(Token).Precedence", Method, 0},
+ {"(Token).String", Method, 0},
+ {"ADD", Const, 0},
+ {"ADD_ASSIGN", Const, 0},
+ {"AND", Const, 0},
+ {"AND_ASSIGN", Const, 0},
+ {"AND_NOT", Const, 0},
+ {"AND_NOT_ASSIGN", Const, 0},
+ {"ARROW", Const, 0},
+ {"ASSIGN", Const, 0},
+ {"BREAK", Const, 0},
+ {"CASE", Const, 0},
+ {"CHAN", Const, 0},
+ {"CHAR", Const, 0},
+ {"COLON", Const, 0},
+ {"COMMA", Const, 0},
+ {"COMMENT", Const, 0},
+ {"CONST", Const, 0},
+ {"CONTINUE", Const, 0},
+ {"DEC", Const, 0},
+ {"DEFAULT", Const, 0},
+ {"DEFER", Const, 0},
+ {"DEFINE", Const, 0},
+ {"ELLIPSIS", Const, 0},
+ {"ELSE", Const, 0},
+ {"EOF", Const, 0},
+ {"EQL", Const, 0},
+ {"FALLTHROUGH", Const, 0},
+ {"FLOAT", Const, 0},
+ {"FOR", Const, 0},
+ {"FUNC", Const, 0},
+ {"File", Type, 0},
+ {"FileSet", Type, 0},
+ {"GEQ", Const, 0},
+ {"GO", Const, 0},
+ {"GOTO", Const, 0},
+ {"GTR", Const, 0},
+ {"HighestPrec", Const, 0},
+ {"IDENT", Const, 0},
+ {"IF", Const, 0},
+ {"ILLEGAL", Const, 0},
+ {"IMAG", Const, 0},
+ {"IMPORT", Const, 0},
+ {"INC", Const, 0},
+ {"INT", Const, 0},
+ {"INTERFACE", Const, 0},
+ {"IsExported", Func, 13},
+ {"IsIdentifier", Func, 13},
+ {"IsKeyword", Func, 13},
+ {"LAND", Const, 0},
+ {"LBRACE", Const, 0},
+ {"LBRACK", Const, 0},
+ {"LEQ", Const, 0},
+ {"LOR", Const, 0},
+ {"LPAREN", Const, 0},
+ {"LSS", Const, 0},
+ {"Lookup", Func, 0},
+ {"LowestPrec", Const, 0},
+ {"MAP", Const, 0},
+ {"MUL", Const, 0},
+ {"MUL_ASSIGN", Const, 0},
+ {"NEQ", Const, 0},
+ {"NOT", Const, 0},
+ {"NewFileSet", Func, 0},
+ {"NoPos", Const, 0},
+ {"OR", Const, 0},
+ {"OR_ASSIGN", Const, 0},
+ {"PACKAGE", Const, 0},
+ {"PERIOD", Const, 0},
+ {"Pos", Type, 0},
+ {"Position", Type, 0},
+ {"Position.Column", Field, 0},
+ {"Position.Filename", Field, 0},
+ {"Position.Line", Field, 0},
+ {"Position.Offset", Field, 0},
+ {"QUO", Const, 0},
+ {"QUO_ASSIGN", Const, 0},
+ {"RANGE", Const, 0},
+ {"RBRACE", Const, 0},
+ {"RBRACK", Const, 0},
+ {"REM", Const, 0},
+ {"REM_ASSIGN", Const, 0},
+ {"RETURN", Const, 0},
+ {"RPAREN", Const, 0},
+ {"SELECT", Const, 0},
+ {"SEMICOLON", Const, 0},
+ {"SHL", Const, 0},
+ {"SHL_ASSIGN", Const, 0},
+ {"SHR", Const, 0},
+ {"SHR_ASSIGN", Const, 0},
+ {"STRING", Const, 0},
+ {"STRUCT", Const, 0},
+ {"SUB", Const, 0},
+ {"SUB_ASSIGN", Const, 0},
+ {"SWITCH", Const, 0},
+ {"TILDE", Const, 18},
+ {"TYPE", Const, 0},
+ {"Token", Type, 0},
+ {"UnaryPrec", Const, 0},
+ {"VAR", Const, 0},
+ {"XOR", Const, 0},
+ {"XOR_ASSIGN", Const, 0},
+ },
+ "go/types": {
+ {"(*Alias).Obj", Method, 22},
+ {"(*Alias).Origin", Method, 23},
+ {"(*Alias).Rhs", Method, 23},
+ {"(*Alias).SetTypeParams", Method, 23},
+ {"(*Alias).String", Method, 22},
+ {"(*Alias).TypeArgs", Method, 23},
+ {"(*Alias).TypeParams", Method, 23},
+ {"(*Alias).Underlying", Method, 22},
+ {"(*ArgumentError).Error", Method, 18},
+ {"(*ArgumentError).Unwrap", Method, 18},
+ {"(*Array).Elem", Method, 5},
+ {"(*Array).Len", Method, 5},
+ {"(*Array).String", Method, 5},
+ {"(*Array).Underlying", Method, 5},
+ {"(*Basic).Info", Method, 5},
+ {"(*Basic).Kind", Method, 5},
+ {"(*Basic).Name", Method, 5},
+ {"(*Basic).String", Method, 5},
+ {"(*Basic).Underlying", Method, 5},
+ {"(*Builtin).Exported", Method, 5},
+ {"(*Builtin).Id", Method, 5},
+ {"(*Builtin).Name", Method, 5},
+ {"(*Builtin).Parent", Method, 5},
+ {"(*Builtin).Pkg", Method, 5},
+ {"(*Builtin).Pos", Method, 5},
+ {"(*Builtin).String", Method, 5},
+ {"(*Builtin).Type", Method, 5},
+ {"(*Chan).Dir", Method, 5},
+ {"(*Chan).Elem", Method, 5},
+ {"(*Chan).String", Method, 5},
+ {"(*Chan).Underlying", Method, 5},
+ {"(*Checker).Files", Method, 5},
+ {"(*Config).Check", Method, 5},
+ {"(*Const).Exported", Method, 5},
+ {"(*Const).Id", Method, 5},
+ {"(*Const).Name", Method, 5},
+ {"(*Const).Parent", Method, 5},
+ {"(*Const).Pkg", Method, 5},
+ {"(*Const).Pos", Method, 5},
+ {"(*Const).String", Method, 5},
+ {"(*Const).Type", Method, 5},
+ {"(*Const).Val", Method, 5},
+ {"(*Func).Exported", Method, 5},
+ {"(*Func).FullName", Method, 5},
+ {"(*Func).Id", Method, 5},
+ {"(*Func).Name", Method, 5},
+ {"(*Func).Origin", Method, 19},
+ {"(*Func).Parent", Method, 5},
+ {"(*Func).Pkg", Method, 5},
+ {"(*Func).Pos", Method, 5},
+ {"(*Func).Scope", Method, 5},
+ {"(*Func).Signature", Method, 23},
+ {"(*Func).String", Method, 5},
+ {"(*Func).Type", Method, 5},
+ {"(*Info).ObjectOf", Method, 5},
+ {"(*Info).PkgNameOf", Method, 22},
+ {"(*Info).TypeOf", Method, 5},
+ {"(*Initializer).String", Method, 5},
+ {"(*Interface).Complete", Method, 5},
+ {"(*Interface).Embedded", Method, 5},
+ {"(*Interface).EmbeddedType", Method, 11},
+ {"(*Interface).Empty", Method, 5},
+ {"(*Interface).ExplicitMethod", Method, 5},
+ {"(*Interface).IsComparable", Method, 18},
+ {"(*Interface).IsImplicit", Method, 18},
+ {"(*Interface).IsMethodSet", Method, 18},
+ {"(*Interface).MarkImplicit", Method, 18},
+ {"(*Interface).Method", Method, 5},
+ {"(*Interface).NumEmbeddeds", Method, 5},
+ {"(*Interface).NumExplicitMethods", Method, 5},
+ {"(*Interface).NumMethods", Method, 5},
+ {"(*Interface).String", Method, 5},
+ {"(*Interface).Underlying", Method, 5},
+ {"(*Label).Exported", Method, 5},
+ {"(*Label).Id", Method, 5},
+ {"(*Label).Name", Method, 5},
+ {"(*Label).Parent", Method, 5},
+ {"(*Label).Pkg", Method, 5},
+ {"(*Label).Pos", Method, 5},
+ {"(*Label).String", Method, 5},
+ {"(*Label).Type", Method, 5},
+ {"(*Map).Elem", Method, 5},
+ {"(*Map).Key", Method, 5},
+ {"(*Map).String", Method, 5},
+ {"(*Map).Underlying", Method, 5},
+ {"(*MethodSet).At", Method, 5},
+ {"(*MethodSet).Len", Method, 5},
+ {"(*MethodSet).Lookup", Method, 5},
+ {"(*MethodSet).String", Method, 5},
+ {"(*Named).AddMethod", Method, 5},
+ {"(*Named).Method", Method, 5},
+ {"(*Named).NumMethods", Method, 5},
+ {"(*Named).Obj", Method, 5},
+ {"(*Named).Origin", Method, 18},
+ {"(*Named).SetTypeParams", Method, 18},
+ {"(*Named).SetUnderlying", Method, 5},
+ {"(*Named).String", Method, 5},
+ {"(*Named).TypeArgs", Method, 18},
+ {"(*Named).TypeParams", Method, 18},
+ {"(*Named).Underlying", Method, 5},
+ {"(*Nil).Exported", Method, 5},
+ {"(*Nil).Id", Method, 5},
+ {"(*Nil).Name", Method, 5},
+ {"(*Nil).Parent", Method, 5},
+ {"(*Nil).Pkg", Method, 5},
+ {"(*Nil).Pos", Method, 5},
+ {"(*Nil).String", Method, 5},
+ {"(*Nil).Type", Method, 5},
+ {"(*Package).Complete", Method, 5},
+ {"(*Package).GoVersion", Method, 21},
+ {"(*Package).Imports", Method, 5},
+ {"(*Package).MarkComplete", Method, 5},
+ {"(*Package).Name", Method, 5},
+ {"(*Package).Path", Method, 5},
+ {"(*Package).Scope", Method, 5},
+ {"(*Package).SetImports", Method, 5},
+ {"(*Package).SetName", Method, 6},
+ {"(*Package).String", Method, 5},
+ {"(*PkgName).Exported", Method, 5},
+ {"(*PkgName).Id", Method, 5},
+ {"(*PkgName).Imported", Method, 5},
+ {"(*PkgName).Name", Method, 5},
+ {"(*PkgName).Parent", Method, 5},
+ {"(*PkgName).Pkg", Method, 5},
+ {"(*PkgName).Pos", Method, 5},
+ {"(*PkgName).String", Method, 5},
+ {"(*PkgName).Type", Method, 5},
+ {"(*Pointer).Elem", Method, 5},
+ {"(*Pointer).String", Method, 5},
+ {"(*Pointer).Underlying", Method, 5},
+ {"(*Scope).Child", Method, 5},
+ {"(*Scope).Contains", Method, 5},
+ {"(*Scope).End", Method, 5},
+ {"(*Scope).Innermost", Method, 5},
+ {"(*Scope).Insert", Method, 5},
+ {"(*Scope).Len", Method, 5},
+ {"(*Scope).Lookup", Method, 5},
+ {"(*Scope).LookupParent", Method, 5},
+ {"(*Scope).Names", Method, 5},
+ {"(*Scope).NumChildren", Method, 5},
+ {"(*Scope).Parent", Method, 5},
+ {"(*Scope).Pos", Method, 5},
+ {"(*Scope).String", Method, 5},
+ {"(*Scope).WriteTo", Method, 5},
+ {"(*Selection).Index", Method, 5},
+ {"(*Selection).Indirect", Method, 5},
+ {"(*Selection).Kind", Method, 5},
+ {"(*Selection).Obj", Method, 5},
+ {"(*Selection).Recv", Method, 5},
+ {"(*Selection).String", Method, 5},
+ {"(*Selection).Type", Method, 5},
+ {"(*Signature).Params", Method, 5},
+ {"(*Signature).Recv", Method, 5},
+ {"(*Signature).RecvTypeParams", Method, 18},
+ {"(*Signature).Results", Method, 5},
+ {"(*Signature).String", Method, 5},
+ {"(*Signature).TypeParams", Method, 18},
+ {"(*Signature).Underlying", Method, 5},
+ {"(*Signature).Variadic", Method, 5},
+ {"(*Slice).Elem", Method, 5},
+ {"(*Slice).String", Method, 5},
+ {"(*Slice).Underlying", Method, 5},
+ {"(*StdSizes).Alignof", Method, 5},
+ {"(*StdSizes).Offsetsof", Method, 5},
+ {"(*StdSizes).Sizeof", Method, 5},
+ {"(*Struct).Field", Method, 5},
+ {"(*Struct).NumFields", Method, 5},
+ {"(*Struct).String", Method, 5},
+ {"(*Struct).Tag", Method, 5},
+ {"(*Struct).Underlying", Method, 5},
+ {"(*Term).String", Method, 18},
+ {"(*Term).Tilde", Method, 18},
+ {"(*Term).Type", Method, 18},
+ {"(*Tuple).At", Method, 5},
+ {"(*Tuple).Len", Method, 5},
+ {"(*Tuple).String", Method, 5},
+ {"(*Tuple).Underlying", Method, 5},
+ {"(*TypeList).At", Method, 18},
+ {"(*TypeList).Len", Method, 18},
+ {"(*TypeName).Exported", Method, 5},
+ {"(*TypeName).Id", Method, 5},
+ {"(*TypeName).IsAlias", Method, 9},
+ {"(*TypeName).Name", Method, 5},
+ {"(*TypeName).Parent", Method, 5},
+ {"(*TypeName).Pkg", Method, 5},
+ {"(*TypeName).Pos", Method, 5},
+ {"(*TypeName).String", Method, 5},
+ {"(*TypeName).Type", Method, 5},
+ {"(*TypeParam).Constraint", Method, 18},
+ {"(*TypeParam).Index", Method, 18},
+ {"(*TypeParam).Obj", Method, 18},
+ {"(*TypeParam).SetConstraint", Method, 18},
+ {"(*TypeParam).String", Method, 18},
+ {"(*TypeParam).Underlying", Method, 18},
+ {"(*TypeParamList).At", Method, 18},
+ {"(*TypeParamList).Len", Method, 18},
+ {"(*Union).Len", Method, 18},
+ {"(*Union).String", Method, 18},
+ {"(*Union).Term", Method, 18},
+ {"(*Union).Underlying", Method, 18},
+ {"(*Var).Anonymous", Method, 5},
+ {"(*Var).Embedded", Method, 11},
+ {"(*Var).Exported", Method, 5},
+ {"(*Var).Id", Method, 5},
+ {"(*Var).IsField", Method, 5},
+ {"(*Var).Name", Method, 5},
+ {"(*Var).Origin", Method, 19},
+ {"(*Var).Parent", Method, 5},
+ {"(*Var).Pkg", Method, 5},
+ {"(*Var).Pos", Method, 5},
+ {"(*Var).String", Method, 5},
+ {"(*Var).Type", Method, 5},
+ {"(Checker).ObjectOf", Method, 5},
+ {"(Checker).PkgNameOf", Method, 22},
+ {"(Checker).TypeOf", Method, 5},
+ {"(Error).Error", Method, 5},
+ {"(TypeAndValue).Addressable", Method, 5},
+ {"(TypeAndValue).Assignable", Method, 5},
+ {"(TypeAndValue).HasOk", Method, 5},
+ {"(TypeAndValue).IsBuiltin", Method, 5},
+ {"(TypeAndValue).IsNil", Method, 5},
+ {"(TypeAndValue).IsType", Method, 5},
+ {"(TypeAndValue).IsValue", Method, 5},
+ {"(TypeAndValue).IsVoid", Method, 5},
+ {"Alias", Type, 22},
+ {"ArgumentError", Type, 18},
+ {"ArgumentError.Err", Field, 18},
+ {"ArgumentError.Index", Field, 18},
+ {"Array", Type, 5},
+ {"AssertableTo", Func, 5},
+ {"AssignableTo", Func, 5},
+ {"Basic", Type, 5},
+ {"BasicInfo", Type, 5},
+ {"BasicKind", Type, 5},
+ {"Bool", Const, 5},
+ {"Builtin", Type, 5},
+ {"Byte", Const, 5},
+ {"Chan", Type, 5},
+ {"ChanDir", Type, 5},
+ {"CheckExpr", Func, 13},
+ {"Checker", Type, 5},
+ {"Checker.Info", Field, 5},
+ {"Comparable", Func, 5},
+ {"Complex128", Const, 5},
+ {"Complex64", Const, 5},
+ {"Config", Type, 5},
+ {"Config.Context", Field, 18},
+ {"Config.DisableUnusedImportCheck", Field, 5},
+ {"Config.Error", Field, 5},
+ {"Config.FakeImportC", Field, 5},
+ {"Config.GoVersion", Field, 18},
+ {"Config.IgnoreFuncBodies", Field, 5},
+ {"Config.Importer", Field, 5},
+ {"Config.Sizes", Field, 5},
+ {"Const", Type, 5},
+ {"Context", Type, 18},
+ {"ConvertibleTo", Func, 5},
+ {"DefPredeclaredTestFuncs", Func, 5},
+ {"Default", Func, 8},
+ {"Error", Type, 5},
+ {"Error.Fset", Field, 5},
+ {"Error.Msg", Field, 5},
+ {"Error.Pos", Field, 5},
+ {"Error.Soft", Field, 5},
+ {"Eval", Func, 5},
+ {"ExprString", Func, 5},
+ {"FieldVal", Const, 5},
+ {"Float32", Const, 5},
+ {"Float64", Const, 5},
+ {"Func", Type, 5},
+ {"Id", Func, 5},
+ {"Identical", Func, 5},
+ {"IdenticalIgnoreTags", Func, 8},
+ {"Implements", Func, 5},
+ {"ImportMode", Type, 6},
+ {"Importer", Type, 5},
+ {"ImporterFrom", Type, 6},
+ {"Info", Type, 5},
+ {"Info.Defs", Field, 5},
+ {"Info.FileVersions", Field, 22},
+ {"Info.Implicits", Field, 5},
+ {"Info.InitOrder", Field, 5},
+ {"Info.Instances", Field, 18},
+ {"Info.Scopes", Field, 5},
+ {"Info.Selections", Field, 5},
+ {"Info.Types", Field, 5},
+ {"Info.Uses", Field, 5},
+ {"Initializer", Type, 5},
+ {"Initializer.Lhs", Field, 5},
+ {"Initializer.Rhs", Field, 5},
+ {"Instance", Type, 18},
+ {"Instance.Type", Field, 18},
+ {"Instance.TypeArgs", Field, 18},
+ {"Instantiate", Func, 18},
+ {"Int", Const, 5},
+ {"Int16", Const, 5},
+ {"Int32", Const, 5},
+ {"Int64", Const, 5},
+ {"Int8", Const, 5},
+ {"Interface", Type, 5},
+ {"Invalid", Const, 5},
+ {"IsBoolean", Const, 5},
+ {"IsComplex", Const, 5},
+ {"IsConstType", Const, 5},
+ {"IsFloat", Const, 5},
+ {"IsInteger", Const, 5},
+ {"IsInterface", Func, 5},
+ {"IsNumeric", Const, 5},
+ {"IsOrdered", Const, 5},
+ {"IsString", Const, 5},
+ {"IsUnsigned", Const, 5},
+ {"IsUntyped", Const, 5},
+ {"Label", Type, 5},
+ {"LookupFieldOrMethod", Func, 5},
+ {"Map", Type, 5},
+ {"MethodExpr", Const, 5},
+ {"MethodSet", Type, 5},
+ {"MethodVal", Const, 5},
+ {"MissingMethod", Func, 5},
+ {"Named", Type, 5},
+ {"NewAlias", Func, 22},
+ {"NewArray", Func, 5},
+ {"NewChan", Func, 5},
+ {"NewChecker", Func, 5},
+ {"NewConst", Func, 5},
+ {"NewContext", Func, 18},
+ {"NewField", Func, 5},
+ {"NewFunc", Func, 5},
+ {"NewInterface", Func, 5},
+ {"NewInterfaceType", Func, 11},
+ {"NewLabel", Func, 5},
+ {"NewMap", Func, 5},
+ {"NewMethodSet", Func, 5},
+ {"NewNamed", Func, 5},
+ {"NewPackage", Func, 5},
+ {"NewParam", Func, 5},
+ {"NewPkgName", Func, 5},
+ {"NewPointer", Func, 5},
+ {"NewScope", Func, 5},
+ {"NewSignature", Func, 5},
+ {"NewSignatureType", Func, 18},
+ {"NewSlice", Func, 5},
+ {"NewStruct", Func, 5},
+ {"NewTerm", Func, 18},
+ {"NewTuple", Func, 5},
+ {"NewTypeName", Func, 5},
+ {"NewTypeParam", Func, 18},
+ {"NewUnion", Func, 18},
+ {"NewVar", Func, 5},
+ {"Nil", Type, 5},
+ {"Object", Type, 5},
+ {"ObjectString", Func, 5},
+ {"Package", Type, 5},
+ {"PkgName", Type, 5},
+ {"Pointer", Type, 5},
+ {"Qualifier", Type, 5},
+ {"RecvOnly", Const, 5},
+ {"RelativeTo", Func, 5},
+ {"Rune", Const, 5},
+ {"Satisfies", Func, 20},
+ {"Scope", Type, 5},
+ {"Selection", Type, 5},
+ {"SelectionKind", Type, 5},
+ {"SelectionString", Func, 5},
+ {"SendOnly", Const, 5},
+ {"SendRecv", Const, 5},
+ {"Signature", Type, 5},
+ {"Sizes", Type, 5},
+ {"SizesFor", Func, 9},
+ {"Slice", Type, 5},
+ {"StdSizes", Type, 5},
+ {"StdSizes.MaxAlign", Field, 5},
+ {"StdSizes.WordSize", Field, 5},
+ {"String", Const, 5},
+ {"Struct", Type, 5},
+ {"Term", Type, 18},
+ {"Tuple", Type, 5},
+ {"Typ", Var, 5},
+ {"Type", Type, 5},
+ {"TypeAndValue", Type, 5},
+ {"TypeAndValue.Type", Field, 5},
+ {"TypeAndValue.Value", Field, 5},
+ {"TypeList", Type, 18},
+ {"TypeName", Type, 5},
+ {"TypeParam", Type, 18},
+ {"TypeParamList", Type, 18},
+ {"TypeString", Func, 5},
+ {"Uint", Const, 5},
+ {"Uint16", Const, 5},
+ {"Uint32", Const, 5},
+ {"Uint64", Const, 5},
+ {"Uint8", Const, 5},
+ {"Uintptr", Const, 5},
+ {"Unalias", Func, 22},
+ {"Union", Type, 18},
+ {"Universe", Var, 5},
+ {"Unsafe", Var, 5},
+ {"UnsafePointer", Const, 5},
+ {"UntypedBool", Const, 5},
+ {"UntypedComplex", Const, 5},
+ {"UntypedFloat", Const, 5},
+ {"UntypedInt", Const, 5},
+ {"UntypedNil", Const, 5},
+ {"UntypedRune", Const, 5},
+ {"UntypedString", Const, 5},
+ {"Var", Type, 5},
+ {"WriteExpr", Func, 5},
+ {"WriteSignature", Func, 5},
+ {"WriteType", Func, 5},
+ },
+ "go/version": {
+ {"Compare", Func, 22},
+ {"IsValid", Func, 22},
+ {"Lang", Func, 22},
+ },
+ "hash": {
+ {"Hash", Type, 0},
+ {"Hash32", Type, 0},
+ {"Hash64", Type, 0},
+ },
+ "hash/adler32": {
+ {"Checksum", Func, 0},
+ {"New", Func, 0},
+ {"Size", Const, 0},
+ },
+ "hash/crc32": {
+ {"Castagnoli", Const, 0},
+ {"Checksum", Func, 0},
+ {"ChecksumIEEE", Func, 0},
+ {"IEEE", Const, 0},
+ {"IEEETable", Var, 0},
+ {"Koopman", Const, 0},
+ {"MakeTable", Func, 0},
+ {"New", Func, 0},
+ {"NewIEEE", Func, 0},
+ {"Size", Const, 0},
+ {"Table", Type, 0},
+ {"Update", Func, 0},
+ },
+ "hash/crc64": {
+ {"Checksum", Func, 0},
+ {"ECMA", Const, 0},
+ {"ISO", Const, 0},
+ {"MakeTable", Func, 0},
+ {"New", Func, 0},
+ {"Size", Const, 0},
+ {"Table", Type, 0},
+ {"Update", Func, 0},
+ },
+ "hash/fnv": {
+ {"New128", Func, 9},
+ {"New128a", Func, 9},
+ {"New32", Func, 0},
+ {"New32a", Func, 0},
+ {"New64", Func, 0},
+ {"New64a", Func, 0},
+ },
+ "hash/maphash": {
+ {"(*Hash).BlockSize", Method, 14},
+ {"(*Hash).Reset", Method, 14},
+ {"(*Hash).Seed", Method, 14},
+ {"(*Hash).SetSeed", Method, 14},
+ {"(*Hash).Size", Method, 14},
+ {"(*Hash).Sum", Method, 14},
+ {"(*Hash).Sum64", Method, 14},
+ {"(*Hash).Write", Method, 14},
+ {"(*Hash).WriteByte", Method, 14},
+ {"(*Hash).WriteString", Method, 14},
+ {"Bytes", Func, 19},
+ {"Hash", Type, 14},
+ {"MakeSeed", Func, 14},
+ {"Seed", Type, 14},
+ {"String", Func, 19},
+ },
+ "html": {
+ {"EscapeString", Func, 0},
+ {"UnescapeString", Func, 0},
+ },
+ "html/template": {
+ {"(*Error).Error", Method, 0},
+ {"(*Template).AddParseTree", Method, 0},
+ {"(*Template).Clone", Method, 0},
+ {"(*Template).DefinedTemplates", Method, 6},
+ {"(*Template).Delims", Method, 0},
+ {"(*Template).Execute", Method, 0},
+ {"(*Template).ExecuteTemplate", Method, 0},
+ {"(*Template).Funcs", Method, 0},
+ {"(*Template).Lookup", Method, 0},
+ {"(*Template).Name", Method, 0},
+ {"(*Template).New", Method, 0},
+ {"(*Template).Option", Method, 5},
+ {"(*Template).Parse", Method, 0},
+ {"(*Template).ParseFS", Method, 16},
+ {"(*Template).ParseFiles", Method, 0},
+ {"(*Template).ParseGlob", Method, 0},
+ {"(*Template).Templates", Method, 0},
+ {"CSS", Type, 0},
+ {"ErrAmbigContext", Const, 0},
+ {"ErrBadHTML", Const, 0},
+ {"ErrBranchEnd", Const, 0},
+ {"ErrEndContext", Const, 0},
+ {"ErrJSTemplate", Const, 21},
+ {"ErrNoSuchTemplate", Const, 0},
+ {"ErrOutputContext", Const, 0},
+ {"ErrPartialCharset", Const, 0},
+ {"ErrPartialEscape", Const, 0},
+ {"ErrPredefinedEscaper", Const, 9},
+ {"ErrRangeLoopReentry", Const, 0},
+ {"ErrSlashAmbig", Const, 0},
+ {"Error", Type, 0},
+ {"Error.Description", Field, 0},
+ {"Error.ErrorCode", Field, 0},
+ {"Error.Line", Field, 0},
+ {"Error.Name", Field, 0},
+ {"Error.Node", Field, 4},
+ {"ErrorCode", Type, 0},
+ {"FuncMap", Type, 0},
+ {"HTML", Type, 0},
+ {"HTMLAttr", Type, 0},
+ {"HTMLEscape", Func, 0},
+ {"HTMLEscapeString", Func, 0},
+ {"HTMLEscaper", Func, 0},
+ {"IsTrue", Func, 6},
+ {"JS", Type, 0},
+ {"JSEscape", Func, 0},
+ {"JSEscapeString", Func, 0},
+ {"JSEscaper", Func, 0},
+ {"JSStr", Type, 0},
+ {"Must", Func, 0},
+ {"New", Func, 0},
+ {"OK", Const, 0},
+ {"ParseFS", Func, 16},
+ {"ParseFiles", Func, 0},
+ {"ParseGlob", Func, 0},
+ {"Srcset", Type, 10},
+ {"Template", Type, 0},
+ {"Template.Tree", Field, 2},
+ {"URL", Type, 0},
+ {"URLQueryEscaper", Func, 0},
+ },
+ "image": {
+ {"(*Alpha).AlphaAt", Method, 4},
+ {"(*Alpha).At", Method, 0},
+ {"(*Alpha).Bounds", Method, 0},
+ {"(*Alpha).ColorModel", Method, 0},
+ {"(*Alpha).Opaque", Method, 0},
+ {"(*Alpha).PixOffset", Method, 0},
+ {"(*Alpha).RGBA64At", Method, 17},
+ {"(*Alpha).Set", Method, 0},
+ {"(*Alpha).SetAlpha", Method, 0},
+ {"(*Alpha).SetRGBA64", Method, 17},
+ {"(*Alpha).SubImage", Method, 0},
+ {"(*Alpha16).Alpha16At", Method, 4},
+ {"(*Alpha16).At", Method, 0},
+ {"(*Alpha16).Bounds", Method, 0},
+ {"(*Alpha16).ColorModel", Method, 0},
+ {"(*Alpha16).Opaque", Method, 0},
+ {"(*Alpha16).PixOffset", Method, 0},
+ {"(*Alpha16).RGBA64At", Method, 17},
+ {"(*Alpha16).Set", Method, 0},
+ {"(*Alpha16).SetAlpha16", Method, 0},
+ {"(*Alpha16).SetRGBA64", Method, 17},
+ {"(*Alpha16).SubImage", Method, 0},
+ {"(*CMYK).At", Method, 5},
+ {"(*CMYK).Bounds", Method, 5},
+ {"(*CMYK).CMYKAt", Method, 5},
+ {"(*CMYK).ColorModel", Method, 5},
+ {"(*CMYK).Opaque", Method, 5},
+ {"(*CMYK).PixOffset", Method, 5},
+ {"(*CMYK).RGBA64At", Method, 17},
+ {"(*CMYK).Set", Method, 5},
+ {"(*CMYK).SetCMYK", Method, 5},
+ {"(*CMYK).SetRGBA64", Method, 17},
+ {"(*CMYK).SubImage", Method, 5},
+ {"(*Gray).At", Method, 0},
+ {"(*Gray).Bounds", Method, 0},
+ {"(*Gray).ColorModel", Method, 0},
+ {"(*Gray).GrayAt", Method, 4},
+ {"(*Gray).Opaque", Method, 0},
+ {"(*Gray).PixOffset", Method, 0},
+ {"(*Gray).RGBA64At", Method, 17},
+ {"(*Gray).Set", Method, 0},
+ {"(*Gray).SetGray", Method, 0},
+ {"(*Gray).SetRGBA64", Method, 17},
+ {"(*Gray).SubImage", Method, 0},
+ {"(*Gray16).At", Method, 0},
+ {"(*Gray16).Bounds", Method, 0},
+ {"(*Gray16).ColorModel", Method, 0},
+ {"(*Gray16).Gray16At", Method, 4},
+ {"(*Gray16).Opaque", Method, 0},
+ {"(*Gray16).PixOffset", Method, 0},
+ {"(*Gray16).RGBA64At", Method, 17},
+ {"(*Gray16).Set", Method, 0},
+ {"(*Gray16).SetGray16", Method, 0},
+ {"(*Gray16).SetRGBA64", Method, 17},
+ {"(*Gray16).SubImage", Method, 0},
+ {"(*NRGBA).At", Method, 0},
+ {"(*NRGBA).Bounds", Method, 0},
+ {"(*NRGBA).ColorModel", Method, 0},
+ {"(*NRGBA).NRGBAAt", Method, 4},
+ {"(*NRGBA).Opaque", Method, 0},
+ {"(*NRGBA).PixOffset", Method, 0},
+ {"(*NRGBA).RGBA64At", Method, 17},
+ {"(*NRGBA).Set", Method, 0},
+ {"(*NRGBA).SetNRGBA", Method, 0},
+ {"(*NRGBA).SetRGBA64", Method, 17},
+ {"(*NRGBA).SubImage", Method, 0},
+ {"(*NRGBA64).At", Method, 0},
+ {"(*NRGBA64).Bounds", Method, 0},
+ {"(*NRGBA64).ColorModel", Method, 0},
+ {"(*NRGBA64).NRGBA64At", Method, 4},
+ {"(*NRGBA64).Opaque", Method, 0},
+ {"(*NRGBA64).PixOffset", Method, 0},
+ {"(*NRGBA64).RGBA64At", Method, 17},
+ {"(*NRGBA64).Set", Method, 0},
+ {"(*NRGBA64).SetNRGBA64", Method, 0},
+ {"(*NRGBA64).SetRGBA64", Method, 17},
+ {"(*NRGBA64).SubImage", Method, 0},
+ {"(*NYCbCrA).AOffset", Method, 6},
+ {"(*NYCbCrA).At", Method, 6},
+ {"(*NYCbCrA).Bounds", Method, 6},
+ {"(*NYCbCrA).COffset", Method, 6},
+ {"(*NYCbCrA).ColorModel", Method, 6},
+ {"(*NYCbCrA).NYCbCrAAt", Method, 6},
+ {"(*NYCbCrA).Opaque", Method, 6},
+ {"(*NYCbCrA).RGBA64At", Method, 17},
+ {"(*NYCbCrA).SubImage", Method, 6},
+ {"(*NYCbCrA).YCbCrAt", Method, 6},
+ {"(*NYCbCrA).YOffset", Method, 6},
+ {"(*Paletted).At", Method, 0},
+ {"(*Paletted).Bounds", Method, 0},
+ {"(*Paletted).ColorIndexAt", Method, 0},
+ {"(*Paletted).ColorModel", Method, 0},
+ {"(*Paletted).Opaque", Method, 0},
+ {"(*Paletted).PixOffset", Method, 0},
+ {"(*Paletted).RGBA64At", Method, 17},
+ {"(*Paletted).Set", Method, 0},
+ {"(*Paletted).SetColorIndex", Method, 0},
+ {"(*Paletted).SetRGBA64", Method, 17},
+ {"(*Paletted).SubImage", Method, 0},
+ {"(*RGBA).At", Method, 0},
+ {"(*RGBA).Bounds", Method, 0},
+ {"(*RGBA).ColorModel", Method, 0},
+ {"(*RGBA).Opaque", Method, 0},
+ {"(*RGBA).PixOffset", Method, 0},
+ {"(*RGBA).RGBA64At", Method, 17},
+ {"(*RGBA).RGBAAt", Method, 4},
+ {"(*RGBA).Set", Method, 0},
+ {"(*RGBA).SetRGBA", Method, 0},
+ {"(*RGBA).SetRGBA64", Method, 17},
+ {"(*RGBA).SubImage", Method, 0},
+ {"(*RGBA64).At", Method, 0},
+ {"(*RGBA64).Bounds", Method, 0},
+ {"(*RGBA64).ColorModel", Method, 0},
+ {"(*RGBA64).Opaque", Method, 0},
+ {"(*RGBA64).PixOffset", Method, 0},
+ {"(*RGBA64).RGBA64At", Method, 4},
+ {"(*RGBA64).Set", Method, 0},
+ {"(*RGBA64).SetRGBA64", Method, 0},
+ {"(*RGBA64).SubImage", Method, 0},
+ {"(*Uniform).At", Method, 0},
+ {"(*Uniform).Bounds", Method, 0},
+ {"(*Uniform).ColorModel", Method, 0},
+ {"(*Uniform).Convert", Method, 0},
+ {"(*Uniform).Opaque", Method, 0},
+ {"(*Uniform).RGBA", Method, 0},
+ {"(*Uniform).RGBA64At", Method, 17},
+ {"(*YCbCr).At", Method, 0},
+ {"(*YCbCr).Bounds", Method, 0},
+ {"(*YCbCr).COffset", Method, 0},
+ {"(*YCbCr).ColorModel", Method, 0},
+ {"(*YCbCr).Opaque", Method, 0},
+ {"(*YCbCr).RGBA64At", Method, 17},
+ {"(*YCbCr).SubImage", Method, 0},
+ {"(*YCbCr).YCbCrAt", Method, 4},
+ {"(*YCbCr).YOffset", Method, 0},
+ {"(Point).Add", Method, 0},
+ {"(Point).Div", Method, 0},
+ {"(Point).Eq", Method, 0},
+ {"(Point).In", Method, 0},
+ {"(Point).Mod", Method, 0},
+ {"(Point).Mul", Method, 0},
+ {"(Point).String", Method, 0},
+ {"(Point).Sub", Method, 0},
+ {"(Rectangle).Add", Method, 0},
+ {"(Rectangle).At", Method, 5},
+ {"(Rectangle).Bounds", Method, 5},
+ {"(Rectangle).Canon", Method, 0},
+ {"(Rectangle).ColorModel", Method, 5},
+ {"(Rectangle).Dx", Method, 0},
+ {"(Rectangle).Dy", Method, 0},
+ {"(Rectangle).Empty", Method, 0},
+ {"(Rectangle).Eq", Method, 0},
+ {"(Rectangle).In", Method, 0},
+ {"(Rectangle).Inset", Method, 0},
+ {"(Rectangle).Intersect", Method, 0},
+ {"(Rectangle).Overlaps", Method, 0},
+ {"(Rectangle).RGBA64At", Method, 17},
+ {"(Rectangle).Size", Method, 0},
+ {"(Rectangle).String", Method, 0},
+ {"(Rectangle).Sub", Method, 0},
+ {"(Rectangle).Union", Method, 0},
+ {"(YCbCrSubsampleRatio).String", Method, 0},
+ {"Alpha", Type, 0},
+ {"Alpha.Pix", Field, 0},
+ {"Alpha.Rect", Field, 0},
+ {"Alpha.Stride", Field, 0},
+ {"Alpha16", Type, 0},
+ {"Alpha16.Pix", Field, 0},
+ {"Alpha16.Rect", Field, 0},
+ {"Alpha16.Stride", Field, 0},
+ {"Black", Var, 0},
+ {"CMYK", Type, 5},
+ {"CMYK.Pix", Field, 5},
+ {"CMYK.Rect", Field, 5},
+ {"CMYK.Stride", Field, 5},
+ {"Config", Type, 0},
+ {"Config.ColorModel", Field, 0},
+ {"Config.Height", Field, 0},
+ {"Config.Width", Field, 0},
+ {"Decode", Func, 0},
+ {"DecodeConfig", Func, 0},
+ {"ErrFormat", Var, 0},
+ {"Gray", Type, 0},
+ {"Gray.Pix", Field, 0},
+ {"Gray.Rect", Field, 0},
+ {"Gray.Stride", Field, 0},
+ {"Gray16", Type, 0},
+ {"Gray16.Pix", Field, 0},
+ {"Gray16.Rect", Field, 0},
+ {"Gray16.Stride", Field, 0},
+ {"Image", Type, 0},
+ {"NRGBA", Type, 0},
+ {"NRGBA.Pix", Field, 0},
+ {"NRGBA.Rect", Field, 0},
+ {"NRGBA.Stride", Field, 0},
+ {"NRGBA64", Type, 0},
+ {"NRGBA64.Pix", Field, 0},
+ {"NRGBA64.Rect", Field, 0},
+ {"NRGBA64.Stride", Field, 0},
+ {"NYCbCrA", Type, 6},
+ {"NYCbCrA.A", Field, 6},
+ {"NYCbCrA.AStride", Field, 6},
+ {"NYCbCrA.YCbCr", Field, 6},
+ {"NewAlpha", Func, 0},
+ {"NewAlpha16", Func, 0},
+ {"NewCMYK", Func, 5},
+ {"NewGray", Func, 0},
+ {"NewGray16", Func, 0},
+ {"NewNRGBA", Func, 0},
+ {"NewNRGBA64", Func, 0},
+ {"NewNYCbCrA", Func, 6},
+ {"NewPaletted", Func, 0},
+ {"NewRGBA", Func, 0},
+ {"NewRGBA64", Func, 0},
+ {"NewUniform", Func, 0},
+ {"NewYCbCr", Func, 0},
+ {"Opaque", Var, 0},
+ {"Paletted", Type, 0},
+ {"Paletted.Palette", Field, 0},
+ {"Paletted.Pix", Field, 0},
+ {"Paletted.Rect", Field, 0},
+ {"Paletted.Stride", Field, 0},
+ {"PalettedImage", Type, 0},
+ {"Point", Type, 0},
+ {"Point.X", Field, 0},
+ {"Point.Y", Field, 0},
+ {"Pt", Func, 0},
+ {"RGBA", Type, 0},
+ {"RGBA.Pix", Field, 0},
+ {"RGBA.Rect", Field, 0},
+ {"RGBA.Stride", Field, 0},
+ {"RGBA64", Type, 0},
+ {"RGBA64.Pix", Field, 0},
+ {"RGBA64.Rect", Field, 0},
+ {"RGBA64.Stride", Field, 0},
+ {"RGBA64Image", Type, 17},
+ {"Rect", Func, 0},
+ {"Rectangle", Type, 0},
+ {"Rectangle.Max", Field, 0},
+ {"Rectangle.Min", Field, 0},
+ {"RegisterFormat", Func, 0},
+ {"Transparent", Var, 0},
+ {"Uniform", Type, 0},
+ {"Uniform.C", Field, 0},
+ {"White", Var, 0},
+ {"YCbCr", Type, 0},
+ {"YCbCr.CStride", Field, 0},
+ {"YCbCr.Cb", Field, 0},
+ {"YCbCr.Cr", Field, 0},
+ {"YCbCr.Rect", Field, 0},
+ {"YCbCr.SubsampleRatio", Field, 0},
+ {"YCbCr.Y", Field, 0},
+ {"YCbCr.YStride", Field, 0},
+ {"YCbCrSubsampleRatio", Type, 0},
+ {"YCbCrSubsampleRatio410", Const, 5},
+ {"YCbCrSubsampleRatio411", Const, 5},
+ {"YCbCrSubsampleRatio420", Const, 0},
+ {"YCbCrSubsampleRatio422", Const, 0},
+ {"YCbCrSubsampleRatio440", Const, 1},
+ {"YCbCrSubsampleRatio444", Const, 0},
+ {"ZP", Var, 0},
+ {"ZR", Var, 0},
+ },
+ "image/color": {
+ {"(Alpha).RGBA", Method, 0},
+ {"(Alpha16).RGBA", Method, 0},
+ {"(CMYK).RGBA", Method, 5},
+ {"(Gray).RGBA", Method, 0},
+ {"(Gray16).RGBA", Method, 0},
+ {"(NRGBA).RGBA", Method, 0},
+ {"(NRGBA64).RGBA", Method, 0},
+ {"(NYCbCrA).RGBA", Method, 6},
+ {"(Palette).Convert", Method, 0},
+ {"(Palette).Index", Method, 0},
+ {"(RGBA).RGBA", Method, 0},
+ {"(RGBA64).RGBA", Method, 0},
+ {"(YCbCr).RGBA", Method, 0},
+ {"Alpha", Type, 0},
+ {"Alpha.A", Field, 0},
+ {"Alpha16", Type, 0},
+ {"Alpha16.A", Field, 0},
+ {"Alpha16Model", Var, 0},
+ {"AlphaModel", Var, 0},
+ {"Black", Var, 0},
+ {"CMYK", Type, 5},
+ {"CMYK.C", Field, 5},
+ {"CMYK.K", Field, 5},
+ {"CMYK.M", Field, 5},
+ {"CMYK.Y", Field, 5},
+ {"CMYKModel", Var, 5},
+ {"CMYKToRGB", Func, 5},
+ {"Color", Type, 0},
+ {"Gray", Type, 0},
+ {"Gray.Y", Field, 0},
+ {"Gray16", Type, 0},
+ {"Gray16.Y", Field, 0},
+ {"Gray16Model", Var, 0},
+ {"GrayModel", Var, 0},
+ {"Model", Type, 0},
+ {"ModelFunc", Func, 0},
+ {"NRGBA", Type, 0},
+ {"NRGBA.A", Field, 0},
+ {"NRGBA.B", Field, 0},
+ {"NRGBA.G", Field, 0},
+ {"NRGBA.R", Field, 0},
+ {"NRGBA64", Type, 0},
+ {"NRGBA64.A", Field, 0},
+ {"NRGBA64.B", Field, 0},
+ {"NRGBA64.G", Field, 0},
+ {"NRGBA64.R", Field, 0},
+ {"NRGBA64Model", Var, 0},
+ {"NRGBAModel", Var, 0},
+ {"NYCbCrA", Type, 6},
+ {"NYCbCrA.A", Field, 6},
+ {"NYCbCrA.YCbCr", Field, 6},
+ {"NYCbCrAModel", Var, 6},
+ {"Opaque", Var, 0},
+ {"Palette", Type, 0},
+ {"RGBA", Type, 0},
+ {"RGBA.A", Field, 0},
+ {"RGBA.B", Field, 0},
+ {"RGBA.G", Field, 0},
+ {"RGBA.R", Field, 0},
+ {"RGBA64", Type, 0},
+ {"RGBA64.A", Field, 0},
+ {"RGBA64.B", Field, 0},
+ {"RGBA64.G", Field, 0},
+ {"RGBA64.R", Field, 0},
+ {"RGBA64Model", Var, 0},
+ {"RGBAModel", Var, 0},
+ {"RGBToCMYK", Func, 5},
+ {"RGBToYCbCr", Func, 0},
+ {"Transparent", Var, 0},
+ {"White", Var, 0},
+ {"YCbCr", Type, 0},
+ {"YCbCr.Cb", Field, 0},
+ {"YCbCr.Cr", Field, 0},
+ {"YCbCr.Y", Field, 0},
+ {"YCbCrModel", Var, 0},
+ {"YCbCrToRGB", Func, 0},
+ },
+ "image/color/palette": {
+ {"Plan9", Var, 2},
+ {"WebSafe", Var, 2},
+ },
+ "image/draw": {
+ {"(Op).Draw", Method, 2},
+ {"Draw", Func, 0},
+ {"DrawMask", Func, 0},
+ {"Drawer", Type, 2},
+ {"FloydSteinberg", Var, 2},
+ {"Image", Type, 0},
+ {"Op", Type, 0},
+ {"Over", Const, 0},
+ {"Quantizer", Type, 2},
+ {"RGBA64Image", Type, 17},
+ {"Src", Const, 0},
+ },
+ "image/gif": {
+ {"Decode", Func, 0},
+ {"DecodeAll", Func, 0},
+ {"DecodeConfig", Func, 0},
+ {"DisposalBackground", Const, 5},
+ {"DisposalNone", Const, 5},
+ {"DisposalPrevious", Const, 5},
+ {"Encode", Func, 2},
+ {"EncodeAll", Func, 2},
+ {"GIF", Type, 0},
+ {"GIF.BackgroundIndex", Field, 5},
+ {"GIF.Config", Field, 5},
+ {"GIF.Delay", Field, 0},
+ {"GIF.Disposal", Field, 5},
+ {"GIF.Image", Field, 0},
+ {"GIF.LoopCount", Field, 0},
+ {"Options", Type, 2},
+ {"Options.Drawer", Field, 2},
+ {"Options.NumColors", Field, 2},
+ {"Options.Quantizer", Field, 2},
+ },
+ "image/jpeg": {
+ {"(FormatError).Error", Method, 0},
+ {"(UnsupportedError).Error", Method, 0},
+ {"Decode", Func, 0},
+ {"DecodeConfig", Func, 0},
+ {"DefaultQuality", Const, 0},
+ {"Encode", Func, 0},
+ {"FormatError", Type, 0},
+ {"Options", Type, 0},
+ {"Options.Quality", Field, 0},
+ {"Reader", Type, 0},
+ {"UnsupportedError", Type, 0},
+ },
+ "image/png": {
+ {"(*Encoder).Encode", Method, 4},
+ {"(FormatError).Error", Method, 0},
+ {"(UnsupportedError).Error", Method, 0},
+ {"BestCompression", Const, 4},
+ {"BestSpeed", Const, 4},
+ {"CompressionLevel", Type, 4},
+ {"Decode", Func, 0},
+ {"DecodeConfig", Func, 0},
+ {"DefaultCompression", Const, 4},
+ {"Encode", Func, 0},
+ {"Encoder", Type, 4},
+ {"Encoder.BufferPool", Field, 9},
+ {"Encoder.CompressionLevel", Field, 4},
+ {"EncoderBuffer", Type, 9},
+ {"EncoderBufferPool", Type, 9},
+ {"FormatError", Type, 0},
+ {"NoCompression", Const, 4},
+ {"UnsupportedError", Type, 0},
+ },
+ "index/suffixarray": {
+ {"(*Index).Bytes", Method, 0},
+ {"(*Index).FindAllIndex", Method, 0},
+ {"(*Index).Lookup", Method, 0},
+ {"(*Index).Read", Method, 0},
+ {"(*Index).Write", Method, 0},
+ {"Index", Type, 0},
+ {"New", Func, 0},
+ },
+ "io": {
+ {"(*LimitedReader).Read", Method, 0},
+ {"(*OffsetWriter).Seek", Method, 20},
+ {"(*OffsetWriter).Write", Method, 20},
+ {"(*OffsetWriter).WriteAt", Method, 20},
+ {"(*PipeReader).Close", Method, 0},
+ {"(*PipeReader).CloseWithError", Method, 0},
+ {"(*PipeReader).Read", Method, 0},
+ {"(*PipeWriter).Close", Method, 0},
+ {"(*PipeWriter).CloseWithError", Method, 0},
+ {"(*PipeWriter).Write", Method, 0},
+ {"(*SectionReader).Outer", Method, 22},
+ {"(*SectionReader).Read", Method, 0},
+ {"(*SectionReader).ReadAt", Method, 0},
+ {"(*SectionReader).Seek", Method, 0},
+ {"(*SectionReader).Size", Method, 0},
+ {"ByteReader", Type, 0},
+ {"ByteScanner", Type, 0},
+ {"ByteWriter", Type, 1},
+ {"Closer", Type, 0},
+ {"Copy", Func, 0},
+ {"CopyBuffer", Func, 5},
+ {"CopyN", Func, 0},
+ {"Discard", Var, 16},
+ {"EOF", Var, 0},
+ {"ErrClosedPipe", Var, 0},
+ {"ErrNoProgress", Var, 1},
+ {"ErrShortBuffer", Var, 0},
+ {"ErrShortWrite", Var, 0},
+ {"ErrUnexpectedEOF", Var, 0},
+ {"LimitReader", Func, 0},
+ {"LimitedReader", Type, 0},
+ {"LimitedReader.N", Field, 0},
+ {"LimitedReader.R", Field, 0},
+ {"MultiReader", Func, 0},
+ {"MultiWriter", Func, 0},
+ {"NewOffsetWriter", Func, 20},
+ {"NewSectionReader", Func, 0},
+ {"NopCloser", Func, 16},
+ {"OffsetWriter", Type, 20},
+ {"Pipe", Func, 0},
+ {"PipeReader", Type, 0},
+ {"PipeWriter", Type, 0},
+ {"ReadAll", Func, 16},
+ {"ReadAtLeast", Func, 0},
+ {"ReadCloser", Type, 0},
+ {"ReadFull", Func, 0},
+ {"ReadSeekCloser", Type, 16},
+ {"ReadSeeker", Type, 0},
+ {"ReadWriteCloser", Type, 0},
+ {"ReadWriteSeeker", Type, 0},
+ {"ReadWriter", Type, 0},
+ {"Reader", Type, 0},
+ {"ReaderAt", Type, 0},
+ {"ReaderFrom", Type, 0},
+ {"RuneReader", Type, 0},
+ {"RuneScanner", Type, 0},
+ {"SectionReader", Type, 0},
+ {"SeekCurrent", Const, 7},
+ {"SeekEnd", Const, 7},
+ {"SeekStart", Const, 7},
+ {"Seeker", Type, 0},
+ {"StringWriter", Type, 12},
+ {"TeeReader", Func, 0},
+ {"WriteCloser", Type, 0},
+ {"WriteSeeker", Type, 0},
+ {"WriteString", Func, 0},
+ {"Writer", Type, 0},
+ {"WriterAt", Type, 0},
+ {"WriterTo", Type, 0},
+ },
+ "io/fs": {
+ {"(*PathError).Error", Method, 16},
+ {"(*PathError).Timeout", Method, 16},
+ {"(*PathError).Unwrap", Method, 16},
+ {"(FileMode).IsDir", Method, 16},
+ {"(FileMode).IsRegular", Method, 16},
+ {"(FileMode).Perm", Method, 16},
+ {"(FileMode).String", Method, 16},
+ {"(FileMode).Type", Method, 16},
+ {"DirEntry", Type, 16},
+ {"ErrClosed", Var, 16},
+ {"ErrExist", Var, 16},
+ {"ErrInvalid", Var, 16},
+ {"ErrNotExist", Var, 16},
+ {"ErrPermission", Var, 16},
+ {"FS", Type, 16},
+ {"File", Type, 16},
+ {"FileInfo", Type, 16},
+ {"FileInfoToDirEntry", Func, 17},
+ {"FileMode", Type, 16},
+ {"FormatDirEntry", Func, 21},
+ {"FormatFileInfo", Func, 21},
+ {"Glob", Func, 16},
+ {"GlobFS", Type, 16},
+ {"ModeAppend", Const, 16},
+ {"ModeCharDevice", Const, 16},
+ {"ModeDevice", Const, 16},
+ {"ModeDir", Const, 16},
+ {"ModeExclusive", Const, 16},
+ {"ModeIrregular", Const, 16},
+ {"ModeNamedPipe", Const, 16},
+ {"ModePerm", Const, 16},
+ {"ModeSetgid", Const, 16},
+ {"ModeSetuid", Const, 16},
+ {"ModeSocket", Const, 16},
+ {"ModeSticky", Const, 16},
+ {"ModeSymlink", Const, 16},
+ {"ModeTemporary", Const, 16},
+ {"ModeType", Const, 16},
+ {"PathError", Type, 16},
+ {"PathError.Err", Field, 16},
+ {"PathError.Op", Field, 16},
+ {"PathError.Path", Field, 16},
+ {"ReadDir", Func, 16},
+ {"ReadDirFS", Type, 16},
+ {"ReadDirFile", Type, 16},
+ {"ReadFile", Func, 16},
+ {"ReadFileFS", Type, 16},
+ {"SkipAll", Var, 20},
+ {"SkipDir", Var, 16},
+ {"Stat", Func, 16},
+ {"StatFS", Type, 16},
+ {"Sub", Func, 16},
+ {"SubFS", Type, 16},
+ {"ValidPath", Func, 16},
+ {"WalkDir", Func, 16},
+ {"WalkDirFunc", Type, 16},
+ },
+ "io/ioutil": {
+ {"Discard", Var, 0},
+ {"NopCloser", Func, 0},
+ {"ReadAll", Func, 0},
+ {"ReadDir", Func, 0},
+ {"ReadFile", Func, 0},
+ {"TempDir", Func, 0},
+ {"TempFile", Func, 0},
+ {"WriteFile", Func, 0},
+ },
+ "iter": {
+ {"Pull", Func, 23},
+ {"Pull2", Func, 23},
+ {"Seq", Type, 23},
+ {"Seq2", Type, 23},
+ },
+ "log": {
+ {"(*Logger).Fatal", Method, 0},
+ {"(*Logger).Fatalf", Method, 0},
+ {"(*Logger).Fatalln", Method, 0},
+ {"(*Logger).Flags", Method, 0},
+ {"(*Logger).Output", Method, 0},
+ {"(*Logger).Panic", Method, 0},
+ {"(*Logger).Panicf", Method, 0},
+ {"(*Logger).Panicln", Method, 0},
+ {"(*Logger).Prefix", Method, 0},
+ {"(*Logger).Print", Method, 0},
+ {"(*Logger).Printf", Method, 0},
+ {"(*Logger).Println", Method, 0},
+ {"(*Logger).SetFlags", Method, 0},
+ {"(*Logger).SetOutput", Method, 5},
+ {"(*Logger).SetPrefix", Method, 0},
+ {"(*Logger).Writer", Method, 12},
+ {"Default", Func, 16},
+ {"Fatal", Func, 0},
+ {"Fatalf", Func, 0},
+ {"Fatalln", Func, 0},
+ {"Flags", Func, 0},
+ {"LUTC", Const, 5},
+ {"Ldate", Const, 0},
+ {"Llongfile", Const, 0},
+ {"Lmicroseconds", Const, 0},
+ {"Lmsgprefix", Const, 14},
+ {"Logger", Type, 0},
+ {"Lshortfile", Const, 0},
+ {"LstdFlags", Const, 0},
+ {"Ltime", Const, 0},
+ {"New", Func, 0},
+ {"Output", Func, 5},
+ {"Panic", Func, 0},
+ {"Panicf", Func, 0},
+ {"Panicln", Func, 0},
+ {"Prefix", Func, 0},
+ {"Print", Func, 0},
+ {"Printf", Func, 0},
+ {"Println", Func, 0},
+ {"SetFlags", Func, 0},
+ {"SetOutput", Func, 0},
+ {"SetPrefix", Func, 0},
+ {"Writer", Func, 13},
+ },
+ "log/slog": {
+ {"(*JSONHandler).Enabled", Method, 21},
+ {"(*JSONHandler).Handle", Method, 21},
+ {"(*JSONHandler).WithAttrs", Method, 21},
+ {"(*JSONHandler).WithGroup", Method, 21},
+ {"(*Level).UnmarshalJSON", Method, 21},
+ {"(*Level).UnmarshalText", Method, 21},
+ {"(*LevelVar).Level", Method, 21},
+ {"(*LevelVar).MarshalText", Method, 21},
+ {"(*LevelVar).Set", Method, 21},
+ {"(*LevelVar).String", Method, 21},
+ {"(*LevelVar).UnmarshalText", Method, 21},
+ {"(*Logger).Debug", Method, 21},
+ {"(*Logger).DebugContext", Method, 21},
+ {"(*Logger).Enabled", Method, 21},
+ {"(*Logger).Error", Method, 21},
+ {"(*Logger).ErrorContext", Method, 21},
+ {"(*Logger).Handler", Method, 21},
+ {"(*Logger).Info", Method, 21},
+ {"(*Logger).InfoContext", Method, 21},
+ {"(*Logger).Log", Method, 21},
+ {"(*Logger).LogAttrs", Method, 21},
+ {"(*Logger).Warn", Method, 21},
+ {"(*Logger).WarnContext", Method, 21},
+ {"(*Logger).With", Method, 21},
+ {"(*Logger).WithGroup", Method, 21},
+ {"(*Record).Add", Method, 21},
+ {"(*Record).AddAttrs", Method, 21},
+ {"(*TextHandler).Enabled", Method, 21},
+ {"(*TextHandler).Handle", Method, 21},
+ {"(*TextHandler).WithAttrs", Method, 21},
+ {"(*TextHandler).WithGroup", Method, 21},
+ {"(Attr).Equal", Method, 21},
+ {"(Attr).String", Method, 21},
+ {"(Kind).String", Method, 21},
+ {"(Level).Level", Method, 21},
+ {"(Level).MarshalJSON", Method, 21},
+ {"(Level).MarshalText", Method, 21},
+ {"(Level).String", Method, 21},
+ {"(Record).Attrs", Method, 21},
+ {"(Record).Clone", Method, 21},
+ {"(Record).NumAttrs", Method, 21},
+ {"(Value).Any", Method, 21},
+ {"(Value).Bool", Method, 21},
+ {"(Value).Duration", Method, 21},
+ {"(Value).Equal", Method, 21},
+ {"(Value).Float64", Method, 21},
+ {"(Value).Group", Method, 21},
+ {"(Value).Int64", Method, 21},
+ {"(Value).Kind", Method, 21},
+ {"(Value).LogValuer", Method, 21},
+ {"(Value).Resolve", Method, 21},
+ {"(Value).String", Method, 21},
+ {"(Value).Time", Method, 21},
+ {"(Value).Uint64", Method, 21},
+ {"Any", Func, 21},
+ {"AnyValue", Func, 21},
+ {"Attr", Type, 21},
+ {"Attr.Key", Field, 21},
+ {"Attr.Value", Field, 21},
+ {"Bool", Func, 21},
+ {"BoolValue", Func, 21},
+ {"Debug", Func, 21},
+ {"DebugContext", Func, 21},
+ {"Default", Func, 21},
+ {"Duration", Func, 21},
+ {"DurationValue", Func, 21},
+ {"Error", Func, 21},
+ {"ErrorContext", Func, 21},
+ {"Float64", Func, 21},
+ {"Float64Value", Func, 21},
+ {"Group", Func, 21},
+ {"GroupValue", Func, 21},
+ {"Handler", Type, 21},
+ {"HandlerOptions", Type, 21},
+ {"HandlerOptions.AddSource", Field, 21},
+ {"HandlerOptions.Level", Field, 21},
+ {"HandlerOptions.ReplaceAttr", Field, 21},
+ {"Info", Func, 21},
+ {"InfoContext", Func, 21},
+ {"Int", Func, 21},
+ {"Int64", Func, 21},
+ {"Int64Value", Func, 21},
+ {"IntValue", Func, 21},
+ {"JSONHandler", Type, 21},
+ {"Kind", Type, 21},
+ {"KindAny", Const, 21},
+ {"KindBool", Const, 21},
+ {"KindDuration", Const, 21},
+ {"KindFloat64", Const, 21},
+ {"KindGroup", Const, 21},
+ {"KindInt64", Const, 21},
+ {"KindLogValuer", Const, 21},
+ {"KindString", Const, 21},
+ {"KindTime", Const, 21},
+ {"KindUint64", Const, 21},
+ {"Level", Type, 21},
+ {"LevelDebug", Const, 21},
+ {"LevelError", Const, 21},
+ {"LevelInfo", Const, 21},
+ {"LevelKey", Const, 21},
+ {"LevelVar", Type, 21},
+ {"LevelWarn", Const, 21},
+ {"Leveler", Type, 21},
+ {"Log", Func, 21},
+ {"LogAttrs", Func, 21},
+ {"LogValuer", Type, 21},
+ {"Logger", Type, 21},
+ {"MessageKey", Const, 21},
+ {"New", Func, 21},
+ {"NewJSONHandler", Func, 21},
+ {"NewLogLogger", Func, 21},
+ {"NewRecord", Func, 21},
+ {"NewTextHandler", Func, 21},
+ {"Record", Type, 21},
+ {"Record.Level", Field, 21},
+ {"Record.Message", Field, 21},
+ {"Record.PC", Field, 21},
+ {"Record.Time", Field, 21},
+ {"SetDefault", Func, 21},
+ {"SetLogLoggerLevel", Func, 22},
+ {"Source", Type, 21},
+ {"Source.File", Field, 21},
+ {"Source.Function", Field, 21},
+ {"Source.Line", Field, 21},
+ {"SourceKey", Const, 21},
+ {"String", Func, 21},
+ {"StringValue", Func, 21},
+ {"TextHandler", Type, 21},
+ {"Time", Func, 21},
+ {"TimeKey", Const, 21},
+ {"TimeValue", Func, 21},
+ {"Uint64", Func, 21},
+ {"Uint64Value", Func, 21},
+ {"Value", Type, 21},
+ {"Warn", Func, 21},
+ {"WarnContext", Func, 21},
+ {"With", Func, 21},
+ },
+ "log/syslog": {
+ {"(*Writer).Alert", Method, 0},
+ {"(*Writer).Close", Method, 0},
+ {"(*Writer).Crit", Method, 0},
+ {"(*Writer).Debug", Method, 0},
+ {"(*Writer).Emerg", Method, 0},
+ {"(*Writer).Err", Method, 0},
+ {"(*Writer).Info", Method, 0},
+ {"(*Writer).Notice", Method, 0},
+ {"(*Writer).Warning", Method, 0},
+ {"(*Writer).Write", Method, 0},
+ {"Dial", Func, 0},
+ {"LOG_ALERT", Const, 0},
+ {"LOG_AUTH", Const, 1},
+ {"LOG_AUTHPRIV", Const, 1},
+ {"LOG_CRIT", Const, 0},
+ {"LOG_CRON", Const, 1},
+ {"LOG_DAEMON", Const, 1},
+ {"LOG_DEBUG", Const, 0},
+ {"LOG_EMERG", Const, 0},
+ {"LOG_ERR", Const, 0},
+ {"LOG_FTP", Const, 1},
+ {"LOG_INFO", Const, 0},
+ {"LOG_KERN", Const, 1},
+ {"LOG_LOCAL0", Const, 1},
+ {"LOG_LOCAL1", Const, 1},
+ {"LOG_LOCAL2", Const, 1},
+ {"LOG_LOCAL3", Const, 1},
+ {"LOG_LOCAL4", Const, 1},
+ {"LOG_LOCAL5", Const, 1},
+ {"LOG_LOCAL6", Const, 1},
+ {"LOG_LOCAL7", Const, 1},
+ {"LOG_LPR", Const, 1},
+ {"LOG_MAIL", Const, 1},
+ {"LOG_NEWS", Const, 1},
+ {"LOG_NOTICE", Const, 0},
+ {"LOG_SYSLOG", Const, 1},
+ {"LOG_USER", Const, 1},
+ {"LOG_UUCP", Const, 1},
+ {"LOG_WARNING", Const, 0},
+ {"New", Func, 0},
+ {"NewLogger", Func, 0},
+ {"Priority", Type, 0},
+ {"Writer", Type, 0},
+ },
+ "maps": {
+ {"All", Func, 23},
+ {"Clone", Func, 21},
+ {"Collect", Func, 23},
+ {"Copy", Func, 21},
+ {"DeleteFunc", Func, 21},
+ {"Equal", Func, 21},
+ {"EqualFunc", Func, 21},
+ {"Insert", Func, 23},
+ {"Keys", Func, 23},
+ {"Values", Func, 23},
+ },
+ "math": {
+ {"Abs", Func, 0},
+ {"Acos", Func, 0},
+ {"Acosh", Func, 0},
+ {"Asin", Func, 0},
+ {"Asinh", Func, 0},
+ {"Atan", Func, 0},
+ {"Atan2", Func, 0},
+ {"Atanh", Func, 0},
+ {"Cbrt", Func, 0},
+ {"Ceil", Func, 0},
+ {"Copysign", Func, 0},
+ {"Cos", Func, 0},
+ {"Cosh", Func, 0},
+ {"Dim", Func, 0},
+ {"E", Const, 0},
+ {"Erf", Func, 0},
+ {"Erfc", Func, 0},
+ {"Erfcinv", Func, 10},
+ {"Erfinv", Func, 10},
+ {"Exp", Func, 0},
+ {"Exp2", Func, 0},
+ {"Expm1", Func, 0},
+ {"FMA", Func, 14},
+ {"Float32bits", Func, 0},
+ {"Float32frombits", Func, 0},
+ {"Float64bits", Func, 0},
+ {"Float64frombits", Func, 0},
+ {"Floor", Func, 0},
+ {"Frexp", Func, 0},
+ {"Gamma", Func, 0},
+ {"Hypot", Func, 0},
+ {"Ilogb", Func, 0},
+ {"Inf", Func, 0},
+ {"IsInf", Func, 0},
+ {"IsNaN", Func, 0},
+ {"J0", Func, 0},
+ {"J1", Func, 0},
+ {"Jn", Func, 0},
+ {"Ldexp", Func, 0},
+ {"Lgamma", Func, 0},
+ {"Ln10", Const, 0},
+ {"Ln2", Const, 0},
+ {"Log", Func, 0},
+ {"Log10", Func, 0},
+ {"Log10E", Const, 0},
+ {"Log1p", Func, 0},
+ {"Log2", Func, 0},
+ {"Log2E", Const, 0},
+ {"Logb", Func, 0},
+ {"Max", Func, 0},
+ {"MaxFloat32", Const, 0},
+ {"MaxFloat64", Const, 0},
+ {"MaxInt", Const, 17},
+ {"MaxInt16", Const, 0},
+ {"MaxInt32", Const, 0},
+ {"MaxInt64", Const, 0},
+ {"MaxInt8", Const, 0},
+ {"MaxUint", Const, 17},
+ {"MaxUint16", Const, 0},
+ {"MaxUint32", Const, 0},
+ {"MaxUint64", Const, 0},
+ {"MaxUint8", Const, 0},
+ {"Min", Func, 0},
+ {"MinInt", Const, 17},
+ {"MinInt16", Const, 0},
+ {"MinInt32", Const, 0},
+ {"MinInt64", Const, 0},
+ {"MinInt8", Const, 0},
+ {"Mod", Func, 0},
+ {"Modf", Func, 0},
+ {"NaN", Func, 0},
+ {"Nextafter", Func, 0},
+ {"Nextafter32", Func, 4},
+ {"Phi", Const, 0},
+ {"Pi", Const, 0},
+ {"Pow", Func, 0},
+ {"Pow10", Func, 0},
+ {"Remainder", Func, 0},
+ {"Round", Func, 10},
+ {"RoundToEven", Func, 10},
+ {"Signbit", Func, 0},
+ {"Sin", Func, 0},
+ {"Sincos", Func, 0},
+ {"Sinh", Func, 0},
+ {"SmallestNonzeroFloat32", Const, 0},
+ {"SmallestNonzeroFloat64", Const, 0},
+ {"Sqrt", Func, 0},
+ {"Sqrt2", Const, 0},
+ {"SqrtE", Const, 0},
+ {"SqrtPhi", Const, 0},
+ {"SqrtPi", Const, 0},
+ {"Tan", Func, 0},
+ {"Tanh", Func, 0},
+ {"Trunc", Func, 0},
+ {"Y0", Func, 0},
+ {"Y1", Func, 0},
+ {"Yn", Func, 0},
+ },
+ "math/big": {
+ {"(*Float).Abs", Method, 5},
+ {"(*Float).Acc", Method, 5},
+ {"(*Float).Add", Method, 5},
+ {"(*Float).Append", Method, 5},
+ {"(*Float).Cmp", Method, 5},
+ {"(*Float).Copy", Method, 5},
+ {"(*Float).Float32", Method, 5},
+ {"(*Float).Float64", Method, 5},
+ {"(*Float).Format", Method, 5},
+ {"(*Float).GobDecode", Method, 7},
+ {"(*Float).GobEncode", Method, 7},
+ {"(*Float).Int", Method, 5},
+ {"(*Float).Int64", Method, 5},
+ {"(*Float).IsInf", Method, 5},
+ {"(*Float).IsInt", Method, 5},
+ {"(*Float).MantExp", Method, 5},
+ {"(*Float).MarshalText", Method, 6},
+ {"(*Float).MinPrec", Method, 5},
+ {"(*Float).Mode", Method, 5},
+ {"(*Float).Mul", Method, 5},
+ {"(*Float).Neg", Method, 5},
+ {"(*Float).Parse", Method, 5},
+ {"(*Float).Prec", Method, 5},
+ {"(*Float).Quo", Method, 5},
+ {"(*Float).Rat", Method, 5},
+ {"(*Float).Scan", Method, 8},
+ {"(*Float).Set", Method, 5},
+ {"(*Float).SetFloat64", Method, 5},
+ {"(*Float).SetInf", Method, 5},
+ {"(*Float).SetInt", Method, 5},
+ {"(*Float).SetInt64", Method, 5},
+ {"(*Float).SetMantExp", Method, 5},
+ {"(*Float).SetMode", Method, 5},
+ {"(*Float).SetPrec", Method, 5},
+ {"(*Float).SetRat", Method, 5},
+ {"(*Float).SetString", Method, 5},
+ {"(*Float).SetUint64", Method, 5},
+ {"(*Float).Sign", Method, 5},
+ {"(*Float).Signbit", Method, 5},
+ {"(*Float).Sqrt", Method, 10},
+ {"(*Float).String", Method, 5},
+ {"(*Float).Sub", Method, 5},
+ {"(*Float).Text", Method, 5},
+ {"(*Float).Uint64", Method, 5},
+ {"(*Float).UnmarshalText", Method, 6},
+ {"(*Int).Abs", Method, 0},
+ {"(*Int).Add", Method, 0},
+ {"(*Int).And", Method, 0},
+ {"(*Int).AndNot", Method, 0},
+ {"(*Int).Append", Method, 6},
+ {"(*Int).Binomial", Method, 0},
+ {"(*Int).Bit", Method, 0},
+ {"(*Int).BitLen", Method, 0},
+ {"(*Int).Bits", Method, 0},
+ {"(*Int).Bytes", Method, 0},
+ {"(*Int).Cmp", Method, 0},
+ {"(*Int).CmpAbs", Method, 10},
+ {"(*Int).Div", Method, 0},
+ {"(*Int).DivMod", Method, 0},
+ {"(*Int).Exp", Method, 0},
+ {"(*Int).FillBytes", Method, 15},
+ {"(*Int).Float64", Method, 21},
+ {"(*Int).Format", Method, 0},
+ {"(*Int).GCD", Method, 0},
+ {"(*Int).GobDecode", Method, 0},
+ {"(*Int).GobEncode", Method, 0},
+ {"(*Int).Int64", Method, 0},
+ {"(*Int).IsInt64", Method, 9},
+ {"(*Int).IsUint64", Method, 9},
+ {"(*Int).Lsh", Method, 0},
+ {"(*Int).MarshalJSON", Method, 1},
+ {"(*Int).MarshalText", Method, 3},
+ {"(*Int).Mod", Method, 0},
+ {"(*Int).ModInverse", Method, 0},
+ {"(*Int).ModSqrt", Method, 5},
+ {"(*Int).Mul", Method, 0},
+ {"(*Int).MulRange", Method, 0},
+ {"(*Int).Neg", Method, 0},
+ {"(*Int).Not", Method, 0},
+ {"(*Int).Or", Method, 0},
+ {"(*Int).ProbablyPrime", Method, 0},
+ {"(*Int).Quo", Method, 0},
+ {"(*Int).QuoRem", Method, 0},
+ {"(*Int).Rand", Method, 0},
+ {"(*Int).Rem", Method, 0},
+ {"(*Int).Rsh", Method, 0},
+ {"(*Int).Scan", Method, 0},
+ {"(*Int).Set", Method, 0},
+ {"(*Int).SetBit", Method, 0},
+ {"(*Int).SetBits", Method, 0},
+ {"(*Int).SetBytes", Method, 0},
+ {"(*Int).SetInt64", Method, 0},
+ {"(*Int).SetString", Method, 0},
+ {"(*Int).SetUint64", Method, 1},
+ {"(*Int).Sign", Method, 0},
+ {"(*Int).Sqrt", Method, 8},
+ {"(*Int).String", Method, 0},
+ {"(*Int).Sub", Method, 0},
+ {"(*Int).Text", Method, 6},
+ {"(*Int).TrailingZeroBits", Method, 13},
+ {"(*Int).Uint64", Method, 1},
+ {"(*Int).UnmarshalJSON", Method, 1},
+ {"(*Int).UnmarshalText", Method, 3},
+ {"(*Int).Xor", Method, 0},
+ {"(*Rat).Abs", Method, 0},
+ {"(*Rat).Add", Method, 0},
+ {"(*Rat).Cmp", Method, 0},
+ {"(*Rat).Denom", Method, 0},
+ {"(*Rat).Float32", Method, 4},
+ {"(*Rat).Float64", Method, 1},
+ {"(*Rat).FloatPrec", Method, 22},
+ {"(*Rat).FloatString", Method, 0},
+ {"(*Rat).GobDecode", Method, 0},
+ {"(*Rat).GobEncode", Method, 0},
+ {"(*Rat).Inv", Method, 0},
+ {"(*Rat).IsInt", Method, 0},
+ {"(*Rat).MarshalText", Method, 3},
+ {"(*Rat).Mul", Method, 0},
+ {"(*Rat).Neg", Method, 0},
+ {"(*Rat).Num", Method, 0},
+ {"(*Rat).Quo", Method, 0},
+ {"(*Rat).RatString", Method, 0},
+ {"(*Rat).Scan", Method, 0},
+ {"(*Rat).Set", Method, 0},
+ {"(*Rat).SetFloat64", Method, 1},
+ {"(*Rat).SetFrac", Method, 0},
+ {"(*Rat).SetFrac64", Method, 0},
+ {"(*Rat).SetInt", Method, 0},
+ {"(*Rat).SetInt64", Method, 0},
+ {"(*Rat).SetString", Method, 0},
+ {"(*Rat).SetUint64", Method, 13},
+ {"(*Rat).Sign", Method, 0},
+ {"(*Rat).String", Method, 0},
+ {"(*Rat).Sub", Method, 0},
+ {"(*Rat).UnmarshalText", Method, 3},
+ {"(Accuracy).String", Method, 5},
+ {"(ErrNaN).Error", Method, 5},
+ {"(RoundingMode).String", Method, 5},
+ {"Above", Const, 5},
+ {"Accuracy", Type, 5},
+ {"AwayFromZero", Const, 5},
+ {"Below", Const, 5},
+ {"ErrNaN", Type, 5},
+ {"Exact", Const, 5},
+ {"Float", Type, 5},
+ {"Int", Type, 0},
+ {"Jacobi", Func, 5},
+ {"MaxBase", Const, 0},
+ {"MaxExp", Const, 5},
+ {"MaxPrec", Const, 5},
+ {"MinExp", Const, 5},
+ {"NewFloat", Func, 5},
+ {"NewInt", Func, 0},
+ {"NewRat", Func, 0},
+ {"ParseFloat", Func, 5},
+ {"Rat", Type, 0},
+ {"RoundingMode", Type, 5},
+ {"ToNearestAway", Const, 5},
+ {"ToNearestEven", Const, 5},
+ {"ToNegativeInf", Const, 5},
+ {"ToPositiveInf", Const, 5},
+ {"ToZero", Const, 5},
+ {"Word", Type, 0},
+ },
+ "math/bits": {
+ {"Add", Func, 12},
+ {"Add32", Func, 12},
+ {"Add64", Func, 12},
+ {"Div", Func, 12},
+ {"Div32", Func, 12},
+ {"Div64", Func, 12},
+ {"LeadingZeros", Func, 9},
+ {"LeadingZeros16", Func, 9},
+ {"LeadingZeros32", Func, 9},
+ {"LeadingZeros64", Func, 9},
+ {"LeadingZeros8", Func, 9},
+ {"Len", Func, 9},
+ {"Len16", Func, 9},
+ {"Len32", Func, 9},
+ {"Len64", Func, 9},
+ {"Len8", Func, 9},
+ {"Mul", Func, 12},
+ {"Mul32", Func, 12},
+ {"Mul64", Func, 12},
+ {"OnesCount", Func, 9},
+ {"OnesCount16", Func, 9},
+ {"OnesCount32", Func, 9},
+ {"OnesCount64", Func, 9},
+ {"OnesCount8", Func, 9},
+ {"Rem", Func, 14},
+ {"Rem32", Func, 14},
+ {"Rem64", Func, 14},
+ {"Reverse", Func, 9},
+ {"Reverse16", Func, 9},
+ {"Reverse32", Func, 9},
+ {"Reverse64", Func, 9},
+ {"Reverse8", Func, 9},
+ {"ReverseBytes", Func, 9},
+ {"ReverseBytes16", Func, 9},
+ {"ReverseBytes32", Func, 9},
+ {"ReverseBytes64", Func, 9},
+ {"RotateLeft", Func, 9},
+ {"RotateLeft16", Func, 9},
+ {"RotateLeft32", Func, 9},
+ {"RotateLeft64", Func, 9},
+ {"RotateLeft8", Func, 9},
+ {"Sub", Func, 12},
+ {"Sub32", Func, 12},
+ {"Sub64", Func, 12},
+ {"TrailingZeros", Func, 9},
+ {"TrailingZeros16", Func, 9},
+ {"TrailingZeros32", Func, 9},
+ {"TrailingZeros64", Func, 9},
+ {"TrailingZeros8", Func, 9},
+ {"UintSize", Const, 9},
+ },
+ "math/cmplx": {
+ {"Abs", Func, 0},
+ {"Acos", Func, 0},
+ {"Acosh", Func, 0},
+ {"Asin", Func, 0},
+ {"Asinh", Func, 0},
+ {"Atan", Func, 0},
+ {"Atanh", Func, 0},
+ {"Conj", Func, 0},
+ {"Cos", Func, 0},
+ {"Cosh", Func, 0},
+ {"Cot", Func, 0},
+ {"Exp", Func, 0},
+ {"Inf", Func, 0},
+ {"IsInf", Func, 0},
+ {"IsNaN", Func, 0},
+ {"Log", Func, 0},
+ {"Log10", Func, 0},
+ {"NaN", Func, 0},
+ {"Phase", Func, 0},
+ {"Polar", Func, 0},
+ {"Pow", Func, 0},
+ {"Rect", Func, 0},
+ {"Sin", Func, 0},
+ {"Sinh", Func, 0},
+ {"Sqrt", Func, 0},
+ {"Tan", Func, 0},
+ {"Tanh", Func, 0},
+ },
+ "math/rand": {
+ {"(*Rand).ExpFloat64", Method, 0},
+ {"(*Rand).Float32", Method, 0},
+ {"(*Rand).Float64", Method, 0},
+ {"(*Rand).Int", Method, 0},
+ {"(*Rand).Int31", Method, 0},
+ {"(*Rand).Int31n", Method, 0},
+ {"(*Rand).Int63", Method, 0},
+ {"(*Rand).Int63n", Method, 0},
+ {"(*Rand).Intn", Method, 0},
+ {"(*Rand).NormFloat64", Method, 0},
+ {"(*Rand).Perm", Method, 0},
+ {"(*Rand).Read", Method, 6},
+ {"(*Rand).Seed", Method, 0},
+ {"(*Rand).Shuffle", Method, 10},
+ {"(*Rand).Uint32", Method, 0},
+ {"(*Rand).Uint64", Method, 8},
+ {"(*Zipf).Uint64", Method, 0},
+ {"ExpFloat64", Func, 0},
+ {"Float32", Func, 0},
+ {"Float64", Func, 0},
+ {"Int", Func, 0},
+ {"Int31", Func, 0},
+ {"Int31n", Func, 0},
+ {"Int63", Func, 0},
+ {"Int63n", Func, 0},
+ {"Intn", Func, 0},
+ {"New", Func, 0},
+ {"NewSource", Func, 0},
+ {"NewZipf", Func, 0},
+ {"NormFloat64", Func, 0},
+ {"Perm", Func, 0},
+ {"Rand", Type, 0},
+ {"Read", Func, 6},
+ {"Seed", Func, 0},
+ {"Shuffle", Func, 10},
+ {"Source", Type, 0},
+ {"Source64", Type, 8},
+ {"Uint32", Func, 0},
+ {"Uint64", Func, 8},
+ {"Zipf", Type, 0},
+ },
+ "math/rand/v2": {
+ {"(*ChaCha8).MarshalBinary", Method, 22},
+ {"(*ChaCha8).Read", Method, 23},
+ {"(*ChaCha8).Seed", Method, 22},
+ {"(*ChaCha8).Uint64", Method, 22},
+ {"(*ChaCha8).UnmarshalBinary", Method, 22},
+ {"(*PCG).MarshalBinary", Method, 22},
+ {"(*PCG).Seed", Method, 22},
+ {"(*PCG).Uint64", Method, 22},
+ {"(*PCG).UnmarshalBinary", Method, 22},
+ {"(*Rand).ExpFloat64", Method, 22},
+ {"(*Rand).Float32", Method, 22},
+ {"(*Rand).Float64", Method, 22},
+ {"(*Rand).Int", Method, 22},
+ {"(*Rand).Int32", Method, 22},
+ {"(*Rand).Int32N", Method, 22},
+ {"(*Rand).Int64", Method, 22},
+ {"(*Rand).Int64N", Method, 22},
+ {"(*Rand).IntN", Method, 22},
+ {"(*Rand).NormFloat64", Method, 22},
+ {"(*Rand).Perm", Method, 22},
+ {"(*Rand).Shuffle", Method, 22},
+ {"(*Rand).Uint", Method, 23},
+ {"(*Rand).Uint32", Method, 22},
+ {"(*Rand).Uint32N", Method, 22},
+ {"(*Rand).Uint64", Method, 22},
+ {"(*Rand).Uint64N", Method, 22},
+ {"(*Rand).UintN", Method, 22},
+ {"(*Zipf).Uint64", Method, 22},
+ {"ChaCha8", Type, 22},
+ {"ExpFloat64", Func, 22},
+ {"Float32", Func, 22},
+ {"Float64", Func, 22},
+ {"Int", Func, 22},
+ {"Int32", Func, 22},
+ {"Int32N", Func, 22},
+ {"Int64", Func, 22},
+ {"Int64N", Func, 22},
+ {"IntN", Func, 22},
+ {"N", Func, 22},
+ {"New", Func, 22},
+ {"NewChaCha8", Func, 22},
+ {"NewPCG", Func, 22},
+ {"NewZipf", Func, 22},
+ {"NormFloat64", Func, 22},
+ {"PCG", Type, 22},
+ {"Perm", Func, 22},
+ {"Rand", Type, 22},
+ {"Shuffle", Func, 22},
+ {"Source", Type, 22},
+ {"Uint", Func, 23},
+ {"Uint32", Func, 22},
+ {"Uint32N", Func, 22},
+ {"Uint64", Func, 22},
+ {"Uint64N", Func, 22},
+ {"UintN", Func, 22},
+ {"Zipf", Type, 22},
+ },
+ "mime": {
+ {"(*WordDecoder).Decode", Method, 5},
+ {"(*WordDecoder).DecodeHeader", Method, 5},
+ {"(WordEncoder).Encode", Method, 5},
+ {"AddExtensionType", Func, 0},
+ {"BEncoding", Const, 5},
+ {"ErrInvalidMediaParameter", Var, 9},
+ {"ExtensionsByType", Func, 5},
+ {"FormatMediaType", Func, 0},
+ {"ParseMediaType", Func, 0},
+ {"QEncoding", Const, 5},
+ {"TypeByExtension", Func, 0},
+ {"WordDecoder", Type, 5},
+ {"WordDecoder.CharsetReader", Field, 5},
+ {"WordEncoder", Type, 5},
+ },
+ "mime/multipart": {
+ {"(*FileHeader).Open", Method, 0},
+ {"(*Form).RemoveAll", Method, 0},
+ {"(*Part).Close", Method, 0},
+ {"(*Part).FileName", Method, 0},
+ {"(*Part).FormName", Method, 0},
+ {"(*Part).Read", Method, 0},
+ {"(*Reader).NextPart", Method, 0},
+ {"(*Reader).NextRawPart", Method, 14},
+ {"(*Reader).ReadForm", Method, 0},
+ {"(*Writer).Boundary", Method, 0},
+ {"(*Writer).Close", Method, 0},
+ {"(*Writer).CreateFormField", Method, 0},
+ {"(*Writer).CreateFormFile", Method, 0},
+ {"(*Writer).CreatePart", Method, 0},
+ {"(*Writer).FormDataContentType", Method, 0},
+ {"(*Writer).SetBoundary", Method, 1},
+ {"(*Writer).WriteField", Method, 0},
+ {"ErrMessageTooLarge", Var, 9},
+ {"File", Type, 0},
+ {"FileHeader", Type, 0},
+ {"FileHeader.Filename", Field, 0},
+ {"FileHeader.Header", Field, 0},
+ {"FileHeader.Size", Field, 9},
+ {"Form", Type, 0},
+ {"Form.File", Field, 0},
+ {"Form.Value", Field, 0},
+ {"NewReader", Func, 0},
+ {"NewWriter", Func, 0},
+ {"Part", Type, 0},
+ {"Part.Header", Field, 0},
+ {"Reader", Type, 0},
+ {"Writer", Type, 0},
+ },
+ "mime/quotedprintable": {
+ {"(*Reader).Read", Method, 5},
+ {"(*Writer).Close", Method, 5},
+ {"(*Writer).Write", Method, 5},
+ {"NewReader", Func, 5},
+ {"NewWriter", Func, 5},
+ {"Reader", Type, 5},
+ {"Writer", Type, 5},
+ {"Writer.Binary", Field, 5},
+ },
+ "net": {
+ {"(*AddrError).Error", Method, 0},
+ {"(*AddrError).Temporary", Method, 0},
+ {"(*AddrError).Timeout", Method, 0},
+ {"(*Buffers).Read", Method, 8},
+ {"(*Buffers).WriteTo", Method, 8},
+ {"(*DNSConfigError).Error", Method, 0},
+ {"(*DNSConfigError).Temporary", Method, 0},
+ {"(*DNSConfigError).Timeout", Method, 0},
+ {"(*DNSConfigError).Unwrap", Method, 13},
+ {"(*DNSError).Error", Method, 0},
+ {"(*DNSError).Temporary", Method, 0},
+ {"(*DNSError).Timeout", Method, 0},
+ {"(*DNSError).Unwrap", Method, 23},
+ {"(*Dialer).Dial", Method, 1},
+ {"(*Dialer).DialContext", Method, 7},
+ {"(*Dialer).MultipathTCP", Method, 21},
+ {"(*Dialer).SetMultipathTCP", Method, 21},
+ {"(*IP).UnmarshalText", Method, 2},
+ {"(*IPAddr).Network", Method, 0},
+ {"(*IPAddr).String", Method, 0},
+ {"(*IPConn).Close", Method, 0},
+ {"(*IPConn).File", Method, 0},
+ {"(*IPConn).LocalAddr", Method, 0},
+ {"(*IPConn).Read", Method, 0},
+ {"(*IPConn).ReadFrom", Method, 0},
+ {"(*IPConn).ReadFromIP", Method, 0},
+ {"(*IPConn).ReadMsgIP", Method, 1},
+ {"(*IPConn).RemoteAddr", Method, 0},
+ {"(*IPConn).SetDeadline", Method, 0},
+ {"(*IPConn).SetReadBuffer", Method, 0},
+ {"(*IPConn).SetReadDeadline", Method, 0},
+ {"(*IPConn).SetWriteBuffer", Method, 0},
+ {"(*IPConn).SetWriteDeadline", Method, 0},
+ {"(*IPConn).SyscallConn", Method, 9},
+ {"(*IPConn).Write", Method, 0},
+ {"(*IPConn).WriteMsgIP", Method, 1},
+ {"(*IPConn).WriteTo", Method, 0},
+ {"(*IPConn).WriteToIP", Method, 0},
+ {"(*IPNet).Contains", Method, 0},
+ {"(*IPNet).Network", Method, 0},
+ {"(*IPNet).String", Method, 0},
+ {"(*Interface).Addrs", Method, 0},
+ {"(*Interface).MulticastAddrs", Method, 0},
+ {"(*ListenConfig).Listen", Method, 11},
+ {"(*ListenConfig).ListenPacket", Method, 11},
+ {"(*ListenConfig).MultipathTCP", Method, 21},
+ {"(*ListenConfig).SetMultipathTCP", Method, 21},
+ {"(*OpError).Error", Method, 0},
+ {"(*OpError).Temporary", Method, 0},
+ {"(*OpError).Timeout", Method, 0},
+ {"(*OpError).Unwrap", Method, 13},
+ {"(*ParseError).Error", Method, 0},
+ {"(*ParseError).Temporary", Method, 17},
+ {"(*ParseError).Timeout", Method, 17},
+ {"(*Resolver).LookupAddr", Method, 8},
+ {"(*Resolver).LookupCNAME", Method, 8},
+ {"(*Resolver).LookupHost", Method, 8},
+ {"(*Resolver).LookupIP", Method, 15},
+ {"(*Resolver).LookupIPAddr", Method, 8},
+ {"(*Resolver).LookupMX", Method, 8},
+ {"(*Resolver).LookupNS", Method, 8},
+ {"(*Resolver).LookupNetIP", Method, 18},
+ {"(*Resolver).LookupPort", Method, 8},
+ {"(*Resolver).LookupSRV", Method, 8},
+ {"(*Resolver).LookupTXT", Method, 8},
+ {"(*TCPAddr).AddrPort", Method, 18},
+ {"(*TCPAddr).Network", Method, 0},
+ {"(*TCPAddr).String", Method, 0},
+ {"(*TCPConn).Close", Method, 0},
+ {"(*TCPConn).CloseRead", Method, 0},
+ {"(*TCPConn).CloseWrite", Method, 0},
+ {"(*TCPConn).File", Method, 0},
+ {"(*TCPConn).LocalAddr", Method, 0},
+ {"(*TCPConn).MultipathTCP", Method, 21},
+ {"(*TCPConn).Read", Method, 0},
+ {"(*TCPConn).ReadFrom", Method, 0},
+ {"(*TCPConn).RemoteAddr", Method, 0},
+ {"(*TCPConn).SetDeadline", Method, 0},
+ {"(*TCPConn).SetKeepAlive", Method, 0},
+ {"(*TCPConn).SetKeepAliveConfig", Method, 23},
+ {"(*TCPConn).SetKeepAlivePeriod", Method, 2},
+ {"(*TCPConn).SetLinger", Method, 0},
+ {"(*TCPConn).SetNoDelay", Method, 0},
+ {"(*TCPConn).SetReadBuffer", Method, 0},
+ {"(*TCPConn).SetReadDeadline", Method, 0},
+ {"(*TCPConn).SetWriteBuffer", Method, 0},
+ {"(*TCPConn).SetWriteDeadline", Method, 0},
+ {"(*TCPConn).SyscallConn", Method, 9},
+ {"(*TCPConn).Write", Method, 0},
+ {"(*TCPConn).WriteTo", Method, 22},
+ {"(*TCPListener).Accept", Method, 0},
+ {"(*TCPListener).AcceptTCP", Method, 0},
+ {"(*TCPListener).Addr", Method, 0},
+ {"(*TCPListener).Close", Method, 0},
+ {"(*TCPListener).File", Method, 0},
+ {"(*TCPListener).SetDeadline", Method, 0},
+ {"(*TCPListener).SyscallConn", Method, 10},
+ {"(*UDPAddr).AddrPort", Method, 18},
+ {"(*UDPAddr).Network", Method, 0},
+ {"(*UDPAddr).String", Method, 0},
+ {"(*UDPConn).Close", Method, 0},
+ {"(*UDPConn).File", Method, 0},
+ {"(*UDPConn).LocalAddr", Method, 0},
+ {"(*UDPConn).Read", Method, 0},
+ {"(*UDPConn).ReadFrom", Method, 0},
+ {"(*UDPConn).ReadFromUDP", Method, 0},
+ {"(*UDPConn).ReadFromUDPAddrPort", Method, 18},
+ {"(*UDPConn).ReadMsgUDP", Method, 1},
+ {"(*UDPConn).ReadMsgUDPAddrPort", Method, 18},
+ {"(*UDPConn).RemoteAddr", Method, 0},
+ {"(*UDPConn).SetDeadline", Method, 0},
+ {"(*UDPConn).SetReadBuffer", Method, 0},
+ {"(*UDPConn).SetReadDeadline", Method, 0},
+ {"(*UDPConn).SetWriteBuffer", Method, 0},
+ {"(*UDPConn).SetWriteDeadline", Method, 0},
+ {"(*UDPConn).SyscallConn", Method, 9},
+ {"(*UDPConn).Write", Method, 0},
+ {"(*UDPConn).WriteMsgUDP", Method, 1},
+ {"(*UDPConn).WriteMsgUDPAddrPort", Method, 18},
+ {"(*UDPConn).WriteTo", Method, 0},
+ {"(*UDPConn).WriteToUDP", Method, 0},
+ {"(*UDPConn).WriteToUDPAddrPort", Method, 18},
+ {"(*UnixAddr).Network", Method, 0},
+ {"(*UnixAddr).String", Method, 0},
+ {"(*UnixConn).Close", Method, 0},
+ {"(*UnixConn).CloseRead", Method, 1},
+ {"(*UnixConn).CloseWrite", Method, 1},
+ {"(*UnixConn).File", Method, 0},
+ {"(*UnixConn).LocalAddr", Method, 0},
+ {"(*UnixConn).Read", Method, 0},
+ {"(*UnixConn).ReadFrom", Method, 0},
+ {"(*UnixConn).ReadFromUnix", Method, 0},
+ {"(*UnixConn).ReadMsgUnix", Method, 0},
+ {"(*UnixConn).RemoteAddr", Method, 0},
+ {"(*UnixConn).SetDeadline", Method, 0},
+ {"(*UnixConn).SetReadBuffer", Method, 0},
+ {"(*UnixConn).SetReadDeadline", Method, 0},
+ {"(*UnixConn).SetWriteBuffer", Method, 0},
+ {"(*UnixConn).SetWriteDeadline", Method, 0},
+ {"(*UnixConn).SyscallConn", Method, 9},
+ {"(*UnixConn).Write", Method, 0},
+ {"(*UnixConn).WriteMsgUnix", Method, 0},
+ {"(*UnixConn).WriteTo", Method, 0},
+ {"(*UnixConn).WriteToUnix", Method, 0},
+ {"(*UnixListener).Accept", Method, 0},
+ {"(*UnixListener).AcceptUnix", Method, 0},
+ {"(*UnixListener).Addr", Method, 0},
+ {"(*UnixListener).Close", Method, 0},
+ {"(*UnixListener).File", Method, 0},
+ {"(*UnixListener).SetDeadline", Method, 0},
+ {"(*UnixListener).SetUnlinkOnClose", Method, 8},
+ {"(*UnixListener).SyscallConn", Method, 10},
+ {"(Flags).String", Method, 0},
+ {"(HardwareAddr).String", Method, 0},
+ {"(IP).DefaultMask", Method, 0},
+ {"(IP).Equal", Method, 0},
+ {"(IP).IsGlobalUnicast", Method, 0},
+ {"(IP).IsInterfaceLocalMulticast", Method, 0},
+ {"(IP).IsLinkLocalMulticast", Method, 0},
+ {"(IP).IsLinkLocalUnicast", Method, 0},
+ {"(IP).IsLoopback", Method, 0},
+ {"(IP).IsMulticast", Method, 0},
+ {"(IP).IsPrivate", Method, 17},
+ {"(IP).IsUnspecified", Method, 0},
+ {"(IP).MarshalText", Method, 2},
+ {"(IP).Mask", Method, 0},
+ {"(IP).String", Method, 0},
+ {"(IP).To16", Method, 0},
+ {"(IP).To4", Method, 0},
+ {"(IPMask).Size", Method, 0},
+ {"(IPMask).String", Method, 0},
+ {"(InvalidAddrError).Error", Method, 0},
+ {"(InvalidAddrError).Temporary", Method, 0},
+ {"(InvalidAddrError).Timeout", Method, 0},
+ {"(UnknownNetworkError).Error", Method, 0},
+ {"(UnknownNetworkError).Temporary", Method, 0},
+ {"(UnknownNetworkError).Timeout", Method, 0},
+ {"Addr", Type, 0},
+ {"AddrError", Type, 0},
+ {"AddrError.Addr", Field, 0},
+ {"AddrError.Err", Field, 0},
+ {"Buffers", Type, 8},
+ {"CIDRMask", Func, 0},
+ {"Conn", Type, 0},
+ {"DNSConfigError", Type, 0},
+ {"DNSConfigError.Err", Field, 0},
+ {"DNSError", Type, 0},
+ {"DNSError.Err", Field, 0},
+ {"DNSError.IsNotFound", Field, 13},
+ {"DNSError.IsTemporary", Field, 6},
+ {"DNSError.IsTimeout", Field, 0},
+ {"DNSError.Name", Field, 0},
+ {"DNSError.Server", Field, 0},
+ {"DNSError.UnwrapErr", Field, 23},
+ {"DefaultResolver", Var, 8},
+ {"Dial", Func, 0},
+ {"DialIP", Func, 0},
+ {"DialTCP", Func, 0},
+ {"DialTimeout", Func, 0},
+ {"DialUDP", Func, 0},
+ {"DialUnix", Func, 0},
+ {"Dialer", Type, 1},
+ {"Dialer.Cancel", Field, 6},
+ {"Dialer.Control", Field, 11},
+ {"Dialer.ControlContext", Field, 20},
+ {"Dialer.Deadline", Field, 1},
+ {"Dialer.DualStack", Field, 2},
+ {"Dialer.FallbackDelay", Field, 5},
+ {"Dialer.KeepAlive", Field, 3},
+ {"Dialer.KeepAliveConfig", Field, 23},
+ {"Dialer.LocalAddr", Field, 1},
+ {"Dialer.Resolver", Field, 8},
+ {"Dialer.Timeout", Field, 1},
+ {"ErrClosed", Var, 16},
+ {"ErrWriteToConnected", Var, 0},
+ {"Error", Type, 0},
+ {"FileConn", Func, 0},
+ {"FileListener", Func, 0},
+ {"FilePacketConn", Func, 0},
+ {"FlagBroadcast", Const, 0},
+ {"FlagLoopback", Const, 0},
+ {"FlagMulticast", Const, 0},
+ {"FlagPointToPoint", Const, 0},
+ {"FlagRunning", Const, 20},
+ {"FlagUp", Const, 0},
+ {"Flags", Type, 0},
+ {"HardwareAddr", Type, 0},
+ {"IP", Type, 0},
+ {"IPAddr", Type, 0},
+ {"IPAddr.IP", Field, 0},
+ {"IPAddr.Zone", Field, 1},
+ {"IPConn", Type, 0},
+ {"IPMask", Type, 0},
+ {"IPNet", Type, 0},
+ {"IPNet.IP", Field, 0},
+ {"IPNet.Mask", Field, 0},
+ {"IPv4", Func, 0},
+ {"IPv4Mask", Func, 0},
+ {"IPv4allrouter", Var, 0},
+ {"IPv4allsys", Var, 0},
+ {"IPv4bcast", Var, 0},
+ {"IPv4len", Const, 0},
+ {"IPv4zero", Var, 0},
+ {"IPv6interfacelocalallnodes", Var, 0},
+ {"IPv6len", Const, 0},
+ {"IPv6linklocalallnodes", Var, 0},
+ {"IPv6linklocalallrouters", Var, 0},
+ {"IPv6loopback", Var, 0},
+ {"IPv6unspecified", Var, 0},
+ {"IPv6zero", Var, 0},
+ {"Interface", Type, 0},
+ {"Interface.Flags", Field, 0},
+ {"Interface.HardwareAddr", Field, 0},
+ {"Interface.Index", Field, 0},
+ {"Interface.MTU", Field, 0},
+ {"Interface.Name", Field, 0},
+ {"InterfaceAddrs", Func, 0},
+ {"InterfaceByIndex", Func, 0},
+ {"InterfaceByName", Func, 0},
+ {"Interfaces", Func, 0},
+ {"InvalidAddrError", Type, 0},
+ {"JoinHostPort", Func, 0},
+ {"KeepAliveConfig", Type, 23},
+ {"KeepAliveConfig.Count", Field, 23},
+ {"KeepAliveConfig.Enable", Field, 23},
+ {"KeepAliveConfig.Idle", Field, 23},
+ {"KeepAliveConfig.Interval", Field, 23},
+ {"Listen", Func, 0},
+ {"ListenConfig", Type, 11},
+ {"ListenConfig.Control", Field, 11},
+ {"ListenConfig.KeepAlive", Field, 13},
+ {"ListenConfig.KeepAliveConfig", Field, 23},
+ {"ListenIP", Func, 0},
+ {"ListenMulticastUDP", Func, 0},
+ {"ListenPacket", Func, 0},
+ {"ListenTCP", Func, 0},
+ {"ListenUDP", Func, 0},
+ {"ListenUnix", Func, 0},
+ {"ListenUnixgram", Func, 0},
+ {"Listener", Type, 0},
+ {"LookupAddr", Func, 0},
+ {"LookupCNAME", Func, 0},
+ {"LookupHost", Func, 0},
+ {"LookupIP", Func, 0},
+ {"LookupMX", Func, 0},
+ {"LookupNS", Func, 1},
+ {"LookupPort", Func, 0},
+ {"LookupSRV", Func, 0},
+ {"LookupTXT", Func, 0},
+ {"MX", Type, 0},
+ {"MX.Host", Field, 0},
+ {"MX.Pref", Field, 0},
+ {"NS", Type, 1},
+ {"NS.Host", Field, 1},
+ {"OpError", Type, 0},
+ {"OpError.Addr", Field, 0},
+ {"OpError.Err", Field, 0},
+ {"OpError.Net", Field, 0},
+ {"OpError.Op", Field, 0},
+ {"OpError.Source", Field, 5},
+ {"PacketConn", Type, 0},
+ {"ParseCIDR", Func, 0},
+ {"ParseError", Type, 0},
+ {"ParseError.Text", Field, 0},
+ {"ParseError.Type", Field, 0},
+ {"ParseIP", Func, 0},
+ {"ParseMAC", Func, 0},
+ {"Pipe", Func, 0},
+ {"ResolveIPAddr", Func, 0},
+ {"ResolveTCPAddr", Func, 0},
+ {"ResolveUDPAddr", Func, 0},
+ {"ResolveUnixAddr", Func, 0},
+ {"Resolver", Type, 8},
+ {"Resolver.Dial", Field, 9},
+ {"Resolver.PreferGo", Field, 8},
+ {"Resolver.StrictErrors", Field, 9},
+ {"SRV", Type, 0},
+ {"SRV.Port", Field, 0},
+ {"SRV.Priority", Field, 0},
+ {"SRV.Target", Field, 0},
+ {"SRV.Weight", Field, 0},
+ {"SplitHostPort", Func, 0},
+ {"TCPAddr", Type, 0},
+ {"TCPAddr.IP", Field, 0},
+ {"TCPAddr.Port", Field, 0},
+ {"TCPAddr.Zone", Field, 1},
+ {"TCPAddrFromAddrPort", Func, 18},
+ {"TCPConn", Type, 0},
+ {"TCPListener", Type, 0},
+ {"UDPAddr", Type, 0},
+ {"UDPAddr.IP", Field, 0},
+ {"UDPAddr.Port", Field, 0},
+ {"UDPAddr.Zone", Field, 1},
+ {"UDPAddrFromAddrPort", Func, 18},
+ {"UDPConn", Type, 0},
+ {"UnixAddr", Type, 0},
+ {"UnixAddr.Name", Field, 0},
+ {"UnixAddr.Net", Field, 0},
+ {"UnixConn", Type, 0},
+ {"UnixListener", Type, 0},
+ {"UnknownNetworkError", Type, 0},
+ },
+ "net/http": {
+ {"(*Client).CloseIdleConnections", Method, 12},
+ {"(*Client).Do", Method, 0},
+ {"(*Client).Get", Method, 0},
+ {"(*Client).Head", Method, 0},
+ {"(*Client).Post", Method, 0},
+ {"(*Client).PostForm", Method, 0},
+ {"(*Cookie).String", Method, 0},
+ {"(*Cookie).Valid", Method, 18},
+ {"(*MaxBytesError).Error", Method, 19},
+ {"(*ProtocolError).Error", Method, 0},
+ {"(*ProtocolError).Is", Method, 21},
+ {"(*Request).AddCookie", Method, 0},
+ {"(*Request).BasicAuth", Method, 4},
+ {"(*Request).Clone", Method, 13},
+ {"(*Request).Context", Method, 7},
+ {"(*Request).Cookie", Method, 0},
+ {"(*Request).Cookies", Method, 0},
+ {"(*Request).CookiesNamed", Method, 23},
+ {"(*Request).FormFile", Method, 0},
+ {"(*Request).FormValue", Method, 0},
+ {"(*Request).MultipartReader", Method, 0},
+ {"(*Request).ParseForm", Method, 0},
+ {"(*Request).ParseMultipartForm", Method, 0},
+ {"(*Request).PathValue", Method, 22},
+ {"(*Request).PostFormValue", Method, 1},
+ {"(*Request).ProtoAtLeast", Method, 0},
+ {"(*Request).Referer", Method, 0},
+ {"(*Request).SetBasicAuth", Method, 0},
+ {"(*Request).SetPathValue", Method, 22},
+ {"(*Request).UserAgent", Method, 0},
+ {"(*Request).WithContext", Method, 7},
+ {"(*Request).Write", Method, 0},
+ {"(*Request).WriteProxy", Method, 0},
+ {"(*Response).Cookies", Method, 0},
+ {"(*Response).Location", Method, 0},
+ {"(*Response).ProtoAtLeast", Method, 0},
+ {"(*Response).Write", Method, 0},
+ {"(*ResponseController).EnableFullDuplex", Method, 21},
+ {"(*ResponseController).Flush", Method, 20},
+ {"(*ResponseController).Hijack", Method, 20},
+ {"(*ResponseController).SetReadDeadline", Method, 20},
+ {"(*ResponseController).SetWriteDeadline", Method, 20},
+ {"(*ServeMux).Handle", Method, 0},
+ {"(*ServeMux).HandleFunc", Method, 0},
+ {"(*ServeMux).Handler", Method, 1},
+ {"(*ServeMux).ServeHTTP", Method, 0},
+ {"(*Server).Close", Method, 8},
+ {"(*Server).ListenAndServe", Method, 0},
+ {"(*Server).ListenAndServeTLS", Method, 0},
+ {"(*Server).RegisterOnShutdown", Method, 9},
+ {"(*Server).Serve", Method, 0},
+ {"(*Server).ServeTLS", Method, 9},
+ {"(*Server).SetKeepAlivesEnabled", Method, 3},
+ {"(*Server).Shutdown", Method, 8},
+ {"(*Transport).CancelRequest", Method, 1},
+ {"(*Transport).Clone", Method, 13},
+ {"(*Transport).CloseIdleConnections", Method, 0},
+ {"(*Transport).RegisterProtocol", Method, 0},
+ {"(*Transport).RoundTrip", Method, 0},
+ {"(ConnState).String", Method, 3},
+ {"(Dir).Open", Method, 0},
+ {"(HandlerFunc).ServeHTTP", Method, 0},
+ {"(Header).Add", Method, 0},
+ {"(Header).Clone", Method, 13},
+ {"(Header).Del", Method, 0},
+ {"(Header).Get", Method, 0},
+ {"(Header).Set", Method, 0},
+ {"(Header).Values", Method, 14},
+ {"(Header).Write", Method, 0},
+ {"(Header).WriteSubset", Method, 0},
+ {"AllowQuerySemicolons", Func, 17},
+ {"CanonicalHeaderKey", Func, 0},
+ {"Client", Type, 0},
+ {"Client.CheckRedirect", Field, 0},
+ {"Client.Jar", Field, 0},
+ {"Client.Timeout", Field, 3},
+ {"Client.Transport", Field, 0},
+ {"CloseNotifier", Type, 1},
+ {"ConnState", Type, 3},
+ {"Cookie", Type, 0},
+ {"Cookie.Domain", Field, 0},
+ {"Cookie.Expires", Field, 0},
+ {"Cookie.HttpOnly", Field, 0},
+ {"Cookie.MaxAge", Field, 0},
+ {"Cookie.Name", Field, 0},
+ {"Cookie.Partitioned", Field, 23},
+ {"Cookie.Path", Field, 0},
+ {"Cookie.Quoted", Field, 23},
+ {"Cookie.Raw", Field, 0},
+ {"Cookie.RawExpires", Field, 0},
+ {"Cookie.SameSite", Field, 11},
+ {"Cookie.Secure", Field, 0},
+ {"Cookie.Unparsed", Field, 0},
+ {"Cookie.Value", Field, 0},
+ {"CookieJar", Type, 0},
+ {"DefaultClient", Var, 0},
+ {"DefaultMaxHeaderBytes", Const, 0},
+ {"DefaultMaxIdleConnsPerHost", Const, 0},
+ {"DefaultServeMux", Var, 0},
+ {"DefaultTransport", Var, 0},
+ {"DetectContentType", Func, 0},
+ {"Dir", Type, 0},
+ {"ErrAbortHandler", Var, 8},
+ {"ErrBodyNotAllowed", Var, 0},
+ {"ErrBodyReadAfterClose", Var, 0},
+ {"ErrContentLength", Var, 0},
+ {"ErrHandlerTimeout", Var, 0},
+ {"ErrHeaderTooLong", Var, 0},
+ {"ErrHijacked", Var, 0},
+ {"ErrLineTooLong", Var, 0},
+ {"ErrMissingBoundary", Var, 0},
+ {"ErrMissingContentLength", Var, 0},
+ {"ErrMissingFile", Var, 0},
+ {"ErrNoCookie", Var, 0},
+ {"ErrNoLocation", Var, 0},
+ {"ErrNotMultipart", Var, 0},
+ {"ErrNotSupported", Var, 0},
+ {"ErrSchemeMismatch", Var, 21},
+ {"ErrServerClosed", Var, 8},
+ {"ErrShortBody", Var, 0},
+ {"ErrSkipAltProtocol", Var, 6},
+ {"ErrUnexpectedTrailer", Var, 0},
+ {"ErrUseLastResponse", Var, 7},
+ {"ErrWriteAfterFlush", Var, 0},
+ {"Error", Func, 0},
+ {"FS", Func, 16},
+ {"File", Type, 0},
+ {"FileServer", Func, 0},
+ {"FileServerFS", Func, 22},
+ {"FileSystem", Type, 0},
+ {"Flusher", Type, 0},
+ {"Get", Func, 0},
+ {"Handle", Func, 0},
+ {"HandleFunc", Func, 0},
+ {"Handler", Type, 0},
+ {"HandlerFunc", Type, 0},
+ {"Head", Func, 0},
+ {"Header", Type, 0},
+ {"Hijacker", Type, 0},
+ {"ListenAndServe", Func, 0},
+ {"ListenAndServeTLS", Func, 0},
+ {"LocalAddrContextKey", Var, 7},
+ {"MaxBytesError", Type, 19},
+ {"MaxBytesError.Limit", Field, 19},
+ {"MaxBytesHandler", Func, 18},
+ {"MaxBytesReader", Func, 0},
+ {"MethodConnect", Const, 6},
+ {"MethodDelete", Const, 6},
+ {"MethodGet", Const, 6},
+ {"MethodHead", Const, 6},
+ {"MethodOptions", Const, 6},
+ {"MethodPatch", Const, 6},
+ {"MethodPost", Const, 6},
+ {"MethodPut", Const, 6},
+ {"MethodTrace", Const, 6},
+ {"NewFileTransport", Func, 0},
+ {"NewFileTransportFS", Func, 22},
+ {"NewRequest", Func, 0},
+ {"NewRequestWithContext", Func, 13},
+ {"NewResponseController", Func, 20},
+ {"NewServeMux", Func, 0},
+ {"NoBody", Var, 8},
+ {"NotFound", Func, 0},
+ {"NotFoundHandler", Func, 0},
+ {"ParseCookie", Func, 23},
+ {"ParseHTTPVersion", Func, 0},
+ {"ParseSetCookie", Func, 23},
+ {"ParseTime", Func, 1},
+ {"Post", Func, 0},
+ {"PostForm", Func, 0},
+ {"ProtocolError", Type, 0},
+ {"ProtocolError.ErrorString", Field, 0},
+ {"ProxyFromEnvironment", Func, 0},
+ {"ProxyURL", Func, 0},
+ {"PushOptions", Type, 8},
+ {"PushOptions.Header", Field, 8},
+ {"PushOptions.Method", Field, 8},
+ {"Pusher", Type, 8},
+ {"ReadRequest", Func, 0},
+ {"ReadResponse", Func, 0},
+ {"Redirect", Func, 0},
+ {"RedirectHandler", Func, 0},
+ {"Request", Type, 0},
+ {"Request.Body", Field, 0},
+ {"Request.Cancel", Field, 5},
+ {"Request.Close", Field, 0},
+ {"Request.ContentLength", Field, 0},
+ {"Request.Form", Field, 0},
+ {"Request.GetBody", Field, 8},
+ {"Request.Header", Field, 0},
+ {"Request.Host", Field, 0},
+ {"Request.Method", Field, 0},
+ {"Request.MultipartForm", Field, 0},
+ {"Request.Pattern", Field, 23},
+ {"Request.PostForm", Field, 1},
+ {"Request.Proto", Field, 0},
+ {"Request.ProtoMajor", Field, 0},
+ {"Request.ProtoMinor", Field, 0},
+ {"Request.RemoteAddr", Field, 0},
+ {"Request.RequestURI", Field, 0},
+ {"Request.Response", Field, 7},
+ {"Request.TLS", Field, 0},
+ {"Request.Trailer", Field, 0},
+ {"Request.TransferEncoding", Field, 0},
+ {"Request.URL", Field, 0},
+ {"Response", Type, 0},
+ {"Response.Body", Field, 0},
+ {"Response.Close", Field, 0},
+ {"Response.ContentLength", Field, 0},
+ {"Response.Header", Field, 0},
+ {"Response.Proto", Field, 0},
+ {"Response.ProtoMajor", Field, 0},
+ {"Response.ProtoMinor", Field, 0},
+ {"Response.Request", Field, 0},
+ {"Response.Status", Field, 0},
+ {"Response.StatusCode", Field, 0},
+ {"Response.TLS", Field, 3},
+ {"Response.Trailer", Field, 0},
+ {"Response.TransferEncoding", Field, 0},
+ {"Response.Uncompressed", Field, 7},
+ {"ResponseController", Type, 20},
+ {"ResponseWriter", Type, 0},
+ {"RoundTripper", Type, 0},
+ {"SameSite", Type, 11},
+ {"SameSiteDefaultMode", Const, 11},
+ {"SameSiteLaxMode", Const, 11},
+ {"SameSiteNoneMode", Const, 13},
+ {"SameSiteStrictMode", Const, 11},
+ {"Serve", Func, 0},
+ {"ServeContent", Func, 0},
+ {"ServeFile", Func, 0},
+ {"ServeFileFS", Func, 22},
+ {"ServeMux", Type, 0},
+ {"ServeTLS", Func, 9},
+ {"Server", Type, 0},
+ {"Server.Addr", Field, 0},
+ {"Server.BaseContext", Field, 13},
+ {"Server.ConnContext", Field, 13},
+ {"Server.ConnState", Field, 3},
+ {"Server.DisableGeneralOptionsHandler", Field, 20},
+ {"Server.ErrorLog", Field, 3},
+ {"Server.Handler", Field, 0},
+ {"Server.IdleTimeout", Field, 8},
+ {"Server.MaxHeaderBytes", Field, 0},
+ {"Server.ReadHeaderTimeout", Field, 8},
+ {"Server.ReadTimeout", Field, 0},
+ {"Server.TLSConfig", Field, 0},
+ {"Server.TLSNextProto", Field, 1},
+ {"Server.WriteTimeout", Field, 0},
+ {"ServerContextKey", Var, 7},
+ {"SetCookie", Func, 0},
+ {"StateActive", Const, 3},
+ {"StateClosed", Const, 3},
+ {"StateHijacked", Const, 3},
+ {"StateIdle", Const, 3},
+ {"StateNew", Const, 3},
+ {"StatusAccepted", Const, 0},
+ {"StatusAlreadyReported", Const, 7},
+ {"StatusBadGateway", Const, 0},
+ {"StatusBadRequest", Const, 0},
+ {"StatusConflict", Const, 0},
+ {"StatusContinue", Const, 0},
+ {"StatusCreated", Const, 0},
+ {"StatusEarlyHints", Const, 13},
+ {"StatusExpectationFailed", Const, 0},
+ {"StatusFailedDependency", Const, 7},
+ {"StatusForbidden", Const, 0},
+ {"StatusFound", Const, 0},
+ {"StatusGatewayTimeout", Const, 0},
+ {"StatusGone", Const, 0},
+ {"StatusHTTPVersionNotSupported", Const, 0},
+ {"StatusIMUsed", Const, 7},
+ {"StatusInsufficientStorage", Const, 7},
+ {"StatusInternalServerError", Const, 0},
+ {"StatusLengthRequired", Const, 0},
+ {"StatusLocked", Const, 7},
+ {"StatusLoopDetected", Const, 7},
+ {"StatusMethodNotAllowed", Const, 0},
+ {"StatusMisdirectedRequest", Const, 11},
+ {"StatusMovedPermanently", Const, 0},
+ {"StatusMultiStatus", Const, 7},
+ {"StatusMultipleChoices", Const, 0},
+ {"StatusNetworkAuthenticationRequired", Const, 6},
+ {"StatusNoContent", Const, 0},
+ {"StatusNonAuthoritativeInfo", Const, 0},
+ {"StatusNotAcceptable", Const, 0},
+ {"StatusNotExtended", Const, 7},
+ {"StatusNotFound", Const, 0},
+ {"StatusNotImplemented", Const, 0},
+ {"StatusNotModified", Const, 0},
+ {"StatusOK", Const, 0},
+ {"StatusPartialContent", Const, 0},
+ {"StatusPaymentRequired", Const, 0},
+ {"StatusPermanentRedirect", Const, 7},
+ {"StatusPreconditionFailed", Const, 0},
+ {"StatusPreconditionRequired", Const, 6},
+ {"StatusProcessing", Const, 7},
+ {"StatusProxyAuthRequired", Const, 0},
+ {"StatusRequestEntityTooLarge", Const, 0},
+ {"StatusRequestHeaderFieldsTooLarge", Const, 6},
+ {"StatusRequestTimeout", Const, 0},
+ {"StatusRequestURITooLong", Const, 0},
+ {"StatusRequestedRangeNotSatisfiable", Const, 0},
+ {"StatusResetContent", Const, 0},
+ {"StatusSeeOther", Const, 0},
+ {"StatusServiceUnavailable", Const, 0},
+ {"StatusSwitchingProtocols", Const, 0},
+ {"StatusTeapot", Const, 0},
+ {"StatusTemporaryRedirect", Const, 0},
+ {"StatusText", Func, 0},
+ {"StatusTooEarly", Const, 12},
+ {"StatusTooManyRequests", Const, 6},
+ {"StatusUnauthorized", Const, 0},
+ {"StatusUnavailableForLegalReasons", Const, 6},
+ {"StatusUnprocessableEntity", Const, 7},
+ {"StatusUnsupportedMediaType", Const, 0},
+ {"StatusUpgradeRequired", Const, 7},
+ {"StatusUseProxy", Const, 0},
+ {"StatusVariantAlsoNegotiates", Const, 7},
+ {"StripPrefix", Func, 0},
+ {"TimeFormat", Const, 0},
+ {"TimeoutHandler", Func, 0},
+ {"TrailerPrefix", Const, 8},
+ {"Transport", Type, 0},
+ {"Transport.Dial", Field, 0},
+ {"Transport.DialContext", Field, 7},
+ {"Transport.DialTLS", Field, 4},
+ {"Transport.DialTLSContext", Field, 14},
+ {"Transport.DisableCompression", Field, 0},
+ {"Transport.DisableKeepAlives", Field, 0},
+ {"Transport.ExpectContinueTimeout", Field, 6},
+ {"Transport.ForceAttemptHTTP2", Field, 13},
+ {"Transport.GetProxyConnectHeader", Field, 16},
+ {"Transport.IdleConnTimeout", Field, 7},
+ {"Transport.MaxConnsPerHost", Field, 11},
+ {"Transport.MaxIdleConns", Field, 7},
+ {"Transport.MaxIdleConnsPerHost", Field, 0},
+ {"Transport.MaxResponseHeaderBytes", Field, 7},
+ {"Transport.OnProxyConnectResponse", Field, 20},
+ {"Transport.Proxy", Field, 0},
+ {"Transport.ProxyConnectHeader", Field, 8},
+ {"Transport.ReadBufferSize", Field, 13},
+ {"Transport.ResponseHeaderTimeout", Field, 1},
+ {"Transport.TLSClientConfig", Field, 0},
+ {"Transport.TLSHandshakeTimeout", Field, 3},
+ {"Transport.TLSNextProto", Field, 6},
+ {"Transport.WriteBufferSize", Field, 13},
+ },
+ "net/http/cgi": {
+ {"(*Handler).ServeHTTP", Method, 0},
+ {"Handler", Type, 0},
+ {"Handler.Args", Field, 0},
+ {"Handler.Dir", Field, 0},
+ {"Handler.Env", Field, 0},
+ {"Handler.InheritEnv", Field, 0},
+ {"Handler.Logger", Field, 0},
+ {"Handler.Path", Field, 0},
+ {"Handler.PathLocationHandler", Field, 0},
+ {"Handler.Root", Field, 0},
+ {"Handler.Stderr", Field, 7},
+ {"Request", Func, 0},
+ {"RequestFromMap", Func, 0},
+ {"Serve", Func, 0},
+ },
+ "net/http/cookiejar": {
+ {"(*Jar).Cookies", Method, 1},
+ {"(*Jar).SetCookies", Method, 1},
+ {"Jar", Type, 1},
+ {"New", Func, 1},
+ {"Options", Type, 1},
+ {"Options.PublicSuffixList", Field, 1},
+ {"PublicSuffixList", Type, 1},
+ },
+ "net/http/fcgi": {
+ {"ErrConnClosed", Var, 5},
+ {"ErrRequestAborted", Var, 5},
+ {"ProcessEnv", Func, 9},
+ {"Serve", Func, 0},
+ },
+ "net/http/httptest": {
+ {"(*ResponseRecorder).Flush", Method, 0},
+ {"(*ResponseRecorder).Header", Method, 0},
+ {"(*ResponseRecorder).Result", Method, 7},
+ {"(*ResponseRecorder).Write", Method, 0},
+ {"(*ResponseRecorder).WriteHeader", Method, 0},
+ {"(*ResponseRecorder).WriteString", Method, 6},
+ {"(*Server).Certificate", Method, 9},
+ {"(*Server).Client", Method, 9},
+ {"(*Server).Close", Method, 0},
+ {"(*Server).CloseClientConnections", Method, 0},
+ {"(*Server).Start", Method, 0},
+ {"(*Server).StartTLS", Method, 0},
+ {"DefaultRemoteAddr", Const, 0},
+ {"NewRecorder", Func, 0},
+ {"NewRequest", Func, 7},
+ {"NewRequestWithContext", Func, 23},
+ {"NewServer", Func, 0},
+ {"NewTLSServer", Func, 0},
+ {"NewUnstartedServer", Func, 0},
+ {"ResponseRecorder", Type, 0},
+ {"ResponseRecorder.Body", Field, 0},
+ {"ResponseRecorder.Code", Field, 0},
+ {"ResponseRecorder.Flushed", Field, 0},
+ {"ResponseRecorder.HeaderMap", Field, 0},
+ {"Server", Type, 0},
+ {"Server.Config", Field, 0},
+ {"Server.EnableHTTP2", Field, 14},
+ {"Server.Listener", Field, 0},
+ {"Server.TLS", Field, 0},
+ {"Server.URL", Field, 0},
+ },
+ "net/http/httptrace": {
+ {"ClientTrace", Type, 7},
+ {"ClientTrace.ConnectDone", Field, 7},
+ {"ClientTrace.ConnectStart", Field, 7},
+ {"ClientTrace.DNSDone", Field, 7},
+ {"ClientTrace.DNSStart", Field, 7},
+ {"ClientTrace.GetConn", Field, 7},
+ {"ClientTrace.Got100Continue", Field, 7},
+ {"ClientTrace.Got1xxResponse", Field, 11},
+ {"ClientTrace.GotConn", Field, 7},
+ {"ClientTrace.GotFirstResponseByte", Field, 7},
+ {"ClientTrace.PutIdleConn", Field, 7},
+ {"ClientTrace.TLSHandshakeDone", Field, 8},
+ {"ClientTrace.TLSHandshakeStart", Field, 8},
+ {"ClientTrace.Wait100Continue", Field, 7},
+ {"ClientTrace.WroteHeaderField", Field, 11},
+ {"ClientTrace.WroteHeaders", Field, 7},
+ {"ClientTrace.WroteRequest", Field, 7},
+ {"ContextClientTrace", Func, 7},
+ {"DNSDoneInfo", Type, 7},
+ {"DNSDoneInfo.Addrs", Field, 7},
+ {"DNSDoneInfo.Coalesced", Field, 7},
+ {"DNSDoneInfo.Err", Field, 7},
+ {"DNSStartInfo", Type, 7},
+ {"DNSStartInfo.Host", Field, 7},
+ {"GotConnInfo", Type, 7},
+ {"GotConnInfo.Conn", Field, 7},
+ {"GotConnInfo.IdleTime", Field, 7},
+ {"GotConnInfo.Reused", Field, 7},
+ {"GotConnInfo.WasIdle", Field, 7},
+ {"WithClientTrace", Func, 7},
+ {"WroteRequestInfo", Type, 7},
+ {"WroteRequestInfo.Err", Field, 7},
+ },
+ "net/http/httputil": {
+ {"(*ClientConn).Close", Method, 0},
+ {"(*ClientConn).Do", Method, 0},
+ {"(*ClientConn).Hijack", Method, 0},
+ {"(*ClientConn).Pending", Method, 0},
+ {"(*ClientConn).Read", Method, 0},
+ {"(*ClientConn).Write", Method, 0},
+ {"(*ProxyRequest).SetURL", Method, 20},
+ {"(*ProxyRequest).SetXForwarded", Method, 20},
+ {"(*ReverseProxy).ServeHTTP", Method, 0},
+ {"(*ServerConn).Close", Method, 0},
+ {"(*ServerConn).Hijack", Method, 0},
+ {"(*ServerConn).Pending", Method, 0},
+ {"(*ServerConn).Read", Method, 0},
+ {"(*ServerConn).Write", Method, 0},
+ {"BufferPool", Type, 6},
+ {"ClientConn", Type, 0},
+ {"DumpRequest", Func, 0},
+ {"DumpRequestOut", Func, 0},
+ {"DumpResponse", Func, 0},
+ {"ErrClosed", Var, 0},
+ {"ErrLineTooLong", Var, 0},
+ {"ErrPersistEOF", Var, 0},
+ {"ErrPipeline", Var, 0},
+ {"NewChunkedReader", Func, 0},
+ {"NewChunkedWriter", Func, 0},
+ {"NewClientConn", Func, 0},
+ {"NewProxyClientConn", Func, 0},
+ {"NewServerConn", Func, 0},
+ {"NewSingleHostReverseProxy", Func, 0},
+ {"ProxyRequest", Type, 20},
+ {"ProxyRequest.In", Field, 20},
+ {"ProxyRequest.Out", Field, 20},
+ {"ReverseProxy", Type, 0},
+ {"ReverseProxy.BufferPool", Field, 6},
+ {"ReverseProxy.Director", Field, 0},
+ {"ReverseProxy.ErrorHandler", Field, 11},
+ {"ReverseProxy.ErrorLog", Field, 4},
+ {"ReverseProxy.FlushInterval", Field, 0},
+ {"ReverseProxy.ModifyResponse", Field, 8},
+ {"ReverseProxy.Rewrite", Field, 20},
+ {"ReverseProxy.Transport", Field, 0},
+ {"ServerConn", Type, 0},
+ },
+ "net/http/pprof": {
+ {"Cmdline", Func, 0},
+ {"Handler", Func, 0},
+ {"Index", Func, 0},
+ {"Profile", Func, 0},
+ {"Symbol", Func, 0},
+ {"Trace", Func, 5},
+ },
+ "net/mail": {
+ {"(*Address).String", Method, 0},
+ {"(*AddressParser).Parse", Method, 5},
+ {"(*AddressParser).ParseList", Method, 5},
+ {"(Header).AddressList", Method, 0},
+ {"(Header).Date", Method, 0},
+ {"(Header).Get", Method, 0},
+ {"Address", Type, 0},
+ {"Address.Address", Field, 0},
+ {"Address.Name", Field, 0},
+ {"AddressParser", Type, 5},
+ {"AddressParser.WordDecoder", Field, 5},
+ {"ErrHeaderNotPresent", Var, 0},
+ {"Header", Type, 0},
+ {"Message", Type, 0},
+ {"Message.Body", Field, 0},
+ {"Message.Header", Field, 0},
+ {"ParseAddress", Func, 1},
+ {"ParseAddressList", Func, 1},
+ {"ParseDate", Func, 8},
+ {"ReadMessage", Func, 0},
+ },
+ "net/netip": {
+ {"(*Addr).UnmarshalBinary", Method, 18},
+ {"(*Addr).UnmarshalText", Method, 18},
+ {"(*AddrPort).UnmarshalBinary", Method, 18},
+ {"(*AddrPort).UnmarshalText", Method, 18},
+ {"(*Prefix).UnmarshalBinary", Method, 18},
+ {"(*Prefix).UnmarshalText", Method, 18},
+ {"(Addr).AppendTo", Method, 18},
+ {"(Addr).As16", Method, 18},
+ {"(Addr).As4", Method, 18},
+ {"(Addr).AsSlice", Method, 18},
+ {"(Addr).BitLen", Method, 18},
+ {"(Addr).Compare", Method, 18},
+ {"(Addr).Is4", Method, 18},
+ {"(Addr).Is4In6", Method, 18},
+ {"(Addr).Is6", Method, 18},
+ {"(Addr).IsGlobalUnicast", Method, 18},
+ {"(Addr).IsInterfaceLocalMulticast", Method, 18},
+ {"(Addr).IsLinkLocalMulticast", Method, 18},
+ {"(Addr).IsLinkLocalUnicast", Method, 18},
+ {"(Addr).IsLoopback", Method, 18},
+ {"(Addr).IsMulticast", Method, 18},
+ {"(Addr).IsPrivate", Method, 18},
+ {"(Addr).IsUnspecified", Method, 18},
+ {"(Addr).IsValid", Method, 18},
+ {"(Addr).Less", Method, 18},
+ {"(Addr).MarshalBinary", Method, 18},
+ {"(Addr).MarshalText", Method, 18},
+ {"(Addr).Next", Method, 18},
+ {"(Addr).Prefix", Method, 18},
+ {"(Addr).Prev", Method, 18},
+ {"(Addr).String", Method, 18},
+ {"(Addr).StringExpanded", Method, 18},
+ {"(Addr).Unmap", Method, 18},
+ {"(Addr).WithZone", Method, 18},
+ {"(Addr).Zone", Method, 18},
+ {"(AddrPort).Addr", Method, 18},
+ {"(AddrPort).AppendTo", Method, 18},
+ {"(AddrPort).Compare", Method, 22},
+ {"(AddrPort).IsValid", Method, 18},
+ {"(AddrPort).MarshalBinary", Method, 18},
+ {"(AddrPort).MarshalText", Method, 18},
+ {"(AddrPort).Port", Method, 18},
+ {"(AddrPort).String", Method, 18},
+ {"(Prefix).Addr", Method, 18},
+ {"(Prefix).AppendTo", Method, 18},
+ {"(Prefix).Bits", Method, 18},
+ {"(Prefix).Contains", Method, 18},
+ {"(Prefix).IsSingleIP", Method, 18},
+ {"(Prefix).IsValid", Method, 18},
+ {"(Prefix).MarshalBinary", Method, 18},
+ {"(Prefix).MarshalText", Method, 18},
+ {"(Prefix).Masked", Method, 18},
+ {"(Prefix).Overlaps", Method, 18},
+ {"(Prefix).String", Method, 18},
+ {"Addr", Type, 18},
+ {"AddrFrom16", Func, 18},
+ {"AddrFrom4", Func, 18},
+ {"AddrFromSlice", Func, 18},
+ {"AddrPort", Type, 18},
+ {"AddrPortFrom", Func, 18},
+ {"IPv4Unspecified", Func, 18},
+ {"IPv6LinkLocalAllNodes", Func, 18},
+ {"IPv6LinkLocalAllRouters", Func, 20},
+ {"IPv6Loopback", Func, 20},
+ {"IPv6Unspecified", Func, 18},
+ {"MustParseAddr", Func, 18},
+ {"MustParseAddrPort", Func, 18},
+ {"MustParsePrefix", Func, 18},
+ {"ParseAddr", Func, 18},
+ {"ParseAddrPort", Func, 18},
+ {"ParsePrefix", Func, 18},
+ {"Prefix", Type, 18},
+ {"PrefixFrom", Func, 18},
+ },
+ "net/rpc": {
+ {"(*Client).Call", Method, 0},
+ {"(*Client).Close", Method, 0},
+ {"(*Client).Go", Method, 0},
+ {"(*Server).Accept", Method, 0},
+ {"(*Server).HandleHTTP", Method, 0},
+ {"(*Server).Register", Method, 0},
+ {"(*Server).RegisterName", Method, 0},
+ {"(*Server).ServeCodec", Method, 0},
+ {"(*Server).ServeConn", Method, 0},
+ {"(*Server).ServeHTTP", Method, 0},
+ {"(*Server).ServeRequest", Method, 0},
+ {"(ServerError).Error", Method, 0},
+ {"Accept", Func, 0},
+ {"Call", Type, 0},
+ {"Call.Args", Field, 0},
+ {"Call.Done", Field, 0},
+ {"Call.Error", Field, 0},
+ {"Call.Reply", Field, 0},
+ {"Call.ServiceMethod", Field, 0},
+ {"Client", Type, 0},
+ {"ClientCodec", Type, 0},
+ {"DefaultDebugPath", Const, 0},
+ {"DefaultRPCPath", Const, 0},
+ {"DefaultServer", Var, 0},
+ {"Dial", Func, 0},
+ {"DialHTTP", Func, 0},
+ {"DialHTTPPath", Func, 0},
+ {"ErrShutdown", Var, 0},
+ {"HandleHTTP", Func, 0},
+ {"NewClient", Func, 0},
+ {"NewClientWithCodec", Func, 0},
+ {"NewServer", Func, 0},
+ {"Register", Func, 0},
+ {"RegisterName", Func, 0},
+ {"Request", Type, 0},
+ {"Request.Seq", Field, 0},
+ {"Request.ServiceMethod", Field, 0},
+ {"Response", Type, 0},
+ {"Response.Error", Field, 0},
+ {"Response.Seq", Field, 0},
+ {"Response.ServiceMethod", Field, 0},
+ {"ServeCodec", Func, 0},
+ {"ServeConn", Func, 0},
+ {"ServeRequest", Func, 0},
+ {"Server", Type, 0},
+ {"ServerCodec", Type, 0},
+ {"ServerError", Type, 0},
+ },
+ "net/rpc/jsonrpc": {
+ {"Dial", Func, 0},
+ {"NewClient", Func, 0},
+ {"NewClientCodec", Func, 0},
+ {"NewServerCodec", Func, 0},
+ {"ServeConn", Func, 0},
+ },
+ "net/smtp": {
+ {"(*Client).Auth", Method, 0},
+ {"(*Client).Close", Method, 2},
+ {"(*Client).Data", Method, 0},
+ {"(*Client).Extension", Method, 0},
+ {"(*Client).Hello", Method, 1},
+ {"(*Client).Mail", Method, 0},
+ {"(*Client).Noop", Method, 10},
+ {"(*Client).Quit", Method, 0},
+ {"(*Client).Rcpt", Method, 0},
+ {"(*Client).Reset", Method, 0},
+ {"(*Client).StartTLS", Method, 0},
+ {"(*Client).TLSConnectionState", Method, 5},
+ {"(*Client).Verify", Method, 0},
+ {"Auth", Type, 0},
+ {"CRAMMD5Auth", Func, 0},
+ {"Client", Type, 0},
+ {"Client.Text", Field, 0},
+ {"Dial", Func, 0},
+ {"NewClient", Func, 0},
+ {"PlainAuth", Func, 0},
+ {"SendMail", Func, 0},
+ {"ServerInfo", Type, 0},
+ {"ServerInfo.Auth", Field, 0},
+ {"ServerInfo.Name", Field, 0},
+ {"ServerInfo.TLS", Field, 0},
+ },
+ "net/textproto": {
+ {"(*Conn).Close", Method, 0},
+ {"(*Conn).Cmd", Method, 0},
+ {"(*Conn).DotReader", Method, 0},
+ {"(*Conn).DotWriter", Method, 0},
+ {"(*Conn).EndRequest", Method, 0},
+ {"(*Conn).EndResponse", Method, 0},
+ {"(*Conn).Next", Method, 0},
+ {"(*Conn).PrintfLine", Method, 0},
+ {"(*Conn).ReadCodeLine", Method, 0},
+ {"(*Conn).ReadContinuedLine", Method, 0},
+ {"(*Conn).ReadContinuedLineBytes", Method, 0},
+ {"(*Conn).ReadDotBytes", Method, 0},
+ {"(*Conn).ReadDotLines", Method, 0},
+ {"(*Conn).ReadLine", Method, 0},
+ {"(*Conn).ReadLineBytes", Method, 0},
+ {"(*Conn).ReadMIMEHeader", Method, 0},
+ {"(*Conn).ReadResponse", Method, 0},
+ {"(*Conn).StartRequest", Method, 0},
+ {"(*Conn).StartResponse", Method, 0},
+ {"(*Error).Error", Method, 0},
+ {"(*Pipeline).EndRequest", Method, 0},
+ {"(*Pipeline).EndResponse", Method, 0},
+ {"(*Pipeline).Next", Method, 0},
+ {"(*Pipeline).StartRequest", Method, 0},
+ {"(*Pipeline).StartResponse", Method, 0},
+ {"(*Reader).DotReader", Method, 0},
+ {"(*Reader).ReadCodeLine", Method, 0},
+ {"(*Reader).ReadContinuedLine", Method, 0},
+ {"(*Reader).ReadContinuedLineBytes", Method, 0},
+ {"(*Reader).ReadDotBytes", Method, 0},
+ {"(*Reader).ReadDotLines", Method, 0},
+ {"(*Reader).ReadLine", Method, 0},
+ {"(*Reader).ReadLineBytes", Method, 0},
+ {"(*Reader).ReadMIMEHeader", Method, 0},
+ {"(*Reader).ReadResponse", Method, 0},
+ {"(*Writer).DotWriter", Method, 0},
+ {"(*Writer).PrintfLine", Method, 0},
+ {"(MIMEHeader).Add", Method, 0},
+ {"(MIMEHeader).Del", Method, 0},
+ {"(MIMEHeader).Get", Method, 0},
+ {"(MIMEHeader).Set", Method, 0},
+ {"(MIMEHeader).Values", Method, 14},
+ {"(ProtocolError).Error", Method, 0},
+ {"CanonicalMIMEHeaderKey", Func, 0},
+ {"Conn", Type, 0},
+ {"Conn.Pipeline", Field, 0},
+ {"Conn.Reader", Field, 0},
+ {"Conn.Writer", Field, 0},
+ {"Dial", Func, 0},
+ {"Error", Type, 0},
+ {"Error.Code", Field, 0},
+ {"Error.Msg", Field, 0},
+ {"MIMEHeader", Type, 0},
+ {"NewConn", Func, 0},
+ {"NewReader", Func, 0},
+ {"NewWriter", Func, 0},
+ {"Pipeline", Type, 0},
+ {"ProtocolError", Type, 0},
+ {"Reader", Type, 0},
+ {"Reader.R", Field, 0},
+ {"TrimBytes", Func, 1},
+ {"TrimString", Func, 1},
+ {"Writer", Type, 0},
+ {"Writer.W", Field, 0},
+ },
+ "net/url": {
+ {"(*Error).Error", Method, 0},
+ {"(*Error).Temporary", Method, 6},
+ {"(*Error).Timeout", Method, 6},
+ {"(*Error).Unwrap", Method, 13},
+ {"(*URL).EscapedFragment", Method, 15},
+ {"(*URL).EscapedPath", Method, 5},
+ {"(*URL).Hostname", Method, 8},
+ {"(*URL).IsAbs", Method, 0},
+ {"(*URL).JoinPath", Method, 19},
+ {"(*URL).MarshalBinary", Method, 8},
+ {"(*URL).Parse", Method, 0},
+ {"(*URL).Port", Method, 8},
+ {"(*URL).Query", Method, 0},
+ {"(*URL).Redacted", Method, 15},
+ {"(*URL).RequestURI", Method, 0},
+ {"(*URL).ResolveReference", Method, 0},
+ {"(*URL).String", Method, 0},
+ {"(*URL).UnmarshalBinary", Method, 8},
+ {"(*Userinfo).Password", Method, 0},
+ {"(*Userinfo).String", Method, 0},
+ {"(*Userinfo).Username", Method, 0},
+ {"(EscapeError).Error", Method, 0},
+ {"(InvalidHostError).Error", Method, 6},
+ {"(Values).Add", Method, 0},
+ {"(Values).Del", Method, 0},
+ {"(Values).Encode", Method, 0},
+ {"(Values).Get", Method, 0},
+ {"(Values).Has", Method, 17},
+ {"(Values).Set", Method, 0},
+ {"Error", Type, 0},
+ {"Error.Err", Field, 0},
+ {"Error.Op", Field, 0},
+ {"Error.URL", Field, 0},
+ {"EscapeError", Type, 0},
+ {"InvalidHostError", Type, 6},
+ {"JoinPath", Func, 19},
+ {"Parse", Func, 0},
+ {"ParseQuery", Func, 0},
+ {"ParseRequestURI", Func, 0},
+ {"PathEscape", Func, 8},
+ {"PathUnescape", Func, 8},
+ {"QueryEscape", Func, 0},
+ {"QueryUnescape", Func, 0},
+ {"URL", Type, 0},
+ {"URL.ForceQuery", Field, 7},
+ {"URL.Fragment", Field, 0},
+ {"URL.Host", Field, 0},
+ {"URL.OmitHost", Field, 19},
+ {"URL.Opaque", Field, 0},
+ {"URL.Path", Field, 0},
+ {"URL.RawFragment", Field, 15},
+ {"URL.RawPath", Field, 5},
+ {"URL.RawQuery", Field, 0},
+ {"URL.Scheme", Field, 0},
+ {"URL.User", Field, 0},
+ {"User", Func, 0},
+ {"UserPassword", Func, 0},
+ {"Userinfo", Type, 0},
+ {"Values", Type, 0},
+ },
+ "os": {
+ {"(*File).Chdir", Method, 0},
+ {"(*File).Chmod", Method, 0},
+ {"(*File).Chown", Method, 0},
+ {"(*File).Close", Method, 0},
+ {"(*File).Fd", Method, 0},
+ {"(*File).Name", Method, 0},
+ {"(*File).Read", Method, 0},
+ {"(*File).ReadAt", Method, 0},
+ {"(*File).ReadDir", Method, 16},
+ {"(*File).ReadFrom", Method, 15},
+ {"(*File).Readdir", Method, 0},
+ {"(*File).Readdirnames", Method, 0},
+ {"(*File).Seek", Method, 0},
+ {"(*File).SetDeadline", Method, 10},
+ {"(*File).SetReadDeadline", Method, 10},
+ {"(*File).SetWriteDeadline", Method, 10},
+ {"(*File).Stat", Method, 0},
+ {"(*File).Sync", Method, 0},
+ {"(*File).SyscallConn", Method, 12},
+ {"(*File).Truncate", Method, 0},
+ {"(*File).Write", Method, 0},
+ {"(*File).WriteAt", Method, 0},
+ {"(*File).WriteString", Method, 0},
+ {"(*File).WriteTo", Method, 22},
+ {"(*LinkError).Error", Method, 0},
+ {"(*LinkError).Unwrap", Method, 13},
+ {"(*PathError).Error", Method, 0},
+ {"(*PathError).Timeout", Method, 10},
+ {"(*PathError).Unwrap", Method, 13},
+ {"(*Process).Kill", Method, 0},
+ {"(*Process).Release", Method, 0},
+ {"(*Process).Signal", Method, 0},
+ {"(*Process).Wait", Method, 0},
+ {"(*ProcessState).ExitCode", Method, 12},
+ {"(*ProcessState).Exited", Method, 0},
+ {"(*ProcessState).Pid", Method, 0},
+ {"(*ProcessState).String", Method, 0},
+ {"(*ProcessState).Success", Method, 0},
+ {"(*ProcessState).Sys", Method, 0},
+ {"(*ProcessState).SysUsage", Method, 0},
+ {"(*ProcessState).SystemTime", Method, 0},
+ {"(*ProcessState).UserTime", Method, 0},
+ {"(*SyscallError).Error", Method, 0},
+ {"(*SyscallError).Timeout", Method, 10},
+ {"(*SyscallError).Unwrap", Method, 13},
+ {"(FileMode).IsDir", Method, 0},
+ {"(FileMode).IsRegular", Method, 1},
+ {"(FileMode).Perm", Method, 0},
+ {"(FileMode).String", Method, 0},
+ {"Args", Var, 0},
+ {"Chdir", Func, 0},
+ {"Chmod", Func, 0},
+ {"Chown", Func, 0},
+ {"Chtimes", Func, 0},
+ {"Clearenv", Func, 0},
+ {"CopyFS", Func, 23},
+ {"Create", Func, 0},
+ {"CreateTemp", Func, 16},
+ {"DevNull", Const, 0},
+ {"DirEntry", Type, 16},
+ {"DirFS", Func, 16},
+ {"Environ", Func, 0},
+ {"ErrClosed", Var, 8},
+ {"ErrDeadlineExceeded", Var, 15},
+ {"ErrExist", Var, 0},
+ {"ErrInvalid", Var, 0},
+ {"ErrNoDeadline", Var, 10},
+ {"ErrNotExist", Var, 0},
+ {"ErrPermission", Var, 0},
+ {"ErrProcessDone", Var, 16},
+ {"Executable", Func, 8},
+ {"Exit", Func, 0},
+ {"Expand", Func, 0},
+ {"ExpandEnv", Func, 0},
+ {"File", Type, 0},
+ {"FileInfo", Type, 0},
+ {"FileMode", Type, 0},
+ {"FindProcess", Func, 0},
+ {"Getegid", Func, 0},
+ {"Getenv", Func, 0},
+ {"Geteuid", Func, 0},
+ {"Getgid", Func, 0},
+ {"Getgroups", Func, 0},
+ {"Getpagesize", Func, 0},
+ {"Getpid", Func, 0},
+ {"Getppid", Func, 0},
+ {"Getuid", Func, 0},
+ {"Getwd", Func, 0},
+ {"Hostname", Func, 0},
+ {"Interrupt", Var, 0},
+ {"IsExist", Func, 0},
+ {"IsNotExist", Func, 0},
+ {"IsPathSeparator", Func, 0},
+ {"IsPermission", Func, 0},
+ {"IsTimeout", Func, 10},
+ {"Kill", Var, 0},
+ {"Lchown", Func, 0},
+ {"Link", Func, 0},
+ {"LinkError", Type, 0},
+ {"LinkError.Err", Field, 0},
+ {"LinkError.New", Field, 0},
+ {"LinkError.Old", Field, 0},
+ {"LinkError.Op", Field, 0},
+ {"LookupEnv", Func, 5},
+ {"Lstat", Func, 0},
+ {"Mkdir", Func, 0},
+ {"MkdirAll", Func, 0},
+ {"MkdirTemp", Func, 16},
+ {"ModeAppend", Const, 0},
+ {"ModeCharDevice", Const, 0},
+ {"ModeDevice", Const, 0},
+ {"ModeDir", Const, 0},
+ {"ModeExclusive", Const, 0},
+ {"ModeIrregular", Const, 11},
+ {"ModeNamedPipe", Const, 0},
+ {"ModePerm", Const, 0},
+ {"ModeSetgid", Const, 0},
+ {"ModeSetuid", Const, 0},
+ {"ModeSocket", Const, 0},
+ {"ModeSticky", Const, 0},
+ {"ModeSymlink", Const, 0},
+ {"ModeTemporary", Const, 0},
+ {"ModeType", Const, 0},
+ {"NewFile", Func, 0},
+ {"NewSyscallError", Func, 0},
+ {"O_APPEND", Const, 0},
+ {"O_CREATE", Const, 0},
+ {"O_EXCL", Const, 0},
+ {"O_RDONLY", Const, 0},
+ {"O_RDWR", Const, 0},
+ {"O_SYNC", Const, 0},
+ {"O_TRUNC", Const, 0},
+ {"O_WRONLY", Const, 0},
+ {"Open", Func, 0},
+ {"OpenFile", Func, 0},
+ {"PathError", Type, 0},
+ {"PathError.Err", Field, 0},
+ {"PathError.Op", Field, 0},
+ {"PathError.Path", Field, 0},
+ {"PathListSeparator", Const, 0},
+ {"PathSeparator", Const, 0},
+ {"Pipe", Func, 0},
+ {"ProcAttr", Type, 0},
+ {"ProcAttr.Dir", Field, 0},
+ {"ProcAttr.Env", Field, 0},
+ {"ProcAttr.Files", Field, 0},
+ {"ProcAttr.Sys", Field, 0},
+ {"Process", Type, 0},
+ {"Process.Pid", Field, 0},
+ {"ProcessState", Type, 0},
+ {"ReadDir", Func, 16},
+ {"ReadFile", Func, 16},
+ {"Readlink", Func, 0},
+ {"Remove", Func, 0},
+ {"RemoveAll", Func, 0},
+ {"Rename", Func, 0},
+ {"SEEK_CUR", Const, 0},
+ {"SEEK_END", Const, 0},
+ {"SEEK_SET", Const, 0},
+ {"SameFile", Func, 0},
+ {"Setenv", Func, 0},
+ {"Signal", Type, 0},
+ {"StartProcess", Func, 0},
+ {"Stat", Func, 0},
+ {"Stderr", Var, 0},
+ {"Stdin", Var, 0},
+ {"Stdout", Var, 0},
+ {"Symlink", Func, 0},
+ {"SyscallError", Type, 0},
+ {"SyscallError.Err", Field, 0},
+ {"SyscallError.Syscall", Field, 0},
+ {"TempDir", Func, 0},
+ {"Truncate", Func, 0},
+ {"Unsetenv", Func, 4},
+ {"UserCacheDir", Func, 11},
+ {"UserConfigDir", Func, 13},
+ {"UserHomeDir", Func, 12},
+ {"WriteFile", Func, 16},
+ },
+ "os/exec": {
+ {"(*Cmd).CombinedOutput", Method, 0},
+ {"(*Cmd).Environ", Method, 19},
+ {"(*Cmd).Output", Method, 0},
+ {"(*Cmd).Run", Method, 0},
+ {"(*Cmd).Start", Method, 0},
+ {"(*Cmd).StderrPipe", Method, 0},
+ {"(*Cmd).StdinPipe", Method, 0},
+ {"(*Cmd).StdoutPipe", Method, 0},
+ {"(*Cmd).String", Method, 13},
+ {"(*Cmd).Wait", Method, 0},
+ {"(*Error).Error", Method, 0},
+ {"(*Error).Unwrap", Method, 13},
+ {"(*ExitError).Error", Method, 0},
+ {"(ExitError).ExitCode", Method, 12},
+ {"(ExitError).Exited", Method, 0},
+ {"(ExitError).Pid", Method, 0},
+ {"(ExitError).String", Method, 0},
+ {"(ExitError).Success", Method, 0},
+ {"(ExitError).Sys", Method, 0},
+ {"(ExitError).SysUsage", Method, 0},
+ {"(ExitError).SystemTime", Method, 0},
+ {"(ExitError).UserTime", Method, 0},
+ {"Cmd", Type, 0},
+ {"Cmd.Args", Field, 0},
+ {"Cmd.Cancel", Field, 20},
+ {"Cmd.Dir", Field, 0},
+ {"Cmd.Env", Field, 0},
+ {"Cmd.Err", Field, 19},
+ {"Cmd.ExtraFiles", Field, 0},
+ {"Cmd.Path", Field, 0},
+ {"Cmd.Process", Field, 0},
+ {"Cmd.ProcessState", Field, 0},
+ {"Cmd.Stderr", Field, 0},
+ {"Cmd.Stdin", Field, 0},
+ {"Cmd.Stdout", Field, 0},
+ {"Cmd.SysProcAttr", Field, 0},
+ {"Cmd.WaitDelay", Field, 20},
+ {"Command", Func, 0},
+ {"CommandContext", Func, 7},
+ {"ErrDot", Var, 19},
+ {"ErrNotFound", Var, 0},
+ {"ErrWaitDelay", Var, 20},
+ {"Error", Type, 0},
+ {"Error.Err", Field, 0},
+ {"Error.Name", Field, 0},
+ {"ExitError", Type, 0},
+ {"ExitError.ProcessState", Field, 0},
+ {"ExitError.Stderr", Field, 6},
+ {"LookPath", Func, 0},
+ },
+ "os/signal": {
+ {"Ignore", Func, 5},
+ {"Ignored", Func, 11},
+ {"Notify", Func, 0},
+ {"NotifyContext", Func, 16},
+ {"Reset", Func, 5},
+ {"Stop", Func, 1},
+ },
+ "os/user": {
+ {"(*User).GroupIds", Method, 7},
+ {"(UnknownGroupError).Error", Method, 7},
+ {"(UnknownGroupIdError).Error", Method, 7},
+ {"(UnknownUserError).Error", Method, 0},
+ {"(UnknownUserIdError).Error", Method, 0},
+ {"Current", Func, 0},
+ {"Group", Type, 7},
+ {"Group.Gid", Field, 7},
+ {"Group.Name", Field, 7},
+ {"Lookup", Func, 0},
+ {"LookupGroup", Func, 7},
+ {"LookupGroupId", Func, 7},
+ {"LookupId", Func, 0},
+ {"UnknownGroupError", Type, 7},
+ {"UnknownGroupIdError", Type, 7},
+ {"UnknownUserError", Type, 0},
+ {"UnknownUserIdError", Type, 0},
+ {"User", Type, 0},
+ {"User.Gid", Field, 0},
+ {"User.HomeDir", Field, 0},
+ {"User.Name", Field, 0},
+ {"User.Uid", Field, 0},
+ {"User.Username", Field, 0},
+ },
+ "path": {
+ {"Base", Func, 0},
+ {"Clean", Func, 0},
+ {"Dir", Func, 0},
+ {"ErrBadPattern", Var, 0},
+ {"Ext", Func, 0},
+ {"IsAbs", Func, 0},
+ {"Join", Func, 0},
+ {"Match", Func, 0},
+ {"Split", Func, 0},
+ },
+ "path/filepath": {
+ {"Abs", Func, 0},
+ {"Base", Func, 0},
+ {"Clean", Func, 0},
+ {"Dir", Func, 0},
+ {"ErrBadPattern", Var, 0},
+ {"EvalSymlinks", Func, 0},
+ {"Ext", Func, 0},
+ {"FromSlash", Func, 0},
+ {"Glob", Func, 0},
+ {"HasPrefix", Func, 0},
+ {"IsAbs", Func, 0},
+ {"IsLocal", Func, 20},
+ {"Join", Func, 0},
+ {"ListSeparator", Const, 0},
+ {"Localize", Func, 23},
+ {"Match", Func, 0},
+ {"Rel", Func, 0},
+ {"Separator", Const, 0},
+ {"SkipAll", Var, 20},
+ {"SkipDir", Var, 0},
+ {"Split", Func, 0},
+ {"SplitList", Func, 0},
+ {"ToSlash", Func, 0},
+ {"VolumeName", Func, 0},
+ {"Walk", Func, 0},
+ {"WalkDir", Func, 16},
+ {"WalkFunc", Type, 0},
+ },
+ "plugin": {
+ {"(*Plugin).Lookup", Method, 8},
+ {"Open", Func, 8},
+ {"Plugin", Type, 8},
+ {"Symbol", Type, 8},
+ },
+ "reflect": {
+ {"(*MapIter).Key", Method, 12},
+ {"(*MapIter).Next", Method, 12},
+ {"(*MapIter).Reset", Method, 18},
+ {"(*MapIter).Value", Method, 12},
+ {"(*ValueError).Error", Method, 0},
+ {"(ChanDir).String", Method, 0},
+ {"(Kind).String", Method, 0},
+ {"(Method).IsExported", Method, 17},
+ {"(StructField).IsExported", Method, 17},
+ {"(StructTag).Get", Method, 0},
+ {"(StructTag).Lookup", Method, 7},
+ {"(Value).Addr", Method, 0},
+ {"(Value).Bool", Method, 0},
+ {"(Value).Bytes", Method, 0},
+ {"(Value).Call", Method, 0},
+ {"(Value).CallSlice", Method, 0},
+ {"(Value).CanAddr", Method, 0},
+ {"(Value).CanComplex", Method, 18},
+ {"(Value).CanConvert", Method, 17},
+ {"(Value).CanFloat", Method, 18},
+ {"(Value).CanInt", Method, 18},
+ {"(Value).CanInterface", Method, 0},
+ {"(Value).CanSet", Method, 0},
+ {"(Value).CanUint", Method, 18},
+ {"(Value).Cap", Method, 0},
+ {"(Value).Clear", Method, 21},
+ {"(Value).Close", Method, 0},
+ {"(Value).Comparable", Method, 20},
+ {"(Value).Complex", Method, 0},
+ {"(Value).Convert", Method, 1},
+ {"(Value).Elem", Method, 0},
+ {"(Value).Equal", Method, 20},
+ {"(Value).Field", Method, 0},
+ {"(Value).FieldByIndex", Method, 0},
+ {"(Value).FieldByIndexErr", Method, 18},
+ {"(Value).FieldByName", Method, 0},
+ {"(Value).FieldByNameFunc", Method, 0},
+ {"(Value).Float", Method, 0},
+ {"(Value).Grow", Method, 20},
+ {"(Value).Index", Method, 0},
+ {"(Value).Int", Method, 0},
+ {"(Value).Interface", Method, 0},
+ {"(Value).InterfaceData", Method, 0},
+ {"(Value).IsNil", Method, 0},
+ {"(Value).IsValid", Method, 0},
+ {"(Value).IsZero", Method, 13},
+ {"(Value).Kind", Method, 0},
+ {"(Value).Len", Method, 0},
+ {"(Value).MapIndex", Method, 0},
+ {"(Value).MapKeys", Method, 0},
+ {"(Value).MapRange", Method, 12},
+ {"(Value).Method", Method, 0},
+ {"(Value).MethodByName", Method, 0},
+ {"(Value).NumField", Method, 0},
+ {"(Value).NumMethod", Method, 0},
+ {"(Value).OverflowComplex", Method, 0},
+ {"(Value).OverflowFloat", Method, 0},
+ {"(Value).OverflowInt", Method, 0},
+ {"(Value).OverflowUint", Method, 0},
+ {"(Value).Pointer", Method, 0},
+ {"(Value).Recv", Method, 0},
+ {"(Value).Send", Method, 0},
+ {"(Value).Seq", Method, 23},
+ {"(Value).Seq2", Method, 23},
+ {"(Value).Set", Method, 0},
+ {"(Value).SetBool", Method, 0},
+ {"(Value).SetBytes", Method, 0},
+ {"(Value).SetCap", Method, 2},
+ {"(Value).SetComplex", Method, 0},
+ {"(Value).SetFloat", Method, 0},
+ {"(Value).SetInt", Method, 0},
+ {"(Value).SetIterKey", Method, 18},
+ {"(Value).SetIterValue", Method, 18},
+ {"(Value).SetLen", Method, 0},
+ {"(Value).SetMapIndex", Method, 0},
+ {"(Value).SetPointer", Method, 0},
+ {"(Value).SetString", Method, 0},
+ {"(Value).SetUint", Method, 0},
+ {"(Value).SetZero", Method, 20},
+ {"(Value).Slice", Method, 0},
+ {"(Value).Slice3", Method, 2},
+ {"(Value).String", Method, 0},
+ {"(Value).TryRecv", Method, 0},
+ {"(Value).TrySend", Method, 0},
+ {"(Value).Type", Method, 0},
+ {"(Value).Uint", Method, 0},
+ {"(Value).UnsafeAddr", Method, 0},
+ {"(Value).UnsafePointer", Method, 18},
+ {"Append", Func, 0},
+ {"AppendSlice", Func, 0},
+ {"Array", Const, 0},
+ {"ArrayOf", Func, 5},
+ {"Bool", Const, 0},
+ {"BothDir", Const, 0},
+ {"Chan", Const, 0},
+ {"ChanDir", Type, 0},
+ {"ChanOf", Func, 1},
+ {"Complex128", Const, 0},
+ {"Complex64", Const, 0},
+ {"Copy", Func, 0},
+ {"DeepEqual", Func, 0},
+ {"Float32", Const, 0},
+ {"Float64", Const, 0},
+ {"Func", Const, 0},
+ {"FuncOf", Func, 5},
+ {"Indirect", Func, 0},
+ {"Int", Const, 0},
+ {"Int16", Const, 0},
+ {"Int32", Const, 0},
+ {"Int64", Const, 0},
+ {"Int8", Const, 0},
+ {"Interface", Const, 0},
+ {"Invalid", Const, 0},
+ {"Kind", Type, 0},
+ {"MakeChan", Func, 0},
+ {"MakeFunc", Func, 1},
+ {"MakeMap", Func, 0},
+ {"MakeMapWithSize", Func, 9},
+ {"MakeSlice", Func, 0},
+ {"Map", Const, 0},
+ {"MapIter", Type, 12},
+ {"MapOf", Func, 1},
+ {"Method", Type, 0},
+ {"Method.Func", Field, 0},
+ {"Method.Index", Field, 0},
+ {"Method.Name", Field, 0},
+ {"Method.PkgPath", Field, 0},
+ {"Method.Type", Field, 0},
+ {"New", Func, 0},
+ {"NewAt", Func, 0},
+ {"Pointer", Const, 18},
+ {"PointerTo", Func, 18},
+ {"Ptr", Const, 0},
+ {"PtrTo", Func, 0},
+ {"RecvDir", Const, 0},
+ {"Select", Func, 1},
+ {"SelectCase", Type, 1},
+ {"SelectCase.Chan", Field, 1},
+ {"SelectCase.Dir", Field, 1},
+ {"SelectCase.Send", Field, 1},
+ {"SelectDefault", Const, 1},
+ {"SelectDir", Type, 1},
+ {"SelectRecv", Const, 1},
+ {"SelectSend", Const, 1},
+ {"SendDir", Const, 0},
+ {"Slice", Const, 0},
+ {"SliceAt", Func, 23},
+ {"SliceHeader", Type, 0},
+ {"SliceHeader.Cap", Field, 0},
+ {"SliceHeader.Data", Field, 0},
+ {"SliceHeader.Len", Field, 0},
+ {"SliceOf", Func, 1},
+ {"String", Const, 0},
+ {"StringHeader", Type, 0},
+ {"StringHeader.Data", Field, 0},
+ {"StringHeader.Len", Field, 0},
+ {"Struct", Const, 0},
+ {"StructField", Type, 0},
+ {"StructField.Anonymous", Field, 0},
+ {"StructField.Index", Field, 0},
+ {"StructField.Name", Field, 0},
+ {"StructField.Offset", Field, 0},
+ {"StructField.PkgPath", Field, 0},
+ {"StructField.Tag", Field, 0},
+ {"StructField.Type", Field, 0},
+ {"StructOf", Func, 7},
+ {"StructTag", Type, 0},
+ {"Swapper", Func, 8},
+ {"Type", Type, 0},
+ {"TypeFor", Func, 22},
+ {"TypeOf", Func, 0},
+ {"Uint", Const, 0},
+ {"Uint16", Const, 0},
+ {"Uint32", Const, 0},
+ {"Uint64", Const, 0},
+ {"Uint8", Const, 0},
+ {"Uintptr", Const, 0},
+ {"UnsafePointer", Const, 0},
+ {"Value", Type, 0},
+ {"ValueError", Type, 0},
+ {"ValueError.Kind", Field, 0},
+ {"ValueError.Method", Field, 0},
+ {"ValueOf", Func, 0},
+ {"VisibleFields", Func, 17},
+ {"Zero", Func, 0},
+ },
+ "regexp": {
+ {"(*Regexp).Copy", Method, 6},
+ {"(*Regexp).Expand", Method, 0},
+ {"(*Regexp).ExpandString", Method, 0},
+ {"(*Regexp).Find", Method, 0},
+ {"(*Regexp).FindAll", Method, 0},
+ {"(*Regexp).FindAllIndex", Method, 0},
+ {"(*Regexp).FindAllString", Method, 0},
+ {"(*Regexp).FindAllStringIndex", Method, 0},
+ {"(*Regexp).FindAllStringSubmatch", Method, 0},
+ {"(*Regexp).FindAllStringSubmatchIndex", Method, 0},
+ {"(*Regexp).FindAllSubmatch", Method, 0},
+ {"(*Regexp).FindAllSubmatchIndex", Method, 0},
+ {"(*Regexp).FindIndex", Method, 0},
+ {"(*Regexp).FindReaderIndex", Method, 0},
+ {"(*Regexp).FindReaderSubmatchIndex", Method, 0},
+ {"(*Regexp).FindString", Method, 0},
+ {"(*Regexp).FindStringIndex", Method, 0},
+ {"(*Regexp).FindStringSubmatch", Method, 0},
+ {"(*Regexp).FindStringSubmatchIndex", Method, 0},
+ {"(*Regexp).FindSubmatch", Method, 0},
+ {"(*Regexp).FindSubmatchIndex", Method, 0},
+ {"(*Regexp).LiteralPrefix", Method, 0},
+ {"(*Regexp).Longest", Method, 1},
+ {"(*Regexp).MarshalText", Method, 21},
+ {"(*Regexp).Match", Method, 0},
+ {"(*Regexp).MatchReader", Method, 0},
+ {"(*Regexp).MatchString", Method, 0},
+ {"(*Regexp).NumSubexp", Method, 0},
+ {"(*Regexp).ReplaceAll", Method, 0},
+ {"(*Regexp).ReplaceAllFunc", Method, 0},
+ {"(*Regexp).ReplaceAllLiteral", Method, 0},
+ {"(*Regexp).ReplaceAllLiteralString", Method, 0},
+ {"(*Regexp).ReplaceAllString", Method, 0},
+ {"(*Regexp).ReplaceAllStringFunc", Method, 0},
+ {"(*Regexp).Split", Method, 1},
+ {"(*Regexp).String", Method, 0},
+ {"(*Regexp).SubexpIndex", Method, 15},
+ {"(*Regexp).SubexpNames", Method, 0},
+ {"(*Regexp).UnmarshalText", Method, 21},
+ {"Compile", Func, 0},
+ {"CompilePOSIX", Func, 0},
+ {"Match", Func, 0},
+ {"MatchReader", Func, 0},
+ {"MatchString", Func, 0},
+ {"MustCompile", Func, 0},
+ {"MustCompilePOSIX", Func, 0},
+ {"QuoteMeta", Func, 0},
+ {"Regexp", Type, 0},
+ },
+ "regexp/syntax": {
+ {"(*Error).Error", Method, 0},
+ {"(*Inst).MatchEmptyWidth", Method, 0},
+ {"(*Inst).MatchRune", Method, 0},
+ {"(*Inst).MatchRunePos", Method, 3},
+ {"(*Inst).String", Method, 0},
+ {"(*Prog).Prefix", Method, 0},
+ {"(*Prog).StartCond", Method, 0},
+ {"(*Prog).String", Method, 0},
+ {"(*Regexp).CapNames", Method, 0},
+ {"(*Regexp).Equal", Method, 0},
+ {"(*Regexp).MaxCap", Method, 0},
+ {"(*Regexp).Simplify", Method, 0},
+ {"(*Regexp).String", Method, 0},
+ {"(ErrorCode).String", Method, 0},
+ {"(InstOp).String", Method, 3},
+ {"(Op).String", Method, 11},
+ {"ClassNL", Const, 0},
+ {"Compile", Func, 0},
+ {"DotNL", Const, 0},
+ {"EmptyBeginLine", Const, 0},
+ {"EmptyBeginText", Const, 0},
+ {"EmptyEndLine", Const, 0},
+ {"EmptyEndText", Const, 0},
+ {"EmptyNoWordBoundary", Const, 0},
+ {"EmptyOp", Type, 0},
+ {"EmptyOpContext", Func, 0},
+ {"EmptyWordBoundary", Const, 0},
+ {"ErrInternalError", Const, 0},
+ {"ErrInvalidCharClass", Const, 0},
+ {"ErrInvalidCharRange", Const, 0},
+ {"ErrInvalidEscape", Const, 0},
+ {"ErrInvalidNamedCapture", Const, 0},
+ {"ErrInvalidPerlOp", Const, 0},
+ {"ErrInvalidRepeatOp", Const, 0},
+ {"ErrInvalidRepeatSize", Const, 0},
+ {"ErrInvalidUTF8", Const, 0},
+ {"ErrLarge", Const, 20},
+ {"ErrMissingBracket", Const, 0},
+ {"ErrMissingParen", Const, 0},
+ {"ErrMissingRepeatArgument", Const, 0},
+ {"ErrNestingDepth", Const, 19},
+ {"ErrTrailingBackslash", Const, 0},
+ {"ErrUnexpectedParen", Const, 1},
+ {"Error", Type, 0},
+ {"Error.Code", Field, 0},
+ {"Error.Expr", Field, 0},
+ {"ErrorCode", Type, 0},
+ {"Flags", Type, 0},
+ {"FoldCase", Const, 0},
+ {"Inst", Type, 0},
+ {"Inst.Arg", Field, 0},
+ {"Inst.Op", Field, 0},
+ {"Inst.Out", Field, 0},
+ {"Inst.Rune", Field, 0},
+ {"InstAlt", Const, 0},
+ {"InstAltMatch", Const, 0},
+ {"InstCapture", Const, 0},
+ {"InstEmptyWidth", Const, 0},
+ {"InstFail", Const, 0},
+ {"InstMatch", Const, 0},
+ {"InstNop", Const, 0},
+ {"InstOp", Type, 0},
+ {"InstRune", Const, 0},
+ {"InstRune1", Const, 0},
+ {"InstRuneAny", Const, 0},
+ {"InstRuneAnyNotNL", Const, 0},
+ {"IsWordChar", Func, 0},
+ {"Literal", Const, 0},
+ {"MatchNL", Const, 0},
+ {"NonGreedy", Const, 0},
+ {"OneLine", Const, 0},
+ {"Op", Type, 0},
+ {"OpAlternate", Const, 0},
+ {"OpAnyChar", Const, 0},
+ {"OpAnyCharNotNL", Const, 0},
+ {"OpBeginLine", Const, 0},
+ {"OpBeginText", Const, 0},
+ {"OpCapture", Const, 0},
+ {"OpCharClass", Const, 0},
+ {"OpConcat", Const, 0},
+ {"OpEmptyMatch", Const, 0},
+ {"OpEndLine", Const, 0},
+ {"OpEndText", Const, 0},
+ {"OpLiteral", Const, 0},
+ {"OpNoMatch", Const, 0},
+ {"OpNoWordBoundary", Const, 0},
+ {"OpPlus", Const, 0},
+ {"OpQuest", Const, 0},
+ {"OpRepeat", Const, 0},
+ {"OpStar", Const, 0},
+ {"OpWordBoundary", Const, 0},
+ {"POSIX", Const, 0},
+ {"Parse", Func, 0},
+ {"Perl", Const, 0},
+ {"PerlX", Const, 0},
+ {"Prog", Type, 0},
+ {"Prog.Inst", Field, 0},
+ {"Prog.NumCap", Field, 0},
+ {"Prog.Start", Field, 0},
+ {"Regexp", Type, 0},
+ {"Regexp.Cap", Field, 0},
+ {"Regexp.Flags", Field, 0},
+ {"Regexp.Max", Field, 0},
+ {"Regexp.Min", Field, 0},
+ {"Regexp.Name", Field, 0},
+ {"Regexp.Op", Field, 0},
+ {"Regexp.Rune", Field, 0},
+ {"Regexp.Rune0", Field, 0},
+ {"Regexp.Sub", Field, 0},
+ {"Regexp.Sub0", Field, 0},
+ {"Simple", Const, 0},
+ {"UnicodeGroups", Const, 0},
+ {"WasDollar", Const, 0},
+ },
+ "runtime": {
+ {"(*BlockProfileRecord).Stack", Method, 1},
+ {"(*Frames).Next", Method, 7},
+ {"(*Func).Entry", Method, 0},
+ {"(*Func).FileLine", Method, 0},
+ {"(*Func).Name", Method, 0},
+ {"(*MemProfileRecord).InUseBytes", Method, 0},
+ {"(*MemProfileRecord).InUseObjects", Method, 0},
+ {"(*MemProfileRecord).Stack", Method, 0},
+ {"(*PanicNilError).Error", Method, 21},
+ {"(*PanicNilError).RuntimeError", Method, 21},
+ {"(*Pinner).Pin", Method, 21},
+ {"(*Pinner).Unpin", Method, 21},
+ {"(*StackRecord).Stack", Method, 0},
+ {"(*TypeAssertionError).Error", Method, 0},
+ {"(*TypeAssertionError).RuntimeError", Method, 0},
+ {"BlockProfile", Func, 1},
+ {"BlockProfileRecord", Type, 1},
+ {"BlockProfileRecord.Count", Field, 1},
+ {"BlockProfileRecord.Cycles", Field, 1},
+ {"BlockProfileRecord.StackRecord", Field, 1},
+ {"Breakpoint", Func, 0},
+ {"CPUProfile", Func, 0},
+ {"Caller", Func, 0},
+ {"Callers", Func, 0},
+ {"CallersFrames", Func, 7},
+ {"Compiler", Const, 0},
+ {"Error", Type, 0},
+ {"Frame", Type, 7},
+ {"Frame.Entry", Field, 7},
+ {"Frame.File", Field, 7},
+ {"Frame.Func", Field, 7},
+ {"Frame.Function", Field, 7},
+ {"Frame.Line", Field, 7},
+ {"Frame.PC", Field, 7},
+ {"Frames", Type, 7},
+ {"Func", Type, 0},
+ {"FuncForPC", Func, 0},
+ {"GC", Func, 0},
+ {"GOARCH", Const, 0},
+ {"GOMAXPROCS", Func, 0},
+ {"GOOS", Const, 0},
+ {"GOROOT", Func, 0},
+ {"Goexit", Func, 0},
+ {"GoroutineProfile", Func, 0},
+ {"Gosched", Func, 0},
+ {"KeepAlive", Func, 7},
+ {"LockOSThread", Func, 0},
+ {"MemProfile", Func, 0},
+ {"MemProfileRate", Var, 0},
+ {"MemProfileRecord", Type, 0},
+ {"MemProfileRecord.AllocBytes", Field, 0},
+ {"MemProfileRecord.AllocObjects", Field, 0},
+ {"MemProfileRecord.FreeBytes", Field, 0},
+ {"MemProfileRecord.FreeObjects", Field, 0},
+ {"MemProfileRecord.Stack0", Field, 0},
+ {"MemStats", Type, 0},
+ {"MemStats.Alloc", Field, 0},
+ {"MemStats.BuckHashSys", Field, 0},
+ {"MemStats.BySize", Field, 0},
+ {"MemStats.DebugGC", Field, 0},
+ {"MemStats.EnableGC", Field, 0},
+ {"MemStats.Frees", Field, 0},
+ {"MemStats.GCCPUFraction", Field, 5},
+ {"MemStats.GCSys", Field, 2},
+ {"MemStats.HeapAlloc", Field, 0},
+ {"MemStats.HeapIdle", Field, 0},
+ {"MemStats.HeapInuse", Field, 0},
+ {"MemStats.HeapObjects", Field, 0},
+ {"MemStats.HeapReleased", Field, 0},
+ {"MemStats.HeapSys", Field, 0},
+ {"MemStats.LastGC", Field, 0},
+ {"MemStats.Lookups", Field, 0},
+ {"MemStats.MCacheInuse", Field, 0},
+ {"MemStats.MCacheSys", Field, 0},
+ {"MemStats.MSpanInuse", Field, 0},
+ {"MemStats.MSpanSys", Field, 0},
+ {"MemStats.Mallocs", Field, 0},
+ {"MemStats.NextGC", Field, 0},
+ {"MemStats.NumForcedGC", Field, 8},
+ {"MemStats.NumGC", Field, 0},
+ {"MemStats.OtherSys", Field, 2},
+ {"MemStats.PauseEnd", Field, 4},
+ {"MemStats.PauseNs", Field, 0},
+ {"MemStats.PauseTotalNs", Field, 0},
+ {"MemStats.StackInuse", Field, 0},
+ {"MemStats.StackSys", Field, 0},
+ {"MemStats.Sys", Field, 0},
+ {"MemStats.TotalAlloc", Field, 0},
+ {"MutexProfile", Func, 8},
+ {"NumCPU", Func, 0},
+ {"NumCgoCall", Func, 0},
+ {"NumGoroutine", Func, 0},
+ {"PanicNilError", Type, 21},
+ {"Pinner", Type, 21},
+ {"ReadMemStats", Func, 0},
+ {"ReadTrace", Func, 5},
+ {"SetBlockProfileRate", Func, 1},
+ {"SetCPUProfileRate", Func, 0},
+ {"SetCgoTraceback", Func, 7},
+ {"SetFinalizer", Func, 0},
+ {"SetMutexProfileFraction", Func, 8},
+ {"Stack", Func, 0},
+ {"StackRecord", Type, 0},
+ {"StackRecord.Stack0", Field, 0},
+ {"StartTrace", Func, 5},
+ {"StopTrace", Func, 5},
+ {"ThreadCreateProfile", Func, 0},
+ {"TypeAssertionError", Type, 0},
+ {"UnlockOSThread", Func, 0},
+ {"Version", Func, 0},
+ },
+ "runtime/cgo": {
+ {"(Handle).Delete", Method, 17},
+ {"(Handle).Value", Method, 17},
+ {"Handle", Type, 17},
+ {"Incomplete", Type, 20},
+ {"NewHandle", Func, 17},
+ },
+ "runtime/coverage": {
+ {"ClearCounters", Func, 20},
+ {"WriteCounters", Func, 20},
+ {"WriteCountersDir", Func, 20},
+ {"WriteMeta", Func, 20},
+ {"WriteMetaDir", Func, 20},
+ },
+ "runtime/debug": {
+ {"(*BuildInfo).String", Method, 18},
+ {"BuildInfo", Type, 12},
+ {"BuildInfo.Deps", Field, 12},
+ {"BuildInfo.GoVersion", Field, 18},
+ {"BuildInfo.Main", Field, 12},
+ {"BuildInfo.Path", Field, 12},
+ {"BuildInfo.Settings", Field, 18},
+ {"BuildSetting", Type, 18},
+ {"BuildSetting.Key", Field, 18},
+ {"BuildSetting.Value", Field, 18},
+ {"CrashOptions", Type, 23},
+ {"FreeOSMemory", Func, 1},
+ {"GCStats", Type, 1},
+ {"GCStats.LastGC", Field, 1},
+ {"GCStats.NumGC", Field, 1},
+ {"GCStats.Pause", Field, 1},
+ {"GCStats.PauseEnd", Field, 4},
+ {"GCStats.PauseQuantiles", Field, 1},
+ {"GCStats.PauseTotal", Field, 1},
+ {"Module", Type, 12},
+ {"Module.Path", Field, 12},
+ {"Module.Replace", Field, 12},
+ {"Module.Sum", Field, 12},
+ {"Module.Version", Field, 12},
+ {"ParseBuildInfo", Func, 18},
+ {"PrintStack", Func, 0},
+ {"ReadBuildInfo", Func, 12},
+ {"ReadGCStats", Func, 1},
+ {"SetCrashOutput", Func, 23},
+ {"SetGCPercent", Func, 1},
+ {"SetMaxStack", Func, 2},
+ {"SetMaxThreads", Func, 2},
+ {"SetMemoryLimit", Func, 19},
+ {"SetPanicOnFault", Func, 3},
+ {"SetTraceback", Func, 6},
+ {"Stack", Func, 0},
+ {"WriteHeapDump", Func, 3},
+ },
+ "runtime/metrics": {
+ {"(Value).Float64", Method, 16},
+ {"(Value).Float64Histogram", Method, 16},
+ {"(Value).Kind", Method, 16},
+ {"(Value).Uint64", Method, 16},
+ {"All", Func, 16},
+ {"Description", Type, 16},
+ {"Description.Cumulative", Field, 16},
+ {"Description.Description", Field, 16},
+ {"Description.Kind", Field, 16},
+ {"Description.Name", Field, 16},
+ {"Float64Histogram", Type, 16},
+ {"Float64Histogram.Buckets", Field, 16},
+ {"Float64Histogram.Counts", Field, 16},
+ {"KindBad", Const, 16},
+ {"KindFloat64", Const, 16},
+ {"KindFloat64Histogram", Const, 16},
+ {"KindUint64", Const, 16},
+ {"Read", Func, 16},
+ {"Sample", Type, 16},
+ {"Sample.Name", Field, 16},
+ {"Sample.Value", Field, 16},
+ {"Value", Type, 16},
+ {"ValueKind", Type, 16},
+ },
+ "runtime/pprof": {
+ {"(*Profile).Add", Method, 0},
+ {"(*Profile).Count", Method, 0},
+ {"(*Profile).Name", Method, 0},
+ {"(*Profile).Remove", Method, 0},
+ {"(*Profile).WriteTo", Method, 0},
+ {"Do", Func, 9},
+ {"ForLabels", Func, 9},
+ {"Label", Func, 9},
+ {"LabelSet", Type, 9},
+ {"Labels", Func, 9},
+ {"Lookup", Func, 0},
+ {"NewProfile", Func, 0},
+ {"Profile", Type, 0},
+ {"Profiles", Func, 0},
+ {"SetGoroutineLabels", Func, 9},
+ {"StartCPUProfile", Func, 0},
+ {"StopCPUProfile", Func, 0},
+ {"WithLabels", Func, 9},
+ {"WriteHeapProfile", Func, 0},
+ },
+ "runtime/trace": {
+ {"(*Region).End", Method, 11},
+ {"(*Task).End", Method, 11},
+ {"IsEnabled", Func, 11},
+ {"Log", Func, 11},
+ {"Logf", Func, 11},
+ {"NewTask", Func, 11},
+ {"Region", Type, 11},
+ {"Start", Func, 5},
+ {"StartRegion", Func, 11},
+ {"Stop", Func, 5},
+ {"Task", Type, 11},
+ {"WithRegion", Func, 11},
+ },
+ "slices": {
+ {"All", Func, 23},
+ {"AppendSeq", Func, 23},
+ {"Backward", Func, 23},
+ {"BinarySearch", Func, 21},
+ {"BinarySearchFunc", Func, 21},
+ {"Chunk", Func, 23},
+ {"Clip", Func, 21},
+ {"Clone", Func, 21},
+ {"Collect", Func, 23},
+ {"Compact", Func, 21},
+ {"CompactFunc", Func, 21},
+ {"Compare", Func, 21},
+ {"CompareFunc", Func, 21},
+ {"Concat", Func, 22},
+ {"Contains", Func, 21},
+ {"ContainsFunc", Func, 21},
+ {"Delete", Func, 21},
+ {"DeleteFunc", Func, 21},
+ {"Equal", Func, 21},
+ {"EqualFunc", Func, 21},
+ {"Grow", Func, 21},
+ {"Index", Func, 21},
+ {"IndexFunc", Func, 21},
+ {"Insert", Func, 21},
+ {"IsSorted", Func, 21},
+ {"IsSortedFunc", Func, 21},
+ {"Max", Func, 21},
+ {"MaxFunc", Func, 21},
+ {"Min", Func, 21},
+ {"MinFunc", Func, 21},
+ {"Repeat", Func, 23},
+ {"Replace", Func, 21},
+ {"Reverse", Func, 21},
+ {"Sort", Func, 21},
+ {"SortFunc", Func, 21},
+ {"SortStableFunc", Func, 21},
+ {"Sorted", Func, 23},
+ {"SortedFunc", Func, 23},
+ {"SortedStableFunc", Func, 23},
+ {"Values", Func, 23},
+ },
+ "sort": {
+ {"(Float64Slice).Len", Method, 0},
+ {"(Float64Slice).Less", Method, 0},
+ {"(Float64Slice).Search", Method, 0},
+ {"(Float64Slice).Sort", Method, 0},
+ {"(Float64Slice).Swap", Method, 0},
+ {"(IntSlice).Len", Method, 0},
+ {"(IntSlice).Less", Method, 0},
+ {"(IntSlice).Search", Method, 0},
+ {"(IntSlice).Sort", Method, 0},
+ {"(IntSlice).Swap", Method, 0},
+ {"(StringSlice).Len", Method, 0},
+ {"(StringSlice).Less", Method, 0},
+ {"(StringSlice).Search", Method, 0},
+ {"(StringSlice).Sort", Method, 0},
+ {"(StringSlice).Swap", Method, 0},
+ {"Find", Func, 19},
+ {"Float64Slice", Type, 0},
+ {"Float64s", Func, 0},
+ {"Float64sAreSorted", Func, 0},
+ {"IntSlice", Type, 0},
+ {"Interface", Type, 0},
+ {"Ints", Func, 0},
+ {"IntsAreSorted", Func, 0},
+ {"IsSorted", Func, 0},
+ {"Reverse", Func, 1},
+ {"Search", Func, 0},
+ {"SearchFloat64s", Func, 0},
+ {"SearchInts", Func, 0},
+ {"SearchStrings", Func, 0},
+ {"Slice", Func, 8},
+ {"SliceIsSorted", Func, 8},
+ {"SliceStable", Func, 8},
+ {"Sort", Func, 0},
+ {"Stable", Func, 2},
+ {"StringSlice", Type, 0},
+ {"Strings", Func, 0},
+ {"StringsAreSorted", Func, 0},
+ },
+ "strconv": {
+ {"(*NumError).Error", Method, 0},
+ {"(*NumError).Unwrap", Method, 14},
+ {"AppendBool", Func, 0},
+ {"AppendFloat", Func, 0},
+ {"AppendInt", Func, 0},
+ {"AppendQuote", Func, 0},
+ {"AppendQuoteRune", Func, 0},
+ {"AppendQuoteRuneToASCII", Func, 0},
+ {"AppendQuoteRuneToGraphic", Func, 6},
+ {"AppendQuoteToASCII", Func, 0},
+ {"AppendQuoteToGraphic", Func, 6},
+ {"AppendUint", Func, 0},
+ {"Atoi", Func, 0},
+ {"CanBackquote", Func, 0},
+ {"ErrRange", Var, 0},
+ {"ErrSyntax", Var, 0},
+ {"FormatBool", Func, 0},
+ {"FormatComplex", Func, 15},
+ {"FormatFloat", Func, 0},
+ {"FormatInt", Func, 0},
+ {"FormatUint", Func, 0},
+ {"IntSize", Const, 0},
+ {"IsGraphic", Func, 6},
+ {"IsPrint", Func, 0},
+ {"Itoa", Func, 0},
+ {"NumError", Type, 0},
+ {"NumError.Err", Field, 0},
+ {"NumError.Func", Field, 0},
+ {"NumError.Num", Field, 0},
+ {"ParseBool", Func, 0},
+ {"ParseComplex", Func, 15},
+ {"ParseFloat", Func, 0},
+ {"ParseInt", Func, 0},
+ {"ParseUint", Func, 0},
+ {"Quote", Func, 0},
+ {"QuoteRune", Func, 0},
+ {"QuoteRuneToASCII", Func, 0},
+ {"QuoteRuneToGraphic", Func, 6},
+ {"QuoteToASCII", Func, 0},
+ {"QuoteToGraphic", Func, 6},
+ {"QuotedPrefix", Func, 17},
+ {"Unquote", Func, 0},
+ {"UnquoteChar", Func, 0},
+ },
+ "strings": {
+ {"(*Builder).Cap", Method, 12},
+ {"(*Builder).Grow", Method, 10},
+ {"(*Builder).Len", Method, 10},
+ {"(*Builder).Reset", Method, 10},
+ {"(*Builder).String", Method, 10},
+ {"(*Builder).Write", Method, 10},
+ {"(*Builder).WriteByte", Method, 10},
+ {"(*Builder).WriteRune", Method, 10},
+ {"(*Builder).WriteString", Method, 10},
+ {"(*Reader).Len", Method, 0},
+ {"(*Reader).Read", Method, 0},
+ {"(*Reader).ReadAt", Method, 0},
+ {"(*Reader).ReadByte", Method, 0},
+ {"(*Reader).ReadRune", Method, 0},
+ {"(*Reader).Reset", Method, 7},
+ {"(*Reader).Seek", Method, 0},
+ {"(*Reader).Size", Method, 5},
+ {"(*Reader).UnreadByte", Method, 0},
+ {"(*Reader).UnreadRune", Method, 0},
+ {"(*Reader).WriteTo", Method, 1},
+ {"(*Replacer).Replace", Method, 0},
+ {"(*Replacer).WriteString", Method, 0},
+ {"Builder", Type, 10},
+ {"Clone", Func, 18},
+ {"Compare", Func, 5},
+ {"Contains", Func, 0},
+ {"ContainsAny", Func, 0},
+ {"ContainsFunc", Func, 21},
+ {"ContainsRune", Func, 0},
+ {"Count", Func, 0},
+ {"Cut", Func, 18},
+ {"CutPrefix", Func, 20},
+ {"CutSuffix", Func, 20},
+ {"EqualFold", Func, 0},
+ {"Fields", Func, 0},
+ {"FieldsFunc", Func, 0},
+ {"HasPrefix", Func, 0},
+ {"HasSuffix", Func, 0},
+ {"Index", Func, 0},
+ {"IndexAny", Func, 0},
+ {"IndexByte", Func, 2},
+ {"IndexFunc", Func, 0},
+ {"IndexRune", Func, 0},
+ {"Join", Func, 0},
+ {"LastIndex", Func, 0},
+ {"LastIndexAny", Func, 0},
+ {"LastIndexByte", Func, 5},
+ {"LastIndexFunc", Func, 0},
+ {"Map", Func, 0},
+ {"NewReader", Func, 0},
+ {"NewReplacer", Func, 0},
+ {"Reader", Type, 0},
+ {"Repeat", Func, 0},
+ {"Replace", Func, 0},
+ {"ReplaceAll", Func, 12},
+ {"Replacer", Type, 0},
+ {"Split", Func, 0},
+ {"SplitAfter", Func, 0},
+ {"SplitAfterN", Func, 0},
+ {"SplitN", Func, 0},
+ {"Title", Func, 0},
+ {"ToLower", Func, 0},
+ {"ToLowerSpecial", Func, 0},
+ {"ToTitle", Func, 0},
+ {"ToTitleSpecial", Func, 0},
+ {"ToUpper", Func, 0},
+ {"ToUpperSpecial", Func, 0},
+ {"ToValidUTF8", Func, 13},
+ {"Trim", Func, 0},
+ {"TrimFunc", Func, 0},
+ {"TrimLeft", Func, 0},
+ {"TrimLeftFunc", Func, 0},
+ {"TrimPrefix", Func, 1},
+ {"TrimRight", Func, 0},
+ {"TrimRightFunc", Func, 0},
+ {"TrimSpace", Func, 0},
+ {"TrimSuffix", Func, 1},
+ },
+ "structs": {
+ {"HostLayout", Type, 23},
+ },
+ "sync": {
+ {"(*Cond).Broadcast", Method, 0},
+ {"(*Cond).Signal", Method, 0},
+ {"(*Cond).Wait", Method, 0},
+ {"(*Map).Clear", Method, 23},
+ {"(*Map).CompareAndDelete", Method, 20},
+ {"(*Map).CompareAndSwap", Method, 20},
+ {"(*Map).Delete", Method, 9},
+ {"(*Map).Load", Method, 9},
+ {"(*Map).LoadAndDelete", Method, 15},
+ {"(*Map).LoadOrStore", Method, 9},
+ {"(*Map).Range", Method, 9},
+ {"(*Map).Store", Method, 9},
+ {"(*Map).Swap", Method, 20},
+ {"(*Mutex).Lock", Method, 0},
+ {"(*Mutex).TryLock", Method, 18},
+ {"(*Mutex).Unlock", Method, 0},
+ {"(*Once).Do", Method, 0},
+ {"(*Pool).Get", Method, 3},
+ {"(*Pool).Put", Method, 3},
+ {"(*RWMutex).Lock", Method, 0},
+ {"(*RWMutex).RLock", Method, 0},
+ {"(*RWMutex).RLocker", Method, 0},
+ {"(*RWMutex).RUnlock", Method, 0},
+ {"(*RWMutex).TryLock", Method, 18},
+ {"(*RWMutex).TryRLock", Method, 18},
+ {"(*RWMutex).Unlock", Method, 0},
+ {"(*WaitGroup).Add", Method, 0},
+ {"(*WaitGroup).Done", Method, 0},
+ {"(*WaitGroup).Wait", Method, 0},
+ {"Cond", Type, 0},
+ {"Cond.L", Field, 0},
+ {"Locker", Type, 0},
+ {"Map", Type, 9},
+ {"Mutex", Type, 0},
+ {"NewCond", Func, 0},
+ {"Once", Type, 0},
+ {"OnceFunc", Func, 21},
+ {"OnceValue", Func, 21},
+ {"OnceValues", Func, 21},
+ {"Pool", Type, 3},
+ {"Pool.New", Field, 3},
+ {"RWMutex", Type, 0},
+ {"WaitGroup", Type, 0},
+ },
+ "sync/atomic": {
+ {"(*Bool).CompareAndSwap", Method, 19},
+ {"(*Bool).Load", Method, 19},
+ {"(*Bool).Store", Method, 19},
+ {"(*Bool).Swap", Method, 19},
+ {"(*Int32).Add", Method, 19},
+ {"(*Int32).And", Method, 23},
+ {"(*Int32).CompareAndSwap", Method, 19},
+ {"(*Int32).Load", Method, 19},
+ {"(*Int32).Or", Method, 23},
+ {"(*Int32).Store", Method, 19},
+ {"(*Int32).Swap", Method, 19},
+ {"(*Int64).Add", Method, 19},
+ {"(*Int64).And", Method, 23},
+ {"(*Int64).CompareAndSwap", Method, 19},
+ {"(*Int64).Load", Method, 19},
+ {"(*Int64).Or", Method, 23},
+ {"(*Int64).Store", Method, 19},
+ {"(*Int64).Swap", Method, 19},
+ {"(*Pointer).CompareAndSwap", Method, 19},
+ {"(*Pointer).Load", Method, 19},
+ {"(*Pointer).Store", Method, 19},
+ {"(*Pointer).Swap", Method, 19},
+ {"(*Uint32).Add", Method, 19},
+ {"(*Uint32).And", Method, 23},
+ {"(*Uint32).CompareAndSwap", Method, 19},
+ {"(*Uint32).Load", Method, 19},
+ {"(*Uint32).Or", Method, 23},
+ {"(*Uint32).Store", Method, 19},
+ {"(*Uint32).Swap", Method, 19},
+ {"(*Uint64).Add", Method, 19},
+ {"(*Uint64).And", Method, 23},
+ {"(*Uint64).CompareAndSwap", Method, 19},
+ {"(*Uint64).Load", Method, 19},
+ {"(*Uint64).Or", Method, 23},
+ {"(*Uint64).Store", Method, 19},
+ {"(*Uint64).Swap", Method, 19},
+ {"(*Uintptr).Add", Method, 19},
+ {"(*Uintptr).And", Method, 23},
+ {"(*Uintptr).CompareAndSwap", Method, 19},
+ {"(*Uintptr).Load", Method, 19},
+ {"(*Uintptr).Or", Method, 23},
+ {"(*Uintptr).Store", Method, 19},
+ {"(*Uintptr).Swap", Method, 19},
+ {"(*Value).CompareAndSwap", Method, 17},
+ {"(*Value).Load", Method, 4},
+ {"(*Value).Store", Method, 4},
+ {"(*Value).Swap", Method, 17},
+ {"AddInt32", Func, 0},
+ {"AddInt64", Func, 0},
+ {"AddUint32", Func, 0},
+ {"AddUint64", Func, 0},
+ {"AddUintptr", Func, 0},
+ {"AndInt32", Func, 23},
+ {"AndInt64", Func, 23},
+ {"AndUint32", Func, 23},
+ {"AndUint64", Func, 23},
+ {"AndUintptr", Func, 23},
+ {"Bool", Type, 19},
+ {"CompareAndSwapInt32", Func, 0},
+ {"CompareAndSwapInt64", Func, 0},
+ {"CompareAndSwapPointer", Func, 0},
+ {"CompareAndSwapUint32", Func, 0},
+ {"CompareAndSwapUint64", Func, 0},
+ {"CompareAndSwapUintptr", Func, 0},
+ {"Int32", Type, 19},
+ {"Int64", Type, 19},
+ {"LoadInt32", Func, 0},
+ {"LoadInt64", Func, 0},
+ {"LoadPointer", Func, 0},
+ {"LoadUint32", Func, 0},
+ {"LoadUint64", Func, 0},
+ {"LoadUintptr", Func, 0},
+ {"OrInt32", Func, 23},
+ {"OrInt64", Func, 23},
+ {"OrUint32", Func, 23},
+ {"OrUint64", Func, 23},
+ {"OrUintptr", Func, 23},
+ {"Pointer", Type, 19},
+ {"StoreInt32", Func, 0},
+ {"StoreInt64", Func, 0},
+ {"StorePointer", Func, 0},
+ {"StoreUint32", Func, 0},
+ {"StoreUint64", Func, 0},
+ {"StoreUintptr", Func, 0},
+ {"SwapInt32", Func, 2},
+ {"SwapInt64", Func, 2},
+ {"SwapPointer", Func, 2},
+ {"SwapUint32", Func, 2},
+ {"SwapUint64", Func, 2},
+ {"SwapUintptr", Func, 2},
+ {"Uint32", Type, 19},
+ {"Uint64", Type, 19},
+ {"Uintptr", Type, 19},
+ {"Value", Type, 4},
+ },
+ "syscall": {
+ {"(*Cmsghdr).SetLen", Method, 0},
+ {"(*DLL).FindProc", Method, 0},
+ {"(*DLL).MustFindProc", Method, 0},
+ {"(*DLL).Release", Method, 0},
+ {"(*DLLError).Error", Method, 0},
+ {"(*DLLError).Unwrap", Method, 16},
+ {"(*Filetime).Nanoseconds", Method, 0},
+ {"(*Iovec).SetLen", Method, 0},
+ {"(*LazyDLL).Handle", Method, 0},
+ {"(*LazyDLL).Load", Method, 0},
+ {"(*LazyDLL).NewProc", Method, 0},
+ {"(*LazyProc).Addr", Method, 0},
+ {"(*LazyProc).Call", Method, 0},
+ {"(*LazyProc).Find", Method, 0},
+ {"(*Msghdr).SetControllen", Method, 0},
+ {"(*Proc).Addr", Method, 0},
+ {"(*Proc).Call", Method, 0},
+ {"(*PtraceRegs).PC", Method, 0},
+ {"(*PtraceRegs).SetPC", Method, 0},
+ {"(*RawSockaddrAny).Sockaddr", Method, 0},
+ {"(*SID).Copy", Method, 0},
+ {"(*SID).Len", Method, 0},
+ {"(*SID).LookupAccount", Method, 0},
+ {"(*SID).String", Method, 0},
+ {"(*Timespec).Nano", Method, 0},
+ {"(*Timespec).Unix", Method, 0},
+ {"(*Timeval).Nano", Method, 0},
+ {"(*Timeval).Nanoseconds", Method, 0},
+ {"(*Timeval).Unix", Method, 0},
+ {"(Errno).Error", Method, 0},
+ {"(Errno).Is", Method, 13},
+ {"(Errno).Temporary", Method, 0},
+ {"(Errno).Timeout", Method, 0},
+ {"(Signal).Signal", Method, 0},
+ {"(Signal).String", Method, 0},
+ {"(Token).Close", Method, 0},
+ {"(Token).GetTokenPrimaryGroup", Method, 0},
+ {"(Token).GetTokenUser", Method, 0},
+ {"(Token).GetUserProfileDirectory", Method, 0},
+ {"(WaitStatus).Continued", Method, 0},
+ {"(WaitStatus).CoreDump", Method, 0},
+ {"(WaitStatus).ExitStatus", Method, 0},
+ {"(WaitStatus).Exited", Method, 0},
+ {"(WaitStatus).Signal", Method, 0},
+ {"(WaitStatus).Signaled", Method, 0},
+ {"(WaitStatus).StopSignal", Method, 0},
+ {"(WaitStatus).Stopped", Method, 0},
+ {"(WaitStatus).TrapCause", Method, 0},
+ {"AF_ALG", Const, 0},
+ {"AF_APPLETALK", Const, 0},
+ {"AF_ARP", Const, 0},
+ {"AF_ASH", Const, 0},
+ {"AF_ATM", Const, 0},
+ {"AF_ATMPVC", Const, 0},
+ {"AF_ATMSVC", Const, 0},
+ {"AF_AX25", Const, 0},
+ {"AF_BLUETOOTH", Const, 0},
+ {"AF_BRIDGE", Const, 0},
+ {"AF_CAIF", Const, 0},
+ {"AF_CAN", Const, 0},
+ {"AF_CCITT", Const, 0},
+ {"AF_CHAOS", Const, 0},
+ {"AF_CNT", Const, 0},
+ {"AF_COIP", Const, 0},
+ {"AF_DATAKIT", Const, 0},
+ {"AF_DECnet", Const, 0},
+ {"AF_DLI", Const, 0},
+ {"AF_E164", Const, 0},
+ {"AF_ECMA", Const, 0},
+ {"AF_ECONET", Const, 0},
+ {"AF_ENCAP", Const, 1},
+ {"AF_FILE", Const, 0},
+ {"AF_HYLINK", Const, 0},
+ {"AF_IEEE80211", Const, 0},
+ {"AF_IEEE802154", Const, 0},
+ {"AF_IMPLINK", Const, 0},
+ {"AF_INET", Const, 0},
+ {"AF_INET6", Const, 0},
+ {"AF_INET6_SDP", Const, 3},
+ {"AF_INET_SDP", Const, 3},
+ {"AF_IPX", Const, 0},
+ {"AF_IRDA", Const, 0},
+ {"AF_ISDN", Const, 0},
+ {"AF_ISO", Const, 0},
+ {"AF_IUCV", Const, 0},
+ {"AF_KEY", Const, 0},
+ {"AF_LAT", Const, 0},
+ {"AF_LINK", Const, 0},
+ {"AF_LLC", Const, 0},
+ {"AF_LOCAL", Const, 0},
+ {"AF_MAX", Const, 0},
+ {"AF_MPLS", Const, 1},
+ {"AF_NATM", Const, 0},
+ {"AF_NDRV", Const, 0},
+ {"AF_NETBEUI", Const, 0},
+ {"AF_NETBIOS", Const, 0},
+ {"AF_NETGRAPH", Const, 0},
+ {"AF_NETLINK", Const, 0},
+ {"AF_NETROM", Const, 0},
+ {"AF_NS", Const, 0},
+ {"AF_OROUTE", Const, 1},
+ {"AF_OSI", Const, 0},
+ {"AF_PACKET", Const, 0},
+ {"AF_PHONET", Const, 0},
+ {"AF_PPP", Const, 0},
+ {"AF_PPPOX", Const, 0},
+ {"AF_PUP", Const, 0},
+ {"AF_RDS", Const, 0},
+ {"AF_RESERVED_36", Const, 0},
+ {"AF_ROSE", Const, 0},
+ {"AF_ROUTE", Const, 0},
+ {"AF_RXRPC", Const, 0},
+ {"AF_SCLUSTER", Const, 0},
+ {"AF_SECURITY", Const, 0},
+ {"AF_SIP", Const, 0},
+ {"AF_SLOW", Const, 0},
+ {"AF_SNA", Const, 0},
+ {"AF_SYSTEM", Const, 0},
+ {"AF_TIPC", Const, 0},
+ {"AF_UNIX", Const, 0},
+ {"AF_UNSPEC", Const, 0},
+ {"AF_UTUN", Const, 16},
+ {"AF_VENDOR00", Const, 0},
+ {"AF_VENDOR01", Const, 0},
+ {"AF_VENDOR02", Const, 0},
+ {"AF_VENDOR03", Const, 0},
+ {"AF_VENDOR04", Const, 0},
+ {"AF_VENDOR05", Const, 0},
+ {"AF_VENDOR06", Const, 0},
+ {"AF_VENDOR07", Const, 0},
+ {"AF_VENDOR08", Const, 0},
+ {"AF_VENDOR09", Const, 0},
+ {"AF_VENDOR10", Const, 0},
+ {"AF_VENDOR11", Const, 0},
+ {"AF_VENDOR12", Const, 0},
+ {"AF_VENDOR13", Const, 0},
+ {"AF_VENDOR14", Const, 0},
+ {"AF_VENDOR15", Const, 0},
+ {"AF_VENDOR16", Const, 0},
+ {"AF_VENDOR17", Const, 0},
+ {"AF_VENDOR18", Const, 0},
+ {"AF_VENDOR19", Const, 0},
+ {"AF_VENDOR20", Const, 0},
+ {"AF_VENDOR21", Const, 0},
+ {"AF_VENDOR22", Const, 0},
+ {"AF_VENDOR23", Const, 0},
+ {"AF_VENDOR24", Const, 0},
+ {"AF_VENDOR25", Const, 0},
+ {"AF_VENDOR26", Const, 0},
+ {"AF_VENDOR27", Const, 0},
+ {"AF_VENDOR28", Const, 0},
+ {"AF_VENDOR29", Const, 0},
+ {"AF_VENDOR30", Const, 0},
+ {"AF_VENDOR31", Const, 0},
+ {"AF_VENDOR32", Const, 0},
+ {"AF_VENDOR33", Const, 0},
+ {"AF_VENDOR34", Const, 0},
+ {"AF_VENDOR35", Const, 0},
+ {"AF_VENDOR36", Const, 0},
+ {"AF_VENDOR37", Const, 0},
+ {"AF_VENDOR38", Const, 0},
+ {"AF_VENDOR39", Const, 0},
+ {"AF_VENDOR40", Const, 0},
+ {"AF_VENDOR41", Const, 0},
+ {"AF_VENDOR42", Const, 0},
+ {"AF_VENDOR43", Const, 0},
+ {"AF_VENDOR44", Const, 0},
+ {"AF_VENDOR45", Const, 0},
+ {"AF_VENDOR46", Const, 0},
+ {"AF_VENDOR47", Const, 0},
+ {"AF_WANPIPE", Const, 0},
+ {"AF_X25", Const, 0},
+ {"AI_CANONNAME", Const, 1},
+ {"AI_NUMERICHOST", Const, 1},
+ {"AI_PASSIVE", Const, 1},
+ {"APPLICATION_ERROR", Const, 0},
+ {"ARPHRD_ADAPT", Const, 0},
+ {"ARPHRD_APPLETLK", Const, 0},
+ {"ARPHRD_ARCNET", Const, 0},
+ {"ARPHRD_ASH", Const, 0},
+ {"ARPHRD_ATM", Const, 0},
+ {"ARPHRD_AX25", Const, 0},
+ {"ARPHRD_BIF", Const, 0},
+ {"ARPHRD_CHAOS", Const, 0},
+ {"ARPHRD_CISCO", Const, 0},
+ {"ARPHRD_CSLIP", Const, 0},
+ {"ARPHRD_CSLIP6", Const, 0},
+ {"ARPHRD_DDCMP", Const, 0},
+ {"ARPHRD_DLCI", Const, 0},
+ {"ARPHRD_ECONET", Const, 0},
+ {"ARPHRD_EETHER", Const, 0},
+ {"ARPHRD_ETHER", Const, 0},
+ {"ARPHRD_EUI64", Const, 0},
+ {"ARPHRD_FCAL", Const, 0},
+ {"ARPHRD_FCFABRIC", Const, 0},
+ {"ARPHRD_FCPL", Const, 0},
+ {"ARPHRD_FCPP", Const, 0},
+ {"ARPHRD_FDDI", Const, 0},
+ {"ARPHRD_FRAD", Const, 0},
+ {"ARPHRD_FRELAY", Const, 1},
+ {"ARPHRD_HDLC", Const, 0},
+ {"ARPHRD_HIPPI", Const, 0},
+ {"ARPHRD_HWX25", Const, 0},
+ {"ARPHRD_IEEE1394", Const, 0},
+ {"ARPHRD_IEEE802", Const, 0},
+ {"ARPHRD_IEEE80211", Const, 0},
+ {"ARPHRD_IEEE80211_PRISM", Const, 0},
+ {"ARPHRD_IEEE80211_RADIOTAP", Const, 0},
+ {"ARPHRD_IEEE802154", Const, 0},
+ {"ARPHRD_IEEE802154_PHY", Const, 0},
+ {"ARPHRD_IEEE802_TR", Const, 0},
+ {"ARPHRD_INFINIBAND", Const, 0},
+ {"ARPHRD_IPDDP", Const, 0},
+ {"ARPHRD_IPGRE", Const, 0},
+ {"ARPHRD_IRDA", Const, 0},
+ {"ARPHRD_LAPB", Const, 0},
+ {"ARPHRD_LOCALTLK", Const, 0},
+ {"ARPHRD_LOOPBACK", Const, 0},
+ {"ARPHRD_METRICOM", Const, 0},
+ {"ARPHRD_NETROM", Const, 0},
+ {"ARPHRD_NONE", Const, 0},
+ {"ARPHRD_PIMREG", Const, 0},
+ {"ARPHRD_PPP", Const, 0},
+ {"ARPHRD_PRONET", Const, 0},
+ {"ARPHRD_RAWHDLC", Const, 0},
+ {"ARPHRD_ROSE", Const, 0},
+ {"ARPHRD_RSRVD", Const, 0},
+ {"ARPHRD_SIT", Const, 0},
+ {"ARPHRD_SKIP", Const, 0},
+ {"ARPHRD_SLIP", Const, 0},
+ {"ARPHRD_SLIP6", Const, 0},
+ {"ARPHRD_STRIP", Const, 1},
+ {"ARPHRD_TUNNEL", Const, 0},
+ {"ARPHRD_TUNNEL6", Const, 0},
+ {"ARPHRD_VOID", Const, 0},
+ {"ARPHRD_X25", Const, 0},
+ {"AUTHTYPE_CLIENT", Const, 0},
+ {"AUTHTYPE_SERVER", Const, 0},
+ {"Accept", Func, 0},
+ {"Accept4", Func, 1},
+ {"AcceptEx", Func, 0},
+ {"Access", Func, 0},
+ {"Acct", Func, 0},
+ {"AddrinfoW", Type, 1},
+ {"AddrinfoW.Addr", Field, 1},
+ {"AddrinfoW.Addrlen", Field, 1},
+ {"AddrinfoW.Canonname", Field, 1},
+ {"AddrinfoW.Family", Field, 1},
+ {"AddrinfoW.Flags", Field, 1},
+ {"AddrinfoW.Next", Field, 1},
+ {"AddrinfoW.Protocol", Field, 1},
+ {"AddrinfoW.Socktype", Field, 1},
+ {"Adjtime", Func, 0},
+ {"Adjtimex", Func, 0},
+ {"AllThreadsSyscall", Func, 16},
+ {"AllThreadsSyscall6", Func, 16},
+ {"AttachLsf", Func, 0},
+ {"B0", Const, 0},
+ {"B1000000", Const, 0},
+ {"B110", Const, 0},
+ {"B115200", Const, 0},
+ {"B1152000", Const, 0},
+ {"B1200", Const, 0},
+ {"B134", Const, 0},
+ {"B14400", Const, 1},
+ {"B150", Const, 0},
+ {"B1500000", Const, 0},
+ {"B1800", Const, 0},
+ {"B19200", Const, 0},
+ {"B200", Const, 0},
+ {"B2000000", Const, 0},
+ {"B230400", Const, 0},
+ {"B2400", Const, 0},
+ {"B2500000", Const, 0},
+ {"B28800", Const, 1},
+ {"B300", Const, 0},
+ {"B3000000", Const, 0},
+ {"B3500000", Const, 0},
+ {"B38400", Const, 0},
+ {"B4000000", Const, 0},
+ {"B460800", Const, 0},
+ {"B4800", Const, 0},
+ {"B50", Const, 0},
+ {"B500000", Const, 0},
+ {"B57600", Const, 0},
+ {"B576000", Const, 0},
+ {"B600", Const, 0},
+ {"B7200", Const, 1},
+ {"B75", Const, 0},
+ {"B76800", Const, 1},
+ {"B921600", Const, 0},
+ {"B9600", Const, 0},
+ {"BASE_PROTOCOL", Const, 2},
+ {"BIOCFEEDBACK", Const, 0},
+ {"BIOCFLUSH", Const, 0},
+ {"BIOCGBLEN", Const, 0},
+ {"BIOCGDIRECTION", Const, 0},
+ {"BIOCGDIRFILT", Const, 1},
+ {"BIOCGDLT", Const, 0},
+ {"BIOCGDLTLIST", Const, 0},
+ {"BIOCGETBUFMODE", Const, 0},
+ {"BIOCGETIF", Const, 0},
+ {"BIOCGETZMAX", Const, 0},
+ {"BIOCGFEEDBACK", Const, 1},
+ {"BIOCGFILDROP", Const, 1},
+ {"BIOCGHDRCMPLT", Const, 0},
+ {"BIOCGRSIG", Const, 0},
+ {"BIOCGRTIMEOUT", Const, 0},
+ {"BIOCGSEESENT", Const, 0},
+ {"BIOCGSTATS", Const, 0},
+ {"BIOCGSTATSOLD", Const, 1},
+ {"BIOCGTSTAMP", Const, 1},
+ {"BIOCIMMEDIATE", Const, 0},
+ {"BIOCLOCK", Const, 0},
+ {"BIOCPROMISC", Const, 0},
+ {"BIOCROTZBUF", Const, 0},
+ {"BIOCSBLEN", Const, 0},
+ {"BIOCSDIRECTION", Const, 0},
+ {"BIOCSDIRFILT", Const, 1},
+ {"BIOCSDLT", Const, 0},
+ {"BIOCSETBUFMODE", Const, 0},
+ {"BIOCSETF", Const, 0},
+ {"BIOCSETFNR", Const, 0},
+ {"BIOCSETIF", Const, 0},
+ {"BIOCSETWF", Const, 0},
+ {"BIOCSETZBUF", Const, 0},
+ {"BIOCSFEEDBACK", Const, 1},
+ {"BIOCSFILDROP", Const, 1},
+ {"BIOCSHDRCMPLT", Const, 0},
+ {"BIOCSRSIG", Const, 0},
+ {"BIOCSRTIMEOUT", Const, 0},
+ {"BIOCSSEESENT", Const, 0},
+ {"BIOCSTCPF", Const, 1},
+ {"BIOCSTSTAMP", Const, 1},
+ {"BIOCSUDPF", Const, 1},
+ {"BIOCVERSION", Const, 0},
+ {"BPF_A", Const, 0},
+ {"BPF_ABS", Const, 0},
+ {"BPF_ADD", Const, 0},
+ {"BPF_ALIGNMENT", Const, 0},
+ {"BPF_ALIGNMENT32", Const, 1},
+ {"BPF_ALU", Const, 0},
+ {"BPF_AND", Const, 0},
+ {"BPF_B", Const, 0},
+ {"BPF_BUFMODE_BUFFER", Const, 0},
+ {"BPF_BUFMODE_ZBUF", Const, 0},
+ {"BPF_DFLTBUFSIZE", Const, 1},
+ {"BPF_DIRECTION_IN", Const, 1},
+ {"BPF_DIRECTION_OUT", Const, 1},
+ {"BPF_DIV", Const, 0},
+ {"BPF_H", Const, 0},
+ {"BPF_IMM", Const, 0},
+ {"BPF_IND", Const, 0},
+ {"BPF_JA", Const, 0},
+ {"BPF_JEQ", Const, 0},
+ {"BPF_JGE", Const, 0},
+ {"BPF_JGT", Const, 0},
+ {"BPF_JMP", Const, 0},
+ {"BPF_JSET", Const, 0},
+ {"BPF_K", Const, 0},
+ {"BPF_LD", Const, 0},
+ {"BPF_LDX", Const, 0},
+ {"BPF_LEN", Const, 0},
+ {"BPF_LSH", Const, 0},
+ {"BPF_MAJOR_VERSION", Const, 0},
+ {"BPF_MAXBUFSIZE", Const, 0},
+ {"BPF_MAXINSNS", Const, 0},
+ {"BPF_MEM", Const, 0},
+ {"BPF_MEMWORDS", Const, 0},
+ {"BPF_MINBUFSIZE", Const, 0},
+ {"BPF_MINOR_VERSION", Const, 0},
+ {"BPF_MISC", Const, 0},
+ {"BPF_MSH", Const, 0},
+ {"BPF_MUL", Const, 0},
+ {"BPF_NEG", Const, 0},
+ {"BPF_OR", Const, 0},
+ {"BPF_RELEASE", Const, 0},
+ {"BPF_RET", Const, 0},
+ {"BPF_RSH", Const, 0},
+ {"BPF_ST", Const, 0},
+ {"BPF_STX", Const, 0},
+ {"BPF_SUB", Const, 0},
+ {"BPF_TAX", Const, 0},
+ {"BPF_TXA", Const, 0},
+ {"BPF_T_BINTIME", Const, 1},
+ {"BPF_T_BINTIME_FAST", Const, 1},
+ {"BPF_T_BINTIME_MONOTONIC", Const, 1},
+ {"BPF_T_BINTIME_MONOTONIC_FAST", Const, 1},
+ {"BPF_T_FAST", Const, 1},
+ {"BPF_T_FLAG_MASK", Const, 1},
+ {"BPF_T_FORMAT_MASK", Const, 1},
+ {"BPF_T_MICROTIME", Const, 1},
+ {"BPF_T_MICROTIME_FAST", Const, 1},
+ {"BPF_T_MICROTIME_MONOTONIC", Const, 1},
+ {"BPF_T_MICROTIME_MONOTONIC_FAST", Const, 1},
+ {"BPF_T_MONOTONIC", Const, 1},
+ {"BPF_T_MONOTONIC_FAST", Const, 1},
+ {"BPF_T_NANOTIME", Const, 1},
+ {"BPF_T_NANOTIME_FAST", Const, 1},
+ {"BPF_T_NANOTIME_MONOTONIC", Const, 1},
+ {"BPF_T_NANOTIME_MONOTONIC_FAST", Const, 1},
+ {"BPF_T_NONE", Const, 1},
+ {"BPF_T_NORMAL", Const, 1},
+ {"BPF_W", Const, 0},
+ {"BPF_X", Const, 0},
+ {"BRKINT", Const, 0},
+ {"Bind", Func, 0},
+ {"BindToDevice", Func, 0},
+ {"BpfBuflen", Func, 0},
+ {"BpfDatalink", Func, 0},
+ {"BpfHdr", Type, 0},
+ {"BpfHdr.Caplen", Field, 0},
+ {"BpfHdr.Datalen", Field, 0},
+ {"BpfHdr.Hdrlen", Field, 0},
+ {"BpfHdr.Pad_cgo_0", Field, 0},
+ {"BpfHdr.Tstamp", Field, 0},
+ {"BpfHeadercmpl", Func, 0},
+ {"BpfInsn", Type, 0},
+ {"BpfInsn.Code", Field, 0},
+ {"BpfInsn.Jf", Field, 0},
+ {"BpfInsn.Jt", Field, 0},
+ {"BpfInsn.K", Field, 0},
+ {"BpfInterface", Func, 0},
+ {"BpfJump", Func, 0},
+ {"BpfProgram", Type, 0},
+ {"BpfProgram.Insns", Field, 0},
+ {"BpfProgram.Len", Field, 0},
+ {"BpfProgram.Pad_cgo_0", Field, 0},
+ {"BpfStat", Type, 0},
+ {"BpfStat.Capt", Field, 2},
+ {"BpfStat.Drop", Field, 0},
+ {"BpfStat.Padding", Field, 2},
+ {"BpfStat.Recv", Field, 0},
+ {"BpfStats", Func, 0},
+ {"BpfStmt", Func, 0},
+ {"BpfTimeout", Func, 0},
+ {"BpfTimeval", Type, 2},
+ {"BpfTimeval.Sec", Field, 2},
+ {"BpfTimeval.Usec", Field, 2},
+ {"BpfVersion", Type, 0},
+ {"BpfVersion.Major", Field, 0},
+ {"BpfVersion.Minor", Field, 0},
+ {"BpfZbuf", Type, 0},
+ {"BpfZbuf.Bufa", Field, 0},
+ {"BpfZbuf.Bufb", Field, 0},
+ {"BpfZbuf.Buflen", Field, 0},
+ {"BpfZbufHeader", Type, 0},
+ {"BpfZbufHeader.Kernel_gen", Field, 0},
+ {"BpfZbufHeader.Kernel_len", Field, 0},
+ {"BpfZbufHeader.User_gen", Field, 0},
+ {"BpfZbufHeader.X_bzh_pad", Field, 0},
+ {"ByHandleFileInformation", Type, 0},
+ {"ByHandleFileInformation.CreationTime", Field, 0},
+ {"ByHandleFileInformation.FileAttributes", Field, 0},
+ {"ByHandleFileInformation.FileIndexHigh", Field, 0},
+ {"ByHandleFileInformation.FileIndexLow", Field, 0},
+ {"ByHandleFileInformation.FileSizeHigh", Field, 0},
+ {"ByHandleFileInformation.FileSizeLow", Field, 0},
+ {"ByHandleFileInformation.LastAccessTime", Field, 0},
+ {"ByHandleFileInformation.LastWriteTime", Field, 0},
+ {"ByHandleFileInformation.NumberOfLinks", Field, 0},
+ {"ByHandleFileInformation.VolumeSerialNumber", Field, 0},
+ {"BytePtrFromString", Func, 1},
+ {"ByteSliceFromString", Func, 1},
+ {"CCR0_FLUSH", Const, 1},
+ {"CERT_CHAIN_POLICY_AUTHENTICODE", Const, 0},
+ {"CERT_CHAIN_POLICY_AUTHENTICODE_TS", Const, 0},
+ {"CERT_CHAIN_POLICY_BASE", Const, 0},
+ {"CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", Const, 0},
+ {"CERT_CHAIN_POLICY_EV", Const, 0},
+ {"CERT_CHAIN_POLICY_MICROSOFT_ROOT", Const, 0},
+ {"CERT_CHAIN_POLICY_NT_AUTH", Const, 0},
+ {"CERT_CHAIN_POLICY_SSL", Const, 0},
+ {"CERT_E_CN_NO_MATCH", Const, 0},
+ {"CERT_E_EXPIRED", Const, 0},
+ {"CERT_E_PURPOSE", Const, 0},
+ {"CERT_E_ROLE", Const, 0},
+ {"CERT_E_UNTRUSTEDROOT", Const, 0},
+ {"CERT_STORE_ADD_ALWAYS", Const, 0},
+ {"CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", Const, 0},
+ {"CERT_STORE_PROV_MEMORY", Const, 0},
+ {"CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", Const, 0},
+ {"CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", Const, 0},
+ {"CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", Const, 0},
+ {"CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", Const, 0},
+ {"CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", Const, 0},
+ {"CERT_TRUST_INVALID_BASIC_CONSTRAINTS", Const, 0},
+ {"CERT_TRUST_INVALID_EXTENSION", Const, 0},
+ {"CERT_TRUST_INVALID_NAME_CONSTRAINTS", Const, 0},
+ {"CERT_TRUST_INVALID_POLICY_CONSTRAINTS", Const, 0},
+ {"CERT_TRUST_IS_CYCLIC", Const, 0},
+ {"CERT_TRUST_IS_EXPLICIT_DISTRUST", Const, 0},
+ {"CERT_TRUST_IS_NOT_SIGNATURE_VALID", Const, 0},
+ {"CERT_TRUST_IS_NOT_TIME_VALID", Const, 0},
+ {"CERT_TRUST_IS_NOT_VALID_FOR_USAGE", Const, 0},
+ {"CERT_TRUST_IS_OFFLINE_REVOCATION", Const, 0},
+ {"CERT_TRUST_IS_REVOKED", Const, 0},
+ {"CERT_TRUST_IS_UNTRUSTED_ROOT", Const, 0},
+ {"CERT_TRUST_NO_ERROR", Const, 0},
+ {"CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", Const, 0},
+ {"CERT_TRUST_REVOCATION_STATUS_UNKNOWN", Const, 0},
+ {"CFLUSH", Const, 1},
+ {"CLOCAL", Const, 0},
+ {"CLONE_CHILD_CLEARTID", Const, 2},
+ {"CLONE_CHILD_SETTID", Const, 2},
+ {"CLONE_CLEAR_SIGHAND", Const, 20},
+ {"CLONE_CSIGNAL", Const, 3},
+ {"CLONE_DETACHED", Const, 2},
+ {"CLONE_FILES", Const, 2},
+ {"CLONE_FS", Const, 2},
+ {"CLONE_INTO_CGROUP", Const, 20},
+ {"CLONE_IO", Const, 2},
+ {"CLONE_NEWCGROUP", Const, 20},
+ {"CLONE_NEWIPC", Const, 2},
+ {"CLONE_NEWNET", Const, 2},
+ {"CLONE_NEWNS", Const, 2},
+ {"CLONE_NEWPID", Const, 2},
+ {"CLONE_NEWTIME", Const, 20},
+ {"CLONE_NEWUSER", Const, 2},
+ {"CLONE_NEWUTS", Const, 2},
+ {"CLONE_PARENT", Const, 2},
+ {"CLONE_PARENT_SETTID", Const, 2},
+ {"CLONE_PID", Const, 3},
+ {"CLONE_PIDFD", Const, 20},
+ {"CLONE_PTRACE", Const, 2},
+ {"CLONE_SETTLS", Const, 2},
+ {"CLONE_SIGHAND", Const, 2},
+ {"CLONE_SYSVSEM", Const, 2},
+ {"CLONE_THREAD", Const, 2},
+ {"CLONE_UNTRACED", Const, 2},
+ {"CLONE_VFORK", Const, 2},
+ {"CLONE_VM", Const, 2},
+ {"CPUID_CFLUSH", Const, 1},
+ {"CREAD", Const, 0},
+ {"CREATE_ALWAYS", Const, 0},
+ {"CREATE_NEW", Const, 0},
+ {"CREATE_NEW_PROCESS_GROUP", Const, 1},
+ {"CREATE_UNICODE_ENVIRONMENT", Const, 0},
+ {"CRYPT_DEFAULT_CONTAINER_OPTIONAL", Const, 0},
+ {"CRYPT_DELETEKEYSET", Const, 0},
+ {"CRYPT_MACHINE_KEYSET", Const, 0},
+ {"CRYPT_NEWKEYSET", Const, 0},
+ {"CRYPT_SILENT", Const, 0},
+ {"CRYPT_VERIFYCONTEXT", Const, 0},
+ {"CS5", Const, 0},
+ {"CS6", Const, 0},
+ {"CS7", Const, 0},
+ {"CS8", Const, 0},
+ {"CSIZE", Const, 0},
+ {"CSTART", Const, 1},
+ {"CSTATUS", Const, 1},
+ {"CSTOP", Const, 1},
+ {"CSTOPB", Const, 0},
+ {"CSUSP", Const, 1},
+ {"CTL_MAXNAME", Const, 0},
+ {"CTL_NET", Const, 0},
+ {"CTL_QUERY", Const, 1},
+ {"CTRL_BREAK_EVENT", Const, 1},
+ {"CTRL_CLOSE_EVENT", Const, 14},
+ {"CTRL_C_EVENT", Const, 1},
+ {"CTRL_LOGOFF_EVENT", Const, 14},
+ {"CTRL_SHUTDOWN_EVENT", Const, 14},
+ {"CancelIo", Func, 0},
+ {"CancelIoEx", Func, 1},
+ {"CertAddCertificateContextToStore", Func, 0},
+ {"CertChainContext", Type, 0},
+ {"CertChainContext.ChainCount", Field, 0},
+ {"CertChainContext.Chains", Field, 0},
+ {"CertChainContext.HasRevocationFreshnessTime", Field, 0},
+ {"CertChainContext.LowerQualityChainCount", Field, 0},
+ {"CertChainContext.LowerQualityChains", Field, 0},
+ {"CertChainContext.RevocationFreshnessTime", Field, 0},
+ {"CertChainContext.Size", Field, 0},
+ {"CertChainContext.TrustStatus", Field, 0},
+ {"CertChainElement", Type, 0},
+ {"CertChainElement.ApplicationUsage", Field, 0},
+ {"CertChainElement.CertContext", Field, 0},
+ {"CertChainElement.ExtendedErrorInfo", Field, 0},
+ {"CertChainElement.IssuanceUsage", Field, 0},
+ {"CertChainElement.RevocationInfo", Field, 0},
+ {"CertChainElement.Size", Field, 0},
+ {"CertChainElement.TrustStatus", Field, 0},
+ {"CertChainPara", Type, 0},
+ {"CertChainPara.CacheResync", Field, 0},
+ {"CertChainPara.CheckRevocationFreshnessTime", Field, 0},
+ {"CertChainPara.RequestedUsage", Field, 0},
+ {"CertChainPara.RequstedIssuancePolicy", Field, 0},
+ {"CertChainPara.RevocationFreshnessTime", Field, 0},
+ {"CertChainPara.Size", Field, 0},
+ {"CertChainPara.URLRetrievalTimeout", Field, 0},
+ {"CertChainPolicyPara", Type, 0},
+ {"CertChainPolicyPara.ExtraPolicyPara", Field, 0},
+ {"CertChainPolicyPara.Flags", Field, 0},
+ {"CertChainPolicyPara.Size", Field, 0},
+ {"CertChainPolicyStatus", Type, 0},
+ {"CertChainPolicyStatus.ChainIndex", Field, 0},
+ {"CertChainPolicyStatus.ElementIndex", Field, 0},
+ {"CertChainPolicyStatus.Error", Field, 0},
+ {"CertChainPolicyStatus.ExtraPolicyStatus", Field, 0},
+ {"CertChainPolicyStatus.Size", Field, 0},
+ {"CertCloseStore", Func, 0},
+ {"CertContext", Type, 0},
+ {"CertContext.CertInfo", Field, 0},
+ {"CertContext.EncodedCert", Field, 0},
+ {"CertContext.EncodingType", Field, 0},
+ {"CertContext.Length", Field, 0},
+ {"CertContext.Store", Field, 0},
+ {"CertCreateCertificateContext", Func, 0},
+ {"CertEnhKeyUsage", Type, 0},
+ {"CertEnhKeyUsage.Length", Field, 0},
+ {"CertEnhKeyUsage.UsageIdentifiers", Field, 0},
+ {"CertEnumCertificatesInStore", Func, 0},
+ {"CertFreeCertificateChain", Func, 0},
+ {"CertFreeCertificateContext", Func, 0},
+ {"CertGetCertificateChain", Func, 0},
+ {"CertInfo", Type, 11},
+ {"CertOpenStore", Func, 0},
+ {"CertOpenSystemStore", Func, 0},
+ {"CertRevocationCrlInfo", Type, 11},
+ {"CertRevocationInfo", Type, 0},
+ {"CertRevocationInfo.CrlInfo", Field, 0},
+ {"CertRevocationInfo.FreshnessTime", Field, 0},
+ {"CertRevocationInfo.HasFreshnessTime", Field, 0},
+ {"CertRevocationInfo.OidSpecificInfo", Field, 0},
+ {"CertRevocationInfo.RevocationOid", Field, 0},
+ {"CertRevocationInfo.RevocationResult", Field, 0},
+ {"CertRevocationInfo.Size", Field, 0},
+ {"CertSimpleChain", Type, 0},
+ {"CertSimpleChain.Elements", Field, 0},
+ {"CertSimpleChain.HasRevocationFreshnessTime", Field, 0},
+ {"CertSimpleChain.NumElements", Field, 0},
+ {"CertSimpleChain.RevocationFreshnessTime", Field, 0},
+ {"CertSimpleChain.Size", Field, 0},
+ {"CertSimpleChain.TrustListInfo", Field, 0},
+ {"CertSimpleChain.TrustStatus", Field, 0},
+ {"CertTrustListInfo", Type, 11},
+ {"CertTrustStatus", Type, 0},
+ {"CertTrustStatus.ErrorStatus", Field, 0},
+ {"CertTrustStatus.InfoStatus", Field, 0},
+ {"CertUsageMatch", Type, 0},
+ {"CertUsageMatch.Type", Field, 0},
+ {"CertUsageMatch.Usage", Field, 0},
+ {"CertVerifyCertificateChainPolicy", Func, 0},
+ {"Chdir", Func, 0},
+ {"CheckBpfVersion", Func, 0},
+ {"Chflags", Func, 0},
+ {"Chmod", Func, 0},
+ {"Chown", Func, 0},
+ {"Chroot", Func, 0},
+ {"Clearenv", Func, 0},
+ {"Close", Func, 0},
+ {"CloseHandle", Func, 0},
+ {"CloseOnExec", Func, 0},
+ {"Closesocket", Func, 0},
+ {"CmsgLen", Func, 0},
+ {"CmsgSpace", Func, 0},
+ {"Cmsghdr", Type, 0},
+ {"Cmsghdr.Len", Field, 0},
+ {"Cmsghdr.Level", Field, 0},
+ {"Cmsghdr.Type", Field, 0},
+ {"Cmsghdr.X__cmsg_data", Field, 0},
+ {"CommandLineToArgv", Func, 0},
+ {"ComputerName", Func, 0},
+ {"Conn", Type, 9},
+ {"Connect", Func, 0},
+ {"ConnectEx", Func, 1},
+ {"ConvertSidToStringSid", Func, 0},
+ {"ConvertStringSidToSid", Func, 0},
+ {"CopySid", Func, 0},
+ {"Creat", Func, 0},
+ {"CreateDirectory", Func, 0},
+ {"CreateFile", Func, 0},
+ {"CreateFileMapping", Func, 0},
+ {"CreateHardLink", Func, 4},
+ {"CreateIoCompletionPort", Func, 0},
+ {"CreatePipe", Func, 0},
+ {"CreateProcess", Func, 0},
+ {"CreateProcessAsUser", Func, 10},
+ {"CreateSymbolicLink", Func, 4},
+ {"CreateToolhelp32Snapshot", Func, 4},
+ {"Credential", Type, 0},
+ {"Credential.Gid", Field, 0},
+ {"Credential.Groups", Field, 0},
+ {"Credential.NoSetGroups", Field, 9},
+ {"Credential.Uid", Field, 0},
+ {"CryptAcquireContext", Func, 0},
+ {"CryptGenRandom", Func, 0},
+ {"CryptReleaseContext", Func, 0},
+ {"DIOCBSFLUSH", Const, 1},
+ {"DIOCOSFPFLUSH", Const, 1},
+ {"DLL", Type, 0},
+ {"DLL.Handle", Field, 0},
+ {"DLL.Name", Field, 0},
+ {"DLLError", Type, 0},
+ {"DLLError.Err", Field, 0},
+ {"DLLError.Msg", Field, 0},
+ {"DLLError.ObjName", Field, 0},
+ {"DLT_A429", Const, 0},
+ {"DLT_A653_ICM", Const, 0},
+ {"DLT_AIRONET_HEADER", Const, 0},
+ {"DLT_AOS", Const, 1},
+ {"DLT_APPLE_IP_OVER_IEEE1394", Const, 0},
+ {"DLT_ARCNET", Const, 0},
+ {"DLT_ARCNET_LINUX", Const, 0},
+ {"DLT_ATM_CLIP", Const, 0},
+ {"DLT_ATM_RFC1483", Const, 0},
+ {"DLT_AURORA", Const, 0},
+ {"DLT_AX25", Const, 0},
+ {"DLT_AX25_KISS", Const, 0},
+ {"DLT_BACNET_MS_TP", Const, 0},
+ {"DLT_BLUETOOTH_HCI_H4", Const, 0},
+ {"DLT_BLUETOOTH_HCI_H4_WITH_PHDR", Const, 0},
+ {"DLT_CAN20B", Const, 0},
+ {"DLT_CAN_SOCKETCAN", Const, 1},
+ {"DLT_CHAOS", Const, 0},
+ {"DLT_CHDLC", Const, 0},
+ {"DLT_CISCO_IOS", Const, 0},
+ {"DLT_C_HDLC", Const, 0},
+ {"DLT_C_HDLC_WITH_DIR", Const, 0},
+ {"DLT_DBUS", Const, 1},
+ {"DLT_DECT", Const, 1},
+ {"DLT_DOCSIS", Const, 0},
+ {"DLT_DVB_CI", Const, 1},
+ {"DLT_ECONET", Const, 0},
+ {"DLT_EN10MB", Const, 0},
+ {"DLT_EN3MB", Const, 0},
+ {"DLT_ENC", Const, 0},
+ {"DLT_ERF", Const, 0},
+ {"DLT_ERF_ETH", Const, 0},
+ {"DLT_ERF_POS", Const, 0},
+ {"DLT_FC_2", Const, 1},
+ {"DLT_FC_2_WITH_FRAME_DELIMS", Const, 1},
+ {"DLT_FDDI", Const, 0},
+ {"DLT_FLEXRAY", Const, 0},
+ {"DLT_FRELAY", Const, 0},
+ {"DLT_FRELAY_WITH_DIR", Const, 0},
+ {"DLT_GCOM_SERIAL", Const, 0},
+ {"DLT_GCOM_T1E1", Const, 0},
+ {"DLT_GPF_F", Const, 0},
+ {"DLT_GPF_T", Const, 0},
+ {"DLT_GPRS_LLC", Const, 0},
+ {"DLT_GSMTAP_ABIS", Const, 1},
+ {"DLT_GSMTAP_UM", Const, 1},
+ {"DLT_HDLC", Const, 1},
+ {"DLT_HHDLC", Const, 0},
+ {"DLT_HIPPI", Const, 1},
+ {"DLT_IBM_SN", Const, 0},
+ {"DLT_IBM_SP", Const, 0},
+ {"DLT_IEEE802", Const, 0},
+ {"DLT_IEEE802_11", Const, 0},
+ {"DLT_IEEE802_11_RADIO", Const, 0},
+ {"DLT_IEEE802_11_RADIO_AVS", Const, 0},
+ {"DLT_IEEE802_15_4", Const, 0},
+ {"DLT_IEEE802_15_4_LINUX", Const, 0},
+ {"DLT_IEEE802_15_4_NOFCS", Const, 1},
+ {"DLT_IEEE802_15_4_NONASK_PHY", Const, 0},
+ {"DLT_IEEE802_16_MAC_CPS", Const, 0},
+ {"DLT_IEEE802_16_MAC_CPS_RADIO", Const, 0},
+ {"DLT_IPFILTER", Const, 0},
+ {"DLT_IPMB", Const, 0},
+ {"DLT_IPMB_LINUX", Const, 0},
+ {"DLT_IPNET", Const, 1},
+ {"DLT_IPOIB", Const, 1},
+ {"DLT_IPV4", Const, 1},
+ {"DLT_IPV6", Const, 1},
+ {"DLT_IP_OVER_FC", Const, 0},
+ {"DLT_JUNIPER_ATM1", Const, 0},
+ {"DLT_JUNIPER_ATM2", Const, 0},
+ {"DLT_JUNIPER_ATM_CEMIC", Const, 1},
+ {"DLT_JUNIPER_CHDLC", Const, 0},
+ {"DLT_JUNIPER_ES", Const, 0},
+ {"DLT_JUNIPER_ETHER", Const, 0},
+ {"DLT_JUNIPER_FIBRECHANNEL", Const, 1},
+ {"DLT_JUNIPER_FRELAY", Const, 0},
+ {"DLT_JUNIPER_GGSN", Const, 0},
+ {"DLT_JUNIPER_ISM", Const, 0},
+ {"DLT_JUNIPER_MFR", Const, 0},
+ {"DLT_JUNIPER_MLFR", Const, 0},
+ {"DLT_JUNIPER_MLPPP", Const, 0},
+ {"DLT_JUNIPER_MONITOR", Const, 0},
+ {"DLT_JUNIPER_PIC_PEER", Const, 0},
+ {"DLT_JUNIPER_PPP", Const, 0},
+ {"DLT_JUNIPER_PPPOE", Const, 0},
+ {"DLT_JUNIPER_PPPOE_ATM", Const, 0},
+ {"DLT_JUNIPER_SERVICES", Const, 0},
+ {"DLT_JUNIPER_SRX_E2E", Const, 1},
+ {"DLT_JUNIPER_ST", Const, 0},
+ {"DLT_JUNIPER_VP", Const, 0},
+ {"DLT_JUNIPER_VS", Const, 1},
+ {"DLT_LAPB_WITH_DIR", Const, 0},
+ {"DLT_LAPD", Const, 0},
+ {"DLT_LIN", Const, 0},
+ {"DLT_LINUX_EVDEV", Const, 1},
+ {"DLT_LINUX_IRDA", Const, 0},
+ {"DLT_LINUX_LAPD", Const, 0},
+ {"DLT_LINUX_PPP_WITHDIRECTION", Const, 0},
+ {"DLT_LINUX_SLL", Const, 0},
+ {"DLT_LOOP", Const, 0},
+ {"DLT_LTALK", Const, 0},
+ {"DLT_MATCHING_MAX", Const, 1},
+ {"DLT_MATCHING_MIN", Const, 1},
+ {"DLT_MFR", Const, 0},
+ {"DLT_MOST", Const, 0},
+ {"DLT_MPEG_2_TS", Const, 1},
+ {"DLT_MPLS", Const, 1},
+ {"DLT_MTP2", Const, 0},
+ {"DLT_MTP2_WITH_PHDR", Const, 0},
+ {"DLT_MTP3", Const, 0},
+ {"DLT_MUX27010", Const, 1},
+ {"DLT_NETANALYZER", Const, 1},
+ {"DLT_NETANALYZER_TRANSPARENT", Const, 1},
+ {"DLT_NFC_LLCP", Const, 1},
+ {"DLT_NFLOG", Const, 1},
+ {"DLT_NG40", Const, 1},
+ {"DLT_NULL", Const, 0},
+ {"DLT_PCI_EXP", Const, 0},
+ {"DLT_PFLOG", Const, 0},
+ {"DLT_PFSYNC", Const, 0},
+ {"DLT_PPI", Const, 0},
+ {"DLT_PPP", Const, 0},
+ {"DLT_PPP_BSDOS", Const, 0},
+ {"DLT_PPP_ETHER", Const, 0},
+ {"DLT_PPP_PPPD", Const, 0},
+ {"DLT_PPP_SERIAL", Const, 0},
+ {"DLT_PPP_WITH_DIR", Const, 0},
+ {"DLT_PPP_WITH_DIRECTION", Const, 0},
+ {"DLT_PRISM_HEADER", Const, 0},
+ {"DLT_PRONET", Const, 0},
+ {"DLT_RAIF1", Const, 0},
+ {"DLT_RAW", Const, 0},
+ {"DLT_RAWAF_MASK", Const, 1},
+ {"DLT_RIO", Const, 0},
+ {"DLT_SCCP", Const, 0},
+ {"DLT_SITA", Const, 0},
+ {"DLT_SLIP", Const, 0},
+ {"DLT_SLIP_BSDOS", Const, 0},
+ {"DLT_STANAG_5066_D_PDU", Const, 1},
+ {"DLT_SUNATM", Const, 0},
+ {"DLT_SYMANTEC_FIREWALL", Const, 0},
+ {"DLT_TZSP", Const, 0},
+ {"DLT_USB", Const, 0},
+ {"DLT_USB_LINUX", Const, 0},
+ {"DLT_USB_LINUX_MMAPPED", Const, 1},
+ {"DLT_USER0", Const, 0},
+ {"DLT_USER1", Const, 0},
+ {"DLT_USER10", Const, 0},
+ {"DLT_USER11", Const, 0},
+ {"DLT_USER12", Const, 0},
+ {"DLT_USER13", Const, 0},
+ {"DLT_USER14", Const, 0},
+ {"DLT_USER15", Const, 0},
+ {"DLT_USER2", Const, 0},
+ {"DLT_USER3", Const, 0},
+ {"DLT_USER4", Const, 0},
+ {"DLT_USER5", Const, 0},
+ {"DLT_USER6", Const, 0},
+ {"DLT_USER7", Const, 0},
+ {"DLT_USER8", Const, 0},
+ {"DLT_USER9", Const, 0},
+ {"DLT_WIHART", Const, 1},
+ {"DLT_X2E_SERIAL", Const, 0},
+ {"DLT_X2E_XORAYA", Const, 0},
+ {"DNSMXData", Type, 0},
+ {"DNSMXData.NameExchange", Field, 0},
+ {"DNSMXData.Pad", Field, 0},
+ {"DNSMXData.Preference", Field, 0},
+ {"DNSPTRData", Type, 0},
+ {"DNSPTRData.Host", Field, 0},
+ {"DNSRecord", Type, 0},
+ {"DNSRecord.Data", Field, 0},
+ {"DNSRecord.Dw", Field, 0},
+ {"DNSRecord.Length", Field, 0},
+ {"DNSRecord.Name", Field, 0},
+ {"DNSRecord.Next", Field, 0},
+ {"DNSRecord.Reserved", Field, 0},
+ {"DNSRecord.Ttl", Field, 0},
+ {"DNSRecord.Type", Field, 0},
+ {"DNSSRVData", Type, 0},
+ {"DNSSRVData.Pad", Field, 0},
+ {"DNSSRVData.Port", Field, 0},
+ {"DNSSRVData.Priority", Field, 0},
+ {"DNSSRVData.Target", Field, 0},
+ {"DNSSRVData.Weight", Field, 0},
+ {"DNSTXTData", Type, 0},
+ {"DNSTXTData.StringArray", Field, 0},
+ {"DNSTXTData.StringCount", Field, 0},
+ {"DNS_INFO_NO_RECORDS", Const, 4},
+ {"DNS_TYPE_A", Const, 0},
+ {"DNS_TYPE_A6", Const, 0},
+ {"DNS_TYPE_AAAA", Const, 0},
+ {"DNS_TYPE_ADDRS", Const, 0},
+ {"DNS_TYPE_AFSDB", Const, 0},
+ {"DNS_TYPE_ALL", Const, 0},
+ {"DNS_TYPE_ANY", Const, 0},
+ {"DNS_TYPE_ATMA", Const, 0},
+ {"DNS_TYPE_AXFR", Const, 0},
+ {"DNS_TYPE_CERT", Const, 0},
+ {"DNS_TYPE_CNAME", Const, 0},
+ {"DNS_TYPE_DHCID", Const, 0},
+ {"DNS_TYPE_DNAME", Const, 0},
+ {"DNS_TYPE_DNSKEY", Const, 0},
+ {"DNS_TYPE_DS", Const, 0},
+ {"DNS_TYPE_EID", Const, 0},
+ {"DNS_TYPE_GID", Const, 0},
+ {"DNS_TYPE_GPOS", Const, 0},
+ {"DNS_TYPE_HINFO", Const, 0},
+ {"DNS_TYPE_ISDN", Const, 0},
+ {"DNS_TYPE_IXFR", Const, 0},
+ {"DNS_TYPE_KEY", Const, 0},
+ {"DNS_TYPE_KX", Const, 0},
+ {"DNS_TYPE_LOC", Const, 0},
+ {"DNS_TYPE_MAILA", Const, 0},
+ {"DNS_TYPE_MAILB", Const, 0},
+ {"DNS_TYPE_MB", Const, 0},
+ {"DNS_TYPE_MD", Const, 0},
+ {"DNS_TYPE_MF", Const, 0},
+ {"DNS_TYPE_MG", Const, 0},
+ {"DNS_TYPE_MINFO", Const, 0},
+ {"DNS_TYPE_MR", Const, 0},
+ {"DNS_TYPE_MX", Const, 0},
+ {"DNS_TYPE_NAPTR", Const, 0},
+ {"DNS_TYPE_NBSTAT", Const, 0},
+ {"DNS_TYPE_NIMLOC", Const, 0},
+ {"DNS_TYPE_NS", Const, 0},
+ {"DNS_TYPE_NSAP", Const, 0},
+ {"DNS_TYPE_NSAPPTR", Const, 0},
+ {"DNS_TYPE_NSEC", Const, 0},
+ {"DNS_TYPE_NULL", Const, 0},
+ {"DNS_TYPE_NXT", Const, 0},
+ {"DNS_TYPE_OPT", Const, 0},
+ {"DNS_TYPE_PTR", Const, 0},
+ {"DNS_TYPE_PX", Const, 0},
+ {"DNS_TYPE_RP", Const, 0},
+ {"DNS_TYPE_RRSIG", Const, 0},
+ {"DNS_TYPE_RT", Const, 0},
+ {"DNS_TYPE_SIG", Const, 0},
+ {"DNS_TYPE_SINK", Const, 0},
+ {"DNS_TYPE_SOA", Const, 0},
+ {"DNS_TYPE_SRV", Const, 0},
+ {"DNS_TYPE_TEXT", Const, 0},
+ {"DNS_TYPE_TKEY", Const, 0},
+ {"DNS_TYPE_TSIG", Const, 0},
+ {"DNS_TYPE_UID", Const, 0},
+ {"DNS_TYPE_UINFO", Const, 0},
+ {"DNS_TYPE_UNSPEC", Const, 0},
+ {"DNS_TYPE_WINS", Const, 0},
+ {"DNS_TYPE_WINSR", Const, 0},
+ {"DNS_TYPE_WKS", Const, 0},
+ {"DNS_TYPE_X25", Const, 0},
+ {"DT_BLK", Const, 0},
+ {"DT_CHR", Const, 0},
+ {"DT_DIR", Const, 0},
+ {"DT_FIFO", Const, 0},
+ {"DT_LNK", Const, 0},
+ {"DT_REG", Const, 0},
+ {"DT_SOCK", Const, 0},
+ {"DT_UNKNOWN", Const, 0},
+ {"DT_WHT", Const, 0},
+ {"DUPLICATE_CLOSE_SOURCE", Const, 0},
+ {"DUPLICATE_SAME_ACCESS", Const, 0},
+ {"DeleteFile", Func, 0},
+ {"DetachLsf", Func, 0},
+ {"DeviceIoControl", Func, 4},
+ {"Dirent", Type, 0},
+ {"Dirent.Fileno", Field, 0},
+ {"Dirent.Ino", Field, 0},
+ {"Dirent.Name", Field, 0},
+ {"Dirent.Namlen", Field, 0},
+ {"Dirent.Off", Field, 0},
+ {"Dirent.Pad0", Field, 12},
+ {"Dirent.Pad1", Field, 12},
+ {"Dirent.Pad_cgo_0", Field, 0},
+ {"Dirent.Reclen", Field, 0},
+ {"Dirent.Seekoff", Field, 0},
+ {"Dirent.Type", Field, 0},
+ {"Dirent.X__d_padding", Field, 3},
+ {"DnsNameCompare", Func, 4},
+ {"DnsQuery", Func, 0},
+ {"DnsRecordListFree", Func, 0},
+ {"DnsSectionAdditional", Const, 4},
+ {"DnsSectionAnswer", Const, 4},
+ {"DnsSectionAuthority", Const, 4},
+ {"DnsSectionQuestion", Const, 4},
+ {"Dup", Func, 0},
+ {"Dup2", Func, 0},
+ {"Dup3", Func, 2},
+ {"DuplicateHandle", Func, 0},
+ {"E2BIG", Const, 0},
+ {"EACCES", Const, 0},
+ {"EADDRINUSE", Const, 0},
+ {"EADDRNOTAVAIL", Const, 0},
+ {"EADV", Const, 0},
+ {"EAFNOSUPPORT", Const, 0},
+ {"EAGAIN", Const, 0},
+ {"EALREADY", Const, 0},
+ {"EAUTH", Const, 0},
+ {"EBADARCH", Const, 0},
+ {"EBADE", Const, 0},
+ {"EBADEXEC", Const, 0},
+ {"EBADF", Const, 0},
+ {"EBADFD", Const, 0},
+ {"EBADMACHO", Const, 0},
+ {"EBADMSG", Const, 0},
+ {"EBADR", Const, 0},
+ {"EBADRPC", Const, 0},
+ {"EBADRQC", Const, 0},
+ {"EBADSLT", Const, 0},
+ {"EBFONT", Const, 0},
+ {"EBUSY", Const, 0},
+ {"ECANCELED", Const, 0},
+ {"ECAPMODE", Const, 1},
+ {"ECHILD", Const, 0},
+ {"ECHO", Const, 0},
+ {"ECHOCTL", Const, 0},
+ {"ECHOE", Const, 0},
+ {"ECHOK", Const, 0},
+ {"ECHOKE", Const, 0},
+ {"ECHONL", Const, 0},
+ {"ECHOPRT", Const, 0},
+ {"ECHRNG", Const, 0},
+ {"ECOMM", Const, 0},
+ {"ECONNABORTED", Const, 0},
+ {"ECONNREFUSED", Const, 0},
+ {"ECONNRESET", Const, 0},
+ {"EDEADLK", Const, 0},
+ {"EDEADLOCK", Const, 0},
+ {"EDESTADDRREQ", Const, 0},
+ {"EDEVERR", Const, 0},
+ {"EDOM", Const, 0},
+ {"EDOOFUS", Const, 0},
+ {"EDOTDOT", Const, 0},
+ {"EDQUOT", Const, 0},
+ {"EEXIST", Const, 0},
+ {"EFAULT", Const, 0},
+ {"EFBIG", Const, 0},
+ {"EFER_LMA", Const, 1},
+ {"EFER_LME", Const, 1},
+ {"EFER_NXE", Const, 1},
+ {"EFER_SCE", Const, 1},
+ {"EFTYPE", Const, 0},
+ {"EHOSTDOWN", Const, 0},
+ {"EHOSTUNREACH", Const, 0},
+ {"EHWPOISON", Const, 0},
+ {"EIDRM", Const, 0},
+ {"EILSEQ", Const, 0},
+ {"EINPROGRESS", Const, 0},
+ {"EINTR", Const, 0},
+ {"EINVAL", Const, 0},
+ {"EIO", Const, 0},
+ {"EIPSEC", Const, 1},
+ {"EISCONN", Const, 0},
+ {"EISDIR", Const, 0},
+ {"EISNAM", Const, 0},
+ {"EKEYEXPIRED", Const, 0},
+ {"EKEYREJECTED", Const, 0},
+ {"EKEYREVOKED", Const, 0},
+ {"EL2HLT", Const, 0},
+ {"EL2NSYNC", Const, 0},
+ {"EL3HLT", Const, 0},
+ {"EL3RST", Const, 0},
+ {"ELAST", Const, 0},
+ {"ELF_NGREG", Const, 0},
+ {"ELF_PRARGSZ", Const, 0},
+ {"ELIBACC", Const, 0},
+ {"ELIBBAD", Const, 0},
+ {"ELIBEXEC", Const, 0},
+ {"ELIBMAX", Const, 0},
+ {"ELIBSCN", Const, 0},
+ {"ELNRNG", Const, 0},
+ {"ELOOP", Const, 0},
+ {"EMEDIUMTYPE", Const, 0},
+ {"EMFILE", Const, 0},
+ {"EMLINK", Const, 0},
+ {"EMSGSIZE", Const, 0},
+ {"EMT_TAGOVF", Const, 1},
+ {"EMULTIHOP", Const, 0},
+ {"EMUL_ENABLED", Const, 1},
+ {"EMUL_LINUX", Const, 1},
+ {"EMUL_LINUX32", Const, 1},
+ {"EMUL_MAXID", Const, 1},
+ {"EMUL_NATIVE", Const, 1},
+ {"ENAMETOOLONG", Const, 0},
+ {"ENAVAIL", Const, 0},
+ {"ENDRUNDISC", Const, 1},
+ {"ENEEDAUTH", Const, 0},
+ {"ENETDOWN", Const, 0},
+ {"ENETRESET", Const, 0},
+ {"ENETUNREACH", Const, 0},
+ {"ENFILE", Const, 0},
+ {"ENOANO", Const, 0},
+ {"ENOATTR", Const, 0},
+ {"ENOBUFS", Const, 0},
+ {"ENOCSI", Const, 0},
+ {"ENODATA", Const, 0},
+ {"ENODEV", Const, 0},
+ {"ENOENT", Const, 0},
+ {"ENOEXEC", Const, 0},
+ {"ENOKEY", Const, 0},
+ {"ENOLCK", Const, 0},
+ {"ENOLINK", Const, 0},
+ {"ENOMEDIUM", Const, 0},
+ {"ENOMEM", Const, 0},
+ {"ENOMSG", Const, 0},
+ {"ENONET", Const, 0},
+ {"ENOPKG", Const, 0},
+ {"ENOPOLICY", Const, 0},
+ {"ENOPROTOOPT", Const, 0},
+ {"ENOSPC", Const, 0},
+ {"ENOSR", Const, 0},
+ {"ENOSTR", Const, 0},
+ {"ENOSYS", Const, 0},
+ {"ENOTBLK", Const, 0},
+ {"ENOTCAPABLE", Const, 0},
+ {"ENOTCONN", Const, 0},
+ {"ENOTDIR", Const, 0},
+ {"ENOTEMPTY", Const, 0},
+ {"ENOTNAM", Const, 0},
+ {"ENOTRECOVERABLE", Const, 0},
+ {"ENOTSOCK", Const, 0},
+ {"ENOTSUP", Const, 0},
+ {"ENOTTY", Const, 0},
+ {"ENOTUNIQ", Const, 0},
+ {"ENXIO", Const, 0},
+ {"EN_SW_CTL_INF", Const, 1},
+ {"EN_SW_CTL_PREC", Const, 1},
+ {"EN_SW_CTL_ROUND", Const, 1},
+ {"EN_SW_DATACHAIN", Const, 1},
+ {"EN_SW_DENORM", Const, 1},
+ {"EN_SW_INVOP", Const, 1},
+ {"EN_SW_OVERFLOW", Const, 1},
+ {"EN_SW_PRECLOSS", Const, 1},
+ {"EN_SW_UNDERFLOW", Const, 1},
+ {"EN_SW_ZERODIV", Const, 1},
+ {"EOPNOTSUPP", Const, 0},
+ {"EOVERFLOW", Const, 0},
+ {"EOWNERDEAD", Const, 0},
+ {"EPERM", Const, 0},
+ {"EPFNOSUPPORT", Const, 0},
+ {"EPIPE", Const, 0},
+ {"EPOLLERR", Const, 0},
+ {"EPOLLET", Const, 0},
+ {"EPOLLHUP", Const, 0},
+ {"EPOLLIN", Const, 0},
+ {"EPOLLMSG", Const, 0},
+ {"EPOLLONESHOT", Const, 0},
+ {"EPOLLOUT", Const, 0},
+ {"EPOLLPRI", Const, 0},
+ {"EPOLLRDBAND", Const, 0},
+ {"EPOLLRDHUP", Const, 0},
+ {"EPOLLRDNORM", Const, 0},
+ {"EPOLLWRBAND", Const, 0},
+ {"EPOLLWRNORM", Const, 0},
+ {"EPOLL_CLOEXEC", Const, 0},
+ {"EPOLL_CTL_ADD", Const, 0},
+ {"EPOLL_CTL_DEL", Const, 0},
+ {"EPOLL_CTL_MOD", Const, 0},
+ {"EPOLL_NONBLOCK", Const, 0},
+ {"EPROCLIM", Const, 0},
+ {"EPROCUNAVAIL", Const, 0},
+ {"EPROGMISMATCH", Const, 0},
+ {"EPROGUNAVAIL", Const, 0},
+ {"EPROTO", Const, 0},
+ {"EPROTONOSUPPORT", Const, 0},
+ {"EPROTOTYPE", Const, 0},
+ {"EPWROFF", Const, 0},
+ {"EQFULL", Const, 16},
+ {"ERANGE", Const, 0},
+ {"EREMCHG", Const, 0},
+ {"EREMOTE", Const, 0},
+ {"EREMOTEIO", Const, 0},
+ {"ERESTART", Const, 0},
+ {"ERFKILL", Const, 0},
+ {"EROFS", Const, 0},
+ {"ERPCMISMATCH", Const, 0},
+ {"ERROR_ACCESS_DENIED", Const, 0},
+ {"ERROR_ALREADY_EXISTS", Const, 0},
+ {"ERROR_BROKEN_PIPE", Const, 0},
+ {"ERROR_BUFFER_OVERFLOW", Const, 0},
+ {"ERROR_DIR_NOT_EMPTY", Const, 8},
+ {"ERROR_ENVVAR_NOT_FOUND", Const, 0},
+ {"ERROR_FILE_EXISTS", Const, 0},
+ {"ERROR_FILE_NOT_FOUND", Const, 0},
+ {"ERROR_HANDLE_EOF", Const, 2},
+ {"ERROR_INSUFFICIENT_BUFFER", Const, 0},
+ {"ERROR_IO_PENDING", Const, 0},
+ {"ERROR_MOD_NOT_FOUND", Const, 0},
+ {"ERROR_MORE_DATA", Const, 3},
+ {"ERROR_NETNAME_DELETED", Const, 3},
+ {"ERROR_NOT_FOUND", Const, 1},
+ {"ERROR_NO_MORE_FILES", Const, 0},
+ {"ERROR_OPERATION_ABORTED", Const, 0},
+ {"ERROR_PATH_NOT_FOUND", Const, 0},
+ {"ERROR_PRIVILEGE_NOT_HELD", Const, 4},
+ {"ERROR_PROC_NOT_FOUND", Const, 0},
+ {"ESHLIBVERS", Const, 0},
+ {"ESHUTDOWN", Const, 0},
+ {"ESOCKTNOSUPPORT", Const, 0},
+ {"ESPIPE", Const, 0},
+ {"ESRCH", Const, 0},
+ {"ESRMNT", Const, 0},
+ {"ESTALE", Const, 0},
+ {"ESTRPIPE", Const, 0},
+ {"ETHERCAP_JUMBO_MTU", Const, 1},
+ {"ETHERCAP_VLAN_HWTAGGING", Const, 1},
+ {"ETHERCAP_VLAN_MTU", Const, 1},
+ {"ETHERMIN", Const, 1},
+ {"ETHERMTU", Const, 1},
+ {"ETHERMTU_JUMBO", Const, 1},
+ {"ETHERTYPE_8023", Const, 1},
+ {"ETHERTYPE_AARP", Const, 1},
+ {"ETHERTYPE_ACCTON", Const, 1},
+ {"ETHERTYPE_AEONIC", Const, 1},
+ {"ETHERTYPE_ALPHA", Const, 1},
+ {"ETHERTYPE_AMBER", Const, 1},
+ {"ETHERTYPE_AMOEBA", Const, 1},
+ {"ETHERTYPE_AOE", Const, 1},
+ {"ETHERTYPE_APOLLO", Const, 1},
+ {"ETHERTYPE_APOLLODOMAIN", Const, 1},
+ {"ETHERTYPE_APPLETALK", Const, 1},
+ {"ETHERTYPE_APPLITEK", Const, 1},
+ {"ETHERTYPE_ARGONAUT", Const, 1},
+ {"ETHERTYPE_ARP", Const, 1},
+ {"ETHERTYPE_AT", Const, 1},
+ {"ETHERTYPE_ATALK", Const, 1},
+ {"ETHERTYPE_ATOMIC", Const, 1},
+ {"ETHERTYPE_ATT", Const, 1},
+ {"ETHERTYPE_ATTSTANFORD", Const, 1},
+ {"ETHERTYPE_AUTOPHON", Const, 1},
+ {"ETHERTYPE_AXIS", Const, 1},
+ {"ETHERTYPE_BCLOOP", Const, 1},
+ {"ETHERTYPE_BOFL", Const, 1},
+ {"ETHERTYPE_CABLETRON", Const, 1},
+ {"ETHERTYPE_CHAOS", Const, 1},
+ {"ETHERTYPE_COMDESIGN", Const, 1},
+ {"ETHERTYPE_COMPUGRAPHIC", Const, 1},
+ {"ETHERTYPE_COUNTERPOINT", Const, 1},
+ {"ETHERTYPE_CRONUS", Const, 1},
+ {"ETHERTYPE_CRONUSVLN", Const, 1},
+ {"ETHERTYPE_DCA", Const, 1},
+ {"ETHERTYPE_DDE", Const, 1},
+ {"ETHERTYPE_DEBNI", Const, 1},
+ {"ETHERTYPE_DECAM", Const, 1},
+ {"ETHERTYPE_DECCUST", Const, 1},
+ {"ETHERTYPE_DECDIAG", Const, 1},
+ {"ETHERTYPE_DECDNS", Const, 1},
+ {"ETHERTYPE_DECDTS", Const, 1},
+ {"ETHERTYPE_DECEXPER", Const, 1},
+ {"ETHERTYPE_DECLAST", Const, 1},
+ {"ETHERTYPE_DECLTM", Const, 1},
+ {"ETHERTYPE_DECMUMPS", Const, 1},
+ {"ETHERTYPE_DECNETBIOS", Const, 1},
+ {"ETHERTYPE_DELTACON", Const, 1},
+ {"ETHERTYPE_DIDDLE", Const, 1},
+ {"ETHERTYPE_DLOG1", Const, 1},
+ {"ETHERTYPE_DLOG2", Const, 1},
+ {"ETHERTYPE_DN", Const, 1},
+ {"ETHERTYPE_DOGFIGHT", Const, 1},
+ {"ETHERTYPE_DSMD", Const, 1},
+ {"ETHERTYPE_ECMA", Const, 1},
+ {"ETHERTYPE_ENCRYPT", Const, 1},
+ {"ETHERTYPE_ES", Const, 1},
+ {"ETHERTYPE_EXCELAN", Const, 1},
+ {"ETHERTYPE_EXPERDATA", Const, 1},
+ {"ETHERTYPE_FLIP", Const, 1},
+ {"ETHERTYPE_FLOWCONTROL", Const, 1},
+ {"ETHERTYPE_FRARP", Const, 1},
+ {"ETHERTYPE_GENDYN", Const, 1},
+ {"ETHERTYPE_HAYES", Const, 1},
+ {"ETHERTYPE_HIPPI_FP", Const, 1},
+ {"ETHERTYPE_HITACHI", Const, 1},
+ {"ETHERTYPE_HP", Const, 1},
+ {"ETHERTYPE_IEEEPUP", Const, 1},
+ {"ETHERTYPE_IEEEPUPAT", Const, 1},
+ {"ETHERTYPE_IMLBL", Const, 1},
+ {"ETHERTYPE_IMLBLDIAG", Const, 1},
+ {"ETHERTYPE_IP", Const, 1},
+ {"ETHERTYPE_IPAS", Const, 1},
+ {"ETHERTYPE_IPV6", Const, 1},
+ {"ETHERTYPE_IPX", Const, 1},
+ {"ETHERTYPE_IPXNEW", Const, 1},
+ {"ETHERTYPE_KALPANA", Const, 1},
+ {"ETHERTYPE_LANBRIDGE", Const, 1},
+ {"ETHERTYPE_LANPROBE", Const, 1},
+ {"ETHERTYPE_LAT", Const, 1},
+ {"ETHERTYPE_LBACK", Const, 1},
+ {"ETHERTYPE_LITTLE", Const, 1},
+ {"ETHERTYPE_LLDP", Const, 1},
+ {"ETHERTYPE_LOGICRAFT", Const, 1},
+ {"ETHERTYPE_LOOPBACK", Const, 1},
+ {"ETHERTYPE_MATRA", Const, 1},
+ {"ETHERTYPE_MAX", Const, 1},
+ {"ETHERTYPE_MERIT", Const, 1},
+ {"ETHERTYPE_MICP", Const, 1},
+ {"ETHERTYPE_MOPDL", Const, 1},
+ {"ETHERTYPE_MOPRC", Const, 1},
+ {"ETHERTYPE_MOTOROLA", Const, 1},
+ {"ETHERTYPE_MPLS", Const, 1},
+ {"ETHERTYPE_MPLS_MCAST", Const, 1},
+ {"ETHERTYPE_MUMPS", Const, 1},
+ {"ETHERTYPE_NBPCC", Const, 1},
+ {"ETHERTYPE_NBPCLAIM", Const, 1},
+ {"ETHERTYPE_NBPCLREQ", Const, 1},
+ {"ETHERTYPE_NBPCLRSP", Const, 1},
+ {"ETHERTYPE_NBPCREQ", Const, 1},
+ {"ETHERTYPE_NBPCRSP", Const, 1},
+ {"ETHERTYPE_NBPDG", Const, 1},
+ {"ETHERTYPE_NBPDGB", Const, 1},
+ {"ETHERTYPE_NBPDLTE", Const, 1},
+ {"ETHERTYPE_NBPRAR", Const, 1},
+ {"ETHERTYPE_NBPRAS", Const, 1},
+ {"ETHERTYPE_NBPRST", Const, 1},
+ {"ETHERTYPE_NBPSCD", Const, 1},
+ {"ETHERTYPE_NBPVCD", Const, 1},
+ {"ETHERTYPE_NBS", Const, 1},
+ {"ETHERTYPE_NCD", Const, 1},
+ {"ETHERTYPE_NESTAR", Const, 1},
+ {"ETHERTYPE_NETBEUI", Const, 1},
+ {"ETHERTYPE_NOVELL", Const, 1},
+ {"ETHERTYPE_NS", Const, 1},
+ {"ETHERTYPE_NSAT", Const, 1},
+ {"ETHERTYPE_NSCOMPAT", Const, 1},
+ {"ETHERTYPE_NTRAILER", Const, 1},
+ {"ETHERTYPE_OS9", Const, 1},
+ {"ETHERTYPE_OS9NET", Const, 1},
+ {"ETHERTYPE_PACER", Const, 1},
+ {"ETHERTYPE_PAE", Const, 1},
+ {"ETHERTYPE_PCS", Const, 1},
+ {"ETHERTYPE_PLANNING", Const, 1},
+ {"ETHERTYPE_PPP", Const, 1},
+ {"ETHERTYPE_PPPOE", Const, 1},
+ {"ETHERTYPE_PPPOEDISC", Const, 1},
+ {"ETHERTYPE_PRIMENTS", Const, 1},
+ {"ETHERTYPE_PUP", Const, 1},
+ {"ETHERTYPE_PUPAT", Const, 1},
+ {"ETHERTYPE_QINQ", Const, 1},
+ {"ETHERTYPE_RACAL", Const, 1},
+ {"ETHERTYPE_RATIONAL", Const, 1},
+ {"ETHERTYPE_RAWFR", Const, 1},
+ {"ETHERTYPE_RCL", Const, 1},
+ {"ETHERTYPE_RDP", Const, 1},
+ {"ETHERTYPE_RETIX", Const, 1},
+ {"ETHERTYPE_REVARP", Const, 1},
+ {"ETHERTYPE_SCA", Const, 1},
+ {"ETHERTYPE_SECTRA", Const, 1},
+ {"ETHERTYPE_SECUREDATA", Const, 1},
+ {"ETHERTYPE_SGITW", Const, 1},
+ {"ETHERTYPE_SG_BOUNCE", Const, 1},
+ {"ETHERTYPE_SG_DIAG", Const, 1},
+ {"ETHERTYPE_SG_NETGAMES", Const, 1},
+ {"ETHERTYPE_SG_RESV", Const, 1},
+ {"ETHERTYPE_SIMNET", Const, 1},
+ {"ETHERTYPE_SLOW", Const, 1},
+ {"ETHERTYPE_SLOWPROTOCOLS", Const, 1},
+ {"ETHERTYPE_SNA", Const, 1},
+ {"ETHERTYPE_SNMP", Const, 1},
+ {"ETHERTYPE_SONIX", Const, 1},
+ {"ETHERTYPE_SPIDER", Const, 1},
+ {"ETHERTYPE_SPRITE", Const, 1},
+ {"ETHERTYPE_STP", Const, 1},
+ {"ETHERTYPE_TALARIS", Const, 1},
+ {"ETHERTYPE_TALARISMC", Const, 1},
+ {"ETHERTYPE_TCPCOMP", Const, 1},
+ {"ETHERTYPE_TCPSM", Const, 1},
+ {"ETHERTYPE_TEC", Const, 1},
+ {"ETHERTYPE_TIGAN", Const, 1},
+ {"ETHERTYPE_TRAIL", Const, 1},
+ {"ETHERTYPE_TRANSETHER", Const, 1},
+ {"ETHERTYPE_TYMSHARE", Const, 1},
+ {"ETHERTYPE_UBBST", Const, 1},
+ {"ETHERTYPE_UBDEBUG", Const, 1},
+ {"ETHERTYPE_UBDIAGLOOP", Const, 1},
+ {"ETHERTYPE_UBDL", Const, 1},
+ {"ETHERTYPE_UBNIU", Const, 1},
+ {"ETHERTYPE_UBNMC", Const, 1},
+ {"ETHERTYPE_VALID", Const, 1},
+ {"ETHERTYPE_VARIAN", Const, 1},
+ {"ETHERTYPE_VAXELN", Const, 1},
+ {"ETHERTYPE_VEECO", Const, 1},
+ {"ETHERTYPE_VEXP", Const, 1},
+ {"ETHERTYPE_VGLAB", Const, 1},
+ {"ETHERTYPE_VINES", Const, 1},
+ {"ETHERTYPE_VINESECHO", Const, 1},
+ {"ETHERTYPE_VINESLOOP", Const, 1},
+ {"ETHERTYPE_VITAL", Const, 1},
+ {"ETHERTYPE_VLAN", Const, 1},
+ {"ETHERTYPE_VLTLMAN", Const, 1},
+ {"ETHERTYPE_VPROD", Const, 1},
+ {"ETHERTYPE_VURESERVED", Const, 1},
+ {"ETHERTYPE_WATERLOO", Const, 1},
+ {"ETHERTYPE_WELLFLEET", Const, 1},
+ {"ETHERTYPE_X25", Const, 1},
+ {"ETHERTYPE_X75", Const, 1},
+ {"ETHERTYPE_XNSSM", Const, 1},
+ {"ETHERTYPE_XTP", Const, 1},
+ {"ETHER_ADDR_LEN", Const, 1},
+ {"ETHER_ALIGN", Const, 1},
+ {"ETHER_CRC_LEN", Const, 1},
+ {"ETHER_CRC_POLY_BE", Const, 1},
+ {"ETHER_CRC_POLY_LE", Const, 1},
+ {"ETHER_HDR_LEN", Const, 1},
+ {"ETHER_MAX_DIX_LEN", Const, 1},
+ {"ETHER_MAX_LEN", Const, 1},
+ {"ETHER_MAX_LEN_JUMBO", Const, 1},
+ {"ETHER_MIN_LEN", Const, 1},
+ {"ETHER_PPPOE_ENCAP_LEN", Const, 1},
+ {"ETHER_TYPE_LEN", Const, 1},
+ {"ETHER_VLAN_ENCAP_LEN", Const, 1},
+ {"ETH_P_1588", Const, 0},
+ {"ETH_P_8021Q", Const, 0},
+ {"ETH_P_802_2", Const, 0},
+ {"ETH_P_802_3", Const, 0},
+ {"ETH_P_AARP", Const, 0},
+ {"ETH_P_ALL", Const, 0},
+ {"ETH_P_AOE", Const, 0},
+ {"ETH_P_ARCNET", Const, 0},
+ {"ETH_P_ARP", Const, 0},
+ {"ETH_P_ATALK", Const, 0},
+ {"ETH_P_ATMFATE", Const, 0},
+ {"ETH_P_ATMMPOA", Const, 0},
+ {"ETH_P_AX25", Const, 0},
+ {"ETH_P_BPQ", Const, 0},
+ {"ETH_P_CAIF", Const, 0},
+ {"ETH_P_CAN", Const, 0},
+ {"ETH_P_CONTROL", Const, 0},
+ {"ETH_P_CUST", Const, 0},
+ {"ETH_P_DDCMP", Const, 0},
+ {"ETH_P_DEC", Const, 0},
+ {"ETH_P_DIAG", Const, 0},
+ {"ETH_P_DNA_DL", Const, 0},
+ {"ETH_P_DNA_RC", Const, 0},
+ {"ETH_P_DNA_RT", Const, 0},
+ {"ETH_P_DSA", Const, 0},
+ {"ETH_P_ECONET", Const, 0},
+ {"ETH_P_EDSA", Const, 0},
+ {"ETH_P_FCOE", Const, 0},
+ {"ETH_P_FIP", Const, 0},
+ {"ETH_P_HDLC", Const, 0},
+ {"ETH_P_IEEE802154", Const, 0},
+ {"ETH_P_IEEEPUP", Const, 0},
+ {"ETH_P_IEEEPUPAT", Const, 0},
+ {"ETH_P_IP", Const, 0},
+ {"ETH_P_IPV6", Const, 0},
+ {"ETH_P_IPX", Const, 0},
+ {"ETH_P_IRDA", Const, 0},
+ {"ETH_P_LAT", Const, 0},
+ {"ETH_P_LINK_CTL", Const, 0},
+ {"ETH_P_LOCALTALK", Const, 0},
+ {"ETH_P_LOOP", Const, 0},
+ {"ETH_P_MOBITEX", Const, 0},
+ {"ETH_P_MPLS_MC", Const, 0},
+ {"ETH_P_MPLS_UC", Const, 0},
+ {"ETH_P_PAE", Const, 0},
+ {"ETH_P_PAUSE", Const, 0},
+ {"ETH_P_PHONET", Const, 0},
+ {"ETH_P_PPPTALK", Const, 0},
+ {"ETH_P_PPP_DISC", Const, 0},
+ {"ETH_P_PPP_MP", Const, 0},
+ {"ETH_P_PPP_SES", Const, 0},
+ {"ETH_P_PUP", Const, 0},
+ {"ETH_P_PUPAT", Const, 0},
+ {"ETH_P_RARP", Const, 0},
+ {"ETH_P_SCA", Const, 0},
+ {"ETH_P_SLOW", Const, 0},
+ {"ETH_P_SNAP", Const, 0},
+ {"ETH_P_TEB", Const, 0},
+ {"ETH_P_TIPC", Const, 0},
+ {"ETH_P_TRAILER", Const, 0},
+ {"ETH_P_TR_802_2", Const, 0},
+ {"ETH_P_WAN_PPP", Const, 0},
+ {"ETH_P_WCCP", Const, 0},
+ {"ETH_P_X25", Const, 0},
+ {"ETIME", Const, 0},
+ {"ETIMEDOUT", Const, 0},
+ {"ETOOMANYREFS", Const, 0},
+ {"ETXTBSY", Const, 0},
+ {"EUCLEAN", Const, 0},
+ {"EUNATCH", Const, 0},
+ {"EUSERS", Const, 0},
+ {"EVFILT_AIO", Const, 0},
+ {"EVFILT_FS", Const, 0},
+ {"EVFILT_LIO", Const, 0},
+ {"EVFILT_MACHPORT", Const, 0},
+ {"EVFILT_PROC", Const, 0},
+ {"EVFILT_READ", Const, 0},
+ {"EVFILT_SIGNAL", Const, 0},
+ {"EVFILT_SYSCOUNT", Const, 0},
+ {"EVFILT_THREADMARKER", Const, 0},
+ {"EVFILT_TIMER", Const, 0},
+ {"EVFILT_USER", Const, 0},
+ {"EVFILT_VM", Const, 0},
+ {"EVFILT_VNODE", Const, 0},
+ {"EVFILT_WRITE", Const, 0},
+ {"EV_ADD", Const, 0},
+ {"EV_CLEAR", Const, 0},
+ {"EV_DELETE", Const, 0},
+ {"EV_DISABLE", Const, 0},
+ {"EV_DISPATCH", Const, 0},
+ {"EV_DROP", Const, 3},
+ {"EV_ENABLE", Const, 0},
+ {"EV_EOF", Const, 0},
+ {"EV_ERROR", Const, 0},
+ {"EV_FLAG0", Const, 0},
+ {"EV_FLAG1", Const, 0},
+ {"EV_ONESHOT", Const, 0},
+ {"EV_OOBAND", Const, 0},
+ {"EV_POLL", Const, 0},
+ {"EV_RECEIPT", Const, 0},
+ {"EV_SYSFLAGS", Const, 0},
+ {"EWINDOWS", Const, 0},
+ {"EWOULDBLOCK", Const, 0},
+ {"EXDEV", Const, 0},
+ {"EXFULL", Const, 0},
+ {"EXTA", Const, 0},
+ {"EXTB", Const, 0},
+ {"EXTPROC", Const, 0},
+ {"Environ", Func, 0},
+ {"EpollCreate", Func, 0},
+ {"EpollCreate1", Func, 0},
+ {"EpollCtl", Func, 0},
+ {"EpollEvent", Type, 0},
+ {"EpollEvent.Events", Field, 0},
+ {"EpollEvent.Fd", Field, 0},
+ {"EpollEvent.Pad", Field, 0},
+ {"EpollEvent.PadFd", Field, 0},
+ {"EpollWait", Func, 0},
+ {"Errno", Type, 0},
+ {"EscapeArg", Func, 0},
+ {"Exchangedata", Func, 0},
+ {"Exec", Func, 0},
+ {"Exit", Func, 0},
+ {"ExitProcess", Func, 0},
+ {"FD_CLOEXEC", Const, 0},
+ {"FD_SETSIZE", Const, 0},
+ {"FILE_ACTION_ADDED", Const, 0},
+ {"FILE_ACTION_MODIFIED", Const, 0},
+ {"FILE_ACTION_REMOVED", Const, 0},
+ {"FILE_ACTION_RENAMED_NEW_NAME", Const, 0},
+ {"FILE_ACTION_RENAMED_OLD_NAME", Const, 0},
+ {"FILE_APPEND_DATA", Const, 0},
+ {"FILE_ATTRIBUTE_ARCHIVE", Const, 0},
+ {"FILE_ATTRIBUTE_DIRECTORY", Const, 0},
+ {"FILE_ATTRIBUTE_HIDDEN", Const, 0},
+ {"FILE_ATTRIBUTE_NORMAL", Const, 0},
+ {"FILE_ATTRIBUTE_READONLY", Const, 0},
+ {"FILE_ATTRIBUTE_REPARSE_POINT", Const, 4},
+ {"FILE_ATTRIBUTE_SYSTEM", Const, 0},
+ {"FILE_BEGIN", Const, 0},
+ {"FILE_CURRENT", Const, 0},
+ {"FILE_END", Const, 0},
+ {"FILE_FLAG_BACKUP_SEMANTICS", Const, 0},
+ {"FILE_FLAG_OPEN_REPARSE_POINT", Const, 4},
+ {"FILE_FLAG_OVERLAPPED", Const, 0},
+ {"FILE_LIST_DIRECTORY", Const, 0},
+ {"FILE_MAP_COPY", Const, 0},
+ {"FILE_MAP_EXECUTE", Const, 0},
+ {"FILE_MAP_READ", Const, 0},
+ {"FILE_MAP_WRITE", Const, 0},
+ {"FILE_NOTIFY_CHANGE_ATTRIBUTES", Const, 0},
+ {"FILE_NOTIFY_CHANGE_CREATION", Const, 0},
+ {"FILE_NOTIFY_CHANGE_DIR_NAME", Const, 0},
+ {"FILE_NOTIFY_CHANGE_FILE_NAME", Const, 0},
+ {"FILE_NOTIFY_CHANGE_LAST_ACCESS", Const, 0},
+ {"FILE_NOTIFY_CHANGE_LAST_WRITE", Const, 0},
+ {"FILE_NOTIFY_CHANGE_SIZE", Const, 0},
+ {"FILE_SHARE_DELETE", Const, 0},
+ {"FILE_SHARE_READ", Const, 0},
+ {"FILE_SHARE_WRITE", Const, 0},
+ {"FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", Const, 2},
+ {"FILE_SKIP_SET_EVENT_ON_HANDLE", Const, 2},
+ {"FILE_TYPE_CHAR", Const, 0},
+ {"FILE_TYPE_DISK", Const, 0},
+ {"FILE_TYPE_PIPE", Const, 0},
+ {"FILE_TYPE_REMOTE", Const, 0},
+ {"FILE_TYPE_UNKNOWN", Const, 0},
+ {"FILE_WRITE_ATTRIBUTES", Const, 0},
+ {"FLUSHO", Const, 0},
+ {"FORMAT_MESSAGE_ALLOCATE_BUFFER", Const, 0},
+ {"FORMAT_MESSAGE_ARGUMENT_ARRAY", Const, 0},
+ {"FORMAT_MESSAGE_FROM_HMODULE", Const, 0},
+ {"FORMAT_MESSAGE_FROM_STRING", Const, 0},
+ {"FORMAT_MESSAGE_FROM_SYSTEM", Const, 0},
+ {"FORMAT_MESSAGE_IGNORE_INSERTS", Const, 0},
+ {"FORMAT_MESSAGE_MAX_WIDTH_MASK", Const, 0},
+ {"FSCTL_GET_REPARSE_POINT", Const, 4},
+ {"F_ADDFILESIGS", Const, 0},
+ {"F_ADDSIGS", Const, 0},
+ {"F_ALLOCATEALL", Const, 0},
+ {"F_ALLOCATECONTIG", Const, 0},
+ {"F_CANCEL", Const, 0},
+ {"F_CHKCLEAN", Const, 0},
+ {"F_CLOSEM", Const, 1},
+ {"F_DUP2FD", Const, 0},
+ {"F_DUP2FD_CLOEXEC", Const, 1},
+ {"F_DUPFD", Const, 0},
+ {"F_DUPFD_CLOEXEC", Const, 0},
+ {"F_EXLCK", Const, 0},
+ {"F_FINDSIGS", Const, 16},
+ {"F_FLUSH_DATA", Const, 0},
+ {"F_FREEZE_FS", Const, 0},
+ {"F_FSCTL", Const, 1},
+ {"F_FSDIRMASK", Const, 1},
+ {"F_FSIN", Const, 1},
+ {"F_FSINOUT", Const, 1},
+ {"F_FSOUT", Const, 1},
+ {"F_FSPRIV", Const, 1},
+ {"F_FSVOID", Const, 1},
+ {"F_FULLFSYNC", Const, 0},
+ {"F_GETCODEDIR", Const, 16},
+ {"F_GETFD", Const, 0},
+ {"F_GETFL", Const, 0},
+ {"F_GETLEASE", Const, 0},
+ {"F_GETLK", Const, 0},
+ {"F_GETLK64", Const, 0},
+ {"F_GETLKPID", Const, 0},
+ {"F_GETNOSIGPIPE", Const, 0},
+ {"F_GETOWN", Const, 0},
+ {"F_GETOWN_EX", Const, 0},
+ {"F_GETPATH", Const, 0},
+ {"F_GETPATH_MTMINFO", Const, 0},
+ {"F_GETPIPE_SZ", Const, 0},
+ {"F_GETPROTECTIONCLASS", Const, 0},
+ {"F_GETPROTECTIONLEVEL", Const, 16},
+ {"F_GETSIG", Const, 0},
+ {"F_GLOBAL_NOCACHE", Const, 0},
+ {"F_LOCK", Const, 0},
+ {"F_LOG2PHYS", Const, 0},
+ {"F_LOG2PHYS_EXT", Const, 0},
+ {"F_MARKDEPENDENCY", Const, 0},
+ {"F_MAXFD", Const, 1},
+ {"F_NOCACHE", Const, 0},
+ {"F_NODIRECT", Const, 0},
+ {"F_NOTIFY", Const, 0},
+ {"F_OGETLK", Const, 0},
+ {"F_OK", Const, 0},
+ {"F_OSETLK", Const, 0},
+ {"F_OSETLKW", Const, 0},
+ {"F_PARAM_MASK", Const, 1},
+ {"F_PARAM_MAX", Const, 1},
+ {"F_PATHPKG_CHECK", Const, 0},
+ {"F_PEOFPOSMODE", Const, 0},
+ {"F_PREALLOCATE", Const, 0},
+ {"F_RDADVISE", Const, 0},
+ {"F_RDAHEAD", Const, 0},
+ {"F_RDLCK", Const, 0},
+ {"F_READAHEAD", Const, 0},
+ {"F_READBOOTSTRAP", Const, 0},
+ {"F_SETBACKINGSTORE", Const, 0},
+ {"F_SETFD", Const, 0},
+ {"F_SETFL", Const, 0},
+ {"F_SETLEASE", Const, 0},
+ {"F_SETLK", Const, 0},
+ {"F_SETLK64", Const, 0},
+ {"F_SETLKW", Const, 0},
+ {"F_SETLKW64", Const, 0},
+ {"F_SETLKWTIMEOUT", Const, 16},
+ {"F_SETLK_REMOTE", Const, 0},
+ {"F_SETNOSIGPIPE", Const, 0},
+ {"F_SETOWN", Const, 0},
+ {"F_SETOWN_EX", Const, 0},
+ {"F_SETPIPE_SZ", Const, 0},
+ {"F_SETPROTECTIONCLASS", Const, 0},
+ {"F_SETSIG", Const, 0},
+ {"F_SETSIZE", Const, 0},
+ {"F_SHLCK", Const, 0},
+ {"F_SINGLE_WRITER", Const, 16},
+ {"F_TEST", Const, 0},
+ {"F_THAW_FS", Const, 0},
+ {"F_TLOCK", Const, 0},
+ {"F_TRANSCODEKEY", Const, 16},
+ {"F_ULOCK", Const, 0},
+ {"F_UNLCK", Const, 0},
+ {"F_UNLCKSYS", Const, 0},
+ {"F_VOLPOSMODE", Const, 0},
+ {"F_WRITEBOOTSTRAP", Const, 0},
+ {"F_WRLCK", Const, 0},
+ {"Faccessat", Func, 0},
+ {"Fallocate", Func, 0},
+ {"Fbootstraptransfer_t", Type, 0},
+ {"Fbootstraptransfer_t.Buffer", Field, 0},
+ {"Fbootstraptransfer_t.Length", Field, 0},
+ {"Fbootstraptransfer_t.Offset", Field, 0},
+ {"Fchdir", Func, 0},
+ {"Fchflags", Func, 0},
+ {"Fchmod", Func, 0},
+ {"Fchmodat", Func, 0},
+ {"Fchown", Func, 0},
+ {"Fchownat", Func, 0},
+ {"FcntlFlock", Func, 3},
+ {"FdSet", Type, 0},
+ {"FdSet.Bits", Field, 0},
+ {"FdSet.X__fds_bits", Field, 0},
+ {"Fdatasync", Func, 0},
+ {"FileNotifyInformation", Type, 0},
+ {"FileNotifyInformation.Action", Field, 0},
+ {"FileNotifyInformation.FileName", Field, 0},
+ {"FileNotifyInformation.FileNameLength", Field, 0},
+ {"FileNotifyInformation.NextEntryOffset", Field, 0},
+ {"Filetime", Type, 0},
+ {"Filetime.HighDateTime", Field, 0},
+ {"Filetime.LowDateTime", Field, 0},
+ {"FindClose", Func, 0},
+ {"FindFirstFile", Func, 0},
+ {"FindNextFile", Func, 0},
+ {"Flock", Func, 0},
+ {"Flock_t", Type, 0},
+ {"Flock_t.Len", Field, 0},
+ {"Flock_t.Pad_cgo_0", Field, 0},
+ {"Flock_t.Pad_cgo_1", Field, 3},
+ {"Flock_t.Pid", Field, 0},
+ {"Flock_t.Start", Field, 0},
+ {"Flock_t.Sysid", Field, 0},
+ {"Flock_t.Type", Field, 0},
+ {"Flock_t.Whence", Field, 0},
+ {"FlushBpf", Func, 0},
+ {"FlushFileBuffers", Func, 0},
+ {"FlushViewOfFile", Func, 0},
+ {"ForkExec", Func, 0},
+ {"ForkLock", Var, 0},
+ {"FormatMessage", Func, 0},
+ {"Fpathconf", Func, 0},
+ {"FreeAddrInfoW", Func, 1},
+ {"FreeEnvironmentStrings", Func, 0},
+ {"FreeLibrary", Func, 0},
+ {"Fsid", Type, 0},
+ {"Fsid.Val", Field, 0},
+ {"Fsid.X__fsid_val", Field, 2},
+ {"Fsid.X__val", Field, 0},
+ {"Fstat", Func, 0},
+ {"Fstatat", Func, 12},
+ {"Fstatfs", Func, 0},
+ {"Fstore_t", Type, 0},
+ {"Fstore_t.Bytesalloc", Field, 0},
+ {"Fstore_t.Flags", Field, 0},
+ {"Fstore_t.Length", Field, 0},
+ {"Fstore_t.Offset", Field, 0},
+ {"Fstore_t.Posmode", Field, 0},
+ {"Fsync", Func, 0},
+ {"Ftruncate", Func, 0},
+ {"FullPath", Func, 4},
+ {"Futimes", Func, 0},
+ {"Futimesat", Func, 0},
+ {"GENERIC_ALL", Const, 0},
+ {"GENERIC_EXECUTE", Const, 0},
+ {"GENERIC_READ", Const, 0},
+ {"GENERIC_WRITE", Const, 0},
+ {"GUID", Type, 1},
+ {"GUID.Data1", Field, 1},
+ {"GUID.Data2", Field, 1},
+ {"GUID.Data3", Field, 1},
+ {"GUID.Data4", Field, 1},
+ {"GetAcceptExSockaddrs", Func, 0},
+ {"GetAdaptersInfo", Func, 0},
+ {"GetAddrInfoW", Func, 1},
+ {"GetCommandLine", Func, 0},
+ {"GetComputerName", Func, 0},
+ {"GetConsoleMode", Func, 1},
+ {"GetCurrentDirectory", Func, 0},
+ {"GetCurrentProcess", Func, 0},
+ {"GetEnvironmentStrings", Func, 0},
+ {"GetEnvironmentVariable", Func, 0},
+ {"GetExitCodeProcess", Func, 0},
+ {"GetFileAttributes", Func, 0},
+ {"GetFileAttributesEx", Func, 0},
+ {"GetFileExInfoStandard", Const, 0},
+ {"GetFileExMaxInfoLevel", Const, 0},
+ {"GetFileInformationByHandle", Func, 0},
+ {"GetFileType", Func, 0},
+ {"GetFullPathName", Func, 0},
+ {"GetHostByName", Func, 0},
+ {"GetIfEntry", Func, 0},
+ {"GetLastError", Func, 0},
+ {"GetLengthSid", Func, 0},
+ {"GetLongPathName", Func, 0},
+ {"GetProcAddress", Func, 0},
+ {"GetProcessTimes", Func, 0},
+ {"GetProtoByName", Func, 0},
+ {"GetQueuedCompletionStatus", Func, 0},
+ {"GetServByName", Func, 0},
+ {"GetShortPathName", Func, 0},
+ {"GetStartupInfo", Func, 0},
+ {"GetStdHandle", Func, 0},
+ {"GetSystemTimeAsFileTime", Func, 0},
+ {"GetTempPath", Func, 0},
+ {"GetTimeZoneInformation", Func, 0},
+ {"GetTokenInformation", Func, 0},
+ {"GetUserNameEx", Func, 0},
+ {"GetUserProfileDirectory", Func, 0},
+ {"GetVersion", Func, 0},
+ {"Getcwd", Func, 0},
+ {"Getdents", Func, 0},
+ {"Getdirentries", Func, 0},
+ {"Getdtablesize", Func, 0},
+ {"Getegid", Func, 0},
+ {"Getenv", Func, 0},
+ {"Geteuid", Func, 0},
+ {"Getfsstat", Func, 0},
+ {"Getgid", Func, 0},
+ {"Getgroups", Func, 0},
+ {"Getpagesize", Func, 0},
+ {"Getpeername", Func, 0},
+ {"Getpgid", Func, 0},
+ {"Getpgrp", Func, 0},
+ {"Getpid", Func, 0},
+ {"Getppid", Func, 0},
+ {"Getpriority", Func, 0},
+ {"Getrlimit", Func, 0},
+ {"Getrusage", Func, 0},
+ {"Getsid", Func, 0},
+ {"Getsockname", Func, 0},
+ {"Getsockopt", Func, 1},
+ {"GetsockoptByte", Func, 0},
+ {"GetsockoptICMPv6Filter", Func, 2},
+ {"GetsockoptIPMreq", Func, 0},
+ {"GetsockoptIPMreqn", Func, 0},
+ {"GetsockoptIPv6MTUInfo", Func, 2},
+ {"GetsockoptIPv6Mreq", Func, 0},
+ {"GetsockoptInet4Addr", Func, 0},
+ {"GetsockoptInt", Func, 0},
+ {"GetsockoptUcred", Func, 1},
+ {"Gettid", Func, 0},
+ {"Gettimeofday", Func, 0},
+ {"Getuid", Func, 0},
+ {"Getwd", Func, 0},
+ {"Getxattr", Func, 1},
+ {"HANDLE_FLAG_INHERIT", Const, 0},
+ {"HKEY_CLASSES_ROOT", Const, 0},
+ {"HKEY_CURRENT_CONFIG", Const, 0},
+ {"HKEY_CURRENT_USER", Const, 0},
+ {"HKEY_DYN_DATA", Const, 0},
+ {"HKEY_LOCAL_MACHINE", Const, 0},
+ {"HKEY_PERFORMANCE_DATA", Const, 0},
+ {"HKEY_USERS", Const, 0},
+ {"HUPCL", Const, 0},
+ {"Handle", Type, 0},
+ {"Hostent", Type, 0},
+ {"Hostent.AddrList", Field, 0},
+ {"Hostent.AddrType", Field, 0},
+ {"Hostent.Aliases", Field, 0},
+ {"Hostent.Length", Field, 0},
+ {"Hostent.Name", Field, 0},
+ {"ICANON", Const, 0},
+ {"ICMP6_FILTER", Const, 2},
+ {"ICMPV6_FILTER", Const, 2},
+ {"ICMPv6Filter", Type, 2},
+ {"ICMPv6Filter.Data", Field, 2},
+ {"ICMPv6Filter.Filt", Field, 2},
+ {"ICRNL", Const, 0},
+ {"IEXTEN", Const, 0},
+ {"IFAN_ARRIVAL", Const, 1},
+ {"IFAN_DEPARTURE", Const, 1},
+ {"IFA_ADDRESS", Const, 0},
+ {"IFA_ANYCAST", Const, 0},
+ {"IFA_BROADCAST", Const, 0},
+ {"IFA_CACHEINFO", Const, 0},
+ {"IFA_F_DADFAILED", Const, 0},
+ {"IFA_F_DEPRECATED", Const, 0},
+ {"IFA_F_HOMEADDRESS", Const, 0},
+ {"IFA_F_NODAD", Const, 0},
+ {"IFA_F_OPTIMISTIC", Const, 0},
+ {"IFA_F_PERMANENT", Const, 0},
+ {"IFA_F_SECONDARY", Const, 0},
+ {"IFA_F_TEMPORARY", Const, 0},
+ {"IFA_F_TENTATIVE", Const, 0},
+ {"IFA_LABEL", Const, 0},
+ {"IFA_LOCAL", Const, 0},
+ {"IFA_MAX", Const, 0},
+ {"IFA_MULTICAST", Const, 0},
+ {"IFA_ROUTE", Const, 1},
+ {"IFA_UNSPEC", Const, 0},
+ {"IFF_ALLMULTI", Const, 0},
+ {"IFF_ALTPHYS", Const, 0},
+ {"IFF_AUTOMEDIA", Const, 0},
+ {"IFF_BROADCAST", Const, 0},
+ {"IFF_CANTCHANGE", Const, 0},
+ {"IFF_CANTCONFIG", Const, 1},
+ {"IFF_DEBUG", Const, 0},
+ {"IFF_DRV_OACTIVE", Const, 0},
+ {"IFF_DRV_RUNNING", Const, 0},
+ {"IFF_DYING", Const, 0},
+ {"IFF_DYNAMIC", Const, 0},
+ {"IFF_LINK0", Const, 0},
+ {"IFF_LINK1", Const, 0},
+ {"IFF_LINK2", Const, 0},
+ {"IFF_LOOPBACK", Const, 0},
+ {"IFF_MASTER", Const, 0},
+ {"IFF_MONITOR", Const, 0},
+ {"IFF_MULTICAST", Const, 0},
+ {"IFF_NOARP", Const, 0},
+ {"IFF_NOTRAILERS", Const, 0},
+ {"IFF_NO_PI", Const, 0},
+ {"IFF_OACTIVE", Const, 0},
+ {"IFF_ONE_QUEUE", Const, 0},
+ {"IFF_POINTOPOINT", Const, 0},
+ {"IFF_POINTTOPOINT", Const, 0},
+ {"IFF_PORTSEL", Const, 0},
+ {"IFF_PPROMISC", Const, 0},
+ {"IFF_PROMISC", Const, 0},
+ {"IFF_RENAMING", Const, 0},
+ {"IFF_RUNNING", Const, 0},
+ {"IFF_SIMPLEX", Const, 0},
+ {"IFF_SLAVE", Const, 0},
+ {"IFF_SMART", Const, 0},
+ {"IFF_STATICARP", Const, 0},
+ {"IFF_TAP", Const, 0},
+ {"IFF_TUN", Const, 0},
+ {"IFF_TUN_EXCL", Const, 0},
+ {"IFF_UP", Const, 0},
+ {"IFF_VNET_HDR", Const, 0},
+ {"IFLA_ADDRESS", Const, 0},
+ {"IFLA_BROADCAST", Const, 0},
+ {"IFLA_COST", Const, 0},
+ {"IFLA_IFALIAS", Const, 0},
+ {"IFLA_IFNAME", Const, 0},
+ {"IFLA_LINK", Const, 0},
+ {"IFLA_LINKINFO", Const, 0},
+ {"IFLA_LINKMODE", Const, 0},
+ {"IFLA_MAP", Const, 0},
+ {"IFLA_MASTER", Const, 0},
+ {"IFLA_MAX", Const, 0},
+ {"IFLA_MTU", Const, 0},
+ {"IFLA_NET_NS_PID", Const, 0},
+ {"IFLA_OPERSTATE", Const, 0},
+ {"IFLA_PRIORITY", Const, 0},
+ {"IFLA_PROTINFO", Const, 0},
+ {"IFLA_QDISC", Const, 0},
+ {"IFLA_STATS", Const, 0},
+ {"IFLA_TXQLEN", Const, 0},
+ {"IFLA_UNSPEC", Const, 0},
+ {"IFLA_WEIGHT", Const, 0},
+ {"IFLA_WIRELESS", Const, 0},
+ {"IFNAMSIZ", Const, 0},
+ {"IFT_1822", Const, 0},
+ {"IFT_A12MPPSWITCH", Const, 0},
+ {"IFT_AAL2", Const, 0},
+ {"IFT_AAL5", Const, 0},
+ {"IFT_ADSL", Const, 0},
+ {"IFT_AFLANE8023", Const, 0},
+ {"IFT_AFLANE8025", Const, 0},
+ {"IFT_ARAP", Const, 0},
+ {"IFT_ARCNET", Const, 0},
+ {"IFT_ARCNETPLUS", Const, 0},
+ {"IFT_ASYNC", Const, 0},
+ {"IFT_ATM", Const, 0},
+ {"IFT_ATMDXI", Const, 0},
+ {"IFT_ATMFUNI", Const, 0},
+ {"IFT_ATMIMA", Const, 0},
+ {"IFT_ATMLOGICAL", Const, 0},
+ {"IFT_ATMRADIO", Const, 0},
+ {"IFT_ATMSUBINTERFACE", Const, 0},
+ {"IFT_ATMVCIENDPT", Const, 0},
+ {"IFT_ATMVIRTUAL", Const, 0},
+ {"IFT_BGPPOLICYACCOUNTING", Const, 0},
+ {"IFT_BLUETOOTH", Const, 1},
+ {"IFT_BRIDGE", Const, 0},
+ {"IFT_BSC", Const, 0},
+ {"IFT_CARP", Const, 0},
+ {"IFT_CCTEMUL", Const, 0},
+ {"IFT_CELLULAR", Const, 0},
+ {"IFT_CEPT", Const, 0},
+ {"IFT_CES", Const, 0},
+ {"IFT_CHANNEL", Const, 0},
+ {"IFT_CNR", Const, 0},
+ {"IFT_COFFEE", Const, 0},
+ {"IFT_COMPOSITELINK", Const, 0},
+ {"IFT_DCN", Const, 0},
+ {"IFT_DIGITALPOWERLINE", Const, 0},
+ {"IFT_DIGITALWRAPPEROVERHEADCHANNEL", Const, 0},
+ {"IFT_DLSW", Const, 0},
+ {"IFT_DOCSCABLEDOWNSTREAM", Const, 0},
+ {"IFT_DOCSCABLEMACLAYER", Const, 0},
+ {"IFT_DOCSCABLEUPSTREAM", Const, 0},
+ {"IFT_DOCSCABLEUPSTREAMCHANNEL", Const, 1},
+ {"IFT_DS0", Const, 0},
+ {"IFT_DS0BUNDLE", Const, 0},
+ {"IFT_DS1FDL", Const, 0},
+ {"IFT_DS3", Const, 0},
+ {"IFT_DTM", Const, 0},
+ {"IFT_DUMMY", Const, 1},
+ {"IFT_DVBASILN", Const, 0},
+ {"IFT_DVBASIOUT", Const, 0},
+ {"IFT_DVBRCCDOWNSTREAM", Const, 0},
+ {"IFT_DVBRCCMACLAYER", Const, 0},
+ {"IFT_DVBRCCUPSTREAM", Const, 0},
+ {"IFT_ECONET", Const, 1},
+ {"IFT_ENC", Const, 0},
+ {"IFT_EON", Const, 0},
+ {"IFT_EPLRS", Const, 0},
+ {"IFT_ESCON", Const, 0},
+ {"IFT_ETHER", Const, 0},
+ {"IFT_FAITH", Const, 0},
+ {"IFT_FAST", Const, 0},
+ {"IFT_FASTETHER", Const, 0},
+ {"IFT_FASTETHERFX", Const, 0},
+ {"IFT_FDDI", Const, 0},
+ {"IFT_FIBRECHANNEL", Const, 0},
+ {"IFT_FRAMERELAYINTERCONNECT", Const, 0},
+ {"IFT_FRAMERELAYMPI", Const, 0},
+ {"IFT_FRDLCIENDPT", Const, 0},
+ {"IFT_FRELAY", Const, 0},
+ {"IFT_FRELAYDCE", Const, 0},
+ {"IFT_FRF16MFRBUNDLE", Const, 0},
+ {"IFT_FRFORWARD", Const, 0},
+ {"IFT_G703AT2MB", Const, 0},
+ {"IFT_G703AT64K", Const, 0},
+ {"IFT_GIF", Const, 0},
+ {"IFT_GIGABITETHERNET", Const, 0},
+ {"IFT_GR303IDT", Const, 0},
+ {"IFT_GR303RDT", Const, 0},
+ {"IFT_H323GATEKEEPER", Const, 0},
+ {"IFT_H323PROXY", Const, 0},
+ {"IFT_HDH1822", Const, 0},
+ {"IFT_HDLC", Const, 0},
+ {"IFT_HDSL2", Const, 0},
+ {"IFT_HIPERLAN2", Const, 0},
+ {"IFT_HIPPI", Const, 0},
+ {"IFT_HIPPIINTERFACE", Const, 0},
+ {"IFT_HOSTPAD", Const, 0},
+ {"IFT_HSSI", Const, 0},
+ {"IFT_HY", Const, 0},
+ {"IFT_IBM370PARCHAN", Const, 0},
+ {"IFT_IDSL", Const, 0},
+ {"IFT_IEEE1394", Const, 0},
+ {"IFT_IEEE80211", Const, 0},
+ {"IFT_IEEE80212", Const, 0},
+ {"IFT_IEEE8023ADLAG", Const, 0},
+ {"IFT_IFGSN", Const, 0},
+ {"IFT_IMT", Const, 0},
+ {"IFT_INFINIBAND", Const, 1},
+ {"IFT_INTERLEAVE", Const, 0},
+ {"IFT_IP", Const, 0},
+ {"IFT_IPFORWARD", Const, 0},
+ {"IFT_IPOVERATM", Const, 0},
+ {"IFT_IPOVERCDLC", Const, 0},
+ {"IFT_IPOVERCLAW", Const, 0},
+ {"IFT_IPSWITCH", Const, 0},
+ {"IFT_IPXIP", Const, 0},
+ {"IFT_ISDN", Const, 0},
+ {"IFT_ISDNBASIC", Const, 0},
+ {"IFT_ISDNPRIMARY", Const, 0},
+ {"IFT_ISDNS", Const, 0},
+ {"IFT_ISDNU", Const, 0},
+ {"IFT_ISO88022LLC", Const, 0},
+ {"IFT_ISO88023", Const, 0},
+ {"IFT_ISO88024", Const, 0},
+ {"IFT_ISO88025", Const, 0},
+ {"IFT_ISO88025CRFPINT", Const, 0},
+ {"IFT_ISO88025DTR", Const, 0},
+ {"IFT_ISO88025FIBER", Const, 0},
+ {"IFT_ISO88026", Const, 0},
+ {"IFT_ISUP", Const, 0},
+ {"IFT_L2VLAN", Const, 0},
+ {"IFT_L3IPVLAN", Const, 0},
+ {"IFT_L3IPXVLAN", Const, 0},
+ {"IFT_LAPB", Const, 0},
+ {"IFT_LAPD", Const, 0},
+ {"IFT_LAPF", Const, 0},
+ {"IFT_LINEGROUP", Const, 1},
+ {"IFT_LOCALTALK", Const, 0},
+ {"IFT_LOOP", Const, 0},
+ {"IFT_MEDIAMAILOVERIP", Const, 0},
+ {"IFT_MFSIGLINK", Const, 0},
+ {"IFT_MIOX25", Const, 0},
+ {"IFT_MODEM", Const, 0},
+ {"IFT_MPC", Const, 0},
+ {"IFT_MPLS", Const, 0},
+ {"IFT_MPLSTUNNEL", Const, 0},
+ {"IFT_MSDSL", Const, 0},
+ {"IFT_MVL", Const, 0},
+ {"IFT_MYRINET", Const, 0},
+ {"IFT_NFAS", Const, 0},
+ {"IFT_NSIP", Const, 0},
+ {"IFT_OPTICALCHANNEL", Const, 0},
+ {"IFT_OPTICALTRANSPORT", Const, 0},
+ {"IFT_OTHER", Const, 0},
+ {"IFT_P10", Const, 0},
+ {"IFT_P80", Const, 0},
+ {"IFT_PARA", Const, 0},
+ {"IFT_PDP", Const, 0},
+ {"IFT_PFLOG", Const, 0},
+ {"IFT_PFLOW", Const, 1},
+ {"IFT_PFSYNC", Const, 0},
+ {"IFT_PLC", Const, 0},
+ {"IFT_PON155", Const, 1},
+ {"IFT_PON622", Const, 1},
+ {"IFT_POS", Const, 0},
+ {"IFT_PPP", Const, 0},
+ {"IFT_PPPMULTILINKBUNDLE", Const, 0},
+ {"IFT_PROPATM", Const, 1},
+ {"IFT_PROPBWAP2MP", Const, 0},
+ {"IFT_PROPCNLS", Const, 0},
+ {"IFT_PROPDOCSWIRELESSDOWNSTREAM", Const, 0},
+ {"IFT_PROPDOCSWIRELESSMACLAYER", Const, 0},
+ {"IFT_PROPDOCSWIRELESSUPSTREAM", Const, 0},
+ {"IFT_PROPMUX", Const, 0},
+ {"IFT_PROPVIRTUAL", Const, 0},
+ {"IFT_PROPWIRELESSP2P", Const, 0},
+ {"IFT_PTPSERIAL", Const, 0},
+ {"IFT_PVC", Const, 0},
+ {"IFT_Q2931", Const, 1},
+ {"IFT_QLLC", Const, 0},
+ {"IFT_RADIOMAC", Const, 0},
+ {"IFT_RADSL", Const, 0},
+ {"IFT_REACHDSL", Const, 0},
+ {"IFT_RFC1483", Const, 0},
+ {"IFT_RS232", Const, 0},
+ {"IFT_RSRB", Const, 0},
+ {"IFT_SDLC", Const, 0},
+ {"IFT_SDSL", Const, 0},
+ {"IFT_SHDSL", Const, 0},
+ {"IFT_SIP", Const, 0},
+ {"IFT_SIPSIG", Const, 1},
+ {"IFT_SIPTG", Const, 1},
+ {"IFT_SLIP", Const, 0},
+ {"IFT_SMDSDXI", Const, 0},
+ {"IFT_SMDSICIP", Const, 0},
+ {"IFT_SONET", Const, 0},
+ {"IFT_SONETOVERHEADCHANNEL", Const, 0},
+ {"IFT_SONETPATH", Const, 0},
+ {"IFT_SONETVT", Const, 0},
+ {"IFT_SRP", Const, 0},
+ {"IFT_SS7SIGLINK", Const, 0},
+ {"IFT_STACKTOSTACK", Const, 0},
+ {"IFT_STARLAN", Const, 0},
+ {"IFT_STF", Const, 0},
+ {"IFT_T1", Const, 0},
+ {"IFT_TDLC", Const, 0},
+ {"IFT_TELINK", Const, 1},
+ {"IFT_TERMPAD", Const, 0},
+ {"IFT_TR008", Const, 0},
+ {"IFT_TRANSPHDLC", Const, 0},
+ {"IFT_TUNNEL", Const, 0},
+ {"IFT_ULTRA", Const, 0},
+ {"IFT_USB", Const, 0},
+ {"IFT_V11", Const, 0},
+ {"IFT_V35", Const, 0},
+ {"IFT_V36", Const, 0},
+ {"IFT_V37", Const, 0},
+ {"IFT_VDSL", Const, 0},
+ {"IFT_VIRTUALIPADDRESS", Const, 0},
+ {"IFT_VIRTUALTG", Const, 1},
+ {"IFT_VOICEDID", Const, 1},
+ {"IFT_VOICEEM", Const, 0},
+ {"IFT_VOICEEMFGD", Const, 1},
+ {"IFT_VOICEENCAP", Const, 0},
+ {"IFT_VOICEFGDEANA", Const, 1},
+ {"IFT_VOICEFXO", Const, 0},
+ {"IFT_VOICEFXS", Const, 0},
+ {"IFT_VOICEOVERATM", Const, 0},
+ {"IFT_VOICEOVERCABLE", Const, 1},
+ {"IFT_VOICEOVERFRAMERELAY", Const, 0},
+ {"IFT_VOICEOVERIP", Const, 0},
+ {"IFT_X213", Const, 0},
+ {"IFT_X25", Const, 0},
+ {"IFT_X25DDN", Const, 0},
+ {"IFT_X25HUNTGROUP", Const, 0},
+ {"IFT_X25MLP", Const, 0},
+ {"IFT_X25PLE", Const, 0},
+ {"IFT_XETHER", Const, 0},
+ {"IGNBRK", Const, 0},
+ {"IGNCR", Const, 0},
+ {"IGNORE", Const, 0},
+ {"IGNPAR", Const, 0},
+ {"IMAXBEL", Const, 0},
+ {"INFINITE", Const, 0},
+ {"INLCR", Const, 0},
+ {"INPCK", Const, 0},
+ {"INVALID_FILE_ATTRIBUTES", Const, 0},
+ {"IN_ACCESS", Const, 0},
+ {"IN_ALL_EVENTS", Const, 0},
+ {"IN_ATTRIB", Const, 0},
+ {"IN_CLASSA_HOST", Const, 0},
+ {"IN_CLASSA_MAX", Const, 0},
+ {"IN_CLASSA_NET", Const, 0},
+ {"IN_CLASSA_NSHIFT", Const, 0},
+ {"IN_CLASSB_HOST", Const, 0},
+ {"IN_CLASSB_MAX", Const, 0},
+ {"IN_CLASSB_NET", Const, 0},
+ {"IN_CLASSB_NSHIFT", Const, 0},
+ {"IN_CLASSC_HOST", Const, 0},
+ {"IN_CLASSC_NET", Const, 0},
+ {"IN_CLASSC_NSHIFT", Const, 0},
+ {"IN_CLASSD_HOST", Const, 0},
+ {"IN_CLASSD_NET", Const, 0},
+ {"IN_CLASSD_NSHIFT", Const, 0},
+ {"IN_CLOEXEC", Const, 0},
+ {"IN_CLOSE", Const, 0},
+ {"IN_CLOSE_NOWRITE", Const, 0},
+ {"IN_CLOSE_WRITE", Const, 0},
+ {"IN_CREATE", Const, 0},
+ {"IN_DELETE", Const, 0},
+ {"IN_DELETE_SELF", Const, 0},
+ {"IN_DONT_FOLLOW", Const, 0},
+ {"IN_EXCL_UNLINK", Const, 0},
+ {"IN_IGNORED", Const, 0},
+ {"IN_ISDIR", Const, 0},
+ {"IN_LINKLOCALNETNUM", Const, 0},
+ {"IN_LOOPBACKNET", Const, 0},
+ {"IN_MASK_ADD", Const, 0},
+ {"IN_MODIFY", Const, 0},
+ {"IN_MOVE", Const, 0},
+ {"IN_MOVED_FROM", Const, 0},
+ {"IN_MOVED_TO", Const, 0},
+ {"IN_MOVE_SELF", Const, 0},
+ {"IN_NONBLOCK", Const, 0},
+ {"IN_ONESHOT", Const, 0},
+ {"IN_ONLYDIR", Const, 0},
+ {"IN_OPEN", Const, 0},
+ {"IN_Q_OVERFLOW", Const, 0},
+ {"IN_RFC3021_HOST", Const, 1},
+ {"IN_RFC3021_MASK", Const, 1},
+ {"IN_RFC3021_NET", Const, 1},
+ {"IN_RFC3021_NSHIFT", Const, 1},
+ {"IN_UNMOUNT", Const, 0},
+ {"IOC_IN", Const, 1},
+ {"IOC_INOUT", Const, 1},
+ {"IOC_OUT", Const, 1},
+ {"IOC_VENDOR", Const, 3},
+ {"IOC_WS2", Const, 1},
+ {"IO_REPARSE_TAG_SYMLINK", Const, 4},
+ {"IPMreq", Type, 0},
+ {"IPMreq.Interface", Field, 0},
+ {"IPMreq.Multiaddr", Field, 0},
+ {"IPMreqn", Type, 0},
+ {"IPMreqn.Address", Field, 0},
+ {"IPMreqn.Ifindex", Field, 0},
+ {"IPMreqn.Multiaddr", Field, 0},
+ {"IPPROTO_3PC", Const, 0},
+ {"IPPROTO_ADFS", Const, 0},
+ {"IPPROTO_AH", Const, 0},
+ {"IPPROTO_AHIP", Const, 0},
+ {"IPPROTO_APES", Const, 0},
+ {"IPPROTO_ARGUS", Const, 0},
+ {"IPPROTO_AX25", Const, 0},
+ {"IPPROTO_BHA", Const, 0},
+ {"IPPROTO_BLT", Const, 0},
+ {"IPPROTO_BRSATMON", Const, 0},
+ {"IPPROTO_CARP", Const, 0},
+ {"IPPROTO_CFTP", Const, 0},
+ {"IPPROTO_CHAOS", Const, 0},
+ {"IPPROTO_CMTP", Const, 0},
+ {"IPPROTO_COMP", Const, 0},
+ {"IPPROTO_CPHB", Const, 0},
+ {"IPPROTO_CPNX", Const, 0},
+ {"IPPROTO_DCCP", Const, 0},
+ {"IPPROTO_DDP", Const, 0},
+ {"IPPROTO_DGP", Const, 0},
+ {"IPPROTO_DIVERT", Const, 0},
+ {"IPPROTO_DIVERT_INIT", Const, 3},
+ {"IPPROTO_DIVERT_RESP", Const, 3},
+ {"IPPROTO_DONE", Const, 0},
+ {"IPPROTO_DSTOPTS", Const, 0},
+ {"IPPROTO_EGP", Const, 0},
+ {"IPPROTO_EMCON", Const, 0},
+ {"IPPROTO_ENCAP", Const, 0},
+ {"IPPROTO_EON", Const, 0},
+ {"IPPROTO_ESP", Const, 0},
+ {"IPPROTO_ETHERIP", Const, 0},
+ {"IPPROTO_FRAGMENT", Const, 0},
+ {"IPPROTO_GGP", Const, 0},
+ {"IPPROTO_GMTP", Const, 0},
+ {"IPPROTO_GRE", Const, 0},
+ {"IPPROTO_HELLO", Const, 0},
+ {"IPPROTO_HMP", Const, 0},
+ {"IPPROTO_HOPOPTS", Const, 0},
+ {"IPPROTO_ICMP", Const, 0},
+ {"IPPROTO_ICMPV6", Const, 0},
+ {"IPPROTO_IDP", Const, 0},
+ {"IPPROTO_IDPR", Const, 0},
+ {"IPPROTO_IDRP", Const, 0},
+ {"IPPROTO_IGMP", Const, 0},
+ {"IPPROTO_IGP", Const, 0},
+ {"IPPROTO_IGRP", Const, 0},
+ {"IPPROTO_IL", Const, 0},
+ {"IPPROTO_INLSP", Const, 0},
+ {"IPPROTO_INP", Const, 0},
+ {"IPPROTO_IP", Const, 0},
+ {"IPPROTO_IPCOMP", Const, 0},
+ {"IPPROTO_IPCV", Const, 0},
+ {"IPPROTO_IPEIP", Const, 0},
+ {"IPPROTO_IPIP", Const, 0},
+ {"IPPROTO_IPPC", Const, 0},
+ {"IPPROTO_IPV4", Const, 0},
+ {"IPPROTO_IPV6", Const, 0},
+ {"IPPROTO_IPV6_ICMP", Const, 1},
+ {"IPPROTO_IRTP", Const, 0},
+ {"IPPROTO_KRYPTOLAN", Const, 0},
+ {"IPPROTO_LARP", Const, 0},
+ {"IPPROTO_LEAF1", Const, 0},
+ {"IPPROTO_LEAF2", Const, 0},
+ {"IPPROTO_MAX", Const, 0},
+ {"IPPROTO_MAXID", Const, 0},
+ {"IPPROTO_MEAS", Const, 0},
+ {"IPPROTO_MH", Const, 1},
+ {"IPPROTO_MHRP", Const, 0},
+ {"IPPROTO_MICP", Const, 0},
+ {"IPPROTO_MOBILE", Const, 0},
+ {"IPPROTO_MPLS", Const, 1},
+ {"IPPROTO_MTP", Const, 0},
+ {"IPPROTO_MUX", Const, 0},
+ {"IPPROTO_ND", Const, 0},
+ {"IPPROTO_NHRP", Const, 0},
+ {"IPPROTO_NONE", Const, 0},
+ {"IPPROTO_NSP", Const, 0},
+ {"IPPROTO_NVPII", Const, 0},
+ {"IPPROTO_OLD_DIVERT", Const, 0},
+ {"IPPROTO_OSPFIGP", Const, 0},
+ {"IPPROTO_PFSYNC", Const, 0},
+ {"IPPROTO_PGM", Const, 0},
+ {"IPPROTO_PIGP", Const, 0},
+ {"IPPROTO_PIM", Const, 0},
+ {"IPPROTO_PRM", Const, 0},
+ {"IPPROTO_PUP", Const, 0},
+ {"IPPROTO_PVP", Const, 0},
+ {"IPPROTO_RAW", Const, 0},
+ {"IPPROTO_RCCMON", Const, 0},
+ {"IPPROTO_RDP", Const, 0},
+ {"IPPROTO_ROUTING", Const, 0},
+ {"IPPROTO_RSVP", Const, 0},
+ {"IPPROTO_RVD", Const, 0},
+ {"IPPROTO_SATEXPAK", Const, 0},
+ {"IPPROTO_SATMON", Const, 0},
+ {"IPPROTO_SCCSP", Const, 0},
+ {"IPPROTO_SCTP", Const, 0},
+ {"IPPROTO_SDRP", Const, 0},
+ {"IPPROTO_SEND", Const, 1},
+ {"IPPROTO_SEP", Const, 0},
+ {"IPPROTO_SKIP", Const, 0},
+ {"IPPROTO_SPACER", Const, 0},
+ {"IPPROTO_SRPC", Const, 0},
+ {"IPPROTO_ST", Const, 0},
+ {"IPPROTO_SVMTP", Const, 0},
+ {"IPPROTO_SWIPE", Const, 0},
+ {"IPPROTO_TCF", Const, 0},
+ {"IPPROTO_TCP", Const, 0},
+ {"IPPROTO_TLSP", Const, 0},
+ {"IPPROTO_TP", Const, 0},
+ {"IPPROTO_TPXX", Const, 0},
+ {"IPPROTO_TRUNK1", Const, 0},
+ {"IPPROTO_TRUNK2", Const, 0},
+ {"IPPROTO_TTP", Const, 0},
+ {"IPPROTO_UDP", Const, 0},
+ {"IPPROTO_UDPLITE", Const, 0},
+ {"IPPROTO_VINES", Const, 0},
+ {"IPPROTO_VISA", Const, 0},
+ {"IPPROTO_VMTP", Const, 0},
+ {"IPPROTO_VRRP", Const, 1},
+ {"IPPROTO_WBEXPAK", Const, 0},
+ {"IPPROTO_WBMON", Const, 0},
+ {"IPPROTO_WSN", Const, 0},
+ {"IPPROTO_XNET", Const, 0},
+ {"IPPROTO_XTP", Const, 0},
+ {"IPV6_2292DSTOPTS", Const, 0},
+ {"IPV6_2292HOPLIMIT", Const, 0},
+ {"IPV6_2292HOPOPTS", Const, 0},
+ {"IPV6_2292NEXTHOP", Const, 0},
+ {"IPV6_2292PKTINFO", Const, 0},
+ {"IPV6_2292PKTOPTIONS", Const, 0},
+ {"IPV6_2292RTHDR", Const, 0},
+ {"IPV6_ADDRFORM", Const, 0},
+ {"IPV6_ADD_MEMBERSHIP", Const, 0},
+ {"IPV6_AUTHHDR", Const, 0},
+ {"IPV6_AUTH_LEVEL", Const, 1},
+ {"IPV6_AUTOFLOWLABEL", Const, 0},
+ {"IPV6_BINDANY", Const, 0},
+ {"IPV6_BINDV6ONLY", Const, 0},
+ {"IPV6_BOUND_IF", Const, 0},
+ {"IPV6_CHECKSUM", Const, 0},
+ {"IPV6_DEFAULT_MULTICAST_HOPS", Const, 0},
+ {"IPV6_DEFAULT_MULTICAST_LOOP", Const, 0},
+ {"IPV6_DEFHLIM", Const, 0},
+ {"IPV6_DONTFRAG", Const, 0},
+ {"IPV6_DROP_MEMBERSHIP", Const, 0},
+ {"IPV6_DSTOPTS", Const, 0},
+ {"IPV6_ESP_NETWORK_LEVEL", Const, 1},
+ {"IPV6_ESP_TRANS_LEVEL", Const, 1},
+ {"IPV6_FAITH", Const, 0},
+ {"IPV6_FLOWINFO_MASK", Const, 0},
+ {"IPV6_FLOWLABEL_MASK", Const, 0},
+ {"IPV6_FRAGTTL", Const, 0},
+ {"IPV6_FW_ADD", Const, 0},
+ {"IPV6_FW_DEL", Const, 0},
+ {"IPV6_FW_FLUSH", Const, 0},
+ {"IPV6_FW_GET", Const, 0},
+ {"IPV6_FW_ZERO", Const, 0},
+ {"IPV6_HLIMDEC", Const, 0},
+ {"IPV6_HOPLIMIT", Const, 0},
+ {"IPV6_HOPOPTS", Const, 0},
+ {"IPV6_IPCOMP_LEVEL", Const, 1},
+ {"IPV6_IPSEC_POLICY", Const, 0},
+ {"IPV6_JOIN_ANYCAST", Const, 0},
+ {"IPV6_JOIN_GROUP", Const, 0},
+ {"IPV6_LEAVE_ANYCAST", Const, 0},
+ {"IPV6_LEAVE_GROUP", Const, 0},
+ {"IPV6_MAXHLIM", Const, 0},
+ {"IPV6_MAXOPTHDR", Const, 0},
+ {"IPV6_MAXPACKET", Const, 0},
+ {"IPV6_MAX_GROUP_SRC_FILTER", Const, 0},
+ {"IPV6_MAX_MEMBERSHIPS", Const, 0},
+ {"IPV6_MAX_SOCK_SRC_FILTER", Const, 0},
+ {"IPV6_MIN_MEMBERSHIPS", Const, 0},
+ {"IPV6_MMTU", Const, 0},
+ {"IPV6_MSFILTER", Const, 0},
+ {"IPV6_MTU", Const, 0},
+ {"IPV6_MTU_DISCOVER", Const, 0},
+ {"IPV6_MULTICAST_HOPS", Const, 0},
+ {"IPV6_MULTICAST_IF", Const, 0},
+ {"IPV6_MULTICAST_LOOP", Const, 0},
+ {"IPV6_NEXTHOP", Const, 0},
+ {"IPV6_OPTIONS", Const, 1},
+ {"IPV6_PATHMTU", Const, 0},
+ {"IPV6_PIPEX", Const, 1},
+ {"IPV6_PKTINFO", Const, 0},
+ {"IPV6_PMTUDISC_DO", Const, 0},
+ {"IPV6_PMTUDISC_DONT", Const, 0},
+ {"IPV6_PMTUDISC_PROBE", Const, 0},
+ {"IPV6_PMTUDISC_WANT", Const, 0},
+ {"IPV6_PORTRANGE", Const, 0},
+ {"IPV6_PORTRANGE_DEFAULT", Const, 0},
+ {"IPV6_PORTRANGE_HIGH", Const, 0},
+ {"IPV6_PORTRANGE_LOW", Const, 0},
+ {"IPV6_PREFER_TEMPADDR", Const, 0},
+ {"IPV6_RECVDSTOPTS", Const, 0},
+ {"IPV6_RECVDSTPORT", Const, 3},
+ {"IPV6_RECVERR", Const, 0},
+ {"IPV6_RECVHOPLIMIT", Const, 0},
+ {"IPV6_RECVHOPOPTS", Const, 0},
+ {"IPV6_RECVPATHMTU", Const, 0},
+ {"IPV6_RECVPKTINFO", Const, 0},
+ {"IPV6_RECVRTHDR", Const, 0},
+ {"IPV6_RECVTCLASS", Const, 0},
+ {"IPV6_ROUTER_ALERT", Const, 0},
+ {"IPV6_RTABLE", Const, 1},
+ {"IPV6_RTHDR", Const, 0},
+ {"IPV6_RTHDRDSTOPTS", Const, 0},
+ {"IPV6_RTHDR_LOOSE", Const, 0},
+ {"IPV6_RTHDR_STRICT", Const, 0},
+ {"IPV6_RTHDR_TYPE_0", Const, 0},
+ {"IPV6_RXDSTOPTS", Const, 0},
+ {"IPV6_RXHOPOPTS", Const, 0},
+ {"IPV6_SOCKOPT_RESERVED1", Const, 0},
+ {"IPV6_TCLASS", Const, 0},
+ {"IPV6_UNICAST_HOPS", Const, 0},
+ {"IPV6_USE_MIN_MTU", Const, 0},
+ {"IPV6_V6ONLY", Const, 0},
+ {"IPV6_VERSION", Const, 0},
+ {"IPV6_VERSION_MASK", Const, 0},
+ {"IPV6_XFRM_POLICY", Const, 0},
+ {"IP_ADD_MEMBERSHIP", Const, 0},
+ {"IP_ADD_SOURCE_MEMBERSHIP", Const, 0},
+ {"IP_AUTH_LEVEL", Const, 1},
+ {"IP_BINDANY", Const, 0},
+ {"IP_BLOCK_SOURCE", Const, 0},
+ {"IP_BOUND_IF", Const, 0},
+ {"IP_DEFAULT_MULTICAST_LOOP", Const, 0},
+ {"IP_DEFAULT_MULTICAST_TTL", Const, 0},
+ {"IP_DF", Const, 0},
+ {"IP_DIVERTFL", Const, 3},
+ {"IP_DONTFRAG", Const, 0},
+ {"IP_DROP_MEMBERSHIP", Const, 0},
+ {"IP_DROP_SOURCE_MEMBERSHIP", Const, 0},
+ {"IP_DUMMYNET3", Const, 0},
+ {"IP_DUMMYNET_CONFIGURE", Const, 0},
+ {"IP_DUMMYNET_DEL", Const, 0},
+ {"IP_DUMMYNET_FLUSH", Const, 0},
+ {"IP_DUMMYNET_GET", Const, 0},
+ {"IP_EF", Const, 1},
+ {"IP_ERRORMTU", Const, 1},
+ {"IP_ESP_NETWORK_LEVEL", Const, 1},
+ {"IP_ESP_TRANS_LEVEL", Const, 1},
+ {"IP_FAITH", Const, 0},
+ {"IP_FREEBIND", Const, 0},
+ {"IP_FW3", Const, 0},
+ {"IP_FW_ADD", Const, 0},
+ {"IP_FW_DEL", Const, 0},
+ {"IP_FW_FLUSH", Const, 0},
+ {"IP_FW_GET", Const, 0},
+ {"IP_FW_NAT_CFG", Const, 0},
+ {"IP_FW_NAT_DEL", Const, 0},
+ {"IP_FW_NAT_GET_CONFIG", Const, 0},
+ {"IP_FW_NAT_GET_LOG", Const, 0},
+ {"IP_FW_RESETLOG", Const, 0},
+ {"IP_FW_TABLE_ADD", Const, 0},
+ {"IP_FW_TABLE_DEL", Const, 0},
+ {"IP_FW_TABLE_FLUSH", Const, 0},
+ {"IP_FW_TABLE_GETSIZE", Const, 0},
+ {"IP_FW_TABLE_LIST", Const, 0},
+ {"IP_FW_ZERO", Const, 0},
+ {"IP_HDRINCL", Const, 0},
+ {"IP_IPCOMP_LEVEL", Const, 1},
+ {"IP_IPSECFLOWINFO", Const, 1},
+ {"IP_IPSEC_LOCAL_AUTH", Const, 1},
+ {"IP_IPSEC_LOCAL_CRED", Const, 1},
+ {"IP_IPSEC_LOCAL_ID", Const, 1},
+ {"IP_IPSEC_POLICY", Const, 0},
+ {"IP_IPSEC_REMOTE_AUTH", Const, 1},
+ {"IP_IPSEC_REMOTE_CRED", Const, 1},
+ {"IP_IPSEC_REMOTE_ID", Const, 1},
+ {"IP_MAXPACKET", Const, 0},
+ {"IP_MAX_GROUP_SRC_FILTER", Const, 0},
+ {"IP_MAX_MEMBERSHIPS", Const, 0},
+ {"IP_MAX_SOCK_MUTE_FILTER", Const, 0},
+ {"IP_MAX_SOCK_SRC_FILTER", Const, 0},
+ {"IP_MAX_SOURCE_FILTER", Const, 0},
+ {"IP_MF", Const, 0},
+ {"IP_MINFRAGSIZE", Const, 1},
+ {"IP_MINTTL", Const, 0},
+ {"IP_MIN_MEMBERSHIPS", Const, 0},
+ {"IP_MSFILTER", Const, 0},
+ {"IP_MSS", Const, 0},
+ {"IP_MTU", Const, 0},
+ {"IP_MTU_DISCOVER", Const, 0},
+ {"IP_MULTICAST_IF", Const, 0},
+ {"IP_MULTICAST_IFINDEX", Const, 0},
+ {"IP_MULTICAST_LOOP", Const, 0},
+ {"IP_MULTICAST_TTL", Const, 0},
+ {"IP_MULTICAST_VIF", Const, 0},
+ {"IP_NAT__XXX", Const, 0},
+ {"IP_OFFMASK", Const, 0},
+ {"IP_OLD_FW_ADD", Const, 0},
+ {"IP_OLD_FW_DEL", Const, 0},
+ {"IP_OLD_FW_FLUSH", Const, 0},
+ {"IP_OLD_FW_GET", Const, 0},
+ {"IP_OLD_FW_RESETLOG", Const, 0},
+ {"IP_OLD_FW_ZERO", Const, 0},
+ {"IP_ONESBCAST", Const, 0},
+ {"IP_OPTIONS", Const, 0},
+ {"IP_ORIGDSTADDR", Const, 0},
+ {"IP_PASSSEC", Const, 0},
+ {"IP_PIPEX", Const, 1},
+ {"IP_PKTINFO", Const, 0},
+ {"IP_PKTOPTIONS", Const, 0},
+ {"IP_PMTUDISC", Const, 0},
+ {"IP_PMTUDISC_DO", Const, 0},
+ {"IP_PMTUDISC_DONT", Const, 0},
+ {"IP_PMTUDISC_PROBE", Const, 0},
+ {"IP_PMTUDISC_WANT", Const, 0},
+ {"IP_PORTRANGE", Const, 0},
+ {"IP_PORTRANGE_DEFAULT", Const, 0},
+ {"IP_PORTRANGE_HIGH", Const, 0},
+ {"IP_PORTRANGE_LOW", Const, 0},
+ {"IP_RECVDSTADDR", Const, 0},
+ {"IP_RECVDSTPORT", Const, 1},
+ {"IP_RECVERR", Const, 0},
+ {"IP_RECVIF", Const, 0},
+ {"IP_RECVOPTS", Const, 0},
+ {"IP_RECVORIGDSTADDR", Const, 0},
+ {"IP_RECVPKTINFO", Const, 0},
+ {"IP_RECVRETOPTS", Const, 0},
+ {"IP_RECVRTABLE", Const, 1},
+ {"IP_RECVTOS", Const, 0},
+ {"IP_RECVTTL", Const, 0},
+ {"IP_RETOPTS", Const, 0},
+ {"IP_RF", Const, 0},
+ {"IP_ROUTER_ALERT", Const, 0},
+ {"IP_RSVP_OFF", Const, 0},
+ {"IP_RSVP_ON", Const, 0},
+ {"IP_RSVP_VIF_OFF", Const, 0},
+ {"IP_RSVP_VIF_ON", Const, 0},
+ {"IP_RTABLE", Const, 1},
+ {"IP_SENDSRCADDR", Const, 0},
+ {"IP_STRIPHDR", Const, 0},
+ {"IP_TOS", Const, 0},
+ {"IP_TRAFFIC_MGT_BACKGROUND", Const, 0},
+ {"IP_TRANSPARENT", Const, 0},
+ {"IP_TTL", Const, 0},
+ {"IP_UNBLOCK_SOURCE", Const, 0},
+ {"IP_XFRM_POLICY", Const, 0},
+ {"IPv6MTUInfo", Type, 2},
+ {"IPv6MTUInfo.Addr", Field, 2},
+ {"IPv6MTUInfo.Mtu", Field, 2},
+ {"IPv6Mreq", Type, 0},
+ {"IPv6Mreq.Interface", Field, 0},
+ {"IPv6Mreq.Multiaddr", Field, 0},
+ {"ISIG", Const, 0},
+ {"ISTRIP", Const, 0},
+ {"IUCLC", Const, 0},
+ {"IUTF8", Const, 0},
+ {"IXANY", Const, 0},
+ {"IXOFF", Const, 0},
+ {"IXON", Const, 0},
+ {"IfAddrmsg", Type, 0},
+ {"IfAddrmsg.Family", Field, 0},
+ {"IfAddrmsg.Flags", Field, 0},
+ {"IfAddrmsg.Index", Field, 0},
+ {"IfAddrmsg.Prefixlen", Field, 0},
+ {"IfAddrmsg.Scope", Field, 0},
+ {"IfAnnounceMsghdr", Type, 1},
+ {"IfAnnounceMsghdr.Hdrlen", Field, 2},
+ {"IfAnnounceMsghdr.Index", Field, 1},
+ {"IfAnnounceMsghdr.Msglen", Field, 1},
+ {"IfAnnounceMsghdr.Name", Field, 1},
+ {"IfAnnounceMsghdr.Type", Field, 1},
+ {"IfAnnounceMsghdr.Version", Field, 1},
+ {"IfAnnounceMsghdr.What", Field, 1},
+ {"IfData", Type, 0},
+ {"IfData.Addrlen", Field, 0},
+ {"IfData.Baudrate", Field, 0},
+ {"IfData.Capabilities", Field, 2},
+ {"IfData.Collisions", Field, 0},
+ {"IfData.Datalen", Field, 0},
+ {"IfData.Epoch", Field, 0},
+ {"IfData.Hdrlen", Field, 0},
+ {"IfData.Hwassist", Field, 0},
+ {"IfData.Ibytes", Field, 0},
+ {"IfData.Ierrors", Field, 0},
+ {"IfData.Imcasts", Field, 0},
+ {"IfData.Ipackets", Field, 0},
+ {"IfData.Iqdrops", Field, 0},
+ {"IfData.Lastchange", Field, 0},
+ {"IfData.Link_state", Field, 0},
+ {"IfData.Mclpool", Field, 2},
+ {"IfData.Metric", Field, 0},
+ {"IfData.Mtu", Field, 0},
+ {"IfData.Noproto", Field, 0},
+ {"IfData.Obytes", Field, 0},
+ {"IfData.Oerrors", Field, 0},
+ {"IfData.Omcasts", Field, 0},
+ {"IfData.Opackets", Field, 0},
+ {"IfData.Pad", Field, 2},
+ {"IfData.Pad_cgo_0", Field, 2},
+ {"IfData.Pad_cgo_1", Field, 2},
+ {"IfData.Physical", Field, 0},
+ {"IfData.Recvquota", Field, 0},
+ {"IfData.Recvtiming", Field, 0},
+ {"IfData.Reserved1", Field, 0},
+ {"IfData.Reserved2", Field, 0},
+ {"IfData.Spare_char1", Field, 0},
+ {"IfData.Spare_char2", Field, 0},
+ {"IfData.Type", Field, 0},
+ {"IfData.Typelen", Field, 0},
+ {"IfData.Unused1", Field, 0},
+ {"IfData.Unused2", Field, 0},
+ {"IfData.Xmitquota", Field, 0},
+ {"IfData.Xmittiming", Field, 0},
+ {"IfInfomsg", Type, 0},
+ {"IfInfomsg.Change", Field, 0},
+ {"IfInfomsg.Family", Field, 0},
+ {"IfInfomsg.Flags", Field, 0},
+ {"IfInfomsg.Index", Field, 0},
+ {"IfInfomsg.Type", Field, 0},
+ {"IfInfomsg.X__ifi_pad", Field, 0},
+ {"IfMsghdr", Type, 0},
+ {"IfMsghdr.Addrs", Field, 0},
+ {"IfMsghdr.Data", Field, 0},
+ {"IfMsghdr.Flags", Field, 0},
+ {"IfMsghdr.Hdrlen", Field, 2},
+ {"IfMsghdr.Index", Field, 0},
+ {"IfMsghdr.Msglen", Field, 0},
+ {"IfMsghdr.Pad1", Field, 2},
+ {"IfMsghdr.Pad2", Field, 2},
+ {"IfMsghdr.Pad_cgo_0", Field, 0},
+ {"IfMsghdr.Pad_cgo_1", Field, 2},
+ {"IfMsghdr.Tableid", Field, 2},
+ {"IfMsghdr.Type", Field, 0},
+ {"IfMsghdr.Version", Field, 0},
+ {"IfMsghdr.Xflags", Field, 2},
+ {"IfaMsghdr", Type, 0},
+ {"IfaMsghdr.Addrs", Field, 0},
+ {"IfaMsghdr.Flags", Field, 0},
+ {"IfaMsghdr.Hdrlen", Field, 2},
+ {"IfaMsghdr.Index", Field, 0},
+ {"IfaMsghdr.Metric", Field, 0},
+ {"IfaMsghdr.Msglen", Field, 0},
+ {"IfaMsghdr.Pad1", Field, 2},
+ {"IfaMsghdr.Pad2", Field, 2},
+ {"IfaMsghdr.Pad_cgo_0", Field, 0},
+ {"IfaMsghdr.Tableid", Field, 2},
+ {"IfaMsghdr.Type", Field, 0},
+ {"IfaMsghdr.Version", Field, 0},
+ {"IfmaMsghdr", Type, 0},
+ {"IfmaMsghdr.Addrs", Field, 0},
+ {"IfmaMsghdr.Flags", Field, 0},
+ {"IfmaMsghdr.Index", Field, 0},
+ {"IfmaMsghdr.Msglen", Field, 0},
+ {"IfmaMsghdr.Pad_cgo_0", Field, 0},
+ {"IfmaMsghdr.Type", Field, 0},
+ {"IfmaMsghdr.Version", Field, 0},
+ {"IfmaMsghdr2", Type, 0},
+ {"IfmaMsghdr2.Addrs", Field, 0},
+ {"IfmaMsghdr2.Flags", Field, 0},
+ {"IfmaMsghdr2.Index", Field, 0},
+ {"IfmaMsghdr2.Msglen", Field, 0},
+ {"IfmaMsghdr2.Pad_cgo_0", Field, 0},
+ {"IfmaMsghdr2.Refcount", Field, 0},
+ {"IfmaMsghdr2.Type", Field, 0},
+ {"IfmaMsghdr2.Version", Field, 0},
+ {"ImplementsGetwd", Const, 0},
+ {"Inet4Pktinfo", Type, 0},
+ {"Inet4Pktinfo.Addr", Field, 0},
+ {"Inet4Pktinfo.Ifindex", Field, 0},
+ {"Inet4Pktinfo.Spec_dst", Field, 0},
+ {"Inet6Pktinfo", Type, 0},
+ {"Inet6Pktinfo.Addr", Field, 0},
+ {"Inet6Pktinfo.Ifindex", Field, 0},
+ {"InotifyAddWatch", Func, 0},
+ {"InotifyEvent", Type, 0},
+ {"InotifyEvent.Cookie", Field, 0},
+ {"InotifyEvent.Len", Field, 0},
+ {"InotifyEvent.Mask", Field, 0},
+ {"InotifyEvent.Name", Field, 0},
+ {"InotifyEvent.Wd", Field, 0},
+ {"InotifyInit", Func, 0},
+ {"InotifyInit1", Func, 0},
+ {"InotifyRmWatch", Func, 0},
+ {"InterfaceAddrMessage", Type, 0},
+ {"InterfaceAddrMessage.Data", Field, 0},
+ {"InterfaceAddrMessage.Header", Field, 0},
+ {"InterfaceAnnounceMessage", Type, 1},
+ {"InterfaceAnnounceMessage.Header", Field, 1},
+ {"InterfaceInfo", Type, 0},
+ {"InterfaceInfo.Address", Field, 0},
+ {"InterfaceInfo.BroadcastAddress", Field, 0},
+ {"InterfaceInfo.Flags", Field, 0},
+ {"InterfaceInfo.Netmask", Field, 0},
+ {"InterfaceMessage", Type, 0},
+ {"InterfaceMessage.Data", Field, 0},
+ {"InterfaceMessage.Header", Field, 0},
+ {"InterfaceMulticastAddrMessage", Type, 0},
+ {"InterfaceMulticastAddrMessage.Data", Field, 0},
+ {"InterfaceMulticastAddrMessage.Header", Field, 0},
+ {"InvalidHandle", Const, 0},
+ {"Ioperm", Func, 0},
+ {"Iopl", Func, 0},
+ {"Iovec", Type, 0},
+ {"Iovec.Base", Field, 0},
+ {"Iovec.Len", Field, 0},
+ {"IpAdapterInfo", Type, 0},
+ {"IpAdapterInfo.AdapterName", Field, 0},
+ {"IpAdapterInfo.Address", Field, 0},
+ {"IpAdapterInfo.AddressLength", Field, 0},
+ {"IpAdapterInfo.ComboIndex", Field, 0},
+ {"IpAdapterInfo.CurrentIpAddress", Field, 0},
+ {"IpAdapterInfo.Description", Field, 0},
+ {"IpAdapterInfo.DhcpEnabled", Field, 0},
+ {"IpAdapterInfo.DhcpServer", Field, 0},
+ {"IpAdapterInfo.GatewayList", Field, 0},
+ {"IpAdapterInfo.HaveWins", Field, 0},
+ {"IpAdapterInfo.Index", Field, 0},
+ {"IpAdapterInfo.IpAddressList", Field, 0},
+ {"IpAdapterInfo.LeaseExpires", Field, 0},
+ {"IpAdapterInfo.LeaseObtained", Field, 0},
+ {"IpAdapterInfo.Next", Field, 0},
+ {"IpAdapterInfo.PrimaryWinsServer", Field, 0},
+ {"IpAdapterInfo.SecondaryWinsServer", Field, 0},
+ {"IpAdapterInfo.Type", Field, 0},
+ {"IpAddrString", Type, 0},
+ {"IpAddrString.Context", Field, 0},
+ {"IpAddrString.IpAddress", Field, 0},
+ {"IpAddrString.IpMask", Field, 0},
+ {"IpAddrString.Next", Field, 0},
+ {"IpAddressString", Type, 0},
+ {"IpAddressString.String", Field, 0},
+ {"IpMaskString", Type, 0},
+ {"IpMaskString.String", Field, 2},
+ {"Issetugid", Func, 0},
+ {"KEY_ALL_ACCESS", Const, 0},
+ {"KEY_CREATE_LINK", Const, 0},
+ {"KEY_CREATE_SUB_KEY", Const, 0},
+ {"KEY_ENUMERATE_SUB_KEYS", Const, 0},
+ {"KEY_EXECUTE", Const, 0},
+ {"KEY_NOTIFY", Const, 0},
+ {"KEY_QUERY_VALUE", Const, 0},
+ {"KEY_READ", Const, 0},
+ {"KEY_SET_VALUE", Const, 0},
+ {"KEY_WOW64_32KEY", Const, 0},
+ {"KEY_WOW64_64KEY", Const, 0},
+ {"KEY_WRITE", Const, 0},
+ {"Kevent", Func, 0},
+ {"Kevent_t", Type, 0},
+ {"Kevent_t.Data", Field, 0},
+ {"Kevent_t.Fflags", Field, 0},
+ {"Kevent_t.Filter", Field, 0},
+ {"Kevent_t.Flags", Field, 0},
+ {"Kevent_t.Ident", Field, 0},
+ {"Kevent_t.Pad_cgo_0", Field, 2},
+ {"Kevent_t.Udata", Field, 0},
+ {"Kill", Func, 0},
+ {"Klogctl", Func, 0},
+ {"Kqueue", Func, 0},
+ {"LANG_ENGLISH", Const, 0},
+ {"LAYERED_PROTOCOL", Const, 2},
+ {"LCNT_OVERLOAD_FLUSH", Const, 1},
+ {"LINUX_REBOOT_CMD_CAD_OFF", Const, 0},
+ {"LINUX_REBOOT_CMD_CAD_ON", Const, 0},
+ {"LINUX_REBOOT_CMD_HALT", Const, 0},
+ {"LINUX_REBOOT_CMD_KEXEC", Const, 0},
+ {"LINUX_REBOOT_CMD_POWER_OFF", Const, 0},
+ {"LINUX_REBOOT_CMD_RESTART", Const, 0},
+ {"LINUX_REBOOT_CMD_RESTART2", Const, 0},
+ {"LINUX_REBOOT_CMD_SW_SUSPEND", Const, 0},
+ {"LINUX_REBOOT_MAGIC1", Const, 0},
+ {"LINUX_REBOOT_MAGIC2", Const, 0},
+ {"LOCK_EX", Const, 0},
+ {"LOCK_NB", Const, 0},
+ {"LOCK_SH", Const, 0},
+ {"LOCK_UN", Const, 0},
+ {"LazyDLL", Type, 0},
+ {"LazyDLL.Name", Field, 0},
+ {"LazyProc", Type, 0},
+ {"LazyProc.Name", Field, 0},
+ {"Lchown", Func, 0},
+ {"Linger", Type, 0},
+ {"Linger.Linger", Field, 0},
+ {"Linger.Onoff", Field, 0},
+ {"Link", Func, 0},
+ {"Listen", Func, 0},
+ {"Listxattr", Func, 1},
+ {"LoadCancelIoEx", Func, 1},
+ {"LoadConnectEx", Func, 1},
+ {"LoadCreateSymbolicLink", Func, 4},
+ {"LoadDLL", Func, 0},
+ {"LoadGetAddrInfo", Func, 1},
+ {"LoadLibrary", Func, 0},
+ {"LoadSetFileCompletionNotificationModes", Func, 2},
+ {"LocalFree", Func, 0},
+ {"Log2phys_t", Type, 0},
+ {"Log2phys_t.Contigbytes", Field, 0},
+ {"Log2phys_t.Devoffset", Field, 0},
+ {"Log2phys_t.Flags", Field, 0},
+ {"LookupAccountName", Func, 0},
+ {"LookupAccountSid", Func, 0},
+ {"LookupSID", Func, 0},
+ {"LsfJump", Func, 0},
+ {"LsfSocket", Func, 0},
+ {"LsfStmt", Func, 0},
+ {"Lstat", Func, 0},
+ {"MADV_AUTOSYNC", Const, 1},
+ {"MADV_CAN_REUSE", Const, 0},
+ {"MADV_CORE", Const, 1},
+ {"MADV_DOFORK", Const, 0},
+ {"MADV_DONTFORK", Const, 0},
+ {"MADV_DONTNEED", Const, 0},
+ {"MADV_FREE", Const, 0},
+ {"MADV_FREE_REUSABLE", Const, 0},
+ {"MADV_FREE_REUSE", Const, 0},
+ {"MADV_HUGEPAGE", Const, 0},
+ {"MADV_HWPOISON", Const, 0},
+ {"MADV_MERGEABLE", Const, 0},
+ {"MADV_NOCORE", Const, 1},
+ {"MADV_NOHUGEPAGE", Const, 0},
+ {"MADV_NORMAL", Const, 0},
+ {"MADV_NOSYNC", Const, 1},
+ {"MADV_PROTECT", Const, 1},
+ {"MADV_RANDOM", Const, 0},
+ {"MADV_REMOVE", Const, 0},
+ {"MADV_SEQUENTIAL", Const, 0},
+ {"MADV_SPACEAVAIL", Const, 3},
+ {"MADV_UNMERGEABLE", Const, 0},
+ {"MADV_WILLNEED", Const, 0},
+ {"MADV_ZERO_WIRED_PAGES", Const, 0},
+ {"MAP_32BIT", Const, 0},
+ {"MAP_ALIGNED_SUPER", Const, 3},
+ {"MAP_ALIGNMENT_16MB", Const, 3},
+ {"MAP_ALIGNMENT_1TB", Const, 3},
+ {"MAP_ALIGNMENT_256TB", Const, 3},
+ {"MAP_ALIGNMENT_4GB", Const, 3},
+ {"MAP_ALIGNMENT_64KB", Const, 3},
+ {"MAP_ALIGNMENT_64PB", Const, 3},
+ {"MAP_ALIGNMENT_MASK", Const, 3},
+ {"MAP_ALIGNMENT_SHIFT", Const, 3},
+ {"MAP_ANON", Const, 0},
+ {"MAP_ANONYMOUS", Const, 0},
+ {"MAP_COPY", Const, 0},
+ {"MAP_DENYWRITE", Const, 0},
+ {"MAP_EXECUTABLE", Const, 0},
+ {"MAP_FILE", Const, 0},
+ {"MAP_FIXED", Const, 0},
+ {"MAP_FLAGMASK", Const, 3},
+ {"MAP_GROWSDOWN", Const, 0},
+ {"MAP_HASSEMAPHORE", Const, 0},
+ {"MAP_HUGETLB", Const, 0},
+ {"MAP_INHERIT", Const, 3},
+ {"MAP_INHERIT_COPY", Const, 3},
+ {"MAP_INHERIT_DEFAULT", Const, 3},
+ {"MAP_INHERIT_DONATE_COPY", Const, 3},
+ {"MAP_INHERIT_NONE", Const, 3},
+ {"MAP_INHERIT_SHARE", Const, 3},
+ {"MAP_JIT", Const, 0},
+ {"MAP_LOCKED", Const, 0},
+ {"MAP_NOCACHE", Const, 0},
+ {"MAP_NOCORE", Const, 1},
+ {"MAP_NOEXTEND", Const, 0},
+ {"MAP_NONBLOCK", Const, 0},
+ {"MAP_NORESERVE", Const, 0},
+ {"MAP_NOSYNC", Const, 1},
+ {"MAP_POPULATE", Const, 0},
+ {"MAP_PREFAULT_READ", Const, 1},
+ {"MAP_PRIVATE", Const, 0},
+ {"MAP_RENAME", Const, 0},
+ {"MAP_RESERVED0080", Const, 0},
+ {"MAP_RESERVED0100", Const, 1},
+ {"MAP_SHARED", Const, 0},
+ {"MAP_STACK", Const, 0},
+ {"MAP_TRYFIXED", Const, 3},
+ {"MAP_TYPE", Const, 0},
+ {"MAP_WIRED", Const, 3},
+ {"MAXIMUM_REPARSE_DATA_BUFFER_SIZE", Const, 4},
+ {"MAXLEN_IFDESCR", Const, 0},
+ {"MAXLEN_PHYSADDR", Const, 0},
+ {"MAX_ADAPTER_ADDRESS_LENGTH", Const, 0},
+ {"MAX_ADAPTER_DESCRIPTION_LENGTH", Const, 0},
+ {"MAX_ADAPTER_NAME_LENGTH", Const, 0},
+ {"MAX_COMPUTERNAME_LENGTH", Const, 0},
+ {"MAX_INTERFACE_NAME_LEN", Const, 0},
+ {"MAX_LONG_PATH", Const, 0},
+ {"MAX_PATH", Const, 0},
+ {"MAX_PROTOCOL_CHAIN", Const, 2},
+ {"MCL_CURRENT", Const, 0},
+ {"MCL_FUTURE", Const, 0},
+ {"MNT_DETACH", Const, 0},
+ {"MNT_EXPIRE", Const, 0},
+ {"MNT_FORCE", Const, 0},
+ {"MSG_BCAST", Const, 1},
+ {"MSG_CMSG_CLOEXEC", Const, 0},
+ {"MSG_COMPAT", Const, 0},
+ {"MSG_CONFIRM", Const, 0},
+ {"MSG_CONTROLMBUF", Const, 1},
+ {"MSG_CTRUNC", Const, 0},
+ {"MSG_DONTROUTE", Const, 0},
+ {"MSG_DONTWAIT", Const, 0},
+ {"MSG_EOF", Const, 0},
+ {"MSG_EOR", Const, 0},
+ {"MSG_ERRQUEUE", Const, 0},
+ {"MSG_FASTOPEN", Const, 1},
+ {"MSG_FIN", Const, 0},
+ {"MSG_FLUSH", Const, 0},
+ {"MSG_HAVEMORE", Const, 0},
+ {"MSG_HOLD", Const, 0},
+ {"MSG_IOVUSRSPACE", Const, 1},
+ {"MSG_LENUSRSPACE", Const, 1},
+ {"MSG_MCAST", Const, 1},
+ {"MSG_MORE", Const, 0},
+ {"MSG_NAMEMBUF", Const, 1},
+ {"MSG_NBIO", Const, 0},
+ {"MSG_NEEDSA", Const, 0},
+ {"MSG_NOSIGNAL", Const, 0},
+ {"MSG_NOTIFICATION", Const, 0},
+ {"MSG_OOB", Const, 0},
+ {"MSG_PEEK", Const, 0},
+ {"MSG_PROXY", Const, 0},
+ {"MSG_RCVMORE", Const, 0},
+ {"MSG_RST", Const, 0},
+ {"MSG_SEND", Const, 0},
+ {"MSG_SYN", Const, 0},
+ {"MSG_TRUNC", Const, 0},
+ {"MSG_TRYHARD", Const, 0},
+ {"MSG_USERFLAGS", Const, 1},
+ {"MSG_WAITALL", Const, 0},
+ {"MSG_WAITFORONE", Const, 0},
+ {"MSG_WAITSTREAM", Const, 0},
+ {"MS_ACTIVE", Const, 0},
+ {"MS_ASYNC", Const, 0},
+ {"MS_BIND", Const, 0},
+ {"MS_DEACTIVATE", Const, 0},
+ {"MS_DIRSYNC", Const, 0},
+ {"MS_INVALIDATE", Const, 0},
+ {"MS_I_VERSION", Const, 0},
+ {"MS_KERNMOUNT", Const, 0},
+ {"MS_KILLPAGES", Const, 0},
+ {"MS_MANDLOCK", Const, 0},
+ {"MS_MGC_MSK", Const, 0},
+ {"MS_MGC_VAL", Const, 0},
+ {"MS_MOVE", Const, 0},
+ {"MS_NOATIME", Const, 0},
+ {"MS_NODEV", Const, 0},
+ {"MS_NODIRATIME", Const, 0},
+ {"MS_NOEXEC", Const, 0},
+ {"MS_NOSUID", Const, 0},
+ {"MS_NOUSER", Const, 0},
+ {"MS_POSIXACL", Const, 0},
+ {"MS_PRIVATE", Const, 0},
+ {"MS_RDONLY", Const, 0},
+ {"MS_REC", Const, 0},
+ {"MS_RELATIME", Const, 0},
+ {"MS_REMOUNT", Const, 0},
+ {"MS_RMT_MASK", Const, 0},
+ {"MS_SHARED", Const, 0},
+ {"MS_SILENT", Const, 0},
+ {"MS_SLAVE", Const, 0},
+ {"MS_STRICTATIME", Const, 0},
+ {"MS_SYNC", Const, 0},
+ {"MS_SYNCHRONOUS", Const, 0},
+ {"MS_UNBINDABLE", Const, 0},
+ {"Madvise", Func, 0},
+ {"MapViewOfFile", Func, 0},
+ {"MaxTokenInfoClass", Const, 0},
+ {"Mclpool", Type, 2},
+ {"Mclpool.Alive", Field, 2},
+ {"Mclpool.Cwm", Field, 2},
+ {"Mclpool.Grown", Field, 2},
+ {"Mclpool.Hwm", Field, 2},
+ {"Mclpool.Lwm", Field, 2},
+ {"MibIfRow", Type, 0},
+ {"MibIfRow.AdminStatus", Field, 0},
+ {"MibIfRow.Descr", Field, 0},
+ {"MibIfRow.DescrLen", Field, 0},
+ {"MibIfRow.InDiscards", Field, 0},
+ {"MibIfRow.InErrors", Field, 0},
+ {"MibIfRow.InNUcastPkts", Field, 0},
+ {"MibIfRow.InOctets", Field, 0},
+ {"MibIfRow.InUcastPkts", Field, 0},
+ {"MibIfRow.InUnknownProtos", Field, 0},
+ {"MibIfRow.Index", Field, 0},
+ {"MibIfRow.LastChange", Field, 0},
+ {"MibIfRow.Mtu", Field, 0},
+ {"MibIfRow.Name", Field, 0},
+ {"MibIfRow.OperStatus", Field, 0},
+ {"MibIfRow.OutDiscards", Field, 0},
+ {"MibIfRow.OutErrors", Field, 0},
+ {"MibIfRow.OutNUcastPkts", Field, 0},
+ {"MibIfRow.OutOctets", Field, 0},
+ {"MibIfRow.OutQLen", Field, 0},
+ {"MibIfRow.OutUcastPkts", Field, 0},
+ {"MibIfRow.PhysAddr", Field, 0},
+ {"MibIfRow.PhysAddrLen", Field, 0},
+ {"MibIfRow.Speed", Field, 0},
+ {"MibIfRow.Type", Field, 0},
+ {"Mkdir", Func, 0},
+ {"Mkdirat", Func, 0},
+ {"Mkfifo", Func, 0},
+ {"Mknod", Func, 0},
+ {"Mknodat", Func, 0},
+ {"Mlock", Func, 0},
+ {"Mlockall", Func, 0},
+ {"Mmap", Func, 0},
+ {"Mount", Func, 0},
+ {"MoveFile", Func, 0},
+ {"Mprotect", Func, 0},
+ {"Msghdr", Type, 0},
+ {"Msghdr.Control", Field, 0},
+ {"Msghdr.Controllen", Field, 0},
+ {"Msghdr.Flags", Field, 0},
+ {"Msghdr.Iov", Field, 0},
+ {"Msghdr.Iovlen", Field, 0},
+ {"Msghdr.Name", Field, 0},
+ {"Msghdr.Namelen", Field, 0},
+ {"Msghdr.Pad_cgo_0", Field, 0},
+ {"Msghdr.Pad_cgo_1", Field, 0},
+ {"Munlock", Func, 0},
+ {"Munlockall", Func, 0},
+ {"Munmap", Func, 0},
+ {"MustLoadDLL", Func, 0},
+ {"NAME_MAX", Const, 0},
+ {"NETLINK_ADD_MEMBERSHIP", Const, 0},
+ {"NETLINK_AUDIT", Const, 0},
+ {"NETLINK_BROADCAST_ERROR", Const, 0},
+ {"NETLINK_CONNECTOR", Const, 0},
+ {"NETLINK_DNRTMSG", Const, 0},
+ {"NETLINK_DROP_MEMBERSHIP", Const, 0},
+ {"NETLINK_ECRYPTFS", Const, 0},
+ {"NETLINK_FIB_LOOKUP", Const, 0},
+ {"NETLINK_FIREWALL", Const, 0},
+ {"NETLINK_GENERIC", Const, 0},
+ {"NETLINK_INET_DIAG", Const, 0},
+ {"NETLINK_IP6_FW", Const, 0},
+ {"NETLINK_ISCSI", Const, 0},
+ {"NETLINK_KOBJECT_UEVENT", Const, 0},
+ {"NETLINK_NETFILTER", Const, 0},
+ {"NETLINK_NFLOG", Const, 0},
+ {"NETLINK_NO_ENOBUFS", Const, 0},
+ {"NETLINK_PKTINFO", Const, 0},
+ {"NETLINK_RDMA", Const, 0},
+ {"NETLINK_ROUTE", Const, 0},
+ {"NETLINK_SCSITRANSPORT", Const, 0},
+ {"NETLINK_SELINUX", Const, 0},
+ {"NETLINK_UNUSED", Const, 0},
+ {"NETLINK_USERSOCK", Const, 0},
+ {"NETLINK_XFRM", Const, 0},
+ {"NET_RT_DUMP", Const, 0},
+ {"NET_RT_DUMP2", Const, 0},
+ {"NET_RT_FLAGS", Const, 0},
+ {"NET_RT_IFLIST", Const, 0},
+ {"NET_RT_IFLIST2", Const, 0},
+ {"NET_RT_IFLISTL", Const, 1},
+ {"NET_RT_IFMALIST", Const, 0},
+ {"NET_RT_MAXID", Const, 0},
+ {"NET_RT_OIFLIST", Const, 1},
+ {"NET_RT_OOIFLIST", Const, 1},
+ {"NET_RT_STAT", Const, 0},
+ {"NET_RT_STATS", Const, 1},
+ {"NET_RT_TABLE", Const, 1},
+ {"NET_RT_TRASH", Const, 0},
+ {"NLA_ALIGNTO", Const, 0},
+ {"NLA_F_NESTED", Const, 0},
+ {"NLA_F_NET_BYTEORDER", Const, 0},
+ {"NLA_HDRLEN", Const, 0},
+ {"NLMSG_ALIGNTO", Const, 0},
+ {"NLMSG_DONE", Const, 0},
+ {"NLMSG_ERROR", Const, 0},
+ {"NLMSG_HDRLEN", Const, 0},
+ {"NLMSG_MIN_TYPE", Const, 0},
+ {"NLMSG_NOOP", Const, 0},
+ {"NLMSG_OVERRUN", Const, 0},
+ {"NLM_F_ACK", Const, 0},
+ {"NLM_F_APPEND", Const, 0},
+ {"NLM_F_ATOMIC", Const, 0},
+ {"NLM_F_CREATE", Const, 0},
+ {"NLM_F_DUMP", Const, 0},
+ {"NLM_F_ECHO", Const, 0},
+ {"NLM_F_EXCL", Const, 0},
+ {"NLM_F_MATCH", Const, 0},
+ {"NLM_F_MULTI", Const, 0},
+ {"NLM_F_REPLACE", Const, 0},
+ {"NLM_F_REQUEST", Const, 0},
+ {"NLM_F_ROOT", Const, 0},
+ {"NOFLSH", Const, 0},
+ {"NOTE_ABSOLUTE", Const, 0},
+ {"NOTE_ATTRIB", Const, 0},
+ {"NOTE_BACKGROUND", Const, 16},
+ {"NOTE_CHILD", Const, 0},
+ {"NOTE_CRITICAL", Const, 16},
+ {"NOTE_DELETE", Const, 0},
+ {"NOTE_EOF", Const, 1},
+ {"NOTE_EXEC", Const, 0},
+ {"NOTE_EXIT", Const, 0},
+ {"NOTE_EXITSTATUS", Const, 0},
+ {"NOTE_EXIT_CSERROR", Const, 16},
+ {"NOTE_EXIT_DECRYPTFAIL", Const, 16},
+ {"NOTE_EXIT_DETAIL", Const, 16},
+ {"NOTE_EXIT_DETAIL_MASK", Const, 16},
+ {"NOTE_EXIT_MEMORY", Const, 16},
+ {"NOTE_EXIT_REPARENTED", Const, 16},
+ {"NOTE_EXTEND", Const, 0},
+ {"NOTE_FFAND", Const, 0},
+ {"NOTE_FFCOPY", Const, 0},
+ {"NOTE_FFCTRLMASK", Const, 0},
+ {"NOTE_FFLAGSMASK", Const, 0},
+ {"NOTE_FFNOP", Const, 0},
+ {"NOTE_FFOR", Const, 0},
+ {"NOTE_FORK", Const, 0},
+ {"NOTE_LEEWAY", Const, 16},
+ {"NOTE_LINK", Const, 0},
+ {"NOTE_LOWAT", Const, 0},
+ {"NOTE_NONE", Const, 0},
+ {"NOTE_NSECONDS", Const, 0},
+ {"NOTE_PCTRLMASK", Const, 0},
+ {"NOTE_PDATAMASK", Const, 0},
+ {"NOTE_REAP", Const, 0},
+ {"NOTE_RENAME", Const, 0},
+ {"NOTE_RESOURCEEND", Const, 0},
+ {"NOTE_REVOKE", Const, 0},
+ {"NOTE_SECONDS", Const, 0},
+ {"NOTE_SIGNAL", Const, 0},
+ {"NOTE_TRACK", Const, 0},
+ {"NOTE_TRACKERR", Const, 0},
+ {"NOTE_TRIGGER", Const, 0},
+ {"NOTE_TRUNCATE", Const, 1},
+ {"NOTE_USECONDS", Const, 0},
+ {"NOTE_VM_ERROR", Const, 0},
+ {"NOTE_VM_PRESSURE", Const, 0},
+ {"NOTE_VM_PRESSURE_SUDDEN_TERMINATE", Const, 0},
+ {"NOTE_VM_PRESSURE_TERMINATE", Const, 0},
+ {"NOTE_WRITE", Const, 0},
+ {"NameCanonical", Const, 0},
+ {"NameCanonicalEx", Const, 0},
+ {"NameDisplay", Const, 0},
+ {"NameDnsDomain", Const, 0},
+ {"NameFullyQualifiedDN", Const, 0},
+ {"NameSamCompatible", Const, 0},
+ {"NameServicePrincipal", Const, 0},
+ {"NameUniqueId", Const, 0},
+ {"NameUnknown", Const, 0},
+ {"NameUserPrincipal", Const, 0},
+ {"Nanosleep", Func, 0},
+ {"NetApiBufferFree", Func, 0},
+ {"NetGetJoinInformation", Func, 2},
+ {"NetSetupDomainName", Const, 2},
+ {"NetSetupUnjoined", Const, 2},
+ {"NetSetupUnknownStatus", Const, 2},
+ {"NetSetupWorkgroupName", Const, 2},
+ {"NetUserGetInfo", Func, 0},
+ {"NetlinkMessage", Type, 0},
+ {"NetlinkMessage.Data", Field, 0},
+ {"NetlinkMessage.Header", Field, 0},
+ {"NetlinkRIB", Func, 0},
+ {"NetlinkRouteAttr", Type, 0},
+ {"NetlinkRouteAttr.Attr", Field, 0},
+ {"NetlinkRouteAttr.Value", Field, 0},
+ {"NetlinkRouteRequest", Type, 0},
+ {"NetlinkRouteRequest.Data", Field, 0},
+ {"NetlinkRouteRequest.Header", Field, 0},
+ {"NewCallback", Func, 0},
+ {"NewCallbackCDecl", Func, 3},
+ {"NewLazyDLL", Func, 0},
+ {"NlAttr", Type, 0},
+ {"NlAttr.Len", Field, 0},
+ {"NlAttr.Type", Field, 0},
+ {"NlMsgerr", Type, 0},
+ {"NlMsgerr.Error", Field, 0},
+ {"NlMsgerr.Msg", Field, 0},
+ {"NlMsghdr", Type, 0},
+ {"NlMsghdr.Flags", Field, 0},
+ {"NlMsghdr.Len", Field, 0},
+ {"NlMsghdr.Pid", Field, 0},
+ {"NlMsghdr.Seq", Field, 0},
+ {"NlMsghdr.Type", Field, 0},
+ {"NsecToFiletime", Func, 0},
+ {"NsecToTimespec", Func, 0},
+ {"NsecToTimeval", Func, 0},
+ {"Ntohs", Func, 0},
+ {"OCRNL", Const, 0},
+ {"OFDEL", Const, 0},
+ {"OFILL", Const, 0},
+ {"OFIOGETBMAP", Const, 1},
+ {"OID_PKIX_KP_SERVER_AUTH", Var, 0},
+ {"OID_SERVER_GATED_CRYPTO", Var, 0},
+ {"OID_SGC_NETSCAPE", Var, 0},
+ {"OLCUC", Const, 0},
+ {"ONLCR", Const, 0},
+ {"ONLRET", Const, 0},
+ {"ONOCR", Const, 0},
+ {"ONOEOT", Const, 1},
+ {"OPEN_ALWAYS", Const, 0},
+ {"OPEN_EXISTING", Const, 0},
+ {"OPOST", Const, 0},
+ {"O_ACCMODE", Const, 0},
+ {"O_ALERT", Const, 0},
+ {"O_ALT_IO", Const, 1},
+ {"O_APPEND", Const, 0},
+ {"O_ASYNC", Const, 0},
+ {"O_CLOEXEC", Const, 0},
+ {"O_CREAT", Const, 0},
+ {"O_DIRECT", Const, 0},
+ {"O_DIRECTORY", Const, 0},
+ {"O_DP_GETRAWENCRYPTED", Const, 16},
+ {"O_DSYNC", Const, 0},
+ {"O_EVTONLY", Const, 0},
+ {"O_EXCL", Const, 0},
+ {"O_EXEC", Const, 0},
+ {"O_EXLOCK", Const, 0},
+ {"O_FSYNC", Const, 0},
+ {"O_LARGEFILE", Const, 0},
+ {"O_NDELAY", Const, 0},
+ {"O_NOATIME", Const, 0},
+ {"O_NOCTTY", Const, 0},
+ {"O_NOFOLLOW", Const, 0},
+ {"O_NONBLOCK", Const, 0},
+ {"O_NOSIGPIPE", Const, 1},
+ {"O_POPUP", Const, 0},
+ {"O_RDONLY", Const, 0},
+ {"O_RDWR", Const, 0},
+ {"O_RSYNC", Const, 0},
+ {"O_SHLOCK", Const, 0},
+ {"O_SYMLINK", Const, 0},
+ {"O_SYNC", Const, 0},
+ {"O_TRUNC", Const, 0},
+ {"O_TTY_INIT", Const, 0},
+ {"O_WRONLY", Const, 0},
+ {"Open", Func, 0},
+ {"OpenCurrentProcessToken", Func, 0},
+ {"OpenProcess", Func, 0},
+ {"OpenProcessToken", Func, 0},
+ {"Openat", Func, 0},
+ {"Overlapped", Type, 0},
+ {"Overlapped.HEvent", Field, 0},
+ {"Overlapped.Internal", Field, 0},
+ {"Overlapped.InternalHigh", Field, 0},
+ {"Overlapped.Offset", Field, 0},
+ {"Overlapped.OffsetHigh", Field, 0},
+ {"PACKET_ADD_MEMBERSHIP", Const, 0},
+ {"PACKET_BROADCAST", Const, 0},
+ {"PACKET_DROP_MEMBERSHIP", Const, 0},
+ {"PACKET_FASTROUTE", Const, 0},
+ {"PACKET_HOST", Const, 0},
+ {"PACKET_LOOPBACK", Const, 0},
+ {"PACKET_MR_ALLMULTI", Const, 0},
+ {"PACKET_MR_MULTICAST", Const, 0},
+ {"PACKET_MR_PROMISC", Const, 0},
+ {"PACKET_MULTICAST", Const, 0},
+ {"PACKET_OTHERHOST", Const, 0},
+ {"PACKET_OUTGOING", Const, 0},
+ {"PACKET_RECV_OUTPUT", Const, 0},
+ {"PACKET_RX_RING", Const, 0},
+ {"PACKET_STATISTICS", Const, 0},
+ {"PAGE_EXECUTE_READ", Const, 0},
+ {"PAGE_EXECUTE_READWRITE", Const, 0},
+ {"PAGE_EXECUTE_WRITECOPY", Const, 0},
+ {"PAGE_READONLY", Const, 0},
+ {"PAGE_READWRITE", Const, 0},
+ {"PAGE_WRITECOPY", Const, 0},
+ {"PARENB", Const, 0},
+ {"PARMRK", Const, 0},
+ {"PARODD", Const, 0},
+ {"PENDIN", Const, 0},
+ {"PFL_HIDDEN", Const, 2},
+ {"PFL_MATCHES_PROTOCOL_ZERO", Const, 2},
+ {"PFL_MULTIPLE_PROTO_ENTRIES", Const, 2},
+ {"PFL_NETWORKDIRECT_PROVIDER", Const, 2},
+ {"PFL_RECOMMENDED_PROTO_ENTRY", Const, 2},
+ {"PF_FLUSH", Const, 1},
+ {"PKCS_7_ASN_ENCODING", Const, 0},
+ {"PMC5_PIPELINE_FLUSH", Const, 1},
+ {"PRIO_PGRP", Const, 2},
+ {"PRIO_PROCESS", Const, 2},
+ {"PRIO_USER", Const, 2},
+ {"PRI_IOFLUSH", Const, 1},
+ {"PROCESS_QUERY_INFORMATION", Const, 0},
+ {"PROCESS_TERMINATE", Const, 2},
+ {"PROT_EXEC", Const, 0},
+ {"PROT_GROWSDOWN", Const, 0},
+ {"PROT_GROWSUP", Const, 0},
+ {"PROT_NONE", Const, 0},
+ {"PROT_READ", Const, 0},
+ {"PROT_WRITE", Const, 0},
+ {"PROV_DH_SCHANNEL", Const, 0},
+ {"PROV_DSS", Const, 0},
+ {"PROV_DSS_DH", Const, 0},
+ {"PROV_EC_ECDSA_FULL", Const, 0},
+ {"PROV_EC_ECDSA_SIG", Const, 0},
+ {"PROV_EC_ECNRA_FULL", Const, 0},
+ {"PROV_EC_ECNRA_SIG", Const, 0},
+ {"PROV_FORTEZZA", Const, 0},
+ {"PROV_INTEL_SEC", Const, 0},
+ {"PROV_MS_EXCHANGE", Const, 0},
+ {"PROV_REPLACE_OWF", Const, 0},
+ {"PROV_RNG", Const, 0},
+ {"PROV_RSA_AES", Const, 0},
+ {"PROV_RSA_FULL", Const, 0},
+ {"PROV_RSA_SCHANNEL", Const, 0},
+ {"PROV_RSA_SIG", Const, 0},
+ {"PROV_SPYRUS_LYNKS", Const, 0},
+ {"PROV_SSL", Const, 0},
+ {"PR_CAPBSET_DROP", Const, 0},
+ {"PR_CAPBSET_READ", Const, 0},
+ {"PR_CLEAR_SECCOMP_FILTER", Const, 0},
+ {"PR_ENDIAN_BIG", Const, 0},
+ {"PR_ENDIAN_LITTLE", Const, 0},
+ {"PR_ENDIAN_PPC_LITTLE", Const, 0},
+ {"PR_FPEMU_NOPRINT", Const, 0},
+ {"PR_FPEMU_SIGFPE", Const, 0},
+ {"PR_FP_EXC_ASYNC", Const, 0},
+ {"PR_FP_EXC_DISABLED", Const, 0},
+ {"PR_FP_EXC_DIV", Const, 0},
+ {"PR_FP_EXC_INV", Const, 0},
+ {"PR_FP_EXC_NONRECOV", Const, 0},
+ {"PR_FP_EXC_OVF", Const, 0},
+ {"PR_FP_EXC_PRECISE", Const, 0},
+ {"PR_FP_EXC_RES", Const, 0},
+ {"PR_FP_EXC_SW_ENABLE", Const, 0},
+ {"PR_FP_EXC_UND", Const, 0},
+ {"PR_GET_DUMPABLE", Const, 0},
+ {"PR_GET_ENDIAN", Const, 0},
+ {"PR_GET_FPEMU", Const, 0},
+ {"PR_GET_FPEXC", Const, 0},
+ {"PR_GET_KEEPCAPS", Const, 0},
+ {"PR_GET_NAME", Const, 0},
+ {"PR_GET_PDEATHSIG", Const, 0},
+ {"PR_GET_SECCOMP", Const, 0},
+ {"PR_GET_SECCOMP_FILTER", Const, 0},
+ {"PR_GET_SECUREBITS", Const, 0},
+ {"PR_GET_TIMERSLACK", Const, 0},
+ {"PR_GET_TIMING", Const, 0},
+ {"PR_GET_TSC", Const, 0},
+ {"PR_GET_UNALIGN", Const, 0},
+ {"PR_MCE_KILL", Const, 0},
+ {"PR_MCE_KILL_CLEAR", Const, 0},
+ {"PR_MCE_KILL_DEFAULT", Const, 0},
+ {"PR_MCE_KILL_EARLY", Const, 0},
+ {"PR_MCE_KILL_GET", Const, 0},
+ {"PR_MCE_KILL_LATE", Const, 0},
+ {"PR_MCE_KILL_SET", Const, 0},
+ {"PR_SECCOMP_FILTER_EVENT", Const, 0},
+ {"PR_SECCOMP_FILTER_SYSCALL", Const, 0},
+ {"PR_SET_DUMPABLE", Const, 0},
+ {"PR_SET_ENDIAN", Const, 0},
+ {"PR_SET_FPEMU", Const, 0},
+ {"PR_SET_FPEXC", Const, 0},
+ {"PR_SET_KEEPCAPS", Const, 0},
+ {"PR_SET_NAME", Const, 0},
+ {"PR_SET_PDEATHSIG", Const, 0},
+ {"PR_SET_PTRACER", Const, 0},
+ {"PR_SET_SECCOMP", Const, 0},
+ {"PR_SET_SECCOMP_FILTER", Const, 0},
+ {"PR_SET_SECUREBITS", Const, 0},
+ {"PR_SET_TIMERSLACK", Const, 0},
+ {"PR_SET_TIMING", Const, 0},
+ {"PR_SET_TSC", Const, 0},
+ {"PR_SET_UNALIGN", Const, 0},
+ {"PR_TASK_PERF_EVENTS_DISABLE", Const, 0},
+ {"PR_TASK_PERF_EVENTS_ENABLE", Const, 0},
+ {"PR_TIMING_STATISTICAL", Const, 0},
+ {"PR_TIMING_TIMESTAMP", Const, 0},
+ {"PR_TSC_ENABLE", Const, 0},
+ {"PR_TSC_SIGSEGV", Const, 0},
+ {"PR_UNALIGN_NOPRINT", Const, 0},
+ {"PR_UNALIGN_SIGBUS", Const, 0},
+ {"PTRACE_ARCH_PRCTL", Const, 0},
+ {"PTRACE_ATTACH", Const, 0},
+ {"PTRACE_CONT", Const, 0},
+ {"PTRACE_DETACH", Const, 0},
+ {"PTRACE_EVENT_CLONE", Const, 0},
+ {"PTRACE_EVENT_EXEC", Const, 0},
+ {"PTRACE_EVENT_EXIT", Const, 0},
+ {"PTRACE_EVENT_FORK", Const, 0},
+ {"PTRACE_EVENT_VFORK", Const, 0},
+ {"PTRACE_EVENT_VFORK_DONE", Const, 0},
+ {"PTRACE_GETCRUNCHREGS", Const, 0},
+ {"PTRACE_GETEVENTMSG", Const, 0},
+ {"PTRACE_GETFPREGS", Const, 0},
+ {"PTRACE_GETFPXREGS", Const, 0},
+ {"PTRACE_GETHBPREGS", Const, 0},
+ {"PTRACE_GETREGS", Const, 0},
+ {"PTRACE_GETREGSET", Const, 0},
+ {"PTRACE_GETSIGINFO", Const, 0},
+ {"PTRACE_GETVFPREGS", Const, 0},
+ {"PTRACE_GETWMMXREGS", Const, 0},
+ {"PTRACE_GET_THREAD_AREA", Const, 0},
+ {"PTRACE_KILL", Const, 0},
+ {"PTRACE_OLDSETOPTIONS", Const, 0},
+ {"PTRACE_O_MASK", Const, 0},
+ {"PTRACE_O_TRACECLONE", Const, 0},
+ {"PTRACE_O_TRACEEXEC", Const, 0},
+ {"PTRACE_O_TRACEEXIT", Const, 0},
+ {"PTRACE_O_TRACEFORK", Const, 0},
+ {"PTRACE_O_TRACESYSGOOD", Const, 0},
+ {"PTRACE_O_TRACEVFORK", Const, 0},
+ {"PTRACE_O_TRACEVFORKDONE", Const, 0},
+ {"PTRACE_PEEKDATA", Const, 0},
+ {"PTRACE_PEEKTEXT", Const, 0},
+ {"PTRACE_PEEKUSR", Const, 0},
+ {"PTRACE_POKEDATA", Const, 0},
+ {"PTRACE_POKETEXT", Const, 0},
+ {"PTRACE_POKEUSR", Const, 0},
+ {"PTRACE_SETCRUNCHREGS", Const, 0},
+ {"PTRACE_SETFPREGS", Const, 0},
+ {"PTRACE_SETFPXREGS", Const, 0},
+ {"PTRACE_SETHBPREGS", Const, 0},
+ {"PTRACE_SETOPTIONS", Const, 0},
+ {"PTRACE_SETREGS", Const, 0},
+ {"PTRACE_SETREGSET", Const, 0},
+ {"PTRACE_SETSIGINFO", Const, 0},
+ {"PTRACE_SETVFPREGS", Const, 0},
+ {"PTRACE_SETWMMXREGS", Const, 0},
+ {"PTRACE_SET_SYSCALL", Const, 0},
+ {"PTRACE_SET_THREAD_AREA", Const, 0},
+ {"PTRACE_SINGLEBLOCK", Const, 0},
+ {"PTRACE_SINGLESTEP", Const, 0},
+ {"PTRACE_SYSCALL", Const, 0},
+ {"PTRACE_SYSEMU", Const, 0},
+ {"PTRACE_SYSEMU_SINGLESTEP", Const, 0},
+ {"PTRACE_TRACEME", Const, 0},
+ {"PT_ATTACH", Const, 0},
+ {"PT_ATTACHEXC", Const, 0},
+ {"PT_CONTINUE", Const, 0},
+ {"PT_DATA_ADDR", Const, 0},
+ {"PT_DENY_ATTACH", Const, 0},
+ {"PT_DETACH", Const, 0},
+ {"PT_FIRSTMACH", Const, 0},
+ {"PT_FORCEQUOTA", Const, 0},
+ {"PT_KILL", Const, 0},
+ {"PT_MASK", Const, 1},
+ {"PT_READ_D", Const, 0},
+ {"PT_READ_I", Const, 0},
+ {"PT_READ_U", Const, 0},
+ {"PT_SIGEXC", Const, 0},
+ {"PT_STEP", Const, 0},
+ {"PT_TEXT_ADDR", Const, 0},
+ {"PT_TEXT_END_ADDR", Const, 0},
+ {"PT_THUPDATE", Const, 0},
+ {"PT_TRACE_ME", Const, 0},
+ {"PT_WRITE_D", Const, 0},
+ {"PT_WRITE_I", Const, 0},
+ {"PT_WRITE_U", Const, 0},
+ {"ParseDirent", Func, 0},
+ {"ParseNetlinkMessage", Func, 0},
+ {"ParseNetlinkRouteAttr", Func, 0},
+ {"ParseRoutingMessage", Func, 0},
+ {"ParseRoutingSockaddr", Func, 0},
+ {"ParseSocketControlMessage", Func, 0},
+ {"ParseUnixCredentials", Func, 0},
+ {"ParseUnixRights", Func, 0},
+ {"PathMax", Const, 0},
+ {"Pathconf", Func, 0},
+ {"Pause", Func, 0},
+ {"Pipe", Func, 0},
+ {"Pipe2", Func, 1},
+ {"PivotRoot", Func, 0},
+ {"Pointer", Type, 11},
+ {"PostQueuedCompletionStatus", Func, 0},
+ {"Pread", Func, 0},
+ {"Proc", Type, 0},
+ {"Proc.Dll", Field, 0},
+ {"Proc.Name", Field, 0},
+ {"ProcAttr", Type, 0},
+ {"ProcAttr.Dir", Field, 0},
+ {"ProcAttr.Env", Field, 0},
+ {"ProcAttr.Files", Field, 0},
+ {"ProcAttr.Sys", Field, 0},
+ {"Process32First", Func, 4},
+ {"Process32Next", Func, 4},
+ {"ProcessEntry32", Type, 4},
+ {"ProcessEntry32.DefaultHeapID", Field, 4},
+ {"ProcessEntry32.ExeFile", Field, 4},
+ {"ProcessEntry32.Flags", Field, 4},
+ {"ProcessEntry32.ModuleID", Field, 4},
+ {"ProcessEntry32.ParentProcessID", Field, 4},
+ {"ProcessEntry32.PriClassBase", Field, 4},
+ {"ProcessEntry32.ProcessID", Field, 4},
+ {"ProcessEntry32.Size", Field, 4},
+ {"ProcessEntry32.Threads", Field, 4},
+ {"ProcessEntry32.Usage", Field, 4},
+ {"ProcessInformation", Type, 0},
+ {"ProcessInformation.Process", Field, 0},
+ {"ProcessInformation.ProcessId", Field, 0},
+ {"ProcessInformation.Thread", Field, 0},
+ {"ProcessInformation.ThreadId", Field, 0},
+ {"Protoent", Type, 0},
+ {"Protoent.Aliases", Field, 0},
+ {"Protoent.Name", Field, 0},
+ {"Protoent.Proto", Field, 0},
+ {"PtraceAttach", Func, 0},
+ {"PtraceCont", Func, 0},
+ {"PtraceDetach", Func, 0},
+ {"PtraceGetEventMsg", Func, 0},
+ {"PtraceGetRegs", Func, 0},
+ {"PtracePeekData", Func, 0},
+ {"PtracePeekText", Func, 0},
+ {"PtracePokeData", Func, 0},
+ {"PtracePokeText", Func, 0},
+ {"PtraceRegs", Type, 0},
+ {"PtraceRegs.Cs", Field, 0},
+ {"PtraceRegs.Ds", Field, 0},
+ {"PtraceRegs.Eax", Field, 0},
+ {"PtraceRegs.Ebp", Field, 0},
+ {"PtraceRegs.Ebx", Field, 0},
+ {"PtraceRegs.Ecx", Field, 0},
+ {"PtraceRegs.Edi", Field, 0},
+ {"PtraceRegs.Edx", Field, 0},
+ {"PtraceRegs.Eflags", Field, 0},
+ {"PtraceRegs.Eip", Field, 0},
+ {"PtraceRegs.Es", Field, 0},
+ {"PtraceRegs.Esi", Field, 0},
+ {"PtraceRegs.Esp", Field, 0},
+ {"PtraceRegs.Fs", Field, 0},
+ {"PtraceRegs.Fs_base", Field, 0},
+ {"PtraceRegs.Gs", Field, 0},
+ {"PtraceRegs.Gs_base", Field, 0},
+ {"PtraceRegs.Orig_eax", Field, 0},
+ {"PtraceRegs.Orig_rax", Field, 0},
+ {"PtraceRegs.R10", Field, 0},
+ {"PtraceRegs.R11", Field, 0},
+ {"PtraceRegs.R12", Field, 0},
+ {"PtraceRegs.R13", Field, 0},
+ {"PtraceRegs.R14", Field, 0},
+ {"PtraceRegs.R15", Field, 0},
+ {"PtraceRegs.R8", Field, 0},
+ {"PtraceRegs.R9", Field, 0},
+ {"PtraceRegs.Rax", Field, 0},
+ {"PtraceRegs.Rbp", Field, 0},
+ {"PtraceRegs.Rbx", Field, 0},
+ {"PtraceRegs.Rcx", Field, 0},
+ {"PtraceRegs.Rdi", Field, 0},
+ {"PtraceRegs.Rdx", Field, 0},
+ {"PtraceRegs.Rip", Field, 0},
+ {"PtraceRegs.Rsi", Field, 0},
+ {"PtraceRegs.Rsp", Field, 0},
+ {"PtraceRegs.Ss", Field, 0},
+ {"PtraceRegs.Uregs", Field, 0},
+ {"PtraceRegs.Xcs", Field, 0},
+ {"PtraceRegs.Xds", Field, 0},
+ {"PtraceRegs.Xes", Field, 0},
+ {"PtraceRegs.Xfs", Field, 0},
+ {"PtraceRegs.Xgs", Field, 0},
+ {"PtraceRegs.Xss", Field, 0},
+ {"PtraceSetOptions", Func, 0},
+ {"PtraceSetRegs", Func, 0},
+ {"PtraceSingleStep", Func, 0},
+ {"PtraceSyscall", Func, 1},
+ {"Pwrite", Func, 0},
+ {"REG_BINARY", Const, 0},
+ {"REG_DWORD", Const, 0},
+ {"REG_DWORD_BIG_ENDIAN", Const, 0},
+ {"REG_DWORD_LITTLE_ENDIAN", Const, 0},
+ {"REG_EXPAND_SZ", Const, 0},
+ {"REG_FULL_RESOURCE_DESCRIPTOR", Const, 0},
+ {"REG_LINK", Const, 0},
+ {"REG_MULTI_SZ", Const, 0},
+ {"REG_NONE", Const, 0},
+ {"REG_QWORD", Const, 0},
+ {"REG_QWORD_LITTLE_ENDIAN", Const, 0},
+ {"REG_RESOURCE_LIST", Const, 0},
+ {"REG_RESOURCE_REQUIREMENTS_LIST", Const, 0},
+ {"REG_SZ", Const, 0},
+ {"RLIMIT_AS", Const, 0},
+ {"RLIMIT_CORE", Const, 0},
+ {"RLIMIT_CPU", Const, 0},
+ {"RLIMIT_CPU_USAGE_MONITOR", Const, 16},
+ {"RLIMIT_DATA", Const, 0},
+ {"RLIMIT_FSIZE", Const, 0},
+ {"RLIMIT_NOFILE", Const, 0},
+ {"RLIMIT_STACK", Const, 0},
+ {"RLIM_INFINITY", Const, 0},
+ {"RTAX_ADVMSS", Const, 0},
+ {"RTAX_AUTHOR", Const, 0},
+ {"RTAX_BRD", Const, 0},
+ {"RTAX_CWND", Const, 0},
+ {"RTAX_DST", Const, 0},
+ {"RTAX_FEATURES", Const, 0},
+ {"RTAX_FEATURE_ALLFRAG", Const, 0},
+ {"RTAX_FEATURE_ECN", Const, 0},
+ {"RTAX_FEATURE_SACK", Const, 0},
+ {"RTAX_FEATURE_TIMESTAMP", Const, 0},
+ {"RTAX_GATEWAY", Const, 0},
+ {"RTAX_GENMASK", Const, 0},
+ {"RTAX_HOPLIMIT", Const, 0},
+ {"RTAX_IFA", Const, 0},
+ {"RTAX_IFP", Const, 0},
+ {"RTAX_INITCWND", Const, 0},
+ {"RTAX_INITRWND", Const, 0},
+ {"RTAX_LABEL", Const, 1},
+ {"RTAX_LOCK", Const, 0},
+ {"RTAX_MAX", Const, 0},
+ {"RTAX_MTU", Const, 0},
+ {"RTAX_NETMASK", Const, 0},
+ {"RTAX_REORDERING", Const, 0},
+ {"RTAX_RTO_MIN", Const, 0},
+ {"RTAX_RTT", Const, 0},
+ {"RTAX_RTTVAR", Const, 0},
+ {"RTAX_SRC", Const, 1},
+ {"RTAX_SRCMASK", Const, 1},
+ {"RTAX_SSTHRESH", Const, 0},
+ {"RTAX_TAG", Const, 1},
+ {"RTAX_UNSPEC", Const, 0},
+ {"RTAX_WINDOW", Const, 0},
+ {"RTA_ALIGNTO", Const, 0},
+ {"RTA_AUTHOR", Const, 0},
+ {"RTA_BRD", Const, 0},
+ {"RTA_CACHEINFO", Const, 0},
+ {"RTA_DST", Const, 0},
+ {"RTA_FLOW", Const, 0},
+ {"RTA_GATEWAY", Const, 0},
+ {"RTA_GENMASK", Const, 0},
+ {"RTA_IFA", Const, 0},
+ {"RTA_IFP", Const, 0},
+ {"RTA_IIF", Const, 0},
+ {"RTA_LABEL", Const, 1},
+ {"RTA_MAX", Const, 0},
+ {"RTA_METRICS", Const, 0},
+ {"RTA_MULTIPATH", Const, 0},
+ {"RTA_NETMASK", Const, 0},
+ {"RTA_OIF", Const, 0},
+ {"RTA_PREFSRC", Const, 0},
+ {"RTA_PRIORITY", Const, 0},
+ {"RTA_SRC", Const, 0},
+ {"RTA_SRCMASK", Const, 1},
+ {"RTA_TABLE", Const, 0},
+ {"RTA_TAG", Const, 1},
+ {"RTA_UNSPEC", Const, 0},
+ {"RTCF_DIRECTSRC", Const, 0},
+ {"RTCF_DOREDIRECT", Const, 0},
+ {"RTCF_LOG", Const, 0},
+ {"RTCF_MASQ", Const, 0},
+ {"RTCF_NAT", Const, 0},
+ {"RTCF_VALVE", Const, 0},
+ {"RTF_ADDRCLASSMASK", Const, 0},
+ {"RTF_ADDRCONF", Const, 0},
+ {"RTF_ALLONLINK", Const, 0},
+ {"RTF_ANNOUNCE", Const, 1},
+ {"RTF_BLACKHOLE", Const, 0},
+ {"RTF_BROADCAST", Const, 0},
+ {"RTF_CACHE", Const, 0},
+ {"RTF_CLONED", Const, 1},
+ {"RTF_CLONING", Const, 0},
+ {"RTF_CONDEMNED", Const, 0},
+ {"RTF_DEFAULT", Const, 0},
+ {"RTF_DELCLONE", Const, 0},
+ {"RTF_DONE", Const, 0},
+ {"RTF_DYNAMIC", Const, 0},
+ {"RTF_FLOW", Const, 0},
+ {"RTF_FMASK", Const, 0},
+ {"RTF_GATEWAY", Const, 0},
+ {"RTF_GWFLAG_COMPAT", Const, 3},
+ {"RTF_HOST", Const, 0},
+ {"RTF_IFREF", Const, 0},
+ {"RTF_IFSCOPE", Const, 0},
+ {"RTF_INTERFACE", Const, 0},
+ {"RTF_IRTT", Const, 0},
+ {"RTF_LINKRT", Const, 0},
+ {"RTF_LLDATA", Const, 0},
+ {"RTF_LLINFO", Const, 0},
+ {"RTF_LOCAL", Const, 0},
+ {"RTF_MASK", Const, 1},
+ {"RTF_MODIFIED", Const, 0},
+ {"RTF_MPATH", Const, 1},
+ {"RTF_MPLS", Const, 1},
+ {"RTF_MSS", Const, 0},
+ {"RTF_MTU", Const, 0},
+ {"RTF_MULTICAST", Const, 0},
+ {"RTF_NAT", Const, 0},
+ {"RTF_NOFORWARD", Const, 0},
+ {"RTF_NONEXTHOP", Const, 0},
+ {"RTF_NOPMTUDISC", Const, 0},
+ {"RTF_PERMANENT_ARP", Const, 1},
+ {"RTF_PINNED", Const, 0},
+ {"RTF_POLICY", Const, 0},
+ {"RTF_PRCLONING", Const, 0},
+ {"RTF_PROTO1", Const, 0},
+ {"RTF_PROTO2", Const, 0},
+ {"RTF_PROTO3", Const, 0},
+ {"RTF_PROXY", Const, 16},
+ {"RTF_REINSTATE", Const, 0},
+ {"RTF_REJECT", Const, 0},
+ {"RTF_RNH_LOCKED", Const, 0},
+ {"RTF_ROUTER", Const, 16},
+ {"RTF_SOURCE", Const, 1},
+ {"RTF_SRC", Const, 1},
+ {"RTF_STATIC", Const, 0},
+ {"RTF_STICKY", Const, 0},
+ {"RTF_THROW", Const, 0},
+ {"RTF_TUNNEL", Const, 1},
+ {"RTF_UP", Const, 0},
+ {"RTF_USETRAILERS", Const, 1},
+ {"RTF_WASCLONED", Const, 0},
+ {"RTF_WINDOW", Const, 0},
+ {"RTF_XRESOLVE", Const, 0},
+ {"RTM_ADD", Const, 0},
+ {"RTM_BASE", Const, 0},
+ {"RTM_CHANGE", Const, 0},
+ {"RTM_CHGADDR", Const, 1},
+ {"RTM_DELACTION", Const, 0},
+ {"RTM_DELADDR", Const, 0},
+ {"RTM_DELADDRLABEL", Const, 0},
+ {"RTM_DELETE", Const, 0},
+ {"RTM_DELLINK", Const, 0},
+ {"RTM_DELMADDR", Const, 0},
+ {"RTM_DELNEIGH", Const, 0},
+ {"RTM_DELQDISC", Const, 0},
+ {"RTM_DELROUTE", Const, 0},
+ {"RTM_DELRULE", Const, 0},
+ {"RTM_DELTCLASS", Const, 0},
+ {"RTM_DELTFILTER", Const, 0},
+ {"RTM_DESYNC", Const, 1},
+ {"RTM_F_CLONED", Const, 0},
+ {"RTM_F_EQUALIZE", Const, 0},
+ {"RTM_F_NOTIFY", Const, 0},
+ {"RTM_F_PREFIX", Const, 0},
+ {"RTM_GET", Const, 0},
+ {"RTM_GET2", Const, 0},
+ {"RTM_GETACTION", Const, 0},
+ {"RTM_GETADDR", Const, 0},
+ {"RTM_GETADDRLABEL", Const, 0},
+ {"RTM_GETANYCAST", Const, 0},
+ {"RTM_GETDCB", Const, 0},
+ {"RTM_GETLINK", Const, 0},
+ {"RTM_GETMULTICAST", Const, 0},
+ {"RTM_GETNEIGH", Const, 0},
+ {"RTM_GETNEIGHTBL", Const, 0},
+ {"RTM_GETQDISC", Const, 0},
+ {"RTM_GETROUTE", Const, 0},
+ {"RTM_GETRULE", Const, 0},
+ {"RTM_GETTCLASS", Const, 0},
+ {"RTM_GETTFILTER", Const, 0},
+ {"RTM_IEEE80211", Const, 0},
+ {"RTM_IFANNOUNCE", Const, 0},
+ {"RTM_IFINFO", Const, 0},
+ {"RTM_IFINFO2", Const, 0},
+ {"RTM_LLINFO_UPD", Const, 1},
+ {"RTM_LOCK", Const, 0},
+ {"RTM_LOSING", Const, 0},
+ {"RTM_MAX", Const, 0},
+ {"RTM_MAXSIZE", Const, 1},
+ {"RTM_MISS", Const, 0},
+ {"RTM_NEWACTION", Const, 0},
+ {"RTM_NEWADDR", Const, 0},
+ {"RTM_NEWADDRLABEL", Const, 0},
+ {"RTM_NEWLINK", Const, 0},
+ {"RTM_NEWMADDR", Const, 0},
+ {"RTM_NEWMADDR2", Const, 0},
+ {"RTM_NEWNDUSEROPT", Const, 0},
+ {"RTM_NEWNEIGH", Const, 0},
+ {"RTM_NEWNEIGHTBL", Const, 0},
+ {"RTM_NEWPREFIX", Const, 0},
+ {"RTM_NEWQDISC", Const, 0},
+ {"RTM_NEWROUTE", Const, 0},
+ {"RTM_NEWRULE", Const, 0},
+ {"RTM_NEWTCLASS", Const, 0},
+ {"RTM_NEWTFILTER", Const, 0},
+ {"RTM_NR_FAMILIES", Const, 0},
+ {"RTM_NR_MSGTYPES", Const, 0},
+ {"RTM_OIFINFO", Const, 1},
+ {"RTM_OLDADD", Const, 0},
+ {"RTM_OLDDEL", Const, 0},
+ {"RTM_OOIFINFO", Const, 1},
+ {"RTM_REDIRECT", Const, 0},
+ {"RTM_RESOLVE", Const, 0},
+ {"RTM_RTTUNIT", Const, 0},
+ {"RTM_SETDCB", Const, 0},
+ {"RTM_SETGATE", Const, 1},
+ {"RTM_SETLINK", Const, 0},
+ {"RTM_SETNEIGHTBL", Const, 0},
+ {"RTM_VERSION", Const, 0},
+ {"RTNH_ALIGNTO", Const, 0},
+ {"RTNH_F_DEAD", Const, 0},
+ {"RTNH_F_ONLINK", Const, 0},
+ {"RTNH_F_PERVASIVE", Const, 0},
+ {"RTNLGRP_IPV4_IFADDR", Const, 1},
+ {"RTNLGRP_IPV4_MROUTE", Const, 1},
+ {"RTNLGRP_IPV4_ROUTE", Const, 1},
+ {"RTNLGRP_IPV4_RULE", Const, 1},
+ {"RTNLGRP_IPV6_IFADDR", Const, 1},
+ {"RTNLGRP_IPV6_IFINFO", Const, 1},
+ {"RTNLGRP_IPV6_MROUTE", Const, 1},
+ {"RTNLGRP_IPV6_PREFIX", Const, 1},
+ {"RTNLGRP_IPV6_ROUTE", Const, 1},
+ {"RTNLGRP_IPV6_RULE", Const, 1},
+ {"RTNLGRP_LINK", Const, 1},
+ {"RTNLGRP_ND_USEROPT", Const, 1},
+ {"RTNLGRP_NEIGH", Const, 1},
+ {"RTNLGRP_NONE", Const, 1},
+ {"RTNLGRP_NOTIFY", Const, 1},
+ {"RTNLGRP_TC", Const, 1},
+ {"RTN_ANYCAST", Const, 0},
+ {"RTN_BLACKHOLE", Const, 0},
+ {"RTN_BROADCAST", Const, 0},
+ {"RTN_LOCAL", Const, 0},
+ {"RTN_MAX", Const, 0},
+ {"RTN_MULTICAST", Const, 0},
+ {"RTN_NAT", Const, 0},
+ {"RTN_PROHIBIT", Const, 0},
+ {"RTN_THROW", Const, 0},
+ {"RTN_UNICAST", Const, 0},
+ {"RTN_UNREACHABLE", Const, 0},
+ {"RTN_UNSPEC", Const, 0},
+ {"RTN_XRESOLVE", Const, 0},
+ {"RTPROT_BIRD", Const, 0},
+ {"RTPROT_BOOT", Const, 0},
+ {"RTPROT_DHCP", Const, 0},
+ {"RTPROT_DNROUTED", Const, 0},
+ {"RTPROT_GATED", Const, 0},
+ {"RTPROT_KERNEL", Const, 0},
+ {"RTPROT_MRT", Const, 0},
+ {"RTPROT_NTK", Const, 0},
+ {"RTPROT_RA", Const, 0},
+ {"RTPROT_REDIRECT", Const, 0},
+ {"RTPROT_STATIC", Const, 0},
+ {"RTPROT_UNSPEC", Const, 0},
+ {"RTPROT_XORP", Const, 0},
+ {"RTPROT_ZEBRA", Const, 0},
+ {"RTV_EXPIRE", Const, 0},
+ {"RTV_HOPCOUNT", Const, 0},
+ {"RTV_MTU", Const, 0},
+ {"RTV_RPIPE", Const, 0},
+ {"RTV_RTT", Const, 0},
+ {"RTV_RTTVAR", Const, 0},
+ {"RTV_SPIPE", Const, 0},
+ {"RTV_SSTHRESH", Const, 0},
+ {"RTV_WEIGHT", Const, 0},
+ {"RT_CACHING_CONTEXT", Const, 1},
+ {"RT_CLASS_DEFAULT", Const, 0},
+ {"RT_CLASS_LOCAL", Const, 0},
+ {"RT_CLASS_MAIN", Const, 0},
+ {"RT_CLASS_MAX", Const, 0},
+ {"RT_CLASS_UNSPEC", Const, 0},
+ {"RT_DEFAULT_FIB", Const, 1},
+ {"RT_NORTREF", Const, 1},
+ {"RT_SCOPE_HOST", Const, 0},
+ {"RT_SCOPE_LINK", Const, 0},
+ {"RT_SCOPE_NOWHERE", Const, 0},
+ {"RT_SCOPE_SITE", Const, 0},
+ {"RT_SCOPE_UNIVERSE", Const, 0},
+ {"RT_TABLEID_MAX", Const, 1},
+ {"RT_TABLE_COMPAT", Const, 0},
+ {"RT_TABLE_DEFAULT", Const, 0},
+ {"RT_TABLE_LOCAL", Const, 0},
+ {"RT_TABLE_MAIN", Const, 0},
+ {"RT_TABLE_MAX", Const, 0},
+ {"RT_TABLE_UNSPEC", Const, 0},
+ {"RUSAGE_CHILDREN", Const, 0},
+ {"RUSAGE_SELF", Const, 0},
+ {"RUSAGE_THREAD", Const, 0},
+ {"Radvisory_t", Type, 0},
+ {"Radvisory_t.Count", Field, 0},
+ {"Radvisory_t.Offset", Field, 0},
+ {"Radvisory_t.Pad_cgo_0", Field, 0},
+ {"RawConn", Type, 9},
+ {"RawSockaddr", Type, 0},
+ {"RawSockaddr.Data", Field, 0},
+ {"RawSockaddr.Family", Field, 0},
+ {"RawSockaddr.Len", Field, 0},
+ {"RawSockaddrAny", Type, 0},
+ {"RawSockaddrAny.Addr", Field, 0},
+ {"RawSockaddrAny.Pad", Field, 0},
+ {"RawSockaddrDatalink", Type, 0},
+ {"RawSockaddrDatalink.Alen", Field, 0},
+ {"RawSockaddrDatalink.Data", Field, 0},
+ {"RawSockaddrDatalink.Family", Field, 0},
+ {"RawSockaddrDatalink.Index", Field, 0},
+ {"RawSockaddrDatalink.Len", Field, 0},
+ {"RawSockaddrDatalink.Nlen", Field, 0},
+ {"RawSockaddrDatalink.Pad_cgo_0", Field, 2},
+ {"RawSockaddrDatalink.Slen", Field, 0},
+ {"RawSockaddrDatalink.Type", Field, 0},
+ {"RawSockaddrInet4", Type, 0},
+ {"RawSockaddrInet4.Addr", Field, 0},
+ {"RawSockaddrInet4.Family", Field, 0},
+ {"RawSockaddrInet4.Len", Field, 0},
+ {"RawSockaddrInet4.Port", Field, 0},
+ {"RawSockaddrInet4.Zero", Field, 0},
+ {"RawSockaddrInet6", Type, 0},
+ {"RawSockaddrInet6.Addr", Field, 0},
+ {"RawSockaddrInet6.Family", Field, 0},
+ {"RawSockaddrInet6.Flowinfo", Field, 0},
+ {"RawSockaddrInet6.Len", Field, 0},
+ {"RawSockaddrInet6.Port", Field, 0},
+ {"RawSockaddrInet6.Scope_id", Field, 0},
+ {"RawSockaddrLinklayer", Type, 0},
+ {"RawSockaddrLinklayer.Addr", Field, 0},
+ {"RawSockaddrLinklayer.Family", Field, 0},
+ {"RawSockaddrLinklayer.Halen", Field, 0},
+ {"RawSockaddrLinklayer.Hatype", Field, 0},
+ {"RawSockaddrLinklayer.Ifindex", Field, 0},
+ {"RawSockaddrLinklayer.Pkttype", Field, 0},
+ {"RawSockaddrLinklayer.Protocol", Field, 0},
+ {"RawSockaddrNetlink", Type, 0},
+ {"RawSockaddrNetlink.Family", Field, 0},
+ {"RawSockaddrNetlink.Groups", Field, 0},
+ {"RawSockaddrNetlink.Pad", Field, 0},
+ {"RawSockaddrNetlink.Pid", Field, 0},
+ {"RawSockaddrUnix", Type, 0},
+ {"RawSockaddrUnix.Family", Field, 0},
+ {"RawSockaddrUnix.Len", Field, 0},
+ {"RawSockaddrUnix.Pad_cgo_0", Field, 2},
+ {"RawSockaddrUnix.Path", Field, 0},
+ {"RawSyscall", Func, 0},
+ {"RawSyscall6", Func, 0},
+ {"Read", Func, 0},
+ {"ReadConsole", Func, 1},
+ {"ReadDirectoryChanges", Func, 0},
+ {"ReadDirent", Func, 0},
+ {"ReadFile", Func, 0},
+ {"Readlink", Func, 0},
+ {"Reboot", Func, 0},
+ {"Recvfrom", Func, 0},
+ {"Recvmsg", Func, 0},
+ {"RegCloseKey", Func, 0},
+ {"RegEnumKeyEx", Func, 0},
+ {"RegOpenKeyEx", Func, 0},
+ {"RegQueryInfoKey", Func, 0},
+ {"RegQueryValueEx", Func, 0},
+ {"RemoveDirectory", Func, 0},
+ {"Removexattr", Func, 1},
+ {"Rename", Func, 0},
+ {"Renameat", Func, 0},
+ {"Revoke", Func, 0},
+ {"Rlimit", Type, 0},
+ {"Rlimit.Cur", Field, 0},
+ {"Rlimit.Max", Field, 0},
+ {"Rmdir", Func, 0},
+ {"RouteMessage", Type, 0},
+ {"RouteMessage.Data", Field, 0},
+ {"RouteMessage.Header", Field, 0},
+ {"RouteRIB", Func, 0},
+ {"RoutingMessage", Type, 0},
+ {"RtAttr", Type, 0},
+ {"RtAttr.Len", Field, 0},
+ {"RtAttr.Type", Field, 0},
+ {"RtGenmsg", Type, 0},
+ {"RtGenmsg.Family", Field, 0},
+ {"RtMetrics", Type, 0},
+ {"RtMetrics.Expire", Field, 0},
+ {"RtMetrics.Filler", Field, 0},
+ {"RtMetrics.Hopcount", Field, 0},
+ {"RtMetrics.Locks", Field, 0},
+ {"RtMetrics.Mtu", Field, 0},
+ {"RtMetrics.Pad", Field, 3},
+ {"RtMetrics.Pksent", Field, 0},
+ {"RtMetrics.Recvpipe", Field, 0},
+ {"RtMetrics.Refcnt", Field, 2},
+ {"RtMetrics.Rtt", Field, 0},
+ {"RtMetrics.Rttvar", Field, 0},
+ {"RtMetrics.Sendpipe", Field, 0},
+ {"RtMetrics.Ssthresh", Field, 0},
+ {"RtMetrics.Weight", Field, 0},
+ {"RtMsg", Type, 0},
+ {"RtMsg.Dst_len", Field, 0},
+ {"RtMsg.Family", Field, 0},
+ {"RtMsg.Flags", Field, 0},
+ {"RtMsg.Protocol", Field, 0},
+ {"RtMsg.Scope", Field, 0},
+ {"RtMsg.Src_len", Field, 0},
+ {"RtMsg.Table", Field, 0},
+ {"RtMsg.Tos", Field, 0},
+ {"RtMsg.Type", Field, 0},
+ {"RtMsghdr", Type, 0},
+ {"RtMsghdr.Addrs", Field, 0},
+ {"RtMsghdr.Errno", Field, 0},
+ {"RtMsghdr.Flags", Field, 0},
+ {"RtMsghdr.Fmask", Field, 0},
+ {"RtMsghdr.Hdrlen", Field, 2},
+ {"RtMsghdr.Index", Field, 0},
+ {"RtMsghdr.Inits", Field, 0},
+ {"RtMsghdr.Mpls", Field, 2},
+ {"RtMsghdr.Msglen", Field, 0},
+ {"RtMsghdr.Pad_cgo_0", Field, 0},
+ {"RtMsghdr.Pad_cgo_1", Field, 2},
+ {"RtMsghdr.Pid", Field, 0},
+ {"RtMsghdr.Priority", Field, 2},
+ {"RtMsghdr.Rmx", Field, 0},
+ {"RtMsghdr.Seq", Field, 0},
+ {"RtMsghdr.Tableid", Field, 2},
+ {"RtMsghdr.Type", Field, 0},
+ {"RtMsghdr.Use", Field, 0},
+ {"RtMsghdr.Version", Field, 0},
+ {"RtNexthop", Type, 0},
+ {"RtNexthop.Flags", Field, 0},
+ {"RtNexthop.Hops", Field, 0},
+ {"RtNexthop.Ifindex", Field, 0},
+ {"RtNexthop.Len", Field, 0},
+ {"Rusage", Type, 0},
+ {"Rusage.CreationTime", Field, 0},
+ {"Rusage.ExitTime", Field, 0},
+ {"Rusage.Idrss", Field, 0},
+ {"Rusage.Inblock", Field, 0},
+ {"Rusage.Isrss", Field, 0},
+ {"Rusage.Ixrss", Field, 0},
+ {"Rusage.KernelTime", Field, 0},
+ {"Rusage.Majflt", Field, 0},
+ {"Rusage.Maxrss", Field, 0},
+ {"Rusage.Minflt", Field, 0},
+ {"Rusage.Msgrcv", Field, 0},
+ {"Rusage.Msgsnd", Field, 0},
+ {"Rusage.Nivcsw", Field, 0},
+ {"Rusage.Nsignals", Field, 0},
+ {"Rusage.Nswap", Field, 0},
+ {"Rusage.Nvcsw", Field, 0},
+ {"Rusage.Oublock", Field, 0},
+ {"Rusage.Stime", Field, 0},
+ {"Rusage.UserTime", Field, 0},
+ {"Rusage.Utime", Field, 0},
+ {"SCM_BINTIME", Const, 0},
+ {"SCM_CREDENTIALS", Const, 0},
+ {"SCM_CREDS", Const, 0},
+ {"SCM_RIGHTS", Const, 0},
+ {"SCM_TIMESTAMP", Const, 0},
+ {"SCM_TIMESTAMPING", Const, 0},
+ {"SCM_TIMESTAMPNS", Const, 0},
+ {"SCM_TIMESTAMP_MONOTONIC", Const, 0},
+ {"SHUT_RD", Const, 0},
+ {"SHUT_RDWR", Const, 0},
+ {"SHUT_WR", Const, 0},
+ {"SID", Type, 0},
+ {"SIDAndAttributes", Type, 0},
+ {"SIDAndAttributes.Attributes", Field, 0},
+ {"SIDAndAttributes.Sid", Field, 0},
+ {"SIGABRT", Const, 0},
+ {"SIGALRM", Const, 0},
+ {"SIGBUS", Const, 0},
+ {"SIGCHLD", Const, 0},
+ {"SIGCLD", Const, 0},
+ {"SIGCONT", Const, 0},
+ {"SIGEMT", Const, 0},
+ {"SIGFPE", Const, 0},
+ {"SIGHUP", Const, 0},
+ {"SIGILL", Const, 0},
+ {"SIGINFO", Const, 0},
+ {"SIGINT", Const, 0},
+ {"SIGIO", Const, 0},
+ {"SIGIOT", Const, 0},
+ {"SIGKILL", Const, 0},
+ {"SIGLIBRT", Const, 1},
+ {"SIGLWP", Const, 0},
+ {"SIGPIPE", Const, 0},
+ {"SIGPOLL", Const, 0},
+ {"SIGPROF", Const, 0},
+ {"SIGPWR", Const, 0},
+ {"SIGQUIT", Const, 0},
+ {"SIGSEGV", Const, 0},
+ {"SIGSTKFLT", Const, 0},
+ {"SIGSTOP", Const, 0},
+ {"SIGSYS", Const, 0},
+ {"SIGTERM", Const, 0},
+ {"SIGTHR", Const, 0},
+ {"SIGTRAP", Const, 0},
+ {"SIGTSTP", Const, 0},
+ {"SIGTTIN", Const, 0},
+ {"SIGTTOU", Const, 0},
+ {"SIGUNUSED", Const, 0},
+ {"SIGURG", Const, 0},
+ {"SIGUSR1", Const, 0},
+ {"SIGUSR2", Const, 0},
+ {"SIGVTALRM", Const, 0},
+ {"SIGWINCH", Const, 0},
+ {"SIGXCPU", Const, 0},
+ {"SIGXFSZ", Const, 0},
+ {"SIOCADDDLCI", Const, 0},
+ {"SIOCADDMULTI", Const, 0},
+ {"SIOCADDRT", Const, 0},
+ {"SIOCAIFADDR", Const, 0},
+ {"SIOCAIFGROUP", Const, 0},
+ {"SIOCALIFADDR", Const, 0},
+ {"SIOCARPIPLL", Const, 0},
+ {"SIOCATMARK", Const, 0},
+ {"SIOCAUTOADDR", Const, 0},
+ {"SIOCAUTONETMASK", Const, 0},
+ {"SIOCBRDGADD", Const, 1},
+ {"SIOCBRDGADDS", Const, 1},
+ {"SIOCBRDGARL", Const, 1},
+ {"SIOCBRDGDADDR", Const, 1},
+ {"SIOCBRDGDEL", Const, 1},
+ {"SIOCBRDGDELS", Const, 1},
+ {"SIOCBRDGFLUSH", Const, 1},
+ {"SIOCBRDGFRL", Const, 1},
+ {"SIOCBRDGGCACHE", Const, 1},
+ {"SIOCBRDGGFD", Const, 1},
+ {"SIOCBRDGGHT", Const, 1},
+ {"SIOCBRDGGIFFLGS", Const, 1},
+ {"SIOCBRDGGMA", Const, 1},
+ {"SIOCBRDGGPARAM", Const, 1},
+ {"SIOCBRDGGPRI", Const, 1},
+ {"SIOCBRDGGRL", Const, 1},
+ {"SIOCBRDGGSIFS", Const, 1},
+ {"SIOCBRDGGTO", Const, 1},
+ {"SIOCBRDGIFS", Const, 1},
+ {"SIOCBRDGRTS", Const, 1},
+ {"SIOCBRDGSADDR", Const, 1},
+ {"SIOCBRDGSCACHE", Const, 1},
+ {"SIOCBRDGSFD", Const, 1},
+ {"SIOCBRDGSHT", Const, 1},
+ {"SIOCBRDGSIFCOST", Const, 1},
+ {"SIOCBRDGSIFFLGS", Const, 1},
+ {"SIOCBRDGSIFPRIO", Const, 1},
+ {"SIOCBRDGSMA", Const, 1},
+ {"SIOCBRDGSPRI", Const, 1},
+ {"SIOCBRDGSPROTO", Const, 1},
+ {"SIOCBRDGSTO", Const, 1},
+ {"SIOCBRDGSTXHC", Const, 1},
+ {"SIOCDARP", Const, 0},
+ {"SIOCDELDLCI", Const, 0},
+ {"SIOCDELMULTI", Const, 0},
+ {"SIOCDELRT", Const, 0},
+ {"SIOCDEVPRIVATE", Const, 0},
+ {"SIOCDIFADDR", Const, 0},
+ {"SIOCDIFGROUP", Const, 0},
+ {"SIOCDIFPHYADDR", Const, 0},
+ {"SIOCDLIFADDR", Const, 0},
+ {"SIOCDRARP", Const, 0},
+ {"SIOCGARP", Const, 0},
+ {"SIOCGDRVSPEC", Const, 0},
+ {"SIOCGETKALIVE", Const, 1},
+ {"SIOCGETLABEL", Const, 1},
+ {"SIOCGETPFLOW", Const, 1},
+ {"SIOCGETPFSYNC", Const, 1},
+ {"SIOCGETSGCNT", Const, 0},
+ {"SIOCGETVIFCNT", Const, 0},
+ {"SIOCGETVLAN", Const, 0},
+ {"SIOCGHIWAT", Const, 0},
+ {"SIOCGIFADDR", Const, 0},
+ {"SIOCGIFADDRPREF", Const, 1},
+ {"SIOCGIFALIAS", Const, 1},
+ {"SIOCGIFALTMTU", Const, 0},
+ {"SIOCGIFASYNCMAP", Const, 0},
+ {"SIOCGIFBOND", Const, 0},
+ {"SIOCGIFBR", Const, 0},
+ {"SIOCGIFBRDADDR", Const, 0},
+ {"SIOCGIFCAP", Const, 0},
+ {"SIOCGIFCONF", Const, 0},
+ {"SIOCGIFCOUNT", Const, 0},
+ {"SIOCGIFDATA", Const, 1},
+ {"SIOCGIFDESCR", Const, 0},
+ {"SIOCGIFDEVMTU", Const, 0},
+ {"SIOCGIFDLT", Const, 1},
+ {"SIOCGIFDSTADDR", Const, 0},
+ {"SIOCGIFENCAP", Const, 0},
+ {"SIOCGIFFIB", Const, 1},
+ {"SIOCGIFFLAGS", Const, 0},
+ {"SIOCGIFGATTR", Const, 1},
+ {"SIOCGIFGENERIC", Const, 0},
+ {"SIOCGIFGMEMB", Const, 0},
+ {"SIOCGIFGROUP", Const, 0},
+ {"SIOCGIFHARDMTU", Const, 3},
+ {"SIOCGIFHWADDR", Const, 0},
+ {"SIOCGIFINDEX", Const, 0},
+ {"SIOCGIFKPI", Const, 0},
+ {"SIOCGIFMAC", Const, 0},
+ {"SIOCGIFMAP", Const, 0},
+ {"SIOCGIFMEDIA", Const, 0},
+ {"SIOCGIFMEM", Const, 0},
+ {"SIOCGIFMETRIC", Const, 0},
+ {"SIOCGIFMTU", Const, 0},
+ {"SIOCGIFNAME", Const, 0},
+ {"SIOCGIFNETMASK", Const, 0},
+ {"SIOCGIFPDSTADDR", Const, 0},
+ {"SIOCGIFPFLAGS", Const, 0},
+ {"SIOCGIFPHYS", Const, 0},
+ {"SIOCGIFPRIORITY", Const, 1},
+ {"SIOCGIFPSRCADDR", Const, 0},
+ {"SIOCGIFRDOMAIN", Const, 1},
+ {"SIOCGIFRTLABEL", Const, 1},
+ {"SIOCGIFSLAVE", Const, 0},
+ {"SIOCGIFSTATUS", Const, 0},
+ {"SIOCGIFTIMESLOT", Const, 1},
+ {"SIOCGIFTXQLEN", Const, 0},
+ {"SIOCGIFVLAN", Const, 0},
+ {"SIOCGIFWAKEFLAGS", Const, 0},
+ {"SIOCGIFXFLAGS", Const, 1},
+ {"SIOCGLIFADDR", Const, 0},
+ {"SIOCGLIFPHYADDR", Const, 0},
+ {"SIOCGLIFPHYRTABLE", Const, 1},
+ {"SIOCGLIFPHYTTL", Const, 3},
+ {"SIOCGLINKSTR", Const, 1},
+ {"SIOCGLOWAT", Const, 0},
+ {"SIOCGPGRP", Const, 0},
+ {"SIOCGPRIVATE_0", Const, 0},
+ {"SIOCGPRIVATE_1", Const, 0},
+ {"SIOCGRARP", Const, 0},
+ {"SIOCGSPPPPARAMS", Const, 3},
+ {"SIOCGSTAMP", Const, 0},
+ {"SIOCGSTAMPNS", Const, 0},
+ {"SIOCGVH", Const, 1},
+ {"SIOCGVNETID", Const, 3},
+ {"SIOCIFCREATE", Const, 0},
+ {"SIOCIFCREATE2", Const, 0},
+ {"SIOCIFDESTROY", Const, 0},
+ {"SIOCIFGCLONERS", Const, 0},
+ {"SIOCINITIFADDR", Const, 1},
+ {"SIOCPROTOPRIVATE", Const, 0},
+ {"SIOCRSLVMULTI", Const, 0},
+ {"SIOCRTMSG", Const, 0},
+ {"SIOCSARP", Const, 0},
+ {"SIOCSDRVSPEC", Const, 0},
+ {"SIOCSETKALIVE", Const, 1},
+ {"SIOCSETLABEL", Const, 1},
+ {"SIOCSETPFLOW", Const, 1},
+ {"SIOCSETPFSYNC", Const, 1},
+ {"SIOCSETVLAN", Const, 0},
+ {"SIOCSHIWAT", Const, 0},
+ {"SIOCSIFADDR", Const, 0},
+ {"SIOCSIFADDRPREF", Const, 1},
+ {"SIOCSIFALTMTU", Const, 0},
+ {"SIOCSIFASYNCMAP", Const, 0},
+ {"SIOCSIFBOND", Const, 0},
+ {"SIOCSIFBR", Const, 0},
+ {"SIOCSIFBRDADDR", Const, 0},
+ {"SIOCSIFCAP", Const, 0},
+ {"SIOCSIFDESCR", Const, 0},
+ {"SIOCSIFDSTADDR", Const, 0},
+ {"SIOCSIFENCAP", Const, 0},
+ {"SIOCSIFFIB", Const, 1},
+ {"SIOCSIFFLAGS", Const, 0},
+ {"SIOCSIFGATTR", Const, 1},
+ {"SIOCSIFGENERIC", Const, 0},
+ {"SIOCSIFHWADDR", Const, 0},
+ {"SIOCSIFHWBROADCAST", Const, 0},
+ {"SIOCSIFKPI", Const, 0},
+ {"SIOCSIFLINK", Const, 0},
+ {"SIOCSIFLLADDR", Const, 0},
+ {"SIOCSIFMAC", Const, 0},
+ {"SIOCSIFMAP", Const, 0},
+ {"SIOCSIFMEDIA", Const, 0},
+ {"SIOCSIFMEM", Const, 0},
+ {"SIOCSIFMETRIC", Const, 0},
+ {"SIOCSIFMTU", Const, 0},
+ {"SIOCSIFNAME", Const, 0},
+ {"SIOCSIFNETMASK", Const, 0},
+ {"SIOCSIFPFLAGS", Const, 0},
+ {"SIOCSIFPHYADDR", Const, 0},
+ {"SIOCSIFPHYS", Const, 0},
+ {"SIOCSIFPRIORITY", Const, 1},
+ {"SIOCSIFRDOMAIN", Const, 1},
+ {"SIOCSIFRTLABEL", Const, 1},
+ {"SIOCSIFRVNET", Const, 0},
+ {"SIOCSIFSLAVE", Const, 0},
+ {"SIOCSIFTIMESLOT", Const, 1},
+ {"SIOCSIFTXQLEN", Const, 0},
+ {"SIOCSIFVLAN", Const, 0},
+ {"SIOCSIFVNET", Const, 0},
+ {"SIOCSIFXFLAGS", Const, 1},
+ {"SIOCSLIFPHYADDR", Const, 0},
+ {"SIOCSLIFPHYRTABLE", Const, 1},
+ {"SIOCSLIFPHYTTL", Const, 3},
+ {"SIOCSLINKSTR", Const, 1},
+ {"SIOCSLOWAT", Const, 0},
+ {"SIOCSPGRP", Const, 0},
+ {"SIOCSRARP", Const, 0},
+ {"SIOCSSPPPPARAMS", Const, 3},
+ {"SIOCSVH", Const, 1},
+ {"SIOCSVNETID", Const, 3},
+ {"SIOCZIFDATA", Const, 1},
+ {"SIO_GET_EXTENSION_FUNCTION_POINTER", Const, 1},
+ {"SIO_GET_INTERFACE_LIST", Const, 0},
+ {"SIO_KEEPALIVE_VALS", Const, 3},
+ {"SIO_UDP_CONNRESET", Const, 4},
+ {"SOCK_CLOEXEC", Const, 0},
+ {"SOCK_DCCP", Const, 0},
+ {"SOCK_DGRAM", Const, 0},
+ {"SOCK_FLAGS_MASK", Const, 1},
+ {"SOCK_MAXADDRLEN", Const, 0},
+ {"SOCK_NONBLOCK", Const, 0},
+ {"SOCK_NOSIGPIPE", Const, 1},
+ {"SOCK_PACKET", Const, 0},
+ {"SOCK_RAW", Const, 0},
+ {"SOCK_RDM", Const, 0},
+ {"SOCK_SEQPACKET", Const, 0},
+ {"SOCK_STREAM", Const, 0},
+ {"SOL_AAL", Const, 0},
+ {"SOL_ATM", Const, 0},
+ {"SOL_DECNET", Const, 0},
+ {"SOL_ICMPV6", Const, 0},
+ {"SOL_IP", Const, 0},
+ {"SOL_IPV6", Const, 0},
+ {"SOL_IRDA", Const, 0},
+ {"SOL_PACKET", Const, 0},
+ {"SOL_RAW", Const, 0},
+ {"SOL_SOCKET", Const, 0},
+ {"SOL_TCP", Const, 0},
+ {"SOL_X25", Const, 0},
+ {"SOMAXCONN", Const, 0},
+ {"SO_ACCEPTCONN", Const, 0},
+ {"SO_ACCEPTFILTER", Const, 0},
+ {"SO_ATTACH_FILTER", Const, 0},
+ {"SO_BINDANY", Const, 1},
+ {"SO_BINDTODEVICE", Const, 0},
+ {"SO_BINTIME", Const, 0},
+ {"SO_BROADCAST", Const, 0},
+ {"SO_BSDCOMPAT", Const, 0},
+ {"SO_DEBUG", Const, 0},
+ {"SO_DETACH_FILTER", Const, 0},
+ {"SO_DOMAIN", Const, 0},
+ {"SO_DONTROUTE", Const, 0},
+ {"SO_DONTTRUNC", Const, 0},
+ {"SO_ERROR", Const, 0},
+ {"SO_KEEPALIVE", Const, 0},
+ {"SO_LABEL", Const, 0},
+ {"SO_LINGER", Const, 0},
+ {"SO_LINGER_SEC", Const, 0},
+ {"SO_LISTENINCQLEN", Const, 0},
+ {"SO_LISTENQLEN", Const, 0},
+ {"SO_LISTENQLIMIT", Const, 0},
+ {"SO_MARK", Const, 0},
+ {"SO_NETPROC", Const, 1},
+ {"SO_NKE", Const, 0},
+ {"SO_NOADDRERR", Const, 0},
+ {"SO_NOHEADER", Const, 1},
+ {"SO_NOSIGPIPE", Const, 0},
+ {"SO_NOTIFYCONFLICT", Const, 0},
+ {"SO_NO_CHECK", Const, 0},
+ {"SO_NO_DDP", Const, 0},
+ {"SO_NO_OFFLOAD", Const, 0},
+ {"SO_NP_EXTENSIONS", Const, 0},
+ {"SO_NREAD", Const, 0},
+ {"SO_NUMRCVPKT", Const, 16},
+ {"SO_NWRITE", Const, 0},
+ {"SO_OOBINLINE", Const, 0},
+ {"SO_OVERFLOWED", Const, 1},
+ {"SO_PASSCRED", Const, 0},
+ {"SO_PASSSEC", Const, 0},
+ {"SO_PEERCRED", Const, 0},
+ {"SO_PEERLABEL", Const, 0},
+ {"SO_PEERNAME", Const, 0},
+ {"SO_PEERSEC", Const, 0},
+ {"SO_PRIORITY", Const, 0},
+ {"SO_PROTOCOL", Const, 0},
+ {"SO_PROTOTYPE", Const, 1},
+ {"SO_RANDOMPORT", Const, 0},
+ {"SO_RCVBUF", Const, 0},
+ {"SO_RCVBUFFORCE", Const, 0},
+ {"SO_RCVLOWAT", Const, 0},
+ {"SO_RCVTIMEO", Const, 0},
+ {"SO_RESTRICTIONS", Const, 0},
+ {"SO_RESTRICT_DENYIN", Const, 0},
+ {"SO_RESTRICT_DENYOUT", Const, 0},
+ {"SO_RESTRICT_DENYSET", Const, 0},
+ {"SO_REUSEADDR", Const, 0},
+ {"SO_REUSEPORT", Const, 0},
+ {"SO_REUSESHAREUID", Const, 0},
+ {"SO_RTABLE", Const, 1},
+ {"SO_RXQ_OVFL", Const, 0},
+ {"SO_SECURITY_AUTHENTICATION", Const, 0},
+ {"SO_SECURITY_ENCRYPTION_NETWORK", Const, 0},
+ {"SO_SECURITY_ENCRYPTION_TRANSPORT", Const, 0},
+ {"SO_SETFIB", Const, 0},
+ {"SO_SNDBUF", Const, 0},
+ {"SO_SNDBUFFORCE", Const, 0},
+ {"SO_SNDLOWAT", Const, 0},
+ {"SO_SNDTIMEO", Const, 0},
+ {"SO_SPLICE", Const, 1},
+ {"SO_TIMESTAMP", Const, 0},
+ {"SO_TIMESTAMPING", Const, 0},
+ {"SO_TIMESTAMPNS", Const, 0},
+ {"SO_TIMESTAMP_MONOTONIC", Const, 0},
+ {"SO_TYPE", Const, 0},
+ {"SO_UPCALLCLOSEWAIT", Const, 0},
+ {"SO_UPDATE_ACCEPT_CONTEXT", Const, 0},
+ {"SO_UPDATE_CONNECT_CONTEXT", Const, 1},
+ {"SO_USELOOPBACK", Const, 0},
+ {"SO_USER_COOKIE", Const, 1},
+ {"SO_VENDOR", Const, 3},
+ {"SO_WANTMORE", Const, 0},
+ {"SO_WANTOOBFLAG", Const, 0},
+ {"SSLExtraCertChainPolicyPara", Type, 0},
+ {"SSLExtraCertChainPolicyPara.AuthType", Field, 0},
+ {"SSLExtraCertChainPolicyPara.Checks", Field, 0},
+ {"SSLExtraCertChainPolicyPara.ServerName", Field, 0},
+ {"SSLExtraCertChainPolicyPara.Size", Field, 0},
+ {"STANDARD_RIGHTS_ALL", Const, 0},
+ {"STANDARD_RIGHTS_EXECUTE", Const, 0},
+ {"STANDARD_RIGHTS_READ", Const, 0},
+ {"STANDARD_RIGHTS_REQUIRED", Const, 0},
+ {"STANDARD_RIGHTS_WRITE", Const, 0},
+ {"STARTF_USESHOWWINDOW", Const, 0},
+ {"STARTF_USESTDHANDLES", Const, 0},
+ {"STD_ERROR_HANDLE", Const, 0},
+ {"STD_INPUT_HANDLE", Const, 0},
+ {"STD_OUTPUT_HANDLE", Const, 0},
+ {"SUBLANG_ENGLISH_US", Const, 0},
+ {"SW_FORCEMINIMIZE", Const, 0},
+ {"SW_HIDE", Const, 0},
+ {"SW_MAXIMIZE", Const, 0},
+ {"SW_MINIMIZE", Const, 0},
+ {"SW_NORMAL", Const, 0},
+ {"SW_RESTORE", Const, 0},
+ {"SW_SHOW", Const, 0},
+ {"SW_SHOWDEFAULT", Const, 0},
+ {"SW_SHOWMAXIMIZED", Const, 0},
+ {"SW_SHOWMINIMIZED", Const, 0},
+ {"SW_SHOWMINNOACTIVE", Const, 0},
+ {"SW_SHOWNA", Const, 0},
+ {"SW_SHOWNOACTIVATE", Const, 0},
+ {"SW_SHOWNORMAL", Const, 0},
+ {"SYMBOLIC_LINK_FLAG_DIRECTORY", Const, 4},
+ {"SYNCHRONIZE", Const, 0},
+ {"SYSCTL_VERSION", Const, 1},
+ {"SYSCTL_VERS_0", Const, 1},
+ {"SYSCTL_VERS_1", Const, 1},
+ {"SYSCTL_VERS_MASK", Const, 1},
+ {"SYS_ABORT2", Const, 0},
+ {"SYS_ACCEPT", Const, 0},
+ {"SYS_ACCEPT4", Const, 0},
+ {"SYS_ACCEPT_NOCANCEL", Const, 0},
+ {"SYS_ACCESS", Const, 0},
+ {"SYS_ACCESS_EXTENDED", Const, 0},
+ {"SYS_ACCT", Const, 0},
+ {"SYS_ADD_KEY", Const, 0},
+ {"SYS_ADD_PROFIL", Const, 0},
+ {"SYS_ADJFREQ", Const, 1},
+ {"SYS_ADJTIME", Const, 0},
+ {"SYS_ADJTIMEX", Const, 0},
+ {"SYS_AFS_SYSCALL", Const, 0},
+ {"SYS_AIO_CANCEL", Const, 0},
+ {"SYS_AIO_ERROR", Const, 0},
+ {"SYS_AIO_FSYNC", Const, 0},
+ {"SYS_AIO_MLOCK", Const, 14},
+ {"SYS_AIO_READ", Const, 0},
+ {"SYS_AIO_RETURN", Const, 0},
+ {"SYS_AIO_SUSPEND", Const, 0},
+ {"SYS_AIO_SUSPEND_NOCANCEL", Const, 0},
+ {"SYS_AIO_WAITCOMPLETE", Const, 14},
+ {"SYS_AIO_WRITE", Const, 0},
+ {"SYS_ALARM", Const, 0},
+ {"SYS_ARCH_PRCTL", Const, 0},
+ {"SYS_ARM_FADVISE64_64", Const, 0},
+ {"SYS_ARM_SYNC_FILE_RANGE", Const, 0},
+ {"SYS_ATGETMSG", Const, 0},
+ {"SYS_ATPGETREQ", Const, 0},
+ {"SYS_ATPGETRSP", Const, 0},
+ {"SYS_ATPSNDREQ", Const, 0},
+ {"SYS_ATPSNDRSP", Const, 0},
+ {"SYS_ATPUTMSG", Const, 0},
+ {"SYS_ATSOCKET", Const, 0},
+ {"SYS_AUDIT", Const, 0},
+ {"SYS_AUDITCTL", Const, 0},
+ {"SYS_AUDITON", Const, 0},
+ {"SYS_AUDIT_SESSION_JOIN", Const, 0},
+ {"SYS_AUDIT_SESSION_PORT", Const, 0},
+ {"SYS_AUDIT_SESSION_SELF", Const, 0},
+ {"SYS_BDFLUSH", Const, 0},
+ {"SYS_BIND", Const, 0},
+ {"SYS_BINDAT", Const, 3},
+ {"SYS_BREAK", Const, 0},
+ {"SYS_BRK", Const, 0},
+ {"SYS_BSDTHREAD_CREATE", Const, 0},
+ {"SYS_BSDTHREAD_REGISTER", Const, 0},
+ {"SYS_BSDTHREAD_TERMINATE", Const, 0},
+ {"SYS_CAPGET", Const, 0},
+ {"SYS_CAPSET", Const, 0},
+ {"SYS_CAP_ENTER", Const, 0},
+ {"SYS_CAP_FCNTLS_GET", Const, 1},
+ {"SYS_CAP_FCNTLS_LIMIT", Const, 1},
+ {"SYS_CAP_GETMODE", Const, 0},
+ {"SYS_CAP_GETRIGHTS", Const, 0},
+ {"SYS_CAP_IOCTLS_GET", Const, 1},
+ {"SYS_CAP_IOCTLS_LIMIT", Const, 1},
+ {"SYS_CAP_NEW", Const, 0},
+ {"SYS_CAP_RIGHTS_GET", Const, 1},
+ {"SYS_CAP_RIGHTS_LIMIT", Const, 1},
+ {"SYS_CHDIR", Const, 0},
+ {"SYS_CHFLAGS", Const, 0},
+ {"SYS_CHFLAGSAT", Const, 3},
+ {"SYS_CHMOD", Const, 0},
+ {"SYS_CHMOD_EXTENDED", Const, 0},
+ {"SYS_CHOWN", Const, 0},
+ {"SYS_CHOWN32", Const, 0},
+ {"SYS_CHROOT", Const, 0},
+ {"SYS_CHUD", Const, 0},
+ {"SYS_CLOCK_ADJTIME", Const, 0},
+ {"SYS_CLOCK_GETCPUCLOCKID2", Const, 1},
+ {"SYS_CLOCK_GETRES", Const, 0},
+ {"SYS_CLOCK_GETTIME", Const, 0},
+ {"SYS_CLOCK_NANOSLEEP", Const, 0},
+ {"SYS_CLOCK_SETTIME", Const, 0},
+ {"SYS_CLONE", Const, 0},
+ {"SYS_CLOSE", Const, 0},
+ {"SYS_CLOSEFROM", Const, 0},
+ {"SYS_CLOSE_NOCANCEL", Const, 0},
+ {"SYS_CONNECT", Const, 0},
+ {"SYS_CONNECTAT", Const, 3},
+ {"SYS_CONNECT_NOCANCEL", Const, 0},
+ {"SYS_COPYFILE", Const, 0},
+ {"SYS_CPUSET", Const, 0},
+ {"SYS_CPUSET_GETAFFINITY", Const, 0},
+ {"SYS_CPUSET_GETID", Const, 0},
+ {"SYS_CPUSET_SETAFFINITY", Const, 0},
+ {"SYS_CPUSET_SETID", Const, 0},
+ {"SYS_CREAT", Const, 0},
+ {"SYS_CREATE_MODULE", Const, 0},
+ {"SYS_CSOPS", Const, 0},
+ {"SYS_CSOPS_AUDITTOKEN", Const, 16},
+ {"SYS_DELETE", Const, 0},
+ {"SYS_DELETE_MODULE", Const, 0},
+ {"SYS_DUP", Const, 0},
+ {"SYS_DUP2", Const, 0},
+ {"SYS_DUP3", Const, 0},
+ {"SYS_EACCESS", Const, 0},
+ {"SYS_EPOLL_CREATE", Const, 0},
+ {"SYS_EPOLL_CREATE1", Const, 0},
+ {"SYS_EPOLL_CTL", Const, 0},
+ {"SYS_EPOLL_CTL_OLD", Const, 0},
+ {"SYS_EPOLL_PWAIT", Const, 0},
+ {"SYS_EPOLL_WAIT", Const, 0},
+ {"SYS_EPOLL_WAIT_OLD", Const, 0},
+ {"SYS_EVENTFD", Const, 0},
+ {"SYS_EVENTFD2", Const, 0},
+ {"SYS_EXCHANGEDATA", Const, 0},
+ {"SYS_EXECVE", Const, 0},
+ {"SYS_EXIT", Const, 0},
+ {"SYS_EXIT_GROUP", Const, 0},
+ {"SYS_EXTATTRCTL", Const, 0},
+ {"SYS_EXTATTR_DELETE_FD", Const, 0},
+ {"SYS_EXTATTR_DELETE_FILE", Const, 0},
+ {"SYS_EXTATTR_DELETE_LINK", Const, 0},
+ {"SYS_EXTATTR_GET_FD", Const, 0},
+ {"SYS_EXTATTR_GET_FILE", Const, 0},
+ {"SYS_EXTATTR_GET_LINK", Const, 0},
+ {"SYS_EXTATTR_LIST_FD", Const, 0},
+ {"SYS_EXTATTR_LIST_FILE", Const, 0},
+ {"SYS_EXTATTR_LIST_LINK", Const, 0},
+ {"SYS_EXTATTR_SET_FD", Const, 0},
+ {"SYS_EXTATTR_SET_FILE", Const, 0},
+ {"SYS_EXTATTR_SET_LINK", Const, 0},
+ {"SYS_FACCESSAT", Const, 0},
+ {"SYS_FADVISE64", Const, 0},
+ {"SYS_FADVISE64_64", Const, 0},
+ {"SYS_FALLOCATE", Const, 0},
+ {"SYS_FANOTIFY_INIT", Const, 0},
+ {"SYS_FANOTIFY_MARK", Const, 0},
+ {"SYS_FCHDIR", Const, 0},
+ {"SYS_FCHFLAGS", Const, 0},
+ {"SYS_FCHMOD", Const, 0},
+ {"SYS_FCHMODAT", Const, 0},
+ {"SYS_FCHMOD_EXTENDED", Const, 0},
+ {"SYS_FCHOWN", Const, 0},
+ {"SYS_FCHOWN32", Const, 0},
+ {"SYS_FCHOWNAT", Const, 0},
+ {"SYS_FCHROOT", Const, 1},
+ {"SYS_FCNTL", Const, 0},
+ {"SYS_FCNTL64", Const, 0},
+ {"SYS_FCNTL_NOCANCEL", Const, 0},
+ {"SYS_FDATASYNC", Const, 0},
+ {"SYS_FEXECVE", Const, 0},
+ {"SYS_FFCLOCK_GETCOUNTER", Const, 0},
+ {"SYS_FFCLOCK_GETESTIMATE", Const, 0},
+ {"SYS_FFCLOCK_SETESTIMATE", Const, 0},
+ {"SYS_FFSCTL", Const, 0},
+ {"SYS_FGETATTRLIST", Const, 0},
+ {"SYS_FGETXATTR", Const, 0},
+ {"SYS_FHOPEN", Const, 0},
+ {"SYS_FHSTAT", Const, 0},
+ {"SYS_FHSTATFS", Const, 0},
+ {"SYS_FILEPORT_MAKEFD", Const, 0},
+ {"SYS_FILEPORT_MAKEPORT", Const, 0},
+ {"SYS_FKTRACE", Const, 1},
+ {"SYS_FLISTXATTR", Const, 0},
+ {"SYS_FLOCK", Const, 0},
+ {"SYS_FORK", Const, 0},
+ {"SYS_FPATHCONF", Const, 0},
+ {"SYS_FREEBSD6_FTRUNCATE", Const, 0},
+ {"SYS_FREEBSD6_LSEEK", Const, 0},
+ {"SYS_FREEBSD6_MMAP", Const, 0},
+ {"SYS_FREEBSD6_PREAD", Const, 0},
+ {"SYS_FREEBSD6_PWRITE", Const, 0},
+ {"SYS_FREEBSD6_TRUNCATE", Const, 0},
+ {"SYS_FREMOVEXATTR", Const, 0},
+ {"SYS_FSCTL", Const, 0},
+ {"SYS_FSETATTRLIST", Const, 0},
+ {"SYS_FSETXATTR", Const, 0},
+ {"SYS_FSGETPATH", Const, 0},
+ {"SYS_FSTAT", Const, 0},
+ {"SYS_FSTAT64", Const, 0},
+ {"SYS_FSTAT64_EXTENDED", Const, 0},
+ {"SYS_FSTATAT", Const, 0},
+ {"SYS_FSTATAT64", Const, 0},
+ {"SYS_FSTATFS", Const, 0},
+ {"SYS_FSTATFS64", Const, 0},
+ {"SYS_FSTATV", Const, 0},
+ {"SYS_FSTATVFS1", Const, 1},
+ {"SYS_FSTAT_EXTENDED", Const, 0},
+ {"SYS_FSYNC", Const, 0},
+ {"SYS_FSYNC_NOCANCEL", Const, 0},
+ {"SYS_FSYNC_RANGE", Const, 1},
+ {"SYS_FTIME", Const, 0},
+ {"SYS_FTRUNCATE", Const, 0},
+ {"SYS_FTRUNCATE64", Const, 0},
+ {"SYS_FUTEX", Const, 0},
+ {"SYS_FUTIMENS", Const, 1},
+ {"SYS_FUTIMES", Const, 0},
+ {"SYS_FUTIMESAT", Const, 0},
+ {"SYS_GETATTRLIST", Const, 0},
+ {"SYS_GETAUDIT", Const, 0},
+ {"SYS_GETAUDIT_ADDR", Const, 0},
+ {"SYS_GETAUID", Const, 0},
+ {"SYS_GETCONTEXT", Const, 0},
+ {"SYS_GETCPU", Const, 0},
+ {"SYS_GETCWD", Const, 0},
+ {"SYS_GETDENTS", Const, 0},
+ {"SYS_GETDENTS64", Const, 0},
+ {"SYS_GETDIRENTRIES", Const, 0},
+ {"SYS_GETDIRENTRIES64", Const, 0},
+ {"SYS_GETDIRENTRIESATTR", Const, 0},
+ {"SYS_GETDTABLECOUNT", Const, 1},
+ {"SYS_GETDTABLESIZE", Const, 0},
+ {"SYS_GETEGID", Const, 0},
+ {"SYS_GETEGID32", Const, 0},
+ {"SYS_GETEUID", Const, 0},
+ {"SYS_GETEUID32", Const, 0},
+ {"SYS_GETFH", Const, 0},
+ {"SYS_GETFSSTAT", Const, 0},
+ {"SYS_GETFSSTAT64", Const, 0},
+ {"SYS_GETGID", Const, 0},
+ {"SYS_GETGID32", Const, 0},
+ {"SYS_GETGROUPS", Const, 0},
+ {"SYS_GETGROUPS32", Const, 0},
+ {"SYS_GETHOSTUUID", Const, 0},
+ {"SYS_GETITIMER", Const, 0},
+ {"SYS_GETLCID", Const, 0},
+ {"SYS_GETLOGIN", Const, 0},
+ {"SYS_GETLOGINCLASS", Const, 0},
+ {"SYS_GETPEERNAME", Const, 0},
+ {"SYS_GETPGID", Const, 0},
+ {"SYS_GETPGRP", Const, 0},
+ {"SYS_GETPID", Const, 0},
+ {"SYS_GETPMSG", Const, 0},
+ {"SYS_GETPPID", Const, 0},
+ {"SYS_GETPRIORITY", Const, 0},
+ {"SYS_GETRESGID", Const, 0},
+ {"SYS_GETRESGID32", Const, 0},
+ {"SYS_GETRESUID", Const, 0},
+ {"SYS_GETRESUID32", Const, 0},
+ {"SYS_GETRLIMIT", Const, 0},
+ {"SYS_GETRTABLE", Const, 1},
+ {"SYS_GETRUSAGE", Const, 0},
+ {"SYS_GETSGROUPS", Const, 0},
+ {"SYS_GETSID", Const, 0},
+ {"SYS_GETSOCKNAME", Const, 0},
+ {"SYS_GETSOCKOPT", Const, 0},
+ {"SYS_GETTHRID", Const, 1},
+ {"SYS_GETTID", Const, 0},
+ {"SYS_GETTIMEOFDAY", Const, 0},
+ {"SYS_GETUID", Const, 0},
+ {"SYS_GETUID32", Const, 0},
+ {"SYS_GETVFSSTAT", Const, 1},
+ {"SYS_GETWGROUPS", Const, 0},
+ {"SYS_GETXATTR", Const, 0},
+ {"SYS_GET_KERNEL_SYMS", Const, 0},
+ {"SYS_GET_MEMPOLICY", Const, 0},
+ {"SYS_GET_ROBUST_LIST", Const, 0},
+ {"SYS_GET_THREAD_AREA", Const, 0},
+ {"SYS_GSSD_SYSCALL", Const, 14},
+ {"SYS_GTTY", Const, 0},
+ {"SYS_IDENTITYSVC", Const, 0},
+ {"SYS_IDLE", Const, 0},
+ {"SYS_INITGROUPS", Const, 0},
+ {"SYS_INIT_MODULE", Const, 0},
+ {"SYS_INOTIFY_ADD_WATCH", Const, 0},
+ {"SYS_INOTIFY_INIT", Const, 0},
+ {"SYS_INOTIFY_INIT1", Const, 0},
+ {"SYS_INOTIFY_RM_WATCH", Const, 0},
+ {"SYS_IOCTL", Const, 0},
+ {"SYS_IOPERM", Const, 0},
+ {"SYS_IOPL", Const, 0},
+ {"SYS_IOPOLICYSYS", Const, 0},
+ {"SYS_IOPRIO_GET", Const, 0},
+ {"SYS_IOPRIO_SET", Const, 0},
+ {"SYS_IO_CANCEL", Const, 0},
+ {"SYS_IO_DESTROY", Const, 0},
+ {"SYS_IO_GETEVENTS", Const, 0},
+ {"SYS_IO_SETUP", Const, 0},
+ {"SYS_IO_SUBMIT", Const, 0},
+ {"SYS_IPC", Const, 0},
+ {"SYS_ISSETUGID", Const, 0},
+ {"SYS_JAIL", Const, 0},
+ {"SYS_JAIL_ATTACH", Const, 0},
+ {"SYS_JAIL_GET", Const, 0},
+ {"SYS_JAIL_REMOVE", Const, 0},
+ {"SYS_JAIL_SET", Const, 0},
+ {"SYS_KAS_INFO", Const, 16},
+ {"SYS_KDEBUG_TRACE", Const, 0},
+ {"SYS_KENV", Const, 0},
+ {"SYS_KEVENT", Const, 0},
+ {"SYS_KEVENT64", Const, 0},
+ {"SYS_KEXEC_LOAD", Const, 0},
+ {"SYS_KEYCTL", Const, 0},
+ {"SYS_KILL", Const, 0},
+ {"SYS_KLDFIND", Const, 0},
+ {"SYS_KLDFIRSTMOD", Const, 0},
+ {"SYS_KLDLOAD", Const, 0},
+ {"SYS_KLDNEXT", Const, 0},
+ {"SYS_KLDSTAT", Const, 0},
+ {"SYS_KLDSYM", Const, 0},
+ {"SYS_KLDUNLOAD", Const, 0},
+ {"SYS_KLDUNLOADF", Const, 0},
+ {"SYS_KMQ_NOTIFY", Const, 14},
+ {"SYS_KMQ_OPEN", Const, 14},
+ {"SYS_KMQ_SETATTR", Const, 14},
+ {"SYS_KMQ_TIMEDRECEIVE", Const, 14},
+ {"SYS_KMQ_TIMEDSEND", Const, 14},
+ {"SYS_KMQ_UNLINK", Const, 14},
+ {"SYS_KQUEUE", Const, 0},
+ {"SYS_KQUEUE1", Const, 1},
+ {"SYS_KSEM_CLOSE", Const, 14},
+ {"SYS_KSEM_DESTROY", Const, 14},
+ {"SYS_KSEM_GETVALUE", Const, 14},
+ {"SYS_KSEM_INIT", Const, 14},
+ {"SYS_KSEM_OPEN", Const, 14},
+ {"SYS_KSEM_POST", Const, 14},
+ {"SYS_KSEM_TIMEDWAIT", Const, 14},
+ {"SYS_KSEM_TRYWAIT", Const, 14},
+ {"SYS_KSEM_UNLINK", Const, 14},
+ {"SYS_KSEM_WAIT", Const, 14},
+ {"SYS_KTIMER_CREATE", Const, 0},
+ {"SYS_KTIMER_DELETE", Const, 0},
+ {"SYS_KTIMER_GETOVERRUN", Const, 0},
+ {"SYS_KTIMER_GETTIME", Const, 0},
+ {"SYS_KTIMER_SETTIME", Const, 0},
+ {"SYS_KTRACE", Const, 0},
+ {"SYS_LCHFLAGS", Const, 0},
+ {"SYS_LCHMOD", Const, 0},
+ {"SYS_LCHOWN", Const, 0},
+ {"SYS_LCHOWN32", Const, 0},
+ {"SYS_LEDGER", Const, 16},
+ {"SYS_LGETFH", Const, 0},
+ {"SYS_LGETXATTR", Const, 0},
+ {"SYS_LINK", Const, 0},
+ {"SYS_LINKAT", Const, 0},
+ {"SYS_LIO_LISTIO", Const, 0},
+ {"SYS_LISTEN", Const, 0},
+ {"SYS_LISTXATTR", Const, 0},
+ {"SYS_LLISTXATTR", Const, 0},
+ {"SYS_LOCK", Const, 0},
+ {"SYS_LOOKUP_DCOOKIE", Const, 0},
+ {"SYS_LPATHCONF", Const, 0},
+ {"SYS_LREMOVEXATTR", Const, 0},
+ {"SYS_LSEEK", Const, 0},
+ {"SYS_LSETXATTR", Const, 0},
+ {"SYS_LSTAT", Const, 0},
+ {"SYS_LSTAT64", Const, 0},
+ {"SYS_LSTAT64_EXTENDED", Const, 0},
+ {"SYS_LSTATV", Const, 0},
+ {"SYS_LSTAT_EXTENDED", Const, 0},
+ {"SYS_LUTIMES", Const, 0},
+ {"SYS_MAC_SYSCALL", Const, 0},
+ {"SYS_MADVISE", Const, 0},
+ {"SYS_MADVISE1", Const, 0},
+ {"SYS_MAXSYSCALL", Const, 0},
+ {"SYS_MBIND", Const, 0},
+ {"SYS_MIGRATE_PAGES", Const, 0},
+ {"SYS_MINCORE", Const, 0},
+ {"SYS_MINHERIT", Const, 0},
+ {"SYS_MKCOMPLEX", Const, 0},
+ {"SYS_MKDIR", Const, 0},
+ {"SYS_MKDIRAT", Const, 0},
+ {"SYS_MKDIR_EXTENDED", Const, 0},
+ {"SYS_MKFIFO", Const, 0},
+ {"SYS_MKFIFOAT", Const, 0},
+ {"SYS_MKFIFO_EXTENDED", Const, 0},
+ {"SYS_MKNOD", Const, 0},
+ {"SYS_MKNODAT", Const, 0},
+ {"SYS_MLOCK", Const, 0},
+ {"SYS_MLOCKALL", Const, 0},
+ {"SYS_MMAP", Const, 0},
+ {"SYS_MMAP2", Const, 0},
+ {"SYS_MODCTL", Const, 1},
+ {"SYS_MODFIND", Const, 0},
+ {"SYS_MODFNEXT", Const, 0},
+ {"SYS_MODIFY_LDT", Const, 0},
+ {"SYS_MODNEXT", Const, 0},
+ {"SYS_MODSTAT", Const, 0},
+ {"SYS_MODWATCH", Const, 0},
+ {"SYS_MOUNT", Const, 0},
+ {"SYS_MOVE_PAGES", Const, 0},
+ {"SYS_MPROTECT", Const, 0},
+ {"SYS_MPX", Const, 0},
+ {"SYS_MQUERY", Const, 1},
+ {"SYS_MQ_GETSETATTR", Const, 0},
+ {"SYS_MQ_NOTIFY", Const, 0},
+ {"SYS_MQ_OPEN", Const, 0},
+ {"SYS_MQ_TIMEDRECEIVE", Const, 0},
+ {"SYS_MQ_TIMEDSEND", Const, 0},
+ {"SYS_MQ_UNLINK", Const, 0},
+ {"SYS_MREMAP", Const, 0},
+ {"SYS_MSGCTL", Const, 0},
+ {"SYS_MSGGET", Const, 0},
+ {"SYS_MSGRCV", Const, 0},
+ {"SYS_MSGRCV_NOCANCEL", Const, 0},
+ {"SYS_MSGSND", Const, 0},
+ {"SYS_MSGSND_NOCANCEL", Const, 0},
+ {"SYS_MSGSYS", Const, 0},
+ {"SYS_MSYNC", Const, 0},
+ {"SYS_MSYNC_NOCANCEL", Const, 0},
+ {"SYS_MUNLOCK", Const, 0},
+ {"SYS_MUNLOCKALL", Const, 0},
+ {"SYS_MUNMAP", Const, 0},
+ {"SYS_NAME_TO_HANDLE_AT", Const, 0},
+ {"SYS_NANOSLEEP", Const, 0},
+ {"SYS_NEWFSTATAT", Const, 0},
+ {"SYS_NFSCLNT", Const, 0},
+ {"SYS_NFSSERVCTL", Const, 0},
+ {"SYS_NFSSVC", Const, 0},
+ {"SYS_NFSTAT", Const, 0},
+ {"SYS_NICE", Const, 0},
+ {"SYS_NLM_SYSCALL", Const, 14},
+ {"SYS_NLSTAT", Const, 0},
+ {"SYS_NMOUNT", Const, 0},
+ {"SYS_NSTAT", Const, 0},
+ {"SYS_NTP_ADJTIME", Const, 0},
+ {"SYS_NTP_GETTIME", Const, 0},
+ {"SYS_NUMA_GETAFFINITY", Const, 14},
+ {"SYS_NUMA_SETAFFINITY", Const, 14},
+ {"SYS_OABI_SYSCALL_BASE", Const, 0},
+ {"SYS_OBREAK", Const, 0},
+ {"SYS_OLDFSTAT", Const, 0},
+ {"SYS_OLDLSTAT", Const, 0},
+ {"SYS_OLDOLDUNAME", Const, 0},
+ {"SYS_OLDSTAT", Const, 0},
+ {"SYS_OLDUNAME", Const, 0},
+ {"SYS_OPEN", Const, 0},
+ {"SYS_OPENAT", Const, 0},
+ {"SYS_OPENBSD_POLL", Const, 0},
+ {"SYS_OPEN_BY_HANDLE_AT", Const, 0},
+ {"SYS_OPEN_DPROTECTED_NP", Const, 16},
+ {"SYS_OPEN_EXTENDED", Const, 0},
+ {"SYS_OPEN_NOCANCEL", Const, 0},
+ {"SYS_OVADVISE", Const, 0},
+ {"SYS_PACCEPT", Const, 1},
+ {"SYS_PATHCONF", Const, 0},
+ {"SYS_PAUSE", Const, 0},
+ {"SYS_PCICONFIG_IOBASE", Const, 0},
+ {"SYS_PCICONFIG_READ", Const, 0},
+ {"SYS_PCICONFIG_WRITE", Const, 0},
+ {"SYS_PDFORK", Const, 0},
+ {"SYS_PDGETPID", Const, 0},
+ {"SYS_PDKILL", Const, 0},
+ {"SYS_PERF_EVENT_OPEN", Const, 0},
+ {"SYS_PERSONALITY", Const, 0},
+ {"SYS_PID_HIBERNATE", Const, 0},
+ {"SYS_PID_RESUME", Const, 0},
+ {"SYS_PID_SHUTDOWN_SOCKETS", Const, 0},
+ {"SYS_PID_SUSPEND", Const, 0},
+ {"SYS_PIPE", Const, 0},
+ {"SYS_PIPE2", Const, 0},
+ {"SYS_PIVOT_ROOT", Const, 0},
+ {"SYS_PMC_CONTROL", Const, 1},
+ {"SYS_PMC_GET_INFO", Const, 1},
+ {"SYS_POLL", Const, 0},
+ {"SYS_POLLTS", Const, 1},
+ {"SYS_POLL_NOCANCEL", Const, 0},
+ {"SYS_POSIX_FADVISE", Const, 0},
+ {"SYS_POSIX_FALLOCATE", Const, 0},
+ {"SYS_POSIX_OPENPT", Const, 0},
+ {"SYS_POSIX_SPAWN", Const, 0},
+ {"SYS_PPOLL", Const, 0},
+ {"SYS_PRCTL", Const, 0},
+ {"SYS_PREAD", Const, 0},
+ {"SYS_PREAD64", Const, 0},
+ {"SYS_PREADV", Const, 0},
+ {"SYS_PREAD_NOCANCEL", Const, 0},
+ {"SYS_PRLIMIT64", Const, 0},
+ {"SYS_PROCCTL", Const, 3},
+ {"SYS_PROCESS_POLICY", Const, 0},
+ {"SYS_PROCESS_VM_READV", Const, 0},
+ {"SYS_PROCESS_VM_WRITEV", Const, 0},
+ {"SYS_PROC_INFO", Const, 0},
+ {"SYS_PROF", Const, 0},
+ {"SYS_PROFIL", Const, 0},
+ {"SYS_PSELECT", Const, 0},
+ {"SYS_PSELECT6", Const, 0},
+ {"SYS_PSET_ASSIGN", Const, 1},
+ {"SYS_PSET_CREATE", Const, 1},
+ {"SYS_PSET_DESTROY", Const, 1},
+ {"SYS_PSYNCH_CVBROAD", Const, 0},
+ {"SYS_PSYNCH_CVCLRPREPOST", Const, 0},
+ {"SYS_PSYNCH_CVSIGNAL", Const, 0},
+ {"SYS_PSYNCH_CVWAIT", Const, 0},
+ {"SYS_PSYNCH_MUTEXDROP", Const, 0},
+ {"SYS_PSYNCH_MUTEXWAIT", Const, 0},
+ {"SYS_PSYNCH_RW_DOWNGRADE", Const, 0},
+ {"SYS_PSYNCH_RW_LONGRDLOCK", Const, 0},
+ {"SYS_PSYNCH_RW_RDLOCK", Const, 0},
+ {"SYS_PSYNCH_RW_UNLOCK", Const, 0},
+ {"SYS_PSYNCH_RW_UNLOCK2", Const, 0},
+ {"SYS_PSYNCH_RW_UPGRADE", Const, 0},
+ {"SYS_PSYNCH_RW_WRLOCK", Const, 0},
+ {"SYS_PSYNCH_RW_YIELDWRLOCK", Const, 0},
+ {"SYS_PTRACE", Const, 0},
+ {"SYS_PUTPMSG", Const, 0},
+ {"SYS_PWRITE", Const, 0},
+ {"SYS_PWRITE64", Const, 0},
+ {"SYS_PWRITEV", Const, 0},
+ {"SYS_PWRITE_NOCANCEL", Const, 0},
+ {"SYS_QUERY_MODULE", Const, 0},
+ {"SYS_QUOTACTL", Const, 0},
+ {"SYS_RASCTL", Const, 1},
+ {"SYS_RCTL_ADD_RULE", Const, 0},
+ {"SYS_RCTL_GET_LIMITS", Const, 0},
+ {"SYS_RCTL_GET_RACCT", Const, 0},
+ {"SYS_RCTL_GET_RULES", Const, 0},
+ {"SYS_RCTL_REMOVE_RULE", Const, 0},
+ {"SYS_READ", Const, 0},
+ {"SYS_READAHEAD", Const, 0},
+ {"SYS_READDIR", Const, 0},
+ {"SYS_READLINK", Const, 0},
+ {"SYS_READLINKAT", Const, 0},
+ {"SYS_READV", Const, 0},
+ {"SYS_READV_NOCANCEL", Const, 0},
+ {"SYS_READ_NOCANCEL", Const, 0},
+ {"SYS_REBOOT", Const, 0},
+ {"SYS_RECV", Const, 0},
+ {"SYS_RECVFROM", Const, 0},
+ {"SYS_RECVFROM_NOCANCEL", Const, 0},
+ {"SYS_RECVMMSG", Const, 0},
+ {"SYS_RECVMSG", Const, 0},
+ {"SYS_RECVMSG_NOCANCEL", Const, 0},
+ {"SYS_REMAP_FILE_PAGES", Const, 0},
+ {"SYS_REMOVEXATTR", Const, 0},
+ {"SYS_RENAME", Const, 0},
+ {"SYS_RENAMEAT", Const, 0},
+ {"SYS_REQUEST_KEY", Const, 0},
+ {"SYS_RESTART_SYSCALL", Const, 0},
+ {"SYS_REVOKE", Const, 0},
+ {"SYS_RFORK", Const, 0},
+ {"SYS_RMDIR", Const, 0},
+ {"SYS_RTPRIO", Const, 0},
+ {"SYS_RTPRIO_THREAD", Const, 0},
+ {"SYS_RT_SIGACTION", Const, 0},
+ {"SYS_RT_SIGPENDING", Const, 0},
+ {"SYS_RT_SIGPROCMASK", Const, 0},
+ {"SYS_RT_SIGQUEUEINFO", Const, 0},
+ {"SYS_RT_SIGRETURN", Const, 0},
+ {"SYS_RT_SIGSUSPEND", Const, 0},
+ {"SYS_RT_SIGTIMEDWAIT", Const, 0},
+ {"SYS_RT_TGSIGQUEUEINFO", Const, 0},
+ {"SYS_SBRK", Const, 0},
+ {"SYS_SCHED_GETAFFINITY", Const, 0},
+ {"SYS_SCHED_GETPARAM", Const, 0},
+ {"SYS_SCHED_GETSCHEDULER", Const, 0},
+ {"SYS_SCHED_GET_PRIORITY_MAX", Const, 0},
+ {"SYS_SCHED_GET_PRIORITY_MIN", Const, 0},
+ {"SYS_SCHED_RR_GET_INTERVAL", Const, 0},
+ {"SYS_SCHED_SETAFFINITY", Const, 0},
+ {"SYS_SCHED_SETPARAM", Const, 0},
+ {"SYS_SCHED_SETSCHEDULER", Const, 0},
+ {"SYS_SCHED_YIELD", Const, 0},
+ {"SYS_SCTP_GENERIC_RECVMSG", Const, 0},
+ {"SYS_SCTP_GENERIC_SENDMSG", Const, 0},
+ {"SYS_SCTP_GENERIC_SENDMSG_IOV", Const, 0},
+ {"SYS_SCTP_PEELOFF", Const, 0},
+ {"SYS_SEARCHFS", Const, 0},
+ {"SYS_SECURITY", Const, 0},
+ {"SYS_SELECT", Const, 0},
+ {"SYS_SELECT_NOCANCEL", Const, 0},
+ {"SYS_SEMCONFIG", Const, 1},
+ {"SYS_SEMCTL", Const, 0},
+ {"SYS_SEMGET", Const, 0},
+ {"SYS_SEMOP", Const, 0},
+ {"SYS_SEMSYS", Const, 0},
+ {"SYS_SEMTIMEDOP", Const, 0},
+ {"SYS_SEM_CLOSE", Const, 0},
+ {"SYS_SEM_DESTROY", Const, 0},
+ {"SYS_SEM_GETVALUE", Const, 0},
+ {"SYS_SEM_INIT", Const, 0},
+ {"SYS_SEM_OPEN", Const, 0},
+ {"SYS_SEM_POST", Const, 0},
+ {"SYS_SEM_TRYWAIT", Const, 0},
+ {"SYS_SEM_UNLINK", Const, 0},
+ {"SYS_SEM_WAIT", Const, 0},
+ {"SYS_SEM_WAIT_NOCANCEL", Const, 0},
+ {"SYS_SEND", Const, 0},
+ {"SYS_SENDFILE", Const, 0},
+ {"SYS_SENDFILE64", Const, 0},
+ {"SYS_SENDMMSG", Const, 0},
+ {"SYS_SENDMSG", Const, 0},
+ {"SYS_SENDMSG_NOCANCEL", Const, 0},
+ {"SYS_SENDTO", Const, 0},
+ {"SYS_SENDTO_NOCANCEL", Const, 0},
+ {"SYS_SETATTRLIST", Const, 0},
+ {"SYS_SETAUDIT", Const, 0},
+ {"SYS_SETAUDIT_ADDR", Const, 0},
+ {"SYS_SETAUID", Const, 0},
+ {"SYS_SETCONTEXT", Const, 0},
+ {"SYS_SETDOMAINNAME", Const, 0},
+ {"SYS_SETEGID", Const, 0},
+ {"SYS_SETEUID", Const, 0},
+ {"SYS_SETFIB", Const, 0},
+ {"SYS_SETFSGID", Const, 0},
+ {"SYS_SETFSGID32", Const, 0},
+ {"SYS_SETFSUID", Const, 0},
+ {"SYS_SETFSUID32", Const, 0},
+ {"SYS_SETGID", Const, 0},
+ {"SYS_SETGID32", Const, 0},
+ {"SYS_SETGROUPS", Const, 0},
+ {"SYS_SETGROUPS32", Const, 0},
+ {"SYS_SETHOSTNAME", Const, 0},
+ {"SYS_SETITIMER", Const, 0},
+ {"SYS_SETLCID", Const, 0},
+ {"SYS_SETLOGIN", Const, 0},
+ {"SYS_SETLOGINCLASS", Const, 0},
+ {"SYS_SETNS", Const, 0},
+ {"SYS_SETPGID", Const, 0},
+ {"SYS_SETPRIORITY", Const, 0},
+ {"SYS_SETPRIVEXEC", Const, 0},
+ {"SYS_SETREGID", Const, 0},
+ {"SYS_SETREGID32", Const, 0},
+ {"SYS_SETRESGID", Const, 0},
+ {"SYS_SETRESGID32", Const, 0},
+ {"SYS_SETRESUID", Const, 0},
+ {"SYS_SETRESUID32", Const, 0},
+ {"SYS_SETREUID", Const, 0},
+ {"SYS_SETREUID32", Const, 0},
+ {"SYS_SETRLIMIT", Const, 0},
+ {"SYS_SETRTABLE", Const, 1},
+ {"SYS_SETSGROUPS", Const, 0},
+ {"SYS_SETSID", Const, 0},
+ {"SYS_SETSOCKOPT", Const, 0},
+ {"SYS_SETTID", Const, 0},
+ {"SYS_SETTID_WITH_PID", Const, 0},
+ {"SYS_SETTIMEOFDAY", Const, 0},
+ {"SYS_SETUID", Const, 0},
+ {"SYS_SETUID32", Const, 0},
+ {"SYS_SETWGROUPS", Const, 0},
+ {"SYS_SETXATTR", Const, 0},
+ {"SYS_SET_MEMPOLICY", Const, 0},
+ {"SYS_SET_ROBUST_LIST", Const, 0},
+ {"SYS_SET_THREAD_AREA", Const, 0},
+ {"SYS_SET_TID_ADDRESS", Const, 0},
+ {"SYS_SGETMASK", Const, 0},
+ {"SYS_SHARED_REGION_CHECK_NP", Const, 0},
+ {"SYS_SHARED_REGION_MAP_AND_SLIDE_NP", Const, 0},
+ {"SYS_SHMAT", Const, 0},
+ {"SYS_SHMCTL", Const, 0},
+ {"SYS_SHMDT", Const, 0},
+ {"SYS_SHMGET", Const, 0},
+ {"SYS_SHMSYS", Const, 0},
+ {"SYS_SHM_OPEN", Const, 0},
+ {"SYS_SHM_UNLINK", Const, 0},
+ {"SYS_SHUTDOWN", Const, 0},
+ {"SYS_SIGACTION", Const, 0},
+ {"SYS_SIGALTSTACK", Const, 0},
+ {"SYS_SIGNAL", Const, 0},
+ {"SYS_SIGNALFD", Const, 0},
+ {"SYS_SIGNALFD4", Const, 0},
+ {"SYS_SIGPENDING", Const, 0},
+ {"SYS_SIGPROCMASK", Const, 0},
+ {"SYS_SIGQUEUE", Const, 0},
+ {"SYS_SIGQUEUEINFO", Const, 1},
+ {"SYS_SIGRETURN", Const, 0},
+ {"SYS_SIGSUSPEND", Const, 0},
+ {"SYS_SIGSUSPEND_NOCANCEL", Const, 0},
+ {"SYS_SIGTIMEDWAIT", Const, 0},
+ {"SYS_SIGWAIT", Const, 0},
+ {"SYS_SIGWAITINFO", Const, 0},
+ {"SYS_SOCKET", Const, 0},
+ {"SYS_SOCKETCALL", Const, 0},
+ {"SYS_SOCKETPAIR", Const, 0},
+ {"SYS_SPLICE", Const, 0},
+ {"SYS_SSETMASK", Const, 0},
+ {"SYS_SSTK", Const, 0},
+ {"SYS_STACK_SNAPSHOT", Const, 0},
+ {"SYS_STAT", Const, 0},
+ {"SYS_STAT64", Const, 0},
+ {"SYS_STAT64_EXTENDED", Const, 0},
+ {"SYS_STATFS", Const, 0},
+ {"SYS_STATFS64", Const, 0},
+ {"SYS_STATV", Const, 0},
+ {"SYS_STATVFS1", Const, 1},
+ {"SYS_STAT_EXTENDED", Const, 0},
+ {"SYS_STIME", Const, 0},
+ {"SYS_STTY", Const, 0},
+ {"SYS_SWAPCONTEXT", Const, 0},
+ {"SYS_SWAPCTL", Const, 1},
+ {"SYS_SWAPOFF", Const, 0},
+ {"SYS_SWAPON", Const, 0},
+ {"SYS_SYMLINK", Const, 0},
+ {"SYS_SYMLINKAT", Const, 0},
+ {"SYS_SYNC", Const, 0},
+ {"SYS_SYNCFS", Const, 0},
+ {"SYS_SYNC_FILE_RANGE", Const, 0},
+ {"SYS_SYSARCH", Const, 0},
+ {"SYS_SYSCALL", Const, 0},
+ {"SYS_SYSCALL_BASE", Const, 0},
+ {"SYS_SYSFS", Const, 0},
+ {"SYS_SYSINFO", Const, 0},
+ {"SYS_SYSLOG", Const, 0},
+ {"SYS_TEE", Const, 0},
+ {"SYS_TGKILL", Const, 0},
+ {"SYS_THREAD_SELFID", Const, 0},
+ {"SYS_THR_CREATE", Const, 0},
+ {"SYS_THR_EXIT", Const, 0},
+ {"SYS_THR_KILL", Const, 0},
+ {"SYS_THR_KILL2", Const, 0},
+ {"SYS_THR_NEW", Const, 0},
+ {"SYS_THR_SELF", Const, 0},
+ {"SYS_THR_SET_NAME", Const, 0},
+ {"SYS_THR_SUSPEND", Const, 0},
+ {"SYS_THR_WAKE", Const, 0},
+ {"SYS_TIME", Const, 0},
+ {"SYS_TIMERFD_CREATE", Const, 0},
+ {"SYS_TIMERFD_GETTIME", Const, 0},
+ {"SYS_TIMERFD_SETTIME", Const, 0},
+ {"SYS_TIMER_CREATE", Const, 0},
+ {"SYS_TIMER_DELETE", Const, 0},
+ {"SYS_TIMER_GETOVERRUN", Const, 0},
+ {"SYS_TIMER_GETTIME", Const, 0},
+ {"SYS_TIMER_SETTIME", Const, 0},
+ {"SYS_TIMES", Const, 0},
+ {"SYS_TKILL", Const, 0},
+ {"SYS_TRUNCATE", Const, 0},
+ {"SYS_TRUNCATE64", Const, 0},
+ {"SYS_TUXCALL", Const, 0},
+ {"SYS_UGETRLIMIT", Const, 0},
+ {"SYS_ULIMIT", Const, 0},
+ {"SYS_UMASK", Const, 0},
+ {"SYS_UMASK_EXTENDED", Const, 0},
+ {"SYS_UMOUNT", Const, 0},
+ {"SYS_UMOUNT2", Const, 0},
+ {"SYS_UNAME", Const, 0},
+ {"SYS_UNDELETE", Const, 0},
+ {"SYS_UNLINK", Const, 0},
+ {"SYS_UNLINKAT", Const, 0},
+ {"SYS_UNMOUNT", Const, 0},
+ {"SYS_UNSHARE", Const, 0},
+ {"SYS_USELIB", Const, 0},
+ {"SYS_USTAT", Const, 0},
+ {"SYS_UTIME", Const, 0},
+ {"SYS_UTIMENSAT", Const, 0},
+ {"SYS_UTIMES", Const, 0},
+ {"SYS_UTRACE", Const, 0},
+ {"SYS_UUIDGEN", Const, 0},
+ {"SYS_VADVISE", Const, 1},
+ {"SYS_VFORK", Const, 0},
+ {"SYS_VHANGUP", Const, 0},
+ {"SYS_VM86", Const, 0},
+ {"SYS_VM86OLD", Const, 0},
+ {"SYS_VMSPLICE", Const, 0},
+ {"SYS_VM_PRESSURE_MONITOR", Const, 0},
+ {"SYS_VSERVER", Const, 0},
+ {"SYS_WAIT4", Const, 0},
+ {"SYS_WAIT4_NOCANCEL", Const, 0},
+ {"SYS_WAIT6", Const, 1},
+ {"SYS_WAITEVENT", Const, 0},
+ {"SYS_WAITID", Const, 0},
+ {"SYS_WAITID_NOCANCEL", Const, 0},
+ {"SYS_WAITPID", Const, 0},
+ {"SYS_WATCHEVENT", Const, 0},
+ {"SYS_WORKQ_KERNRETURN", Const, 0},
+ {"SYS_WORKQ_OPEN", Const, 0},
+ {"SYS_WRITE", Const, 0},
+ {"SYS_WRITEV", Const, 0},
+ {"SYS_WRITEV_NOCANCEL", Const, 0},
+ {"SYS_WRITE_NOCANCEL", Const, 0},
+ {"SYS_YIELD", Const, 0},
+ {"SYS__LLSEEK", Const, 0},
+ {"SYS__LWP_CONTINUE", Const, 1},
+ {"SYS__LWP_CREATE", Const, 1},
+ {"SYS__LWP_CTL", Const, 1},
+ {"SYS__LWP_DETACH", Const, 1},
+ {"SYS__LWP_EXIT", Const, 1},
+ {"SYS__LWP_GETNAME", Const, 1},
+ {"SYS__LWP_GETPRIVATE", Const, 1},
+ {"SYS__LWP_KILL", Const, 1},
+ {"SYS__LWP_PARK", Const, 1},
+ {"SYS__LWP_SELF", Const, 1},
+ {"SYS__LWP_SETNAME", Const, 1},
+ {"SYS__LWP_SETPRIVATE", Const, 1},
+ {"SYS__LWP_SUSPEND", Const, 1},
+ {"SYS__LWP_UNPARK", Const, 1},
+ {"SYS__LWP_UNPARK_ALL", Const, 1},
+ {"SYS__LWP_WAIT", Const, 1},
+ {"SYS__LWP_WAKEUP", Const, 1},
+ {"SYS__NEWSELECT", Const, 0},
+ {"SYS__PSET_BIND", Const, 1},
+ {"SYS__SCHED_GETAFFINITY", Const, 1},
+ {"SYS__SCHED_GETPARAM", Const, 1},
+ {"SYS__SCHED_SETAFFINITY", Const, 1},
+ {"SYS__SCHED_SETPARAM", Const, 1},
+ {"SYS__SYSCTL", Const, 0},
+ {"SYS__UMTX_LOCK", Const, 0},
+ {"SYS__UMTX_OP", Const, 0},
+ {"SYS__UMTX_UNLOCK", Const, 0},
+ {"SYS___ACL_ACLCHECK_FD", Const, 0},
+ {"SYS___ACL_ACLCHECK_FILE", Const, 0},
+ {"SYS___ACL_ACLCHECK_LINK", Const, 0},
+ {"SYS___ACL_DELETE_FD", Const, 0},
+ {"SYS___ACL_DELETE_FILE", Const, 0},
+ {"SYS___ACL_DELETE_LINK", Const, 0},
+ {"SYS___ACL_GET_FD", Const, 0},
+ {"SYS___ACL_GET_FILE", Const, 0},
+ {"SYS___ACL_GET_LINK", Const, 0},
+ {"SYS___ACL_SET_FD", Const, 0},
+ {"SYS___ACL_SET_FILE", Const, 0},
+ {"SYS___ACL_SET_LINK", Const, 0},
+ {"SYS___CAP_RIGHTS_GET", Const, 14},
+ {"SYS___CLONE", Const, 1},
+ {"SYS___DISABLE_THREADSIGNAL", Const, 0},
+ {"SYS___GETCWD", Const, 0},
+ {"SYS___GETLOGIN", Const, 1},
+ {"SYS___GET_TCB", Const, 1},
+ {"SYS___MAC_EXECVE", Const, 0},
+ {"SYS___MAC_GETFSSTAT", Const, 0},
+ {"SYS___MAC_GET_FD", Const, 0},
+ {"SYS___MAC_GET_FILE", Const, 0},
+ {"SYS___MAC_GET_LCID", Const, 0},
+ {"SYS___MAC_GET_LCTX", Const, 0},
+ {"SYS___MAC_GET_LINK", Const, 0},
+ {"SYS___MAC_GET_MOUNT", Const, 0},
+ {"SYS___MAC_GET_PID", Const, 0},
+ {"SYS___MAC_GET_PROC", Const, 0},
+ {"SYS___MAC_MOUNT", Const, 0},
+ {"SYS___MAC_SET_FD", Const, 0},
+ {"SYS___MAC_SET_FILE", Const, 0},
+ {"SYS___MAC_SET_LCTX", Const, 0},
+ {"SYS___MAC_SET_LINK", Const, 0},
+ {"SYS___MAC_SET_PROC", Const, 0},
+ {"SYS___MAC_SYSCALL", Const, 0},
+ {"SYS___OLD_SEMWAIT_SIGNAL", Const, 0},
+ {"SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", Const, 0},
+ {"SYS___POSIX_CHOWN", Const, 1},
+ {"SYS___POSIX_FCHOWN", Const, 1},
+ {"SYS___POSIX_LCHOWN", Const, 1},
+ {"SYS___POSIX_RENAME", Const, 1},
+ {"SYS___PTHREAD_CANCELED", Const, 0},
+ {"SYS___PTHREAD_CHDIR", Const, 0},
+ {"SYS___PTHREAD_FCHDIR", Const, 0},
+ {"SYS___PTHREAD_KILL", Const, 0},
+ {"SYS___PTHREAD_MARKCANCEL", Const, 0},
+ {"SYS___PTHREAD_SIGMASK", Const, 0},
+ {"SYS___QUOTACTL", Const, 1},
+ {"SYS___SEMCTL", Const, 1},
+ {"SYS___SEMWAIT_SIGNAL", Const, 0},
+ {"SYS___SEMWAIT_SIGNAL_NOCANCEL", Const, 0},
+ {"SYS___SETLOGIN", Const, 1},
+ {"SYS___SETUGID", Const, 0},
+ {"SYS___SET_TCB", Const, 1},
+ {"SYS___SIGACTION_SIGTRAMP", Const, 1},
+ {"SYS___SIGTIMEDWAIT", Const, 1},
+ {"SYS___SIGWAIT", Const, 0},
+ {"SYS___SIGWAIT_NOCANCEL", Const, 0},
+ {"SYS___SYSCTL", Const, 0},
+ {"SYS___TFORK", Const, 1},
+ {"SYS___THREXIT", Const, 1},
+ {"SYS___THRSIGDIVERT", Const, 1},
+ {"SYS___THRSLEEP", Const, 1},
+ {"SYS___THRWAKEUP", Const, 1},
+ {"S_ARCH1", Const, 1},
+ {"S_ARCH2", Const, 1},
+ {"S_BLKSIZE", Const, 0},
+ {"S_IEXEC", Const, 0},
+ {"S_IFBLK", Const, 0},
+ {"S_IFCHR", Const, 0},
+ {"S_IFDIR", Const, 0},
+ {"S_IFIFO", Const, 0},
+ {"S_IFLNK", Const, 0},
+ {"S_IFMT", Const, 0},
+ {"S_IFREG", Const, 0},
+ {"S_IFSOCK", Const, 0},
+ {"S_IFWHT", Const, 0},
+ {"S_IREAD", Const, 0},
+ {"S_IRGRP", Const, 0},
+ {"S_IROTH", Const, 0},
+ {"S_IRUSR", Const, 0},
+ {"S_IRWXG", Const, 0},
+ {"S_IRWXO", Const, 0},
+ {"S_IRWXU", Const, 0},
+ {"S_ISGID", Const, 0},
+ {"S_ISTXT", Const, 0},
+ {"S_ISUID", Const, 0},
+ {"S_ISVTX", Const, 0},
+ {"S_IWGRP", Const, 0},
+ {"S_IWOTH", Const, 0},
+ {"S_IWRITE", Const, 0},
+ {"S_IWUSR", Const, 0},
+ {"S_IXGRP", Const, 0},
+ {"S_IXOTH", Const, 0},
+ {"S_IXUSR", Const, 0},
+ {"S_LOGIN_SET", Const, 1},
+ {"SecurityAttributes", Type, 0},
+ {"SecurityAttributes.InheritHandle", Field, 0},
+ {"SecurityAttributes.Length", Field, 0},
+ {"SecurityAttributes.SecurityDescriptor", Field, 0},
+ {"Seek", Func, 0},
+ {"Select", Func, 0},
+ {"Sendfile", Func, 0},
+ {"Sendmsg", Func, 0},
+ {"SendmsgN", Func, 3},
+ {"Sendto", Func, 0},
+ {"Servent", Type, 0},
+ {"Servent.Aliases", Field, 0},
+ {"Servent.Name", Field, 0},
+ {"Servent.Port", Field, 0},
+ {"Servent.Proto", Field, 0},
+ {"SetBpf", Func, 0},
+ {"SetBpfBuflen", Func, 0},
+ {"SetBpfDatalink", Func, 0},
+ {"SetBpfHeadercmpl", Func, 0},
+ {"SetBpfImmediate", Func, 0},
+ {"SetBpfInterface", Func, 0},
+ {"SetBpfPromisc", Func, 0},
+ {"SetBpfTimeout", Func, 0},
+ {"SetCurrentDirectory", Func, 0},
+ {"SetEndOfFile", Func, 0},
+ {"SetEnvironmentVariable", Func, 0},
+ {"SetFileAttributes", Func, 0},
+ {"SetFileCompletionNotificationModes", Func, 2},
+ {"SetFilePointer", Func, 0},
+ {"SetFileTime", Func, 0},
+ {"SetHandleInformation", Func, 0},
+ {"SetKevent", Func, 0},
+ {"SetLsfPromisc", Func, 0},
+ {"SetNonblock", Func, 0},
+ {"Setdomainname", Func, 0},
+ {"Setegid", Func, 0},
+ {"Setenv", Func, 0},
+ {"Seteuid", Func, 0},
+ {"Setfsgid", Func, 0},
+ {"Setfsuid", Func, 0},
+ {"Setgid", Func, 0},
+ {"Setgroups", Func, 0},
+ {"Sethostname", Func, 0},
+ {"Setlogin", Func, 0},
+ {"Setpgid", Func, 0},
+ {"Setpriority", Func, 0},
+ {"Setprivexec", Func, 0},
+ {"Setregid", Func, 0},
+ {"Setresgid", Func, 0},
+ {"Setresuid", Func, 0},
+ {"Setreuid", Func, 0},
+ {"Setrlimit", Func, 0},
+ {"Setsid", Func, 0},
+ {"Setsockopt", Func, 0},
+ {"SetsockoptByte", Func, 0},
+ {"SetsockoptICMPv6Filter", Func, 2},
+ {"SetsockoptIPMreq", Func, 0},
+ {"SetsockoptIPMreqn", Func, 0},
+ {"SetsockoptIPv6Mreq", Func, 0},
+ {"SetsockoptInet4Addr", Func, 0},
+ {"SetsockoptInt", Func, 0},
+ {"SetsockoptLinger", Func, 0},
+ {"SetsockoptString", Func, 0},
+ {"SetsockoptTimeval", Func, 0},
+ {"Settimeofday", Func, 0},
+ {"Setuid", Func, 0},
+ {"Setxattr", Func, 1},
+ {"Shutdown", Func, 0},
+ {"SidTypeAlias", Const, 0},
+ {"SidTypeComputer", Const, 0},
+ {"SidTypeDeletedAccount", Const, 0},
+ {"SidTypeDomain", Const, 0},
+ {"SidTypeGroup", Const, 0},
+ {"SidTypeInvalid", Const, 0},
+ {"SidTypeLabel", Const, 0},
+ {"SidTypeUnknown", Const, 0},
+ {"SidTypeUser", Const, 0},
+ {"SidTypeWellKnownGroup", Const, 0},
+ {"Signal", Type, 0},
+ {"SizeofBpfHdr", Const, 0},
+ {"SizeofBpfInsn", Const, 0},
+ {"SizeofBpfProgram", Const, 0},
+ {"SizeofBpfStat", Const, 0},
+ {"SizeofBpfVersion", Const, 0},
+ {"SizeofBpfZbuf", Const, 0},
+ {"SizeofBpfZbufHeader", Const, 0},
+ {"SizeofCmsghdr", Const, 0},
+ {"SizeofICMPv6Filter", Const, 2},
+ {"SizeofIPMreq", Const, 0},
+ {"SizeofIPMreqn", Const, 0},
+ {"SizeofIPv6MTUInfo", Const, 2},
+ {"SizeofIPv6Mreq", Const, 0},
+ {"SizeofIfAddrmsg", Const, 0},
+ {"SizeofIfAnnounceMsghdr", Const, 1},
+ {"SizeofIfData", Const, 0},
+ {"SizeofIfInfomsg", Const, 0},
+ {"SizeofIfMsghdr", Const, 0},
+ {"SizeofIfaMsghdr", Const, 0},
+ {"SizeofIfmaMsghdr", Const, 0},
+ {"SizeofIfmaMsghdr2", Const, 0},
+ {"SizeofInet4Pktinfo", Const, 0},
+ {"SizeofInet6Pktinfo", Const, 0},
+ {"SizeofInotifyEvent", Const, 0},
+ {"SizeofLinger", Const, 0},
+ {"SizeofMsghdr", Const, 0},
+ {"SizeofNlAttr", Const, 0},
+ {"SizeofNlMsgerr", Const, 0},
+ {"SizeofNlMsghdr", Const, 0},
+ {"SizeofRtAttr", Const, 0},
+ {"SizeofRtGenmsg", Const, 0},
+ {"SizeofRtMetrics", Const, 0},
+ {"SizeofRtMsg", Const, 0},
+ {"SizeofRtMsghdr", Const, 0},
+ {"SizeofRtNexthop", Const, 0},
+ {"SizeofSockFilter", Const, 0},
+ {"SizeofSockFprog", Const, 0},
+ {"SizeofSockaddrAny", Const, 0},
+ {"SizeofSockaddrDatalink", Const, 0},
+ {"SizeofSockaddrInet4", Const, 0},
+ {"SizeofSockaddrInet6", Const, 0},
+ {"SizeofSockaddrLinklayer", Const, 0},
+ {"SizeofSockaddrNetlink", Const, 0},
+ {"SizeofSockaddrUnix", Const, 0},
+ {"SizeofTCPInfo", Const, 1},
+ {"SizeofUcred", Const, 0},
+ {"SlicePtrFromStrings", Func, 1},
+ {"SockFilter", Type, 0},
+ {"SockFilter.Code", Field, 0},
+ {"SockFilter.Jf", Field, 0},
+ {"SockFilter.Jt", Field, 0},
+ {"SockFilter.K", Field, 0},
+ {"SockFprog", Type, 0},
+ {"SockFprog.Filter", Field, 0},
+ {"SockFprog.Len", Field, 0},
+ {"SockFprog.Pad_cgo_0", Field, 0},
+ {"Sockaddr", Type, 0},
+ {"SockaddrDatalink", Type, 0},
+ {"SockaddrDatalink.Alen", Field, 0},
+ {"SockaddrDatalink.Data", Field, 0},
+ {"SockaddrDatalink.Family", Field, 0},
+ {"SockaddrDatalink.Index", Field, 0},
+ {"SockaddrDatalink.Len", Field, 0},
+ {"SockaddrDatalink.Nlen", Field, 0},
+ {"SockaddrDatalink.Slen", Field, 0},
+ {"SockaddrDatalink.Type", Field, 0},
+ {"SockaddrGen", Type, 0},
+ {"SockaddrInet4", Type, 0},
+ {"SockaddrInet4.Addr", Field, 0},
+ {"SockaddrInet4.Port", Field, 0},
+ {"SockaddrInet6", Type, 0},
+ {"SockaddrInet6.Addr", Field, 0},
+ {"SockaddrInet6.Port", Field, 0},
+ {"SockaddrInet6.ZoneId", Field, 0},
+ {"SockaddrLinklayer", Type, 0},
+ {"SockaddrLinklayer.Addr", Field, 0},
+ {"SockaddrLinklayer.Halen", Field, 0},
+ {"SockaddrLinklayer.Hatype", Field, 0},
+ {"SockaddrLinklayer.Ifindex", Field, 0},
+ {"SockaddrLinklayer.Pkttype", Field, 0},
+ {"SockaddrLinklayer.Protocol", Field, 0},
+ {"SockaddrNetlink", Type, 0},
+ {"SockaddrNetlink.Family", Field, 0},
+ {"SockaddrNetlink.Groups", Field, 0},
+ {"SockaddrNetlink.Pad", Field, 0},
+ {"SockaddrNetlink.Pid", Field, 0},
+ {"SockaddrUnix", Type, 0},
+ {"SockaddrUnix.Name", Field, 0},
+ {"Socket", Func, 0},
+ {"SocketControlMessage", Type, 0},
+ {"SocketControlMessage.Data", Field, 0},
+ {"SocketControlMessage.Header", Field, 0},
+ {"SocketDisableIPv6", Var, 0},
+ {"Socketpair", Func, 0},
+ {"Splice", Func, 0},
+ {"StartProcess", Func, 0},
+ {"StartupInfo", Type, 0},
+ {"StartupInfo.Cb", Field, 0},
+ {"StartupInfo.Desktop", Field, 0},
+ {"StartupInfo.FillAttribute", Field, 0},
+ {"StartupInfo.Flags", Field, 0},
+ {"StartupInfo.ShowWindow", Field, 0},
+ {"StartupInfo.StdErr", Field, 0},
+ {"StartupInfo.StdInput", Field, 0},
+ {"StartupInfo.StdOutput", Field, 0},
+ {"StartupInfo.Title", Field, 0},
+ {"StartupInfo.X", Field, 0},
+ {"StartupInfo.XCountChars", Field, 0},
+ {"StartupInfo.XSize", Field, 0},
+ {"StartupInfo.Y", Field, 0},
+ {"StartupInfo.YCountChars", Field, 0},
+ {"StartupInfo.YSize", Field, 0},
+ {"Stat", Func, 0},
+ {"Stat_t", Type, 0},
+ {"Stat_t.Atim", Field, 0},
+ {"Stat_t.Atim_ext", Field, 12},
+ {"Stat_t.Atimespec", Field, 0},
+ {"Stat_t.Birthtimespec", Field, 0},
+ {"Stat_t.Blksize", Field, 0},
+ {"Stat_t.Blocks", Field, 0},
+ {"Stat_t.Btim_ext", Field, 12},
+ {"Stat_t.Ctim", Field, 0},
+ {"Stat_t.Ctim_ext", Field, 12},
+ {"Stat_t.Ctimespec", Field, 0},
+ {"Stat_t.Dev", Field, 0},
+ {"Stat_t.Flags", Field, 0},
+ {"Stat_t.Gen", Field, 0},
+ {"Stat_t.Gid", Field, 0},
+ {"Stat_t.Ino", Field, 0},
+ {"Stat_t.Lspare", Field, 0},
+ {"Stat_t.Lspare0", Field, 2},
+ {"Stat_t.Lspare1", Field, 2},
+ {"Stat_t.Mode", Field, 0},
+ {"Stat_t.Mtim", Field, 0},
+ {"Stat_t.Mtim_ext", Field, 12},
+ {"Stat_t.Mtimespec", Field, 0},
+ {"Stat_t.Nlink", Field, 0},
+ {"Stat_t.Pad_cgo_0", Field, 0},
+ {"Stat_t.Pad_cgo_1", Field, 0},
+ {"Stat_t.Pad_cgo_2", Field, 0},
+ {"Stat_t.Padding0", Field, 12},
+ {"Stat_t.Padding1", Field, 12},
+ {"Stat_t.Qspare", Field, 0},
+ {"Stat_t.Rdev", Field, 0},
+ {"Stat_t.Size", Field, 0},
+ {"Stat_t.Spare", Field, 2},
+ {"Stat_t.Uid", Field, 0},
+ {"Stat_t.X__pad0", Field, 0},
+ {"Stat_t.X__pad1", Field, 0},
+ {"Stat_t.X__pad2", Field, 0},
+ {"Stat_t.X__st_birthtim", Field, 2},
+ {"Stat_t.X__st_ino", Field, 0},
+ {"Stat_t.X__unused", Field, 0},
+ {"Statfs", Func, 0},
+ {"Statfs_t", Type, 0},
+ {"Statfs_t.Asyncreads", Field, 0},
+ {"Statfs_t.Asyncwrites", Field, 0},
+ {"Statfs_t.Bavail", Field, 0},
+ {"Statfs_t.Bfree", Field, 0},
+ {"Statfs_t.Blocks", Field, 0},
+ {"Statfs_t.Bsize", Field, 0},
+ {"Statfs_t.Charspare", Field, 0},
+ {"Statfs_t.F_asyncreads", Field, 2},
+ {"Statfs_t.F_asyncwrites", Field, 2},
+ {"Statfs_t.F_bavail", Field, 2},
+ {"Statfs_t.F_bfree", Field, 2},
+ {"Statfs_t.F_blocks", Field, 2},
+ {"Statfs_t.F_bsize", Field, 2},
+ {"Statfs_t.F_ctime", Field, 2},
+ {"Statfs_t.F_favail", Field, 2},
+ {"Statfs_t.F_ffree", Field, 2},
+ {"Statfs_t.F_files", Field, 2},
+ {"Statfs_t.F_flags", Field, 2},
+ {"Statfs_t.F_fsid", Field, 2},
+ {"Statfs_t.F_fstypename", Field, 2},
+ {"Statfs_t.F_iosize", Field, 2},
+ {"Statfs_t.F_mntfromname", Field, 2},
+ {"Statfs_t.F_mntfromspec", Field, 3},
+ {"Statfs_t.F_mntonname", Field, 2},
+ {"Statfs_t.F_namemax", Field, 2},
+ {"Statfs_t.F_owner", Field, 2},
+ {"Statfs_t.F_spare", Field, 2},
+ {"Statfs_t.F_syncreads", Field, 2},
+ {"Statfs_t.F_syncwrites", Field, 2},
+ {"Statfs_t.Ffree", Field, 0},
+ {"Statfs_t.Files", Field, 0},
+ {"Statfs_t.Flags", Field, 0},
+ {"Statfs_t.Frsize", Field, 0},
+ {"Statfs_t.Fsid", Field, 0},
+ {"Statfs_t.Fssubtype", Field, 0},
+ {"Statfs_t.Fstypename", Field, 0},
+ {"Statfs_t.Iosize", Field, 0},
+ {"Statfs_t.Mntfromname", Field, 0},
+ {"Statfs_t.Mntonname", Field, 0},
+ {"Statfs_t.Mount_info", Field, 2},
+ {"Statfs_t.Namelen", Field, 0},
+ {"Statfs_t.Namemax", Field, 0},
+ {"Statfs_t.Owner", Field, 0},
+ {"Statfs_t.Pad_cgo_0", Field, 0},
+ {"Statfs_t.Pad_cgo_1", Field, 2},
+ {"Statfs_t.Reserved", Field, 0},
+ {"Statfs_t.Spare", Field, 0},
+ {"Statfs_t.Syncreads", Field, 0},
+ {"Statfs_t.Syncwrites", Field, 0},
+ {"Statfs_t.Type", Field, 0},
+ {"Statfs_t.Version", Field, 0},
+ {"Stderr", Var, 0},
+ {"Stdin", Var, 0},
+ {"Stdout", Var, 0},
+ {"StringBytePtr", Func, 0},
+ {"StringByteSlice", Func, 0},
+ {"StringSlicePtr", Func, 0},
+ {"StringToSid", Func, 0},
+ {"StringToUTF16", Func, 0},
+ {"StringToUTF16Ptr", Func, 0},
+ {"Symlink", Func, 0},
+ {"Sync", Func, 0},
+ {"SyncFileRange", Func, 0},
+ {"SysProcAttr", Type, 0},
+ {"SysProcAttr.AdditionalInheritedHandles", Field, 17},
+ {"SysProcAttr.AmbientCaps", Field, 9},
+ {"SysProcAttr.CgroupFD", Field, 20},
+ {"SysProcAttr.Chroot", Field, 0},
+ {"SysProcAttr.Cloneflags", Field, 2},
+ {"SysProcAttr.CmdLine", Field, 0},
+ {"SysProcAttr.CreationFlags", Field, 1},
+ {"SysProcAttr.Credential", Field, 0},
+ {"SysProcAttr.Ctty", Field, 1},
+ {"SysProcAttr.Foreground", Field, 5},
+ {"SysProcAttr.GidMappings", Field, 4},
+ {"SysProcAttr.GidMappingsEnableSetgroups", Field, 5},
+ {"SysProcAttr.HideWindow", Field, 0},
+ {"SysProcAttr.Jail", Field, 21},
+ {"SysProcAttr.NoInheritHandles", Field, 16},
+ {"SysProcAttr.Noctty", Field, 0},
+ {"SysProcAttr.ParentProcess", Field, 17},
+ {"SysProcAttr.Pdeathsig", Field, 0},
+ {"SysProcAttr.Pgid", Field, 5},
+ {"SysProcAttr.PidFD", Field, 22},
+ {"SysProcAttr.ProcessAttributes", Field, 13},
+ {"SysProcAttr.Ptrace", Field, 0},
+ {"SysProcAttr.Setctty", Field, 0},
+ {"SysProcAttr.Setpgid", Field, 0},
+ {"SysProcAttr.Setsid", Field, 0},
+ {"SysProcAttr.ThreadAttributes", Field, 13},
+ {"SysProcAttr.Token", Field, 10},
+ {"SysProcAttr.UidMappings", Field, 4},
+ {"SysProcAttr.Unshareflags", Field, 7},
+ {"SysProcAttr.UseCgroupFD", Field, 20},
+ {"SysProcIDMap", Type, 4},
+ {"SysProcIDMap.ContainerID", Field, 4},
+ {"SysProcIDMap.HostID", Field, 4},
+ {"SysProcIDMap.Size", Field, 4},
+ {"Syscall", Func, 0},
+ {"Syscall12", Func, 0},
+ {"Syscall15", Func, 0},
+ {"Syscall18", Func, 12},
+ {"Syscall6", Func, 0},
+ {"Syscall9", Func, 0},
+ {"SyscallN", Func, 18},
+ {"Sysctl", Func, 0},
+ {"SysctlUint32", Func, 0},
+ {"Sysctlnode", Type, 2},
+ {"Sysctlnode.Flags", Field, 2},
+ {"Sysctlnode.Name", Field, 2},
+ {"Sysctlnode.Num", Field, 2},
+ {"Sysctlnode.Un", Field, 2},
+ {"Sysctlnode.Ver", Field, 2},
+ {"Sysctlnode.X__rsvd", Field, 2},
+ {"Sysctlnode.X_sysctl_desc", Field, 2},
+ {"Sysctlnode.X_sysctl_func", Field, 2},
+ {"Sysctlnode.X_sysctl_parent", Field, 2},
+ {"Sysctlnode.X_sysctl_size", Field, 2},
+ {"Sysinfo", Func, 0},
+ {"Sysinfo_t", Type, 0},
+ {"Sysinfo_t.Bufferram", Field, 0},
+ {"Sysinfo_t.Freehigh", Field, 0},
+ {"Sysinfo_t.Freeram", Field, 0},
+ {"Sysinfo_t.Freeswap", Field, 0},
+ {"Sysinfo_t.Loads", Field, 0},
+ {"Sysinfo_t.Pad", Field, 0},
+ {"Sysinfo_t.Pad_cgo_0", Field, 0},
+ {"Sysinfo_t.Pad_cgo_1", Field, 0},
+ {"Sysinfo_t.Procs", Field, 0},
+ {"Sysinfo_t.Sharedram", Field, 0},
+ {"Sysinfo_t.Totalhigh", Field, 0},
+ {"Sysinfo_t.Totalram", Field, 0},
+ {"Sysinfo_t.Totalswap", Field, 0},
+ {"Sysinfo_t.Unit", Field, 0},
+ {"Sysinfo_t.Uptime", Field, 0},
+ {"Sysinfo_t.X_f", Field, 0},
+ {"Systemtime", Type, 0},
+ {"Systemtime.Day", Field, 0},
+ {"Systemtime.DayOfWeek", Field, 0},
+ {"Systemtime.Hour", Field, 0},
+ {"Systemtime.Milliseconds", Field, 0},
+ {"Systemtime.Minute", Field, 0},
+ {"Systemtime.Month", Field, 0},
+ {"Systemtime.Second", Field, 0},
+ {"Systemtime.Year", Field, 0},
+ {"TCGETS", Const, 0},
+ {"TCIFLUSH", Const, 1},
+ {"TCIOFLUSH", Const, 1},
+ {"TCOFLUSH", Const, 1},
+ {"TCPInfo", Type, 1},
+ {"TCPInfo.Advmss", Field, 1},
+ {"TCPInfo.Ato", Field, 1},
+ {"TCPInfo.Backoff", Field, 1},
+ {"TCPInfo.Ca_state", Field, 1},
+ {"TCPInfo.Fackets", Field, 1},
+ {"TCPInfo.Last_ack_recv", Field, 1},
+ {"TCPInfo.Last_ack_sent", Field, 1},
+ {"TCPInfo.Last_data_recv", Field, 1},
+ {"TCPInfo.Last_data_sent", Field, 1},
+ {"TCPInfo.Lost", Field, 1},
+ {"TCPInfo.Options", Field, 1},
+ {"TCPInfo.Pad_cgo_0", Field, 1},
+ {"TCPInfo.Pmtu", Field, 1},
+ {"TCPInfo.Probes", Field, 1},
+ {"TCPInfo.Rcv_mss", Field, 1},
+ {"TCPInfo.Rcv_rtt", Field, 1},
+ {"TCPInfo.Rcv_space", Field, 1},
+ {"TCPInfo.Rcv_ssthresh", Field, 1},
+ {"TCPInfo.Reordering", Field, 1},
+ {"TCPInfo.Retrans", Field, 1},
+ {"TCPInfo.Retransmits", Field, 1},
+ {"TCPInfo.Rto", Field, 1},
+ {"TCPInfo.Rtt", Field, 1},
+ {"TCPInfo.Rttvar", Field, 1},
+ {"TCPInfo.Sacked", Field, 1},
+ {"TCPInfo.Snd_cwnd", Field, 1},
+ {"TCPInfo.Snd_mss", Field, 1},
+ {"TCPInfo.Snd_ssthresh", Field, 1},
+ {"TCPInfo.State", Field, 1},
+ {"TCPInfo.Total_retrans", Field, 1},
+ {"TCPInfo.Unacked", Field, 1},
+ {"TCPKeepalive", Type, 3},
+ {"TCPKeepalive.Interval", Field, 3},
+ {"TCPKeepalive.OnOff", Field, 3},
+ {"TCPKeepalive.Time", Field, 3},
+ {"TCP_CA_NAME_MAX", Const, 0},
+ {"TCP_CONGCTL", Const, 1},
+ {"TCP_CONGESTION", Const, 0},
+ {"TCP_CONNECTIONTIMEOUT", Const, 0},
+ {"TCP_CORK", Const, 0},
+ {"TCP_DEFER_ACCEPT", Const, 0},
+ {"TCP_ENABLE_ECN", Const, 16},
+ {"TCP_INFO", Const, 0},
+ {"TCP_KEEPALIVE", Const, 0},
+ {"TCP_KEEPCNT", Const, 0},
+ {"TCP_KEEPIDLE", Const, 0},
+ {"TCP_KEEPINIT", Const, 1},
+ {"TCP_KEEPINTVL", Const, 0},
+ {"TCP_LINGER2", Const, 0},
+ {"TCP_MAXBURST", Const, 0},
+ {"TCP_MAXHLEN", Const, 0},
+ {"TCP_MAXOLEN", Const, 0},
+ {"TCP_MAXSEG", Const, 0},
+ {"TCP_MAXWIN", Const, 0},
+ {"TCP_MAX_SACK", Const, 0},
+ {"TCP_MAX_WINSHIFT", Const, 0},
+ {"TCP_MD5SIG", Const, 0},
+ {"TCP_MD5SIG_MAXKEYLEN", Const, 0},
+ {"TCP_MINMSS", Const, 0},
+ {"TCP_MINMSSOVERLOAD", Const, 0},
+ {"TCP_MSS", Const, 0},
+ {"TCP_NODELAY", Const, 0},
+ {"TCP_NOOPT", Const, 0},
+ {"TCP_NOPUSH", Const, 0},
+ {"TCP_NOTSENT_LOWAT", Const, 16},
+ {"TCP_NSTATES", Const, 1},
+ {"TCP_QUICKACK", Const, 0},
+ {"TCP_RXT_CONNDROPTIME", Const, 0},
+ {"TCP_RXT_FINDROP", Const, 0},
+ {"TCP_SACK_ENABLE", Const, 1},
+ {"TCP_SENDMOREACKS", Const, 16},
+ {"TCP_SYNCNT", Const, 0},
+ {"TCP_VENDOR", Const, 3},
+ {"TCP_WINDOW_CLAMP", Const, 0},
+ {"TCSAFLUSH", Const, 1},
+ {"TCSETS", Const, 0},
+ {"TF_DISCONNECT", Const, 0},
+ {"TF_REUSE_SOCKET", Const, 0},
+ {"TF_USE_DEFAULT_WORKER", Const, 0},
+ {"TF_USE_KERNEL_APC", Const, 0},
+ {"TF_USE_SYSTEM_THREAD", Const, 0},
+ {"TF_WRITE_BEHIND", Const, 0},
+ {"TH32CS_INHERIT", Const, 4},
+ {"TH32CS_SNAPALL", Const, 4},
+ {"TH32CS_SNAPHEAPLIST", Const, 4},
+ {"TH32CS_SNAPMODULE", Const, 4},
+ {"TH32CS_SNAPMODULE32", Const, 4},
+ {"TH32CS_SNAPPROCESS", Const, 4},
+ {"TH32CS_SNAPTHREAD", Const, 4},
+ {"TIME_ZONE_ID_DAYLIGHT", Const, 0},
+ {"TIME_ZONE_ID_STANDARD", Const, 0},
+ {"TIME_ZONE_ID_UNKNOWN", Const, 0},
+ {"TIOCCBRK", Const, 0},
+ {"TIOCCDTR", Const, 0},
+ {"TIOCCONS", Const, 0},
+ {"TIOCDCDTIMESTAMP", Const, 0},
+ {"TIOCDRAIN", Const, 0},
+ {"TIOCDSIMICROCODE", Const, 0},
+ {"TIOCEXCL", Const, 0},
+ {"TIOCEXT", Const, 0},
+ {"TIOCFLAG_CDTRCTS", Const, 1},
+ {"TIOCFLAG_CLOCAL", Const, 1},
+ {"TIOCFLAG_CRTSCTS", Const, 1},
+ {"TIOCFLAG_MDMBUF", Const, 1},
+ {"TIOCFLAG_PPS", Const, 1},
+ {"TIOCFLAG_SOFTCAR", Const, 1},
+ {"TIOCFLUSH", Const, 0},
+ {"TIOCGDEV", Const, 0},
+ {"TIOCGDRAINWAIT", Const, 0},
+ {"TIOCGETA", Const, 0},
+ {"TIOCGETD", Const, 0},
+ {"TIOCGFLAGS", Const, 1},
+ {"TIOCGICOUNT", Const, 0},
+ {"TIOCGLCKTRMIOS", Const, 0},
+ {"TIOCGLINED", Const, 1},
+ {"TIOCGPGRP", Const, 0},
+ {"TIOCGPTN", Const, 0},
+ {"TIOCGQSIZE", Const, 1},
+ {"TIOCGRANTPT", Const, 1},
+ {"TIOCGRS485", Const, 0},
+ {"TIOCGSERIAL", Const, 0},
+ {"TIOCGSID", Const, 0},
+ {"TIOCGSIZE", Const, 1},
+ {"TIOCGSOFTCAR", Const, 0},
+ {"TIOCGTSTAMP", Const, 1},
+ {"TIOCGWINSZ", Const, 0},
+ {"TIOCINQ", Const, 0},
+ {"TIOCIXOFF", Const, 0},
+ {"TIOCIXON", Const, 0},
+ {"TIOCLINUX", Const, 0},
+ {"TIOCMBIC", Const, 0},
+ {"TIOCMBIS", Const, 0},
+ {"TIOCMGDTRWAIT", Const, 0},
+ {"TIOCMGET", Const, 0},
+ {"TIOCMIWAIT", Const, 0},
+ {"TIOCMODG", Const, 0},
+ {"TIOCMODS", Const, 0},
+ {"TIOCMSDTRWAIT", Const, 0},
+ {"TIOCMSET", Const, 0},
+ {"TIOCM_CAR", Const, 0},
+ {"TIOCM_CD", Const, 0},
+ {"TIOCM_CTS", Const, 0},
+ {"TIOCM_DCD", Const, 0},
+ {"TIOCM_DSR", Const, 0},
+ {"TIOCM_DTR", Const, 0},
+ {"TIOCM_LE", Const, 0},
+ {"TIOCM_RI", Const, 0},
+ {"TIOCM_RNG", Const, 0},
+ {"TIOCM_RTS", Const, 0},
+ {"TIOCM_SR", Const, 0},
+ {"TIOCM_ST", Const, 0},
+ {"TIOCNOTTY", Const, 0},
+ {"TIOCNXCL", Const, 0},
+ {"TIOCOUTQ", Const, 0},
+ {"TIOCPKT", Const, 0},
+ {"TIOCPKT_DATA", Const, 0},
+ {"TIOCPKT_DOSTOP", Const, 0},
+ {"TIOCPKT_FLUSHREAD", Const, 0},
+ {"TIOCPKT_FLUSHWRITE", Const, 0},
+ {"TIOCPKT_IOCTL", Const, 0},
+ {"TIOCPKT_NOSTOP", Const, 0},
+ {"TIOCPKT_START", Const, 0},
+ {"TIOCPKT_STOP", Const, 0},
+ {"TIOCPTMASTER", Const, 0},
+ {"TIOCPTMGET", Const, 1},
+ {"TIOCPTSNAME", Const, 1},
+ {"TIOCPTYGNAME", Const, 0},
+ {"TIOCPTYGRANT", Const, 0},
+ {"TIOCPTYUNLK", Const, 0},
+ {"TIOCRCVFRAME", Const, 1},
+ {"TIOCREMOTE", Const, 0},
+ {"TIOCSBRK", Const, 0},
+ {"TIOCSCONS", Const, 0},
+ {"TIOCSCTTY", Const, 0},
+ {"TIOCSDRAINWAIT", Const, 0},
+ {"TIOCSDTR", Const, 0},
+ {"TIOCSERCONFIG", Const, 0},
+ {"TIOCSERGETLSR", Const, 0},
+ {"TIOCSERGETMULTI", Const, 0},
+ {"TIOCSERGSTRUCT", Const, 0},
+ {"TIOCSERGWILD", Const, 0},
+ {"TIOCSERSETMULTI", Const, 0},
+ {"TIOCSERSWILD", Const, 0},
+ {"TIOCSER_TEMT", Const, 0},
+ {"TIOCSETA", Const, 0},
+ {"TIOCSETAF", Const, 0},
+ {"TIOCSETAW", Const, 0},
+ {"TIOCSETD", Const, 0},
+ {"TIOCSFLAGS", Const, 1},
+ {"TIOCSIG", Const, 0},
+ {"TIOCSLCKTRMIOS", Const, 0},
+ {"TIOCSLINED", Const, 1},
+ {"TIOCSPGRP", Const, 0},
+ {"TIOCSPTLCK", Const, 0},
+ {"TIOCSQSIZE", Const, 1},
+ {"TIOCSRS485", Const, 0},
+ {"TIOCSSERIAL", Const, 0},
+ {"TIOCSSIZE", Const, 1},
+ {"TIOCSSOFTCAR", Const, 0},
+ {"TIOCSTART", Const, 0},
+ {"TIOCSTAT", Const, 0},
+ {"TIOCSTI", Const, 0},
+ {"TIOCSTOP", Const, 0},
+ {"TIOCSTSTAMP", Const, 1},
+ {"TIOCSWINSZ", Const, 0},
+ {"TIOCTIMESTAMP", Const, 0},
+ {"TIOCUCNTL", Const, 0},
+ {"TIOCVHANGUP", Const, 0},
+ {"TIOCXMTFRAME", Const, 1},
+ {"TOKEN_ADJUST_DEFAULT", Const, 0},
+ {"TOKEN_ADJUST_GROUPS", Const, 0},
+ {"TOKEN_ADJUST_PRIVILEGES", Const, 0},
+ {"TOKEN_ADJUST_SESSIONID", Const, 11},
+ {"TOKEN_ALL_ACCESS", Const, 0},
+ {"TOKEN_ASSIGN_PRIMARY", Const, 0},
+ {"TOKEN_DUPLICATE", Const, 0},
+ {"TOKEN_EXECUTE", Const, 0},
+ {"TOKEN_IMPERSONATE", Const, 0},
+ {"TOKEN_QUERY", Const, 0},
+ {"TOKEN_QUERY_SOURCE", Const, 0},
+ {"TOKEN_READ", Const, 0},
+ {"TOKEN_WRITE", Const, 0},
+ {"TOSTOP", Const, 0},
+ {"TRUNCATE_EXISTING", Const, 0},
+ {"TUNATTACHFILTER", Const, 0},
+ {"TUNDETACHFILTER", Const, 0},
+ {"TUNGETFEATURES", Const, 0},
+ {"TUNGETIFF", Const, 0},
+ {"TUNGETSNDBUF", Const, 0},
+ {"TUNGETVNETHDRSZ", Const, 0},
+ {"TUNSETDEBUG", Const, 0},
+ {"TUNSETGROUP", Const, 0},
+ {"TUNSETIFF", Const, 0},
+ {"TUNSETLINK", Const, 0},
+ {"TUNSETNOCSUM", Const, 0},
+ {"TUNSETOFFLOAD", Const, 0},
+ {"TUNSETOWNER", Const, 0},
+ {"TUNSETPERSIST", Const, 0},
+ {"TUNSETSNDBUF", Const, 0},
+ {"TUNSETTXFILTER", Const, 0},
+ {"TUNSETVNETHDRSZ", Const, 0},
+ {"Tee", Func, 0},
+ {"TerminateProcess", Func, 0},
+ {"Termios", Type, 0},
+ {"Termios.Cc", Field, 0},
+ {"Termios.Cflag", Field, 0},
+ {"Termios.Iflag", Field, 0},
+ {"Termios.Ispeed", Field, 0},
+ {"Termios.Lflag", Field, 0},
+ {"Termios.Line", Field, 0},
+ {"Termios.Oflag", Field, 0},
+ {"Termios.Ospeed", Field, 0},
+ {"Termios.Pad_cgo_0", Field, 0},
+ {"Tgkill", Func, 0},
+ {"Time", Func, 0},
+ {"Time_t", Type, 0},
+ {"Times", Func, 0},
+ {"Timespec", Type, 0},
+ {"Timespec.Nsec", Field, 0},
+ {"Timespec.Pad_cgo_0", Field, 2},
+ {"Timespec.Sec", Field, 0},
+ {"TimespecToNsec", Func, 0},
+ {"Timeval", Type, 0},
+ {"Timeval.Pad_cgo_0", Field, 0},
+ {"Timeval.Sec", Field, 0},
+ {"Timeval.Usec", Field, 0},
+ {"Timeval32", Type, 0},
+ {"Timeval32.Sec", Field, 0},
+ {"Timeval32.Usec", Field, 0},
+ {"TimevalToNsec", Func, 0},
+ {"Timex", Type, 0},
+ {"Timex.Calcnt", Field, 0},
+ {"Timex.Constant", Field, 0},
+ {"Timex.Errcnt", Field, 0},
+ {"Timex.Esterror", Field, 0},
+ {"Timex.Freq", Field, 0},
+ {"Timex.Jitcnt", Field, 0},
+ {"Timex.Jitter", Field, 0},
+ {"Timex.Maxerror", Field, 0},
+ {"Timex.Modes", Field, 0},
+ {"Timex.Offset", Field, 0},
+ {"Timex.Pad_cgo_0", Field, 0},
+ {"Timex.Pad_cgo_1", Field, 0},
+ {"Timex.Pad_cgo_2", Field, 0},
+ {"Timex.Pad_cgo_3", Field, 0},
+ {"Timex.Ppsfreq", Field, 0},
+ {"Timex.Precision", Field, 0},
+ {"Timex.Shift", Field, 0},
+ {"Timex.Stabil", Field, 0},
+ {"Timex.Status", Field, 0},
+ {"Timex.Stbcnt", Field, 0},
+ {"Timex.Tai", Field, 0},
+ {"Timex.Tick", Field, 0},
+ {"Timex.Time", Field, 0},
+ {"Timex.Tolerance", Field, 0},
+ {"Timezoneinformation", Type, 0},
+ {"Timezoneinformation.Bias", Field, 0},
+ {"Timezoneinformation.DaylightBias", Field, 0},
+ {"Timezoneinformation.DaylightDate", Field, 0},
+ {"Timezoneinformation.DaylightName", Field, 0},
+ {"Timezoneinformation.StandardBias", Field, 0},
+ {"Timezoneinformation.StandardDate", Field, 0},
+ {"Timezoneinformation.StandardName", Field, 0},
+ {"Tms", Type, 0},
+ {"Tms.Cstime", Field, 0},
+ {"Tms.Cutime", Field, 0},
+ {"Tms.Stime", Field, 0},
+ {"Tms.Utime", Field, 0},
+ {"Token", Type, 0},
+ {"TokenAccessInformation", Const, 0},
+ {"TokenAuditPolicy", Const, 0},
+ {"TokenDefaultDacl", Const, 0},
+ {"TokenElevation", Const, 0},
+ {"TokenElevationType", Const, 0},
+ {"TokenGroups", Const, 0},
+ {"TokenGroupsAndPrivileges", Const, 0},
+ {"TokenHasRestrictions", Const, 0},
+ {"TokenImpersonationLevel", Const, 0},
+ {"TokenIntegrityLevel", Const, 0},
+ {"TokenLinkedToken", Const, 0},
+ {"TokenLogonSid", Const, 0},
+ {"TokenMandatoryPolicy", Const, 0},
+ {"TokenOrigin", Const, 0},
+ {"TokenOwner", Const, 0},
+ {"TokenPrimaryGroup", Const, 0},
+ {"TokenPrivileges", Const, 0},
+ {"TokenRestrictedSids", Const, 0},
+ {"TokenSandBoxInert", Const, 0},
+ {"TokenSessionId", Const, 0},
+ {"TokenSessionReference", Const, 0},
+ {"TokenSource", Const, 0},
+ {"TokenStatistics", Const, 0},
+ {"TokenType", Const, 0},
+ {"TokenUIAccess", Const, 0},
+ {"TokenUser", Const, 0},
+ {"TokenVirtualizationAllowed", Const, 0},
+ {"TokenVirtualizationEnabled", Const, 0},
+ {"Tokenprimarygroup", Type, 0},
+ {"Tokenprimarygroup.PrimaryGroup", Field, 0},
+ {"Tokenuser", Type, 0},
+ {"Tokenuser.User", Field, 0},
+ {"TranslateAccountName", Func, 0},
+ {"TranslateName", Func, 0},
+ {"TransmitFile", Func, 0},
+ {"TransmitFileBuffers", Type, 0},
+ {"TransmitFileBuffers.Head", Field, 0},
+ {"TransmitFileBuffers.HeadLength", Field, 0},
+ {"TransmitFileBuffers.Tail", Field, 0},
+ {"TransmitFileBuffers.TailLength", Field, 0},
+ {"Truncate", Func, 0},
+ {"UNIX_PATH_MAX", Const, 12},
+ {"USAGE_MATCH_TYPE_AND", Const, 0},
+ {"USAGE_MATCH_TYPE_OR", Const, 0},
+ {"UTF16FromString", Func, 1},
+ {"UTF16PtrFromString", Func, 1},
+ {"UTF16ToString", Func, 0},
+ {"Ucred", Type, 0},
+ {"Ucred.Gid", Field, 0},
+ {"Ucred.Pid", Field, 0},
+ {"Ucred.Uid", Field, 0},
+ {"Umask", Func, 0},
+ {"Uname", Func, 0},
+ {"Undelete", Func, 0},
+ {"UnixCredentials", Func, 0},
+ {"UnixRights", Func, 0},
+ {"Unlink", Func, 0},
+ {"Unlinkat", Func, 0},
+ {"UnmapViewOfFile", Func, 0},
+ {"Unmount", Func, 0},
+ {"Unsetenv", Func, 4},
+ {"Unshare", Func, 0},
+ {"UserInfo10", Type, 0},
+ {"UserInfo10.Comment", Field, 0},
+ {"UserInfo10.FullName", Field, 0},
+ {"UserInfo10.Name", Field, 0},
+ {"UserInfo10.UsrComment", Field, 0},
+ {"Ustat", Func, 0},
+ {"Ustat_t", Type, 0},
+ {"Ustat_t.Fname", Field, 0},
+ {"Ustat_t.Fpack", Field, 0},
+ {"Ustat_t.Pad_cgo_0", Field, 0},
+ {"Ustat_t.Pad_cgo_1", Field, 0},
+ {"Ustat_t.Tfree", Field, 0},
+ {"Ustat_t.Tinode", Field, 0},
+ {"Utimbuf", Type, 0},
+ {"Utimbuf.Actime", Field, 0},
+ {"Utimbuf.Modtime", Field, 0},
+ {"Utime", Func, 0},
+ {"Utimes", Func, 0},
+ {"UtimesNano", Func, 1},
+ {"Utsname", Type, 0},
+ {"Utsname.Domainname", Field, 0},
+ {"Utsname.Machine", Field, 0},
+ {"Utsname.Nodename", Field, 0},
+ {"Utsname.Release", Field, 0},
+ {"Utsname.Sysname", Field, 0},
+ {"Utsname.Version", Field, 0},
+ {"VDISCARD", Const, 0},
+ {"VDSUSP", Const, 1},
+ {"VEOF", Const, 0},
+ {"VEOL", Const, 0},
+ {"VEOL2", Const, 0},
+ {"VERASE", Const, 0},
+ {"VERASE2", Const, 1},
+ {"VINTR", Const, 0},
+ {"VKILL", Const, 0},
+ {"VLNEXT", Const, 0},
+ {"VMIN", Const, 0},
+ {"VQUIT", Const, 0},
+ {"VREPRINT", Const, 0},
+ {"VSTART", Const, 0},
+ {"VSTATUS", Const, 1},
+ {"VSTOP", Const, 0},
+ {"VSUSP", Const, 0},
+ {"VSWTC", Const, 0},
+ {"VT0", Const, 1},
+ {"VT1", Const, 1},
+ {"VTDLY", Const, 1},
+ {"VTIME", Const, 0},
+ {"VWERASE", Const, 0},
+ {"VirtualLock", Func, 0},
+ {"VirtualUnlock", Func, 0},
+ {"WAIT_ABANDONED", Const, 0},
+ {"WAIT_FAILED", Const, 0},
+ {"WAIT_OBJECT_0", Const, 0},
+ {"WAIT_TIMEOUT", Const, 0},
+ {"WALL", Const, 0},
+ {"WALLSIG", Const, 1},
+ {"WALTSIG", Const, 1},
+ {"WCLONE", Const, 0},
+ {"WCONTINUED", Const, 0},
+ {"WCOREFLAG", Const, 0},
+ {"WEXITED", Const, 0},
+ {"WLINUXCLONE", Const, 0},
+ {"WNOHANG", Const, 0},
+ {"WNOTHREAD", Const, 0},
+ {"WNOWAIT", Const, 0},
+ {"WNOZOMBIE", Const, 1},
+ {"WOPTSCHECKED", Const, 1},
+ {"WORDSIZE", Const, 0},
+ {"WSABuf", Type, 0},
+ {"WSABuf.Buf", Field, 0},
+ {"WSABuf.Len", Field, 0},
+ {"WSACleanup", Func, 0},
+ {"WSADESCRIPTION_LEN", Const, 0},
+ {"WSAData", Type, 0},
+ {"WSAData.Description", Field, 0},
+ {"WSAData.HighVersion", Field, 0},
+ {"WSAData.MaxSockets", Field, 0},
+ {"WSAData.MaxUdpDg", Field, 0},
+ {"WSAData.SystemStatus", Field, 0},
+ {"WSAData.VendorInfo", Field, 0},
+ {"WSAData.Version", Field, 0},
+ {"WSAEACCES", Const, 2},
+ {"WSAECONNABORTED", Const, 9},
+ {"WSAECONNRESET", Const, 3},
+ {"WSAENOPROTOOPT", Const, 23},
+ {"WSAEnumProtocols", Func, 2},
+ {"WSAID_CONNECTEX", Var, 1},
+ {"WSAIoctl", Func, 0},
+ {"WSAPROTOCOL_LEN", Const, 2},
+ {"WSAProtocolChain", Type, 2},
+ {"WSAProtocolChain.ChainEntries", Field, 2},
+ {"WSAProtocolChain.ChainLen", Field, 2},
+ {"WSAProtocolInfo", Type, 2},
+ {"WSAProtocolInfo.AddressFamily", Field, 2},
+ {"WSAProtocolInfo.CatalogEntryId", Field, 2},
+ {"WSAProtocolInfo.MaxSockAddr", Field, 2},
+ {"WSAProtocolInfo.MessageSize", Field, 2},
+ {"WSAProtocolInfo.MinSockAddr", Field, 2},
+ {"WSAProtocolInfo.NetworkByteOrder", Field, 2},
+ {"WSAProtocolInfo.Protocol", Field, 2},
+ {"WSAProtocolInfo.ProtocolChain", Field, 2},
+ {"WSAProtocolInfo.ProtocolMaxOffset", Field, 2},
+ {"WSAProtocolInfo.ProtocolName", Field, 2},
+ {"WSAProtocolInfo.ProviderFlags", Field, 2},
+ {"WSAProtocolInfo.ProviderId", Field, 2},
+ {"WSAProtocolInfo.ProviderReserved", Field, 2},
+ {"WSAProtocolInfo.SecurityScheme", Field, 2},
+ {"WSAProtocolInfo.ServiceFlags1", Field, 2},
+ {"WSAProtocolInfo.ServiceFlags2", Field, 2},
+ {"WSAProtocolInfo.ServiceFlags3", Field, 2},
+ {"WSAProtocolInfo.ServiceFlags4", Field, 2},
+ {"WSAProtocolInfo.SocketType", Field, 2},
+ {"WSAProtocolInfo.Version", Field, 2},
+ {"WSARecv", Func, 0},
+ {"WSARecvFrom", Func, 0},
+ {"WSASYS_STATUS_LEN", Const, 0},
+ {"WSASend", Func, 0},
+ {"WSASendTo", Func, 0},
+ {"WSASendto", Func, 0},
+ {"WSAStartup", Func, 0},
+ {"WSTOPPED", Const, 0},
+ {"WTRAPPED", Const, 1},
+ {"WUNTRACED", Const, 0},
+ {"Wait4", Func, 0},
+ {"WaitForSingleObject", Func, 0},
+ {"WaitStatus", Type, 0},
+ {"WaitStatus.ExitCode", Field, 0},
+ {"Win32FileAttributeData", Type, 0},
+ {"Win32FileAttributeData.CreationTime", Field, 0},
+ {"Win32FileAttributeData.FileAttributes", Field, 0},
+ {"Win32FileAttributeData.FileSizeHigh", Field, 0},
+ {"Win32FileAttributeData.FileSizeLow", Field, 0},
+ {"Win32FileAttributeData.LastAccessTime", Field, 0},
+ {"Win32FileAttributeData.LastWriteTime", Field, 0},
+ {"Win32finddata", Type, 0},
+ {"Win32finddata.AlternateFileName", Field, 0},
+ {"Win32finddata.CreationTime", Field, 0},
+ {"Win32finddata.FileAttributes", Field, 0},
+ {"Win32finddata.FileName", Field, 0},
+ {"Win32finddata.FileSizeHigh", Field, 0},
+ {"Win32finddata.FileSizeLow", Field, 0},
+ {"Win32finddata.LastAccessTime", Field, 0},
+ {"Win32finddata.LastWriteTime", Field, 0},
+ {"Win32finddata.Reserved0", Field, 0},
+ {"Win32finddata.Reserved1", Field, 0},
+ {"Write", Func, 0},
+ {"WriteConsole", Func, 1},
+ {"WriteFile", Func, 0},
+ {"X509_ASN_ENCODING", Const, 0},
+ {"XCASE", Const, 0},
+ {"XP1_CONNECTIONLESS", Const, 2},
+ {"XP1_CONNECT_DATA", Const, 2},
+ {"XP1_DISCONNECT_DATA", Const, 2},
+ {"XP1_EXPEDITED_DATA", Const, 2},
+ {"XP1_GRACEFUL_CLOSE", Const, 2},
+ {"XP1_GUARANTEED_DELIVERY", Const, 2},
+ {"XP1_GUARANTEED_ORDER", Const, 2},
+ {"XP1_IFS_HANDLES", Const, 2},
+ {"XP1_MESSAGE_ORIENTED", Const, 2},
+ {"XP1_MULTIPOINT_CONTROL_PLANE", Const, 2},
+ {"XP1_MULTIPOINT_DATA_PLANE", Const, 2},
+ {"XP1_PARTIAL_MESSAGE", Const, 2},
+ {"XP1_PSEUDO_STREAM", Const, 2},
+ {"XP1_QOS_SUPPORTED", Const, 2},
+ {"XP1_SAN_SUPPORT_SDP", Const, 2},
+ {"XP1_SUPPORT_BROADCAST", Const, 2},
+ {"XP1_SUPPORT_MULTIPOINT", Const, 2},
+ {"XP1_UNI_RECV", Const, 2},
+ {"XP1_UNI_SEND", Const, 2},
+ },
+ "syscall/js": {
+ {"CopyBytesToGo", Func, 0},
+ {"CopyBytesToJS", Func, 0},
+ {"Error", Type, 0},
+ {"Func", Type, 0},
+ {"FuncOf", Func, 0},
+ {"Global", Func, 0},
+ {"Null", Func, 0},
+ {"Type", Type, 0},
+ {"TypeBoolean", Const, 0},
+ {"TypeFunction", Const, 0},
+ {"TypeNull", Const, 0},
+ {"TypeNumber", Const, 0},
+ {"TypeObject", Const, 0},
+ {"TypeString", Const, 0},
+ {"TypeSymbol", Const, 0},
+ {"TypeUndefined", Const, 0},
+ {"Undefined", Func, 0},
+ {"Value", Type, 0},
+ {"ValueError", Type, 0},
+ {"ValueOf", Func, 0},
+ },
+ "testing": {
+ {"(*B).Cleanup", Method, 14},
+ {"(*B).Elapsed", Method, 20},
+ {"(*B).Error", Method, 0},
+ {"(*B).Errorf", Method, 0},
+ {"(*B).Fail", Method, 0},
+ {"(*B).FailNow", Method, 0},
+ {"(*B).Failed", Method, 0},
+ {"(*B).Fatal", Method, 0},
+ {"(*B).Fatalf", Method, 0},
+ {"(*B).Helper", Method, 9},
+ {"(*B).Log", Method, 0},
+ {"(*B).Logf", Method, 0},
+ {"(*B).Name", Method, 8},
+ {"(*B).ReportAllocs", Method, 1},
+ {"(*B).ReportMetric", Method, 13},
+ {"(*B).ResetTimer", Method, 0},
+ {"(*B).Run", Method, 7},
+ {"(*B).RunParallel", Method, 3},
+ {"(*B).SetBytes", Method, 0},
+ {"(*B).SetParallelism", Method, 3},
+ {"(*B).Setenv", Method, 17},
+ {"(*B).Skip", Method, 1},
+ {"(*B).SkipNow", Method, 1},
+ {"(*B).Skipf", Method, 1},
+ {"(*B).Skipped", Method, 1},
+ {"(*B).StartTimer", Method, 0},
+ {"(*B).StopTimer", Method, 0},
+ {"(*B).TempDir", Method, 15},
+ {"(*F).Add", Method, 18},
+ {"(*F).Cleanup", Method, 18},
+ {"(*F).Error", Method, 18},
+ {"(*F).Errorf", Method, 18},
+ {"(*F).Fail", Method, 18},
+ {"(*F).FailNow", Method, 18},
+ {"(*F).Failed", Method, 18},
+ {"(*F).Fatal", Method, 18},
+ {"(*F).Fatalf", Method, 18},
+ {"(*F).Fuzz", Method, 18},
+ {"(*F).Helper", Method, 18},
+ {"(*F).Log", Method, 18},
+ {"(*F).Logf", Method, 18},
+ {"(*F).Name", Method, 18},
+ {"(*F).Setenv", Method, 18},
+ {"(*F).Skip", Method, 18},
+ {"(*F).SkipNow", Method, 18},
+ {"(*F).Skipf", Method, 18},
+ {"(*F).Skipped", Method, 18},
+ {"(*F).TempDir", Method, 18},
+ {"(*M).Run", Method, 4},
+ {"(*PB).Next", Method, 3},
+ {"(*T).Cleanup", Method, 14},
+ {"(*T).Deadline", Method, 15},
+ {"(*T).Error", Method, 0},
+ {"(*T).Errorf", Method, 0},
+ {"(*T).Fail", Method, 0},
+ {"(*T).FailNow", Method, 0},
+ {"(*T).Failed", Method, 0},
+ {"(*T).Fatal", Method, 0},
+ {"(*T).Fatalf", Method, 0},
+ {"(*T).Helper", Method, 9},
+ {"(*T).Log", Method, 0},
+ {"(*T).Logf", Method, 0},
+ {"(*T).Name", Method, 8},
+ {"(*T).Parallel", Method, 0},
+ {"(*T).Run", Method, 7},
+ {"(*T).Setenv", Method, 17},
+ {"(*T).Skip", Method, 1},
+ {"(*T).SkipNow", Method, 1},
+ {"(*T).Skipf", Method, 1},
+ {"(*T).Skipped", Method, 1},
+ {"(*T).TempDir", Method, 15},
+ {"(BenchmarkResult).AllocedBytesPerOp", Method, 1},
+ {"(BenchmarkResult).AllocsPerOp", Method, 1},
+ {"(BenchmarkResult).MemString", Method, 1},
+ {"(BenchmarkResult).NsPerOp", Method, 0},
+ {"(BenchmarkResult).String", Method, 0},
+ {"AllocsPerRun", Func, 1},
+ {"B", Type, 0},
+ {"B.N", Field, 0},
+ {"Benchmark", Func, 0},
+ {"BenchmarkResult", Type, 0},
+ {"BenchmarkResult.Bytes", Field, 0},
+ {"BenchmarkResult.Extra", Field, 13},
+ {"BenchmarkResult.MemAllocs", Field, 1},
+ {"BenchmarkResult.MemBytes", Field, 1},
+ {"BenchmarkResult.N", Field, 0},
+ {"BenchmarkResult.T", Field, 0},
+ {"Cover", Type, 2},
+ {"Cover.Blocks", Field, 2},
+ {"Cover.Counters", Field, 2},
+ {"Cover.CoveredPackages", Field, 2},
+ {"Cover.Mode", Field, 2},
+ {"CoverBlock", Type, 2},
+ {"CoverBlock.Col0", Field, 2},
+ {"CoverBlock.Col1", Field, 2},
+ {"CoverBlock.Line0", Field, 2},
+ {"CoverBlock.Line1", Field, 2},
+ {"CoverBlock.Stmts", Field, 2},
+ {"CoverMode", Func, 8},
+ {"Coverage", Func, 4},
+ {"F", Type, 18},
+ {"Init", Func, 13},
+ {"InternalBenchmark", Type, 0},
+ {"InternalBenchmark.F", Field, 0},
+ {"InternalBenchmark.Name", Field, 0},
+ {"InternalExample", Type, 0},
+ {"InternalExample.F", Field, 0},
+ {"InternalExample.Name", Field, 0},
+ {"InternalExample.Output", Field, 0},
+ {"InternalExample.Unordered", Field, 7},
+ {"InternalFuzzTarget", Type, 18},
+ {"InternalFuzzTarget.Fn", Field, 18},
+ {"InternalFuzzTarget.Name", Field, 18},
+ {"InternalTest", Type, 0},
+ {"InternalTest.F", Field, 0},
+ {"InternalTest.Name", Field, 0},
+ {"M", Type, 4},
+ {"Main", Func, 0},
+ {"MainStart", Func, 4},
+ {"PB", Type, 3},
+ {"RegisterCover", Func, 2},
+ {"RunBenchmarks", Func, 0},
+ {"RunExamples", Func, 0},
+ {"RunTests", Func, 0},
+ {"Short", Func, 0},
+ {"T", Type, 0},
+ {"TB", Type, 2},
+ {"Testing", Func, 21},
+ {"Verbose", Func, 1},
+ },
+ "testing/fstest": {
+ {"(MapFS).Glob", Method, 16},
+ {"(MapFS).Open", Method, 16},
+ {"(MapFS).ReadDir", Method, 16},
+ {"(MapFS).ReadFile", Method, 16},
+ {"(MapFS).Stat", Method, 16},
+ {"(MapFS).Sub", Method, 16},
+ {"MapFS", Type, 16},
+ {"MapFile", Type, 16},
+ {"MapFile.Data", Field, 16},
+ {"MapFile.ModTime", Field, 16},
+ {"MapFile.Mode", Field, 16},
+ {"MapFile.Sys", Field, 16},
+ {"TestFS", Func, 16},
+ },
+ "testing/iotest": {
+ {"DataErrReader", Func, 0},
+ {"ErrReader", Func, 16},
+ {"ErrTimeout", Var, 0},
+ {"HalfReader", Func, 0},
+ {"NewReadLogger", Func, 0},
+ {"NewWriteLogger", Func, 0},
+ {"OneByteReader", Func, 0},
+ {"TestReader", Func, 16},
+ {"TimeoutReader", Func, 0},
+ {"TruncateWriter", Func, 0},
+ },
+ "testing/quick": {
+ {"(*CheckEqualError).Error", Method, 0},
+ {"(*CheckError).Error", Method, 0},
+ {"(SetupError).Error", Method, 0},
+ {"Check", Func, 0},
+ {"CheckEqual", Func, 0},
+ {"CheckEqualError", Type, 0},
+ {"CheckEqualError.CheckError", Field, 0},
+ {"CheckEqualError.Out1", Field, 0},
+ {"CheckEqualError.Out2", Field, 0},
+ {"CheckError", Type, 0},
+ {"CheckError.Count", Field, 0},
+ {"CheckError.In", Field, 0},
+ {"Config", Type, 0},
+ {"Config.MaxCount", Field, 0},
+ {"Config.MaxCountScale", Field, 0},
+ {"Config.Rand", Field, 0},
+ {"Config.Values", Field, 0},
+ {"Generator", Type, 0},
+ {"SetupError", Type, 0},
+ {"Value", Func, 0},
+ },
+ "testing/slogtest": {
+ {"Run", Func, 22},
+ {"TestHandler", Func, 21},
+ },
+ "text/scanner": {
+ {"(*Position).IsValid", Method, 0},
+ {"(*Scanner).Init", Method, 0},
+ {"(*Scanner).IsValid", Method, 0},
+ {"(*Scanner).Next", Method, 0},
+ {"(*Scanner).Peek", Method, 0},
+ {"(*Scanner).Pos", Method, 0},
+ {"(*Scanner).Scan", Method, 0},
+ {"(*Scanner).TokenText", Method, 0},
+ {"(Position).String", Method, 0},
+ {"(Scanner).String", Method, 0},
+ {"Char", Const, 0},
+ {"Comment", Const, 0},
+ {"EOF", Const, 0},
+ {"Float", Const, 0},
+ {"GoTokens", Const, 0},
+ {"GoWhitespace", Const, 0},
+ {"Ident", Const, 0},
+ {"Int", Const, 0},
+ {"Position", Type, 0},
+ {"Position.Column", Field, 0},
+ {"Position.Filename", Field, 0},
+ {"Position.Line", Field, 0},
+ {"Position.Offset", Field, 0},
+ {"RawString", Const, 0},
+ {"ScanChars", Const, 0},
+ {"ScanComments", Const, 0},
+ {"ScanFloats", Const, 0},
+ {"ScanIdents", Const, 0},
+ {"ScanInts", Const, 0},
+ {"ScanRawStrings", Const, 0},
+ {"ScanStrings", Const, 0},
+ {"Scanner", Type, 0},
+ {"Scanner.Error", Field, 0},
+ {"Scanner.ErrorCount", Field, 0},
+ {"Scanner.IsIdentRune", Field, 4},
+ {"Scanner.Mode", Field, 0},
+ {"Scanner.Position", Field, 0},
+ {"Scanner.Whitespace", Field, 0},
+ {"SkipComments", Const, 0},
+ {"String", Const, 0},
+ {"TokenString", Func, 0},
+ },
+ "text/tabwriter": {
+ {"(*Writer).Flush", Method, 0},
+ {"(*Writer).Init", Method, 0},
+ {"(*Writer).Write", Method, 0},
+ {"AlignRight", Const, 0},
+ {"Debug", Const, 0},
+ {"DiscardEmptyColumns", Const, 0},
+ {"Escape", Const, 0},
+ {"FilterHTML", Const, 0},
+ {"NewWriter", Func, 0},
+ {"StripEscape", Const, 0},
+ {"TabIndent", Const, 0},
+ {"Writer", Type, 0},
+ },
+ "text/template": {
+ {"(*Template).AddParseTree", Method, 0},
+ {"(*Template).Clone", Method, 0},
+ {"(*Template).DefinedTemplates", Method, 5},
+ {"(*Template).Delims", Method, 0},
+ {"(*Template).Execute", Method, 0},
+ {"(*Template).ExecuteTemplate", Method, 0},
+ {"(*Template).Funcs", Method, 0},
+ {"(*Template).Lookup", Method, 0},
+ {"(*Template).Name", Method, 0},
+ {"(*Template).New", Method, 0},
+ {"(*Template).Option", Method, 5},
+ {"(*Template).Parse", Method, 0},
+ {"(*Template).ParseFS", Method, 16},
+ {"(*Template).ParseFiles", Method, 0},
+ {"(*Template).ParseGlob", Method, 0},
+ {"(*Template).Templates", Method, 0},
+ {"(ExecError).Error", Method, 6},
+ {"(ExecError).Unwrap", Method, 13},
+ {"(Template).Copy", Method, 2},
+ {"(Template).ErrorContext", Method, 1},
+ {"ExecError", Type, 6},
+ {"ExecError.Err", Field, 6},
+ {"ExecError.Name", Field, 6},
+ {"FuncMap", Type, 0},
+ {"HTMLEscape", Func, 0},
+ {"HTMLEscapeString", Func, 0},
+ {"HTMLEscaper", Func, 0},
+ {"IsTrue", Func, 6},
+ {"JSEscape", Func, 0},
+ {"JSEscapeString", Func, 0},
+ {"JSEscaper", Func, 0},
+ {"Must", Func, 0},
+ {"New", Func, 0},
+ {"ParseFS", Func, 16},
+ {"ParseFiles", Func, 0},
+ {"ParseGlob", Func, 0},
+ {"Template", Type, 0},
+ {"Template.Tree", Field, 0},
+ {"URLQueryEscaper", Func, 0},
+ },
+ "text/template/parse": {
+ {"(*ActionNode).Copy", Method, 0},
+ {"(*ActionNode).String", Method, 0},
+ {"(*BoolNode).Copy", Method, 0},
+ {"(*BoolNode).String", Method, 0},
+ {"(*BranchNode).Copy", Method, 4},
+ {"(*BranchNode).String", Method, 0},
+ {"(*BreakNode).Copy", Method, 18},
+ {"(*BreakNode).String", Method, 18},
+ {"(*ChainNode).Add", Method, 1},
+ {"(*ChainNode).Copy", Method, 1},
+ {"(*ChainNode).String", Method, 1},
+ {"(*CommandNode).Copy", Method, 0},
+ {"(*CommandNode).String", Method, 0},
+ {"(*CommentNode).Copy", Method, 16},
+ {"(*CommentNode).String", Method, 16},
+ {"(*ContinueNode).Copy", Method, 18},
+ {"(*ContinueNode).String", Method, 18},
+ {"(*DotNode).Copy", Method, 0},
+ {"(*DotNode).String", Method, 0},
+ {"(*DotNode).Type", Method, 0},
+ {"(*FieldNode).Copy", Method, 0},
+ {"(*FieldNode).String", Method, 0},
+ {"(*IdentifierNode).Copy", Method, 0},
+ {"(*IdentifierNode).SetPos", Method, 1},
+ {"(*IdentifierNode).SetTree", Method, 4},
+ {"(*IdentifierNode).String", Method, 0},
+ {"(*IfNode).Copy", Method, 0},
+ {"(*IfNode).String", Method, 0},
+ {"(*ListNode).Copy", Method, 0},
+ {"(*ListNode).CopyList", Method, 0},
+ {"(*ListNode).String", Method, 0},
+ {"(*NilNode).Copy", Method, 1},
+ {"(*NilNode).String", Method, 1},
+ {"(*NilNode).Type", Method, 1},
+ {"(*NumberNode).Copy", Method, 0},
+ {"(*NumberNode).String", Method, 0},
+ {"(*PipeNode).Copy", Method, 0},
+ {"(*PipeNode).CopyPipe", Method, 0},
+ {"(*PipeNode).String", Method, 0},
+ {"(*RangeNode).Copy", Method, 0},
+ {"(*RangeNode).String", Method, 0},
+ {"(*StringNode).Copy", Method, 0},
+ {"(*StringNode).String", Method, 0},
+ {"(*TemplateNode).Copy", Method, 0},
+ {"(*TemplateNode).String", Method, 0},
+ {"(*TextNode).Copy", Method, 0},
+ {"(*TextNode).String", Method, 0},
+ {"(*Tree).Copy", Method, 2},
+ {"(*Tree).ErrorContext", Method, 1},
+ {"(*Tree).Parse", Method, 0},
+ {"(*VariableNode).Copy", Method, 0},
+ {"(*VariableNode).String", Method, 0},
+ {"(*WithNode).Copy", Method, 0},
+ {"(*WithNode).String", Method, 0},
+ {"(ActionNode).Position", Method, 1},
+ {"(ActionNode).Type", Method, 0},
+ {"(BoolNode).Position", Method, 1},
+ {"(BoolNode).Type", Method, 0},
+ {"(BranchNode).Position", Method, 1},
+ {"(BranchNode).Type", Method, 0},
+ {"(BreakNode).Position", Method, 18},
+ {"(BreakNode).Type", Method, 18},
+ {"(ChainNode).Position", Method, 1},
+ {"(ChainNode).Type", Method, 1},
+ {"(CommandNode).Position", Method, 1},
+ {"(CommandNode).Type", Method, 0},
+ {"(CommentNode).Position", Method, 16},
+ {"(CommentNode).Type", Method, 16},
+ {"(ContinueNode).Position", Method, 18},
+ {"(ContinueNode).Type", Method, 18},
+ {"(DotNode).Position", Method, 1},
+ {"(FieldNode).Position", Method, 1},
+ {"(FieldNode).Type", Method, 0},
+ {"(IdentifierNode).Position", Method, 1},
+ {"(IdentifierNode).Type", Method, 0},
+ {"(IfNode).Position", Method, 1},
+ {"(IfNode).Type", Method, 0},
+ {"(ListNode).Position", Method, 1},
+ {"(ListNode).Type", Method, 0},
+ {"(NilNode).Position", Method, 1},
+ {"(NodeType).Type", Method, 0},
+ {"(NumberNode).Position", Method, 1},
+ {"(NumberNode).Type", Method, 0},
+ {"(PipeNode).Position", Method, 1},
+ {"(PipeNode).Type", Method, 0},
+ {"(Pos).Position", Method, 1},
+ {"(RangeNode).Position", Method, 1},
+ {"(RangeNode).Type", Method, 0},
+ {"(StringNode).Position", Method, 1},
+ {"(StringNode).Type", Method, 0},
+ {"(TemplateNode).Position", Method, 1},
+ {"(TemplateNode).Type", Method, 0},
+ {"(TextNode).Position", Method, 1},
+ {"(TextNode).Type", Method, 0},
+ {"(VariableNode).Position", Method, 1},
+ {"(VariableNode).Type", Method, 0},
+ {"(WithNode).Position", Method, 1},
+ {"(WithNode).Type", Method, 0},
+ {"ActionNode", Type, 0},
+ {"ActionNode.Line", Field, 0},
+ {"ActionNode.NodeType", Field, 0},
+ {"ActionNode.Pipe", Field, 0},
+ {"ActionNode.Pos", Field, 1},
+ {"BoolNode", Type, 0},
+ {"BoolNode.NodeType", Field, 0},
+ {"BoolNode.Pos", Field, 1},
+ {"BoolNode.True", Field, 0},
+ {"BranchNode", Type, 0},
+ {"BranchNode.ElseList", Field, 0},
+ {"BranchNode.Line", Field, 0},
+ {"BranchNode.List", Field, 0},
+ {"BranchNode.NodeType", Field, 0},
+ {"BranchNode.Pipe", Field, 0},
+ {"BranchNode.Pos", Field, 1},
+ {"BreakNode", Type, 18},
+ {"BreakNode.Line", Field, 18},
+ {"BreakNode.NodeType", Field, 18},
+ {"BreakNode.Pos", Field, 18},
+ {"ChainNode", Type, 1},
+ {"ChainNode.Field", Field, 1},
+ {"ChainNode.Node", Field, 1},
+ {"ChainNode.NodeType", Field, 1},
+ {"ChainNode.Pos", Field, 1},
+ {"CommandNode", Type, 0},
+ {"CommandNode.Args", Field, 0},
+ {"CommandNode.NodeType", Field, 0},
+ {"CommandNode.Pos", Field, 1},
+ {"CommentNode", Type, 16},
+ {"CommentNode.NodeType", Field, 16},
+ {"CommentNode.Pos", Field, 16},
+ {"CommentNode.Text", Field, 16},
+ {"ContinueNode", Type, 18},
+ {"ContinueNode.Line", Field, 18},
+ {"ContinueNode.NodeType", Field, 18},
+ {"ContinueNode.Pos", Field, 18},
+ {"DotNode", Type, 0},
+ {"DotNode.NodeType", Field, 4},
+ {"DotNode.Pos", Field, 1},
+ {"FieldNode", Type, 0},
+ {"FieldNode.Ident", Field, 0},
+ {"FieldNode.NodeType", Field, 0},
+ {"FieldNode.Pos", Field, 1},
+ {"IdentifierNode", Type, 0},
+ {"IdentifierNode.Ident", Field, 0},
+ {"IdentifierNode.NodeType", Field, 0},
+ {"IdentifierNode.Pos", Field, 1},
+ {"IfNode", Type, 0},
+ {"IfNode.BranchNode", Field, 0},
+ {"IsEmptyTree", Func, 0},
+ {"ListNode", Type, 0},
+ {"ListNode.NodeType", Field, 0},
+ {"ListNode.Nodes", Field, 0},
+ {"ListNode.Pos", Field, 1},
+ {"Mode", Type, 16},
+ {"New", Func, 0},
+ {"NewIdentifier", Func, 0},
+ {"NilNode", Type, 1},
+ {"NilNode.NodeType", Field, 4},
+ {"NilNode.Pos", Field, 1},
+ {"Node", Type, 0},
+ {"NodeAction", Const, 0},
+ {"NodeBool", Const, 0},
+ {"NodeBreak", Const, 18},
+ {"NodeChain", Const, 1},
+ {"NodeCommand", Const, 0},
+ {"NodeComment", Const, 16},
+ {"NodeContinue", Const, 18},
+ {"NodeDot", Const, 0},
+ {"NodeField", Const, 0},
+ {"NodeIdentifier", Const, 0},
+ {"NodeIf", Const, 0},
+ {"NodeList", Const, 0},
+ {"NodeNil", Const, 1},
+ {"NodeNumber", Const, 0},
+ {"NodePipe", Const, 0},
+ {"NodeRange", Const, 0},
+ {"NodeString", Const, 0},
+ {"NodeTemplate", Const, 0},
+ {"NodeText", Const, 0},
+ {"NodeType", Type, 0},
+ {"NodeVariable", Const, 0},
+ {"NodeWith", Const, 0},
+ {"NumberNode", Type, 0},
+ {"NumberNode.Complex128", Field, 0},
+ {"NumberNode.Float64", Field, 0},
+ {"NumberNode.Int64", Field, 0},
+ {"NumberNode.IsComplex", Field, 0},
+ {"NumberNode.IsFloat", Field, 0},
+ {"NumberNode.IsInt", Field, 0},
+ {"NumberNode.IsUint", Field, 0},
+ {"NumberNode.NodeType", Field, 0},
+ {"NumberNode.Pos", Field, 1},
+ {"NumberNode.Text", Field, 0},
+ {"NumberNode.Uint64", Field, 0},
+ {"Parse", Func, 0},
+ {"ParseComments", Const, 16},
+ {"PipeNode", Type, 0},
+ {"PipeNode.Cmds", Field, 0},
+ {"PipeNode.Decl", Field, 0},
+ {"PipeNode.IsAssign", Field, 11},
+ {"PipeNode.Line", Field, 0},
+ {"PipeNode.NodeType", Field, 0},
+ {"PipeNode.Pos", Field, 1},
+ {"Pos", Type, 1},
+ {"RangeNode", Type, 0},
+ {"RangeNode.BranchNode", Field, 0},
+ {"SkipFuncCheck", Const, 17},
+ {"StringNode", Type, 0},
+ {"StringNode.NodeType", Field, 0},
+ {"StringNode.Pos", Field, 1},
+ {"StringNode.Quoted", Field, 0},
+ {"StringNode.Text", Field, 0},
+ {"TemplateNode", Type, 0},
+ {"TemplateNode.Line", Field, 0},
+ {"TemplateNode.Name", Field, 0},
+ {"TemplateNode.NodeType", Field, 0},
+ {"TemplateNode.Pipe", Field, 0},
+ {"TemplateNode.Pos", Field, 1},
+ {"TextNode", Type, 0},
+ {"TextNode.NodeType", Field, 0},
+ {"TextNode.Pos", Field, 1},
+ {"TextNode.Text", Field, 0},
+ {"Tree", Type, 0},
+ {"Tree.Mode", Field, 16},
+ {"Tree.Name", Field, 0},
+ {"Tree.ParseName", Field, 1},
+ {"Tree.Root", Field, 0},
+ {"VariableNode", Type, 0},
+ {"VariableNode.Ident", Field, 0},
+ {"VariableNode.NodeType", Field, 0},
+ {"VariableNode.Pos", Field, 1},
+ {"WithNode", Type, 0},
+ {"WithNode.BranchNode", Field, 0},
+ },
+ "time": {
+ {"(*Location).String", Method, 0},
+ {"(*ParseError).Error", Method, 0},
+ {"(*Ticker).Reset", Method, 15},
+ {"(*Ticker).Stop", Method, 0},
+ {"(*Time).GobDecode", Method, 0},
+ {"(*Time).UnmarshalBinary", Method, 2},
+ {"(*Time).UnmarshalJSON", Method, 0},
+ {"(*Time).UnmarshalText", Method, 2},
+ {"(*Timer).Reset", Method, 1},
+ {"(*Timer).Stop", Method, 0},
+ {"(Duration).Abs", Method, 19},
+ {"(Duration).Hours", Method, 0},
+ {"(Duration).Microseconds", Method, 13},
+ {"(Duration).Milliseconds", Method, 13},
+ {"(Duration).Minutes", Method, 0},
+ {"(Duration).Nanoseconds", Method, 0},
+ {"(Duration).Round", Method, 9},
+ {"(Duration).Seconds", Method, 0},
+ {"(Duration).String", Method, 0},
+ {"(Duration).Truncate", Method, 9},
+ {"(Month).String", Method, 0},
+ {"(Time).Add", Method, 0},
+ {"(Time).AddDate", Method, 0},
+ {"(Time).After", Method, 0},
+ {"(Time).AppendFormat", Method, 5},
+ {"(Time).Before", Method, 0},
+ {"(Time).Clock", Method, 0},
+ {"(Time).Compare", Method, 20},
+ {"(Time).Date", Method, 0},
+ {"(Time).Day", Method, 0},
+ {"(Time).Equal", Method, 0},
+ {"(Time).Format", Method, 0},
+ {"(Time).GoString", Method, 17},
+ {"(Time).GobEncode", Method, 0},
+ {"(Time).Hour", Method, 0},
+ {"(Time).ISOWeek", Method, 0},
+ {"(Time).In", Method, 0},
+ {"(Time).IsDST", Method, 17},
+ {"(Time).IsZero", Method, 0},
+ {"(Time).Local", Method, 0},
+ {"(Time).Location", Method, 0},
+ {"(Time).MarshalBinary", Method, 2},
+ {"(Time).MarshalJSON", Method, 0},
+ {"(Time).MarshalText", Method, 2},
+ {"(Time).Minute", Method, 0},
+ {"(Time).Month", Method, 0},
+ {"(Time).Nanosecond", Method, 0},
+ {"(Time).Round", Method, 1},
+ {"(Time).Second", Method, 0},
+ {"(Time).String", Method, 0},
+ {"(Time).Sub", Method, 0},
+ {"(Time).Truncate", Method, 1},
+ {"(Time).UTC", Method, 0},
+ {"(Time).Unix", Method, 0},
+ {"(Time).UnixMicro", Method, 17},
+ {"(Time).UnixMilli", Method, 17},
+ {"(Time).UnixNano", Method, 0},
+ {"(Time).Weekday", Method, 0},
+ {"(Time).Year", Method, 0},
+ {"(Time).YearDay", Method, 1},
+ {"(Time).Zone", Method, 0},
+ {"(Time).ZoneBounds", Method, 19},
+ {"(Weekday).String", Method, 0},
+ {"ANSIC", Const, 0},
+ {"After", Func, 0},
+ {"AfterFunc", Func, 0},
+ {"April", Const, 0},
+ {"August", Const, 0},
+ {"Date", Func, 0},
+ {"DateOnly", Const, 20},
+ {"DateTime", Const, 20},
+ {"December", Const, 0},
+ {"Duration", Type, 0},
+ {"February", Const, 0},
+ {"FixedZone", Func, 0},
+ {"Friday", Const, 0},
+ {"Hour", Const, 0},
+ {"January", Const, 0},
+ {"July", Const, 0},
+ {"June", Const, 0},
+ {"Kitchen", Const, 0},
+ {"Layout", Const, 17},
+ {"LoadLocation", Func, 0},
+ {"LoadLocationFromTZData", Func, 10},
+ {"Local", Var, 0},
+ {"Location", Type, 0},
+ {"March", Const, 0},
+ {"May", Const, 0},
+ {"Microsecond", Const, 0},
+ {"Millisecond", Const, 0},
+ {"Minute", Const, 0},
+ {"Monday", Const, 0},
+ {"Month", Type, 0},
+ {"Nanosecond", Const, 0},
+ {"NewTicker", Func, 0},
+ {"NewTimer", Func, 0},
+ {"November", Const, 0},
+ {"Now", Func, 0},
+ {"October", Const, 0},
+ {"Parse", Func, 0},
+ {"ParseDuration", Func, 0},
+ {"ParseError", Type, 0},
+ {"ParseError.Layout", Field, 0},
+ {"ParseError.LayoutElem", Field, 0},
+ {"ParseError.Message", Field, 0},
+ {"ParseError.Value", Field, 0},
+ {"ParseError.ValueElem", Field, 0},
+ {"ParseInLocation", Func, 1},
+ {"RFC1123", Const, 0},
+ {"RFC1123Z", Const, 0},
+ {"RFC3339", Const, 0},
+ {"RFC3339Nano", Const, 0},
+ {"RFC822", Const, 0},
+ {"RFC822Z", Const, 0},
+ {"RFC850", Const, 0},
+ {"RubyDate", Const, 0},
+ {"Saturday", Const, 0},
+ {"Second", Const, 0},
+ {"September", Const, 0},
+ {"Since", Func, 0},
+ {"Sleep", Func, 0},
+ {"Stamp", Const, 0},
+ {"StampMicro", Const, 0},
+ {"StampMilli", Const, 0},
+ {"StampNano", Const, 0},
+ {"Sunday", Const, 0},
+ {"Thursday", Const, 0},
+ {"Tick", Func, 0},
+ {"Ticker", Type, 0},
+ {"Ticker.C", Field, 0},
+ {"Time", Type, 0},
+ {"TimeOnly", Const, 20},
+ {"Timer", Type, 0},
+ {"Timer.C", Field, 0},
+ {"Tuesday", Const, 0},
+ {"UTC", Var, 0},
+ {"Unix", Func, 0},
+ {"UnixDate", Const, 0},
+ {"UnixMicro", Func, 17},
+ {"UnixMilli", Func, 17},
+ {"Until", Func, 8},
+ {"Wednesday", Const, 0},
+ {"Weekday", Type, 0},
+ },
+ "unicode": {
+ {"(SpecialCase).ToLower", Method, 0},
+ {"(SpecialCase).ToTitle", Method, 0},
+ {"(SpecialCase).ToUpper", Method, 0},
+ {"ASCII_Hex_Digit", Var, 0},
+ {"Adlam", Var, 7},
+ {"Ahom", Var, 5},
+ {"Anatolian_Hieroglyphs", Var, 5},
+ {"Arabic", Var, 0},
+ {"Armenian", Var, 0},
+ {"Avestan", Var, 0},
+ {"AzeriCase", Var, 0},
+ {"Balinese", Var, 0},
+ {"Bamum", Var, 0},
+ {"Bassa_Vah", Var, 4},
+ {"Batak", Var, 0},
+ {"Bengali", Var, 0},
+ {"Bhaiksuki", Var, 7},
+ {"Bidi_Control", Var, 0},
+ {"Bopomofo", Var, 0},
+ {"Brahmi", Var, 0},
+ {"Braille", Var, 0},
+ {"Buginese", Var, 0},
+ {"Buhid", Var, 0},
+ {"C", Var, 0},
+ {"Canadian_Aboriginal", Var, 0},
+ {"Carian", Var, 0},
+ {"CaseRange", Type, 0},
+ {"CaseRange.Delta", Field, 0},
+ {"CaseRange.Hi", Field, 0},
+ {"CaseRange.Lo", Field, 0},
+ {"CaseRanges", Var, 0},
+ {"Categories", Var, 0},
+ {"Caucasian_Albanian", Var, 4},
+ {"Cc", Var, 0},
+ {"Cf", Var, 0},
+ {"Chakma", Var, 1},
+ {"Cham", Var, 0},
+ {"Cherokee", Var, 0},
+ {"Chorasmian", Var, 16},
+ {"Co", Var, 0},
+ {"Common", Var, 0},
+ {"Coptic", Var, 0},
+ {"Cs", Var, 0},
+ {"Cuneiform", Var, 0},
+ {"Cypriot", Var, 0},
+ {"Cypro_Minoan", Var, 21},
+ {"Cyrillic", Var, 0},
+ {"Dash", Var, 0},
+ {"Deprecated", Var, 0},
+ {"Deseret", Var, 0},
+ {"Devanagari", Var, 0},
+ {"Diacritic", Var, 0},
+ {"Digit", Var, 0},
+ {"Dives_Akuru", Var, 16},
+ {"Dogra", Var, 13},
+ {"Duployan", Var, 4},
+ {"Egyptian_Hieroglyphs", Var, 0},
+ {"Elbasan", Var, 4},
+ {"Elymaic", Var, 14},
+ {"Ethiopic", Var, 0},
+ {"Extender", Var, 0},
+ {"FoldCategory", Var, 0},
+ {"FoldScript", Var, 0},
+ {"Georgian", Var, 0},
+ {"Glagolitic", Var, 0},
+ {"Gothic", Var, 0},
+ {"Grantha", Var, 4},
+ {"GraphicRanges", Var, 0},
+ {"Greek", Var, 0},
+ {"Gujarati", Var, 0},
+ {"Gunjala_Gondi", Var, 13},
+ {"Gurmukhi", Var, 0},
+ {"Han", Var, 0},
+ {"Hangul", Var, 0},
+ {"Hanifi_Rohingya", Var, 13},
+ {"Hanunoo", Var, 0},
+ {"Hatran", Var, 5},
+ {"Hebrew", Var, 0},
+ {"Hex_Digit", Var, 0},
+ {"Hiragana", Var, 0},
+ {"Hyphen", Var, 0},
+ {"IDS_Binary_Operator", Var, 0},
+ {"IDS_Trinary_Operator", Var, 0},
+ {"Ideographic", Var, 0},
+ {"Imperial_Aramaic", Var, 0},
+ {"In", Func, 2},
+ {"Inherited", Var, 0},
+ {"Inscriptional_Pahlavi", Var, 0},
+ {"Inscriptional_Parthian", Var, 0},
+ {"Is", Func, 0},
+ {"IsControl", Func, 0},
+ {"IsDigit", Func, 0},
+ {"IsGraphic", Func, 0},
+ {"IsLetter", Func, 0},
+ {"IsLower", Func, 0},
+ {"IsMark", Func, 0},
+ {"IsNumber", Func, 0},
+ {"IsOneOf", Func, 0},
+ {"IsPrint", Func, 0},
+ {"IsPunct", Func, 0},
+ {"IsSpace", Func, 0},
+ {"IsSymbol", Func, 0},
+ {"IsTitle", Func, 0},
+ {"IsUpper", Func, 0},
+ {"Javanese", Var, 0},
+ {"Join_Control", Var, 0},
+ {"Kaithi", Var, 0},
+ {"Kannada", Var, 0},
+ {"Katakana", Var, 0},
+ {"Kawi", Var, 21},
+ {"Kayah_Li", Var, 0},
+ {"Kharoshthi", Var, 0},
+ {"Khitan_Small_Script", Var, 16},
+ {"Khmer", Var, 0},
+ {"Khojki", Var, 4},
+ {"Khudawadi", Var, 4},
+ {"L", Var, 0},
+ {"Lao", Var, 0},
+ {"Latin", Var, 0},
+ {"Lepcha", Var, 0},
+ {"Letter", Var, 0},
+ {"Limbu", Var, 0},
+ {"Linear_A", Var, 4},
+ {"Linear_B", Var, 0},
+ {"Lisu", Var, 0},
+ {"Ll", Var, 0},
+ {"Lm", Var, 0},
+ {"Lo", Var, 0},
+ {"Logical_Order_Exception", Var, 0},
+ {"Lower", Var, 0},
+ {"LowerCase", Const, 0},
+ {"Lt", Var, 0},
+ {"Lu", Var, 0},
+ {"Lycian", Var, 0},
+ {"Lydian", Var, 0},
+ {"M", Var, 0},
+ {"Mahajani", Var, 4},
+ {"Makasar", Var, 13},
+ {"Malayalam", Var, 0},
+ {"Mandaic", Var, 0},
+ {"Manichaean", Var, 4},
+ {"Marchen", Var, 7},
+ {"Mark", Var, 0},
+ {"Masaram_Gondi", Var, 10},
+ {"MaxASCII", Const, 0},
+ {"MaxCase", Const, 0},
+ {"MaxLatin1", Const, 0},
+ {"MaxRune", Const, 0},
+ {"Mc", Var, 0},
+ {"Me", Var, 0},
+ {"Medefaidrin", Var, 13},
+ {"Meetei_Mayek", Var, 0},
+ {"Mende_Kikakui", Var, 4},
+ {"Meroitic_Cursive", Var, 1},
+ {"Meroitic_Hieroglyphs", Var, 1},
+ {"Miao", Var, 1},
+ {"Mn", Var, 0},
+ {"Modi", Var, 4},
+ {"Mongolian", Var, 0},
+ {"Mro", Var, 4},
+ {"Multani", Var, 5},
+ {"Myanmar", Var, 0},
+ {"N", Var, 0},
+ {"Nabataean", Var, 4},
+ {"Nag_Mundari", Var, 21},
+ {"Nandinagari", Var, 14},
+ {"Nd", Var, 0},
+ {"New_Tai_Lue", Var, 0},
+ {"Newa", Var, 7},
+ {"Nko", Var, 0},
+ {"Nl", Var, 0},
+ {"No", Var, 0},
+ {"Noncharacter_Code_Point", Var, 0},
+ {"Number", Var, 0},
+ {"Nushu", Var, 10},
+ {"Nyiakeng_Puachue_Hmong", Var, 14},
+ {"Ogham", Var, 0},
+ {"Ol_Chiki", Var, 0},
+ {"Old_Hungarian", Var, 5},
+ {"Old_Italic", Var, 0},
+ {"Old_North_Arabian", Var, 4},
+ {"Old_Permic", Var, 4},
+ {"Old_Persian", Var, 0},
+ {"Old_Sogdian", Var, 13},
+ {"Old_South_Arabian", Var, 0},
+ {"Old_Turkic", Var, 0},
+ {"Old_Uyghur", Var, 21},
+ {"Oriya", Var, 0},
+ {"Osage", Var, 7},
+ {"Osmanya", Var, 0},
+ {"Other", Var, 0},
+ {"Other_Alphabetic", Var, 0},
+ {"Other_Default_Ignorable_Code_Point", Var, 0},
+ {"Other_Grapheme_Extend", Var, 0},
+ {"Other_ID_Continue", Var, 0},
+ {"Other_ID_Start", Var, 0},
+ {"Other_Lowercase", Var, 0},
+ {"Other_Math", Var, 0},
+ {"Other_Uppercase", Var, 0},
+ {"P", Var, 0},
+ {"Pahawh_Hmong", Var, 4},
+ {"Palmyrene", Var, 4},
+ {"Pattern_Syntax", Var, 0},
+ {"Pattern_White_Space", Var, 0},
+ {"Pau_Cin_Hau", Var, 4},
+ {"Pc", Var, 0},
+ {"Pd", Var, 0},
+ {"Pe", Var, 0},
+ {"Pf", Var, 0},
+ {"Phags_Pa", Var, 0},
+ {"Phoenician", Var, 0},
+ {"Pi", Var, 0},
+ {"Po", Var, 0},
+ {"Prepended_Concatenation_Mark", Var, 7},
+ {"PrintRanges", Var, 0},
+ {"Properties", Var, 0},
+ {"Ps", Var, 0},
+ {"Psalter_Pahlavi", Var, 4},
+ {"Punct", Var, 0},
+ {"Quotation_Mark", Var, 0},
+ {"Radical", Var, 0},
+ {"Range16", Type, 0},
+ {"Range16.Hi", Field, 0},
+ {"Range16.Lo", Field, 0},
+ {"Range16.Stride", Field, 0},
+ {"Range32", Type, 0},
+ {"Range32.Hi", Field, 0},
+ {"Range32.Lo", Field, 0},
+ {"Range32.Stride", Field, 0},
+ {"RangeTable", Type, 0},
+ {"RangeTable.LatinOffset", Field, 1},
+ {"RangeTable.R16", Field, 0},
+ {"RangeTable.R32", Field, 0},
+ {"Regional_Indicator", Var, 10},
+ {"Rejang", Var, 0},
+ {"ReplacementChar", Const, 0},
+ {"Runic", Var, 0},
+ {"S", Var, 0},
+ {"STerm", Var, 0},
+ {"Samaritan", Var, 0},
+ {"Saurashtra", Var, 0},
+ {"Sc", Var, 0},
+ {"Scripts", Var, 0},
+ {"Sentence_Terminal", Var, 7},
+ {"Sharada", Var, 1},
+ {"Shavian", Var, 0},
+ {"Siddham", Var, 4},
+ {"SignWriting", Var, 5},
+ {"SimpleFold", Func, 0},
+ {"Sinhala", Var, 0},
+ {"Sk", Var, 0},
+ {"Sm", Var, 0},
+ {"So", Var, 0},
+ {"Soft_Dotted", Var, 0},
+ {"Sogdian", Var, 13},
+ {"Sora_Sompeng", Var, 1},
+ {"Soyombo", Var, 10},
+ {"Space", Var, 0},
+ {"SpecialCase", Type, 0},
+ {"Sundanese", Var, 0},
+ {"Syloti_Nagri", Var, 0},
+ {"Symbol", Var, 0},
+ {"Syriac", Var, 0},
+ {"Tagalog", Var, 0},
+ {"Tagbanwa", Var, 0},
+ {"Tai_Le", Var, 0},
+ {"Tai_Tham", Var, 0},
+ {"Tai_Viet", Var, 0},
+ {"Takri", Var, 1},
+ {"Tamil", Var, 0},
+ {"Tangsa", Var, 21},
+ {"Tangut", Var, 7},
+ {"Telugu", Var, 0},
+ {"Terminal_Punctuation", Var, 0},
+ {"Thaana", Var, 0},
+ {"Thai", Var, 0},
+ {"Tibetan", Var, 0},
+ {"Tifinagh", Var, 0},
+ {"Tirhuta", Var, 4},
+ {"Title", Var, 0},
+ {"TitleCase", Const, 0},
+ {"To", Func, 0},
+ {"ToLower", Func, 0},
+ {"ToTitle", Func, 0},
+ {"ToUpper", Func, 0},
+ {"Toto", Var, 21},
+ {"TurkishCase", Var, 0},
+ {"Ugaritic", Var, 0},
+ {"Unified_Ideograph", Var, 0},
+ {"Upper", Var, 0},
+ {"UpperCase", Const, 0},
+ {"UpperLower", Const, 0},
+ {"Vai", Var, 0},
+ {"Variation_Selector", Var, 0},
+ {"Version", Const, 0},
+ {"Vithkuqi", Var, 21},
+ {"Wancho", Var, 14},
+ {"Warang_Citi", Var, 4},
+ {"White_Space", Var, 0},
+ {"Yezidi", Var, 16},
+ {"Yi", Var, 0},
+ {"Z", Var, 0},
+ {"Zanabazar_Square", Var, 10},
+ {"Zl", Var, 0},
+ {"Zp", Var, 0},
+ {"Zs", Var, 0},
+ },
+ "unicode/utf16": {
+ {"AppendRune", Func, 20},
+ {"Decode", Func, 0},
+ {"DecodeRune", Func, 0},
+ {"Encode", Func, 0},
+ {"EncodeRune", Func, 0},
+ {"IsSurrogate", Func, 0},
+ {"RuneLen", Func, 23},
+ },
+ "unicode/utf8": {
+ {"AppendRune", Func, 18},
+ {"DecodeLastRune", Func, 0},
+ {"DecodeLastRuneInString", Func, 0},
+ {"DecodeRune", Func, 0},
+ {"DecodeRuneInString", Func, 0},
+ {"EncodeRune", Func, 0},
+ {"FullRune", Func, 0},
+ {"FullRuneInString", Func, 0},
+ {"MaxRune", Const, 0},
+ {"RuneCount", Func, 0},
+ {"RuneCountInString", Func, 0},
+ {"RuneError", Const, 0},
+ {"RuneLen", Func, 0},
+ {"RuneSelf", Const, 0},
+ {"RuneStart", Func, 0},
+ {"UTFMax", Const, 0},
+ {"Valid", Func, 0},
+ {"ValidRune", Func, 1},
+ {"ValidString", Func, 0},
+ },
+ "unique": {
+ {"(Handle).Value", Method, 23},
+ {"Handle", Type, 23},
+ {"Make", Func, 23},
+ },
+ "unsafe": {
+ {"Add", Func, 0},
+ {"Alignof", Func, 0},
+ {"Offsetof", Func, 0},
+ {"Pointer", Type, 0},
+ {"Sizeof", Func, 0},
+ {"Slice", Func, 0},
+ {"SliceData", Func, 0},
+ {"String", Func, 0},
+ {"StringData", Func, 0},
+ },
+}
diff --git a/vendor/golang.org/x/tools/internal/stdlib/stdlib.go b/vendor/golang.org/x/tools/internal/stdlib/stdlib.go
new file mode 100644
index 0000000..9890401
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/stdlib/stdlib.go
@@ -0,0 +1,97 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run generate.go
+
+// Package stdlib provides a table of all exported symbols in the
+// standard library, along with the version at which they first
+// appeared.
+package stdlib
+
+import (
+ "fmt"
+ "strings"
+)
+
+type Symbol struct {
+ Name string
+ Kind Kind
+ Version Version // Go version that first included the symbol
+}
+
+// A Kind indicates the kind of a symbol:
+// function, variable, constant, type, and so on.
+type Kind int8
+
+const (
+ Invalid Kind = iota // Example name:
+ Type // "Buffer"
+ Func // "Println"
+ Var // "EOF"
+ Const // "Pi"
+ Field // "Point.X"
+ Method // "(*Buffer).Grow"
+)
+
+func (kind Kind) String() string {
+ return [...]string{
+ Invalid: "invalid",
+ Type: "type",
+ Func: "func",
+ Var: "var",
+ Const: "const",
+ Field: "field",
+ Method: "method",
+ }[kind]
+}
+
+// A Version represents a version of Go of the form "go1.%d".
+type Version int8
+
+// String returns a version string of the form "go1.23", without allocating.
+func (v Version) String() string { return versions[v] }
+
+var versions [30]string // (increase constant as needed)
+
+func init() {
+ for i := range versions {
+ versions[i] = fmt.Sprintf("go1.%d", i)
+ }
+}
+
+// HasPackage reports whether the specified package path is part of
+// the standard library's public API.
+func HasPackage(path string) bool {
+ _, ok := PackageSymbols[path]
+ return ok
+}
+
+// SplitField splits the field symbol name into type and field
+// components. It must be called only on Field symbols.
+//
+// Example: "File.Package" -> ("File", "Package")
+func (sym *Symbol) SplitField() (typename, name string) {
+ if sym.Kind != Field {
+ panic("not a field")
+ }
+ typename, name, _ = strings.Cut(sym.Name, ".")
+ return
+}
+
+// SplitMethod splits the method symbol name into pointer, receiver,
+// and method components. It must be called only on Method symbols.
+//
+// Example: "(*Buffer).Grow" -> (true, "Buffer", "Grow")
+func (sym *Symbol) SplitMethod() (ptr bool, recv, name string) {
+ if sym.Kind != Method {
+ panic("not a method")
+ }
+ recv, name, _ = strings.Cut(sym.Name, ".")
+ recv = recv[len("(") : len(recv)-len(")")]
+ ptr = recv[0] == '*'
+ if ptr {
+ recv = recv[len("*"):]
+ }
+ return
+}
diff --git a/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go b/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go
new file mode 100644
index 0000000..ff9437a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go
@@ -0,0 +1,137 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// package tokeninternal provides access to some internal features of the token
+// package.
+package tokeninternal
+
+import (
+ "fmt"
+ "go/token"
+ "sort"
+ "sync"
+ "unsafe"
+)
+
+// GetLines returns the table of line-start offsets from a token.File.
+func GetLines(file *token.File) []int {
+ // token.File has a Lines method on Go 1.21 and later.
+ if file, ok := (interface{})(file).(interface{ Lines() []int }); ok {
+ return file.Lines()
+ }
+
+ // This declaration must match that of token.File.
+ // This creates a risk of dependency skew.
+ // For now we check that the size of the two
+ // declarations is the same, on the (fragile) assumption
+ // that future changes would add fields.
+ type tokenFile119 struct {
+ _ string
+ _ int
+ _ int
+ mu sync.Mutex // we're not complete monsters
+ lines []int
+ _ []struct{}
+ }
+
+ if unsafe.Sizeof(*file) != unsafe.Sizeof(tokenFile119{}) {
+ panic("unexpected token.File size")
+ }
+ var ptr *tokenFile119
+ type uP = unsafe.Pointer
+ *(*uP)(uP(&ptr)) = uP(file)
+ ptr.mu.Lock()
+ defer ptr.mu.Unlock()
+ return ptr.lines
+}
+
+// AddExistingFiles adds the specified files to the FileSet if they
+// are not already present. It panics if any pair of files in the
+// resulting FileSet would overlap.
+func AddExistingFiles(fset *token.FileSet, files []*token.File) {
+ // Punch through the FileSet encapsulation.
+ type tokenFileSet struct {
+ // This type remained essentially consistent from go1.16 to go1.21.
+ mutex sync.RWMutex
+ base int
+ files []*token.File
+ _ *token.File // changed to atomic.Pointer[token.File] in go1.19
+ }
+
+ // If the size of token.FileSet changes, this will fail to compile.
+ const delta = int64(unsafe.Sizeof(tokenFileSet{})) - int64(unsafe.Sizeof(token.FileSet{}))
+ var _ [-delta * delta]int
+
+ type uP = unsafe.Pointer
+ var ptr *tokenFileSet
+ *(*uP)(uP(&ptr)) = uP(fset)
+ ptr.mutex.Lock()
+ defer ptr.mutex.Unlock()
+
+ // Merge and sort.
+ newFiles := append(ptr.files, files...)
+ sort.Slice(newFiles, func(i, j int) bool {
+ return newFiles[i].Base() < newFiles[j].Base()
+ })
+
+ // Reject overlapping files.
+ // Discard adjacent identical files.
+ out := newFiles[:0]
+ for i, file := range newFiles {
+ if i > 0 {
+ prev := newFiles[i-1]
+ if file == prev {
+ continue
+ }
+ if prev.Base()+prev.Size()+1 > file.Base() {
+ panic(fmt.Sprintf("file %s (%d-%d) overlaps with file %s (%d-%d)",
+ prev.Name(), prev.Base(), prev.Base()+prev.Size(),
+ file.Name(), file.Base(), file.Base()+file.Size()))
+ }
+ }
+ out = append(out, file)
+ }
+ newFiles = out
+
+ ptr.files = newFiles
+
+ // Advance FileSet.Base().
+ if len(newFiles) > 0 {
+ last := newFiles[len(newFiles)-1]
+ newBase := last.Base() + last.Size() + 1
+ if ptr.base < newBase {
+ ptr.base = newBase
+ }
+ }
+}
+
+// FileSetFor returns a new FileSet containing a sequence of new Files with
+// the same base, size, and line as the input files, for use in APIs that
+// require a FileSet.
+//
+// Precondition: the input files must be non-overlapping, and sorted in order
+// of their Base.
+func FileSetFor(files ...*token.File) *token.FileSet {
+ fset := token.NewFileSet()
+ for _, f := range files {
+ f2 := fset.AddFile(f.Name(), f.Base(), f.Size())
+ lines := GetLines(f)
+ f2.SetLines(lines)
+ }
+ return fset
+}
+
+// CloneFileSet creates a new FileSet holding all files in fset. It does not
+// create copies of the token.Files in fset: they are added to the resulting
+// FileSet unmodified.
+func CloneFileSet(fset *token.FileSet) *token.FileSet {
+ var files []*token.File
+ fset.Iterate(func(f *token.File) bool {
+ files = append(files, f)
+ return true
+ })
+ newFileSet := token.NewFileSet()
+ AddExistingFiles(newFileSet, files)
+ return newFileSet
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/common.go b/vendor/golang.org/x/tools/internal/typeparams/common.go
new file mode 100644
index 0000000..89bd256
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typeparams/common.go
@@ -0,0 +1,142 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typeparams contains common utilities for writing tools that
+// interact with generic Go code, as introduced with Go 1.18. It
+// supplements the standard library APIs. Notably, the StructuralTerms
+// API computes a minimal representation of the structural
+// restrictions on a type parameter.
+//
+// An external version of these APIs is available in the
+// golang.org/x/exp/typeparams module.
+package typeparams
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/internal/aliases"
+)
+
+// UnpackIndexExpr extracts data from AST nodes that represent index
+// expressions.
+//
+// For an ast.IndexExpr, the resulting indices slice will contain exactly one
+// index expression. For an ast.IndexListExpr (go1.18+), it may have a variable
+// number of index expressions.
+//
+// For nodes that don't represent index expressions, the first return value of
+// UnpackIndexExpr will be nil.
+func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) {
+ switch e := n.(type) {
+ case *ast.IndexExpr:
+ return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack
+ case *ast.IndexListExpr:
+ return e.X, e.Lbrack, e.Indices, e.Rbrack
+ }
+ return nil, token.NoPos, nil, token.NoPos
+}
+
+// PackIndexExpr returns an *ast.IndexExpr or *ast.IndexListExpr, depending on
+// the cardinality of indices. Calling PackIndexExpr with len(indices) == 0
+// will panic.
+func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) ast.Expr {
+ switch len(indices) {
+ case 0:
+ panic("empty indices")
+ case 1:
+ return &ast.IndexExpr{
+ X: x,
+ Lbrack: lbrack,
+ Index: indices[0],
+ Rbrack: rbrack,
+ }
+ default:
+ return &ast.IndexListExpr{
+ X: x,
+ Lbrack: lbrack,
+ Indices: indices,
+ Rbrack: rbrack,
+ }
+ }
+}
+
+// IsTypeParam reports whether t is a type parameter (or an alias of one).
+func IsTypeParam(t types.Type) bool {
+ _, ok := aliases.Unalias(t).(*types.TypeParam)
+ return ok
+}
+
+// GenericAssignableTo is a generalization of types.AssignableTo that
+// implements the following rule for uninstantiated generic types:
+//
+// If V and T are generic named types, then V is considered assignable to T if,
+// for every possible instantiation of V[A_1, ..., A_N], the instantiation
+// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N].
+//
+// If T has structural constraints, they must be satisfied by V.
+//
+// For example, consider the following type declarations:
+//
+// type Interface[T any] interface {
+// Accept(T)
+// }
+//
+// type Container[T any] struct {
+// Element T
+// }
+//
+// func (c Container[T]) Accept(t T) { c.Element = t }
+//
+// In this case, GenericAssignableTo reports that instantiations of Container
+// are assignable to the corresponding instantiation of Interface.
+func GenericAssignableTo(ctxt *types.Context, V, T types.Type) bool {
+ V = aliases.Unalias(V)
+ T = aliases.Unalias(T)
+
+ // If V and T are not both named, or do not have matching non-empty type
+ // parameter lists, fall back on types.AssignableTo.
+
+ VN, Vnamed := V.(*types.Named)
+ TN, Tnamed := T.(*types.Named)
+ if !Vnamed || !Tnamed {
+ return types.AssignableTo(V, T)
+ }
+
+ vtparams := VN.TypeParams()
+ ttparams := TN.TypeParams()
+ if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || VN.TypeArgs().Len() != 0 || TN.TypeArgs().Len() != 0 {
+ return types.AssignableTo(V, T)
+ }
+
+ // V and T have the same (non-zero) number of type params. Instantiate both
+ // with the type parameters of V. This must always succeed for V, and will
+ // succeed for T if and only if the type set of each type parameter of V is a
+ // subset of the type set of the corresponding type parameter of T, meaning
+ // that every instantiation of V corresponds to a valid instantiation of T.
+
+ // Minor optimization: ensure we share a context across the two
+ // instantiations below.
+ if ctxt == nil {
+ ctxt = types.NewContext()
+ }
+
+ var targs []types.Type
+ for i := 0; i < vtparams.Len(); i++ {
+ targs = append(targs, vtparams.At(i))
+ }
+
+ vinst, err := types.Instantiate(ctxt, V, targs, true)
+ if err != nil {
+ panic("type parameters should satisfy their own constraints")
+ }
+
+ tinst, err := types.Instantiate(ctxt, T, targs, true)
+ if err != nil {
+ return false
+ }
+
+ return types.AssignableTo(vinst, tinst)
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/coretype.go b/vendor/golang.org/x/tools/internal/typeparams/coretype.go
new file mode 100644
index 0000000..6e83c6f
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typeparams/coretype.go
@@ -0,0 +1,150 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+ "fmt"
+ "go/types"
+)
+
+// CoreType returns the core type of T or nil if T does not have a core type.
+//
+// See https://go.dev/ref/spec#Core_types for the definition of a core type.
+func CoreType(T types.Type) types.Type {
+ U := T.Underlying()
+ if _, ok := U.(*types.Interface); !ok {
+ return U // for non-interface types,
+ }
+
+ terms, err := NormalTerms(U)
+ if len(terms) == 0 || err != nil {
+ // len(terms) -> empty type set of interface.
+ // err != nil => U is invalid, exceeds complexity bounds, or has an empty type set.
+ return nil // no core type.
+ }
+
+ U = terms[0].Type().Underlying()
+ var identical int // i in [0,identical) => Identical(U, terms[i].Type().Underlying())
+ for identical = 1; identical < len(terms); identical++ {
+ if !types.Identical(U, terms[identical].Type().Underlying()) {
+ break
+ }
+ }
+
+ if identical == len(terms) {
+ // https://go.dev/ref/spec#Core_types
+ // "There is a single type U which is the underlying type of all types in the type set of T"
+ return U
+ }
+ ch, ok := U.(*types.Chan)
+ if !ok {
+ return nil // no core type as identical < len(terms) and U is not a channel.
+ }
+ // https://go.dev/ref/spec#Core_types
+ // "the type chan E if T contains only bidirectional channels, or the type chan<- E or
+ // <-chan E depending on the direction of the directional channels present."
+ for chans := identical; chans < len(terms); chans++ {
+ curr, ok := terms[chans].Type().Underlying().(*types.Chan)
+ if !ok {
+ return nil
+ }
+ if !types.Identical(ch.Elem(), curr.Elem()) {
+ return nil // channel elements are not identical.
+ }
+ if ch.Dir() == types.SendRecv {
+ // ch is bidirectional. We can safely always use curr's direction.
+ ch = curr
+ } else if curr.Dir() != types.SendRecv && ch.Dir() != curr.Dir() {
+ // ch and curr are not bidirectional and not the same direction.
+ return nil
+ }
+ }
+ return ch
+}
+
+// NormalTerms returns a slice of terms representing the normalized structural
+// type restrictions of a type, if any.
+//
+// For all types other than *types.TypeParam, *types.Interface, and
+// *types.Union, this is just a single term with Tilde() == false and
+// Type() == typ. For *types.TypeParam, *types.Interface, and *types.Union, see
+// below.
+//
+// Structural type restrictions of a type parameter are created via
+// non-interface types embedded in its constraint interface (directly, or via a
+// chain of interface embeddings). For example, in the declaration type
+// T[P interface{~int; m()}] int the structural restriction of the type
+// parameter P is ~int.
+//
+// With interface embedding and unions, the specification of structural type
+// restrictions may be arbitrarily complex. For example, consider the
+// following:
+//
+// type A interface{ ~string|~[]byte }
+//
+// type B interface{ int|string }
+//
+// type C interface { ~string|~int }
+//
+// type T[P interface{ A|B; C }] int
+//
+// In this example, the structural type restriction of P is ~string|int: A|B
+// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
+// which when intersected with C (~string|~int) yields ~string|int.
+//
+// NormalTerms computes these expansions and reductions, producing a
+// "normalized" form of the embeddings. A structural restriction is normalized
+// if it is a single union containing no interface terms, and is minimal in the
+// sense that removing any term changes the set of types satisfying the
+// constraint. It is left as a proof for the reader that, modulo sorting, there
+// is exactly one such normalized form.
+//
+// Because the minimal representation always takes this form, NormalTerms
+// returns a slice of tilde terms corresponding to the terms of the union in
+// the normalized structural restriction. An error is returned if the type is
+// invalid, exceeds complexity bounds, or has an empty type set. In the latter
+// case, NormalTerms returns ErrEmptyTypeSet.
+//
+// NormalTerms makes no guarantees about the order of terms, except that it
+// is deterministic.
+func NormalTerms(typ types.Type) ([]*types.Term, error) {
+ switch typ := typ.Underlying().(type) {
+ case *types.TypeParam:
+ return StructuralTerms(typ)
+ case *types.Union:
+ return UnionTermSet(typ)
+ case *types.Interface:
+ return InterfaceTermSet(typ)
+ default:
+ return []*types.Term{types.NewTerm(false, typ)}, nil
+ }
+}
+
+// Deref returns the type of the variable pointed to by t,
+// if t's core type is a pointer; otherwise it returns t.
+//
+// Do not assume that Deref(T)==T implies T is not a pointer:
+// consider "type T *T", for example.
+//
+// TODO(adonovan): ideally this would live in typesinternal, but that
+// creates an import cycle. Move there when we melt this package down.
+func Deref(t types.Type) types.Type {
+ if ptr, ok := CoreType(t).(*types.Pointer); ok {
+ return ptr.Elem()
+ }
+ return t
+}
+
+// MustDeref returns the type of the variable pointed to by t.
+// It panics if t's core type is not a pointer.
+//
+// TODO(adonovan): ideally this would live in typesinternal, but that
+// creates an import cycle. Move there when we melt this package down.
+func MustDeref(t types.Type) types.Type {
+ if ptr, ok := CoreType(t).(*types.Pointer); ok {
+ return ptr.Elem()
+ }
+ panic(fmt.Sprintf("%v is not a pointer", t))
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/free.go b/vendor/golang.org/x/tools/internal/typeparams/free.go
new file mode 100644
index 0000000..a1d1382
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typeparams/free.go
@@ -0,0 +1,120 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/internal/aliases"
+)
+
+// Free is a memoization of the set of free type parameters within a
+// type. It makes a sequence of calls to [Free.Has] for overlapping
+// types more efficient. The zero value is ready for use.
+//
+// NOTE: Adapted from go/types/infer.go. If it is later exported, factor.
+type Free struct {
+ seen map[types.Type]bool
+}
+
+// Has reports whether the specified type has a free type parameter.
+func (w *Free) Has(typ types.Type) (res bool) {
+ // detect cycles
+ if x, ok := w.seen[typ]; ok {
+ return x
+ }
+ if w.seen == nil {
+ w.seen = make(map[types.Type]bool)
+ }
+ w.seen[typ] = false
+ defer func() {
+ w.seen[typ] = res
+ }()
+
+ switch t := typ.(type) {
+ case nil, *types.Basic: // TODO(gri) should nil be handled here?
+ break
+
+ case *aliases.Alias:
+ return w.Has(aliases.Unalias(t))
+
+ case *types.Array:
+ return w.Has(t.Elem())
+
+ case *types.Slice:
+ return w.Has(t.Elem())
+
+ case *types.Struct:
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ if w.Has(t.Field(i).Type()) {
+ return true
+ }
+ }
+
+ case *types.Pointer:
+ return w.Has(t.Elem())
+
+ case *types.Tuple:
+ n := t.Len()
+ for i := 0; i < n; i++ {
+ if w.Has(t.At(i).Type()) {
+ return true
+ }
+ }
+
+ case *types.Signature:
+ // t.tparams may not be nil if we are looking at a signature
+ // of a generic function type (or an interface method) that is
+ // part of the type we're testing. We don't care about these type
+ // parameters.
+ // Similarly, the receiver of a method may declare (rather than
+ // use) type parameters, we don't care about those either.
+ // Thus, we only need to look at the input and result parameters.
+ return w.Has(t.Params()) || w.Has(t.Results())
+
+ case *types.Interface:
+ for i, n := 0, t.NumMethods(); i < n; i++ {
+ if w.Has(t.Method(i).Type()) {
+ return true
+ }
+ }
+ terms, err := InterfaceTermSet(t)
+ if err != nil {
+ return false // ill typed
+ }
+ for _, term := range terms {
+ if w.Has(term.Type()) {
+ return true
+ }
+ }
+
+ case *types.Map:
+ return w.Has(t.Key()) || w.Has(t.Elem())
+
+ case *types.Chan:
+ return w.Has(t.Elem())
+
+ case *types.Named:
+ args := t.TypeArgs()
+ // TODO(taking): this does not match go/types/infer.go. Check with rfindley.
+ if params := t.TypeParams(); params.Len() > args.Len() {
+ return true
+ }
+ for i, n := 0, args.Len(); i < n; i++ {
+ if w.Has(args.At(i)) {
+ return true
+ }
+ }
+ return w.Has(t.Underlying()) // recurse for types local to parameterized functions
+
+ case *types.TypeParam:
+ return true
+
+ default:
+ panic(t) // unreachable
+ }
+
+ return false
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/normalize.go b/vendor/golang.org/x/tools/internal/typeparams/normalize.go
new file mode 100644
index 0000000..93c80fd
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typeparams/normalize.go
@@ -0,0 +1,218 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+ "errors"
+ "fmt"
+ "go/types"
+ "os"
+ "strings"
+)
+
+//go:generate go run copytermlist.go
+
+const debug = false
+
+var ErrEmptyTypeSet = errors.New("empty type set")
+
+// StructuralTerms returns a slice of terms representing the normalized
+// structural type restrictions of a type parameter, if any.
+//
+// Structural type restrictions of a type parameter are created via
+// non-interface types embedded in its constraint interface (directly, or via a
+// chain of interface embeddings). For example, in the declaration
+//
+// type T[P interface{~int; m()}] int
+//
+// the structural restriction of the type parameter P is ~int.
+//
+// With interface embedding and unions, the specification of structural type
+// restrictions may be arbitrarily complex. For example, consider the
+// following:
+//
+// type A interface{ ~string|~[]byte }
+//
+// type B interface{ int|string }
+//
+// type C interface { ~string|~int }
+//
+// type T[P interface{ A|B; C }] int
+//
+// In this example, the structural type restriction of P is ~string|int: A|B
+// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
+// which when intersected with C (~string|~int) yields ~string|int.
+//
+// StructuralTerms computes these expansions and reductions, producing a
+// "normalized" form of the embeddings. A structural restriction is normalized
+// if it is a single union containing no interface terms, and is minimal in the
+// sense that removing any term changes the set of types satisfying the
+// constraint. It is left as a proof for the reader that, modulo sorting, there
+// is exactly one such normalized form.
+//
+// Because the minimal representation always takes this form, StructuralTerms
+// returns a slice of tilde terms corresponding to the terms of the union in
+// the normalized structural restriction. An error is returned if the
+// constraint interface is invalid, exceeds complexity bounds, or has an empty
+// type set. In the latter case, StructuralTerms returns ErrEmptyTypeSet.
+//
+// StructuralTerms makes no guarantees about the order of terms, except that it
+// is deterministic.
+func StructuralTerms(tparam *types.TypeParam) ([]*types.Term, error) {
+ constraint := tparam.Constraint()
+ if constraint == nil {
+ return nil, fmt.Errorf("%s has nil constraint", tparam)
+ }
+ iface, _ := constraint.Underlying().(*types.Interface)
+ if iface == nil {
+ return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying())
+ }
+ return InterfaceTermSet(iface)
+}
+
+// InterfaceTermSet computes the normalized terms for a constraint interface,
+// returning an error if the term set cannot be computed or is empty. In the
+// latter case, the error will be ErrEmptyTypeSet.
+//
+// See the documentation of StructuralTerms for more information on
+// normalization.
+func InterfaceTermSet(iface *types.Interface) ([]*types.Term, error) {
+ return computeTermSet(iface)
+}
+
+// UnionTermSet computes the normalized terms for a union, returning an error
+// if the term set cannot be computed or is empty. In the latter case, the
+// error will be ErrEmptyTypeSet.
+//
+// See the documentation of StructuralTerms for more information on
+// normalization.
+func UnionTermSet(union *types.Union) ([]*types.Term, error) {
+ return computeTermSet(union)
+}
+
+func computeTermSet(typ types.Type) ([]*types.Term, error) {
+ tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0)
+ if err != nil {
+ return nil, err
+ }
+ if tset.terms.isEmpty() {
+ return nil, ErrEmptyTypeSet
+ }
+ if tset.terms.isAll() {
+ return nil, nil
+ }
+ var terms []*types.Term
+ for _, term := range tset.terms {
+ terms = append(terms, types.NewTerm(term.tilde, term.typ))
+ }
+ return terms, nil
+}
+
+// A termSet holds the normalized set of terms for a given type.
+//
+// The name termSet is intentionally distinct from 'type set': a type set is
+// all types that implement a type (and includes method restrictions), whereas
+// a term set just represents the structural restrictions on a type.
+type termSet struct {
+ complete bool
+ terms termlist
+}
+
+func indentf(depth int, format string, args ...interface{}) {
+ fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...)
+}
+
+func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) {
+ if t == nil {
+ panic("nil type")
+ }
+
+ if debug {
+ indentf(depth, "%s", t.String())
+ defer func() {
+ if err != nil {
+ indentf(depth, "=> %s", err)
+ } else {
+ indentf(depth, "=> %s", res.terms.String())
+ }
+ }()
+ }
+
+ const maxTermCount = 100
+ if tset, ok := seen[t]; ok {
+ if !tset.complete {
+ return nil, fmt.Errorf("cycle detected in the declaration of %s", t)
+ }
+ return tset, nil
+ }
+
+ // Mark the current type as seen to avoid infinite recursion.
+ tset := new(termSet)
+ defer func() {
+ tset.complete = true
+ }()
+ seen[t] = tset
+
+ switch u := t.Underlying().(type) {
+ case *types.Interface:
+ // The term set of an interface is the intersection of the term sets of its
+ // embedded types.
+ tset.terms = allTermlist
+ for i := 0; i < u.NumEmbeddeds(); i++ {
+ embedded := u.EmbeddedType(i)
+ if _, ok := embedded.Underlying().(*types.TypeParam); ok {
+ return nil, fmt.Errorf("invalid embedded type %T", embedded)
+ }
+ tset2, err := computeTermSetInternal(embedded, seen, depth+1)
+ if err != nil {
+ return nil, err
+ }
+ tset.terms = tset.terms.intersect(tset2.terms)
+ }
+ case *types.Union:
+ // The term set of a union is the union of term sets of its terms.
+ tset.terms = nil
+ for i := 0; i < u.Len(); i++ {
+ t := u.Term(i)
+ var terms termlist
+ switch t.Type().Underlying().(type) {
+ case *types.Interface:
+ tset2, err := computeTermSetInternal(t.Type(), seen, depth+1)
+ if err != nil {
+ return nil, err
+ }
+ terms = tset2.terms
+ case *types.TypeParam, *types.Union:
+ // A stand-alone type parameter or union is not permitted as union
+ // term.
+ return nil, fmt.Errorf("invalid union term %T", t)
+ default:
+ if t.Type() == types.Typ[types.Invalid] {
+ continue
+ }
+ terms = termlist{{t.Tilde(), t.Type()}}
+ }
+ tset.terms = tset.terms.union(terms)
+ if len(tset.terms) > maxTermCount {
+ return nil, fmt.Errorf("exceeded max term count %d", maxTermCount)
+ }
+ }
+ case *types.TypeParam:
+ panic("unreachable")
+ default:
+ // For all other types, the term set is just a single non-tilde term
+ // holding the type itself.
+ if u != types.Typ[types.Invalid] {
+ tset.terms = termlist{{false, t}}
+ }
+ }
+ return tset, nil
+}
+
+// under is a facade for the go/types internal function of the same name. It is
+// used by typeterm.go.
+func under(t types.Type) types.Type {
+ return t.Underlying()
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/termlist.go b/vendor/golang.org/x/tools/internal/typeparams/termlist.go
new file mode 100644
index 0000000..cbd12f8
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typeparams/termlist.go
@@ -0,0 +1,163 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by copytermlist.go DO NOT EDIT.
+
+package typeparams
+
+import (
+ "bytes"
+ "go/types"
+)
+
+// A termlist represents the type set represented by the union
+// t1 ∪ y2 ∪ ... tn of the type sets of the terms t1 to tn.
+// A termlist is in normal form if all terms are disjoint.
+// termlist operations don't require the operands to be in
+// normal form.
+type termlist []*term
+
+// allTermlist represents the set of all types.
+// It is in normal form.
+var allTermlist = termlist{new(term)}
+
+// String prints the termlist exactly (without normalization).
+func (xl termlist) String() string {
+ if len(xl) == 0 {
+ return "∅"
+ }
+ var buf bytes.Buffer
+ for i, x := range xl {
+ if i > 0 {
+ buf.WriteString(" | ")
+ }
+ buf.WriteString(x.String())
+ }
+ return buf.String()
+}
+
+// isEmpty reports whether the termlist xl represents the empty set of types.
+func (xl termlist) isEmpty() bool {
+ // If there's a non-nil term, the entire list is not empty.
+ // If the termlist is in normal form, this requires at most
+ // one iteration.
+ for _, x := range xl {
+ if x != nil {
+ return false
+ }
+ }
+ return true
+}
+
+// isAll reports whether the termlist xl represents the set of all types.
+func (xl termlist) isAll() bool {
+ // If there's a 𝓤 term, the entire list is 𝓤.
+ // If the termlist is in normal form, this requires at most
+ // one iteration.
+ for _, x := range xl {
+ if x != nil && x.typ == nil {
+ return true
+ }
+ }
+ return false
+}
+
+// norm returns the normal form of xl.
+func (xl termlist) norm() termlist {
+ // Quadratic algorithm, but good enough for now.
+ // TODO(gri) fix asymptotic performance
+ used := make([]bool, len(xl))
+ var rl termlist
+ for i, xi := range xl {
+ if xi == nil || used[i] {
+ continue
+ }
+ for j := i + 1; j < len(xl); j++ {
+ xj := xl[j]
+ if xj == nil || used[j] {
+ continue
+ }
+ if u1, u2 := xi.union(xj); u2 == nil {
+ // If we encounter a 𝓤 term, the entire list is 𝓤.
+ // Exit early.
+ // (Note that this is not just an optimization;
+ // if we continue, we may end up with a 𝓤 term
+ // and other terms and the result would not be
+ // in normal form.)
+ if u1.typ == nil {
+ return allTermlist
+ }
+ xi = u1
+ used[j] = true // xj is now unioned into xi - ignore it in future iterations
+ }
+ }
+ rl = append(rl, xi)
+ }
+ return rl
+}
+
+// union returns the union xl ∪ yl.
+func (xl termlist) union(yl termlist) termlist {
+ return append(xl, yl...).norm()
+}
+
+// intersect returns the intersection xl ∩ yl.
+func (xl termlist) intersect(yl termlist) termlist {
+ if xl.isEmpty() || yl.isEmpty() {
+ return nil
+ }
+
+ // Quadratic algorithm, but good enough for now.
+ // TODO(gri) fix asymptotic performance
+ var rl termlist
+ for _, x := range xl {
+ for _, y := range yl {
+ if r := x.intersect(y); r != nil {
+ rl = append(rl, r)
+ }
+ }
+ }
+ return rl.norm()
+}
+
+// equal reports whether xl and yl represent the same type set.
+func (xl termlist) equal(yl termlist) bool {
+ // TODO(gri) this should be more efficient
+ return xl.subsetOf(yl) && yl.subsetOf(xl)
+}
+
+// includes reports whether t ∈ xl.
+func (xl termlist) includes(t types.Type) bool {
+ for _, x := range xl {
+ if x.includes(t) {
+ return true
+ }
+ }
+ return false
+}
+
+// supersetOf reports whether y ⊆ xl.
+func (xl termlist) supersetOf(y *term) bool {
+ for _, x := range xl {
+ if y.subsetOf(x) {
+ return true
+ }
+ }
+ return false
+}
+
+// subsetOf reports whether xl ⊆ yl.
+func (xl termlist) subsetOf(yl termlist) bool {
+ if yl.isEmpty() {
+ return xl.isEmpty()
+ }
+
+ // each term x of xl must be a subset of yl
+ for _, x := range xl {
+ if !yl.supersetOf(x) {
+ return false // x is not a subset yl
+ }
+ }
+ return true
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/typeterm.go b/vendor/golang.org/x/tools/internal/typeparams/typeterm.go
new file mode 100644
index 0000000..7350bb7
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typeparams/typeterm.go
@@ -0,0 +1,169 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by copytermlist.go DO NOT EDIT.
+
+package typeparams
+
+import "go/types"
+
+// A term describes elementary type sets:
+//
+// ∅: (*term)(nil) == ∅ // set of no types (empty set)
+// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse)
+// T: &term{false, T} == {T} // set of type T
+// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t
+type term struct {
+ tilde bool // valid if typ != nil
+ typ types.Type
+}
+
+func (x *term) String() string {
+ switch {
+ case x == nil:
+ return "∅"
+ case x.typ == nil:
+ return "𝓤"
+ case x.tilde:
+ return "~" + x.typ.String()
+ default:
+ return x.typ.String()
+ }
+}
+
+// equal reports whether x and y represent the same type set.
+func (x *term) equal(y *term) bool {
+ // easy cases
+ switch {
+ case x == nil || y == nil:
+ return x == y
+ case x.typ == nil || y.typ == nil:
+ return x.typ == y.typ
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ return x.tilde == y.tilde && types.Identical(x.typ, y.typ)
+}
+
+// union returns the union x ∪ y: zero, one, or two non-nil terms.
+func (x *term) union(y *term) (_, _ *term) {
+ // easy cases
+ switch {
+ case x == nil && y == nil:
+ return nil, nil // ∅ ∪ ∅ == ∅
+ case x == nil:
+ return y, nil // ∅ ∪ y == y
+ case y == nil:
+ return x, nil // x ∪ ∅ == x
+ case x.typ == nil:
+ return x, nil // 𝓤 ∪ y == 𝓤
+ case y.typ == nil:
+ return y, nil // x ∪ 𝓤 == 𝓤
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return x, y // x ∪ y == (x, y) if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ∪ ~t == ~t
+ // ~t ∪ T == ~t
+ // T ∪ ~t == ~t
+ // T ∪ T == T
+ if x.tilde || !y.tilde {
+ return x, nil
+ }
+ return y, nil
+}
+
+// intersect returns the intersection x ∩ y.
+func (x *term) intersect(y *term) *term {
+ // easy cases
+ switch {
+ case x == nil || y == nil:
+ return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅
+ case x.typ == nil:
+ return y // 𝓤 ∩ y == y
+ case y.typ == nil:
+ return x // x ∩ 𝓤 == x
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return nil // x ∩ y == ∅ if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ∩ ~t == ~t
+ // ~t ∩ T == T
+ // T ∩ ~t == T
+ // T ∩ T == T
+ if !x.tilde || y.tilde {
+ return x
+ }
+ return y
+}
+
+// includes reports whether t ∈ x.
+func (x *term) includes(t types.Type) bool {
+ // easy cases
+ switch {
+ case x == nil:
+ return false // t ∈ ∅ == false
+ case x.typ == nil:
+ return true // t ∈ 𝓤 == true
+ }
+ // ∅ ⊂ x ⊂ 𝓤
+
+ u := t
+ if x.tilde {
+ u = under(u)
+ }
+ return types.Identical(x.typ, u)
+}
+
+// subsetOf reports whether x ⊆ y.
+func (x *term) subsetOf(y *term) bool {
+ // easy cases
+ switch {
+ case x == nil:
+ return true // ∅ ⊆ y == true
+ case y == nil:
+ return false // x ⊆ ∅ == false since x != ∅
+ case y.typ == nil:
+ return true // x ⊆ 𝓤 == true
+ case x.typ == nil:
+ return false // 𝓤 ⊆ y == false since y != 𝓤
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return false // x ⊆ y == false if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ⊆ ~t == true
+ // ~t ⊆ T == false
+ // T ⊆ ~t == true
+ // T ⊆ T == true
+ return !x.tilde || y.tilde
+}
+
+// disjoint reports whether x ∩ y == ∅.
+// x.typ and y.typ must not be nil.
+func (x *term) disjoint(y *term) bool {
+ if debug && (x.typ == nil || y.typ == nil) {
+ panic("invalid argument(s)")
+ }
+ ux := x.typ
+ if y.tilde {
+ ux = under(ux)
+ }
+ uy := y.typ
+ if x.tilde {
+ uy = under(uy)
+ }
+ return !types.Identical(ux, uy)
+}
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
new file mode 100644
index 0000000..834e053
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
@@ -0,0 +1,1560 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typesinternal
+
+//go:generate stringer -type=ErrorCode
+
+type ErrorCode int
+
+// This file defines the error codes that can be produced during type-checking.
+// Collectively, these codes provide an identifier that may be used to
+// implement special handling for certain types of errors.
+//
+// Error codes should be fine-grained enough that the exact nature of the error
+// can be easily determined, but coarse enough that they are not an
+// implementation detail of the type checking algorithm. As a rule-of-thumb,
+// errors should be considered equivalent if there is a theoretical refactoring
+// of the type checker in which they are emitted in exactly one place. For
+// example, the type checker emits different error messages for "too many
+// arguments" and "too few arguments", but one can imagine an alternative type
+// checker where this check instead just emits a single "wrong number of
+// arguments", so these errors should have the same code.
+//
+// Error code names should be as brief as possible while retaining accuracy and
+// distinctiveness. In most cases names should start with an adjective
+// describing the nature of the error (e.g. "invalid", "unused", "misplaced"),
+// and end with a noun identifying the relevant language object. For example,
+// "DuplicateDecl" or "InvalidSliceExpr". For brevity, naming follows the
+// convention that "bad" implies a problem with syntax, and "invalid" implies a
+// problem with types.
+
+const (
+ // InvalidSyntaxTree occurs if an invalid syntax tree is provided
+ // to the type checker. It should never happen.
+ InvalidSyntaxTree ErrorCode = -1
+)
+
+const (
+ _ ErrorCode = iota
+
+ // Test is reserved for errors that only apply while in self-test mode.
+ Test
+
+ /* package names */
+
+ // BlankPkgName occurs when a package name is the blank identifier "_".
+ //
+ // Per the spec:
+ // "The PackageName must not be the blank identifier."
+ BlankPkgName
+
+ // MismatchedPkgName occurs when a file's package name doesn't match the
+ // package name already established by other files.
+ MismatchedPkgName
+
+ // InvalidPkgUse occurs when a package identifier is used outside of a
+ // selector expression.
+ //
+ // Example:
+ // import "fmt"
+ //
+ // var _ = fmt
+ InvalidPkgUse
+
+ /* imports */
+
+ // BadImportPath occurs when an import path is not valid.
+ BadImportPath
+
+ // BrokenImport occurs when importing a package fails.
+ //
+ // Example:
+ // import "amissingpackage"
+ BrokenImport
+
+ // ImportCRenamed occurs when the special import "C" is renamed. "C" is a
+ // pseudo-package, and must not be renamed.
+ //
+ // Example:
+ // import _ "C"
+ ImportCRenamed
+
+ // UnusedImport occurs when an import is unused.
+ //
+ // Example:
+ // import "fmt"
+ //
+ // func main() {}
+ UnusedImport
+
+ /* initialization */
+
+ // InvalidInitCycle occurs when an invalid cycle is detected within the
+ // initialization graph.
+ //
+ // Example:
+ // var x int = f()
+ //
+ // func f() int { return x }
+ InvalidInitCycle
+
+ /* decls */
+
+ // DuplicateDecl occurs when an identifier is declared multiple times.
+ //
+ // Example:
+ // var x = 1
+ // var x = 2
+ DuplicateDecl
+
+ // InvalidDeclCycle occurs when a declaration cycle is not valid.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // type T struct {
+ // a [n]int
+ // }
+ //
+ // var n = unsafe.Sizeof(T{})
+ InvalidDeclCycle
+
+ // InvalidTypeCycle occurs when a cycle in type definitions results in a
+ // type that is not well-defined.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // type T [unsafe.Sizeof(T{})]int
+ InvalidTypeCycle
+
+ /* decls > const */
+
+ // InvalidConstInit occurs when a const declaration has a non-constant
+ // initializer.
+ //
+ // Example:
+ // var x int
+ // const _ = x
+ InvalidConstInit
+
+ // InvalidConstVal occurs when a const value cannot be converted to its
+ // target type.
+ //
+ // TODO(findleyr): this error code and example are not very clear. Consider
+ // removing it.
+ //
+ // Example:
+ // const _ = 1 << "hello"
+ InvalidConstVal
+
+ // InvalidConstType occurs when the underlying type in a const declaration
+ // is not a valid constant type.
+ //
+ // Example:
+ // const c *int = 4
+ InvalidConstType
+
+ /* decls > var (+ other variable assignment codes) */
+
+ // UntypedNilUse occurs when the predeclared (untyped) value nil is used to
+ // initialize a variable declared without an explicit type.
+ //
+ // Example:
+ // var x = nil
+ UntypedNilUse
+
+ // WrongAssignCount occurs when the number of values on the right-hand side
+ // of an assignment or initialization expression does not match the number
+ // of variables on the left-hand side.
+ //
+ // Example:
+ // var x = 1, 2
+ WrongAssignCount
+
+ // UnassignableOperand occurs when the left-hand side of an assignment is
+ // not assignable.
+ //
+ // Example:
+ // func f() {
+ // const c = 1
+ // c = 2
+ // }
+ UnassignableOperand
+
+ // NoNewVar occurs when a short variable declaration (':=') does not declare
+ // new variables.
+ //
+ // Example:
+ // func f() {
+ // x := 1
+ // x := 2
+ // }
+ NoNewVar
+
+ // MultiValAssignOp occurs when an assignment operation (+=, *=, etc) does
+ // not have single-valued left-hand or right-hand side.
+ //
+ // Per the spec:
+ // "In assignment operations, both the left- and right-hand expression lists
+ // must contain exactly one single-valued expression"
+ //
+ // Example:
+ // func f() int {
+ // x, y := 1, 2
+ // x, y += 1
+ // return x + y
+ // }
+ MultiValAssignOp
+
+ // InvalidIfaceAssign occurs when a value of type T is used as an
+ // interface, but T does not implement a method of the expected interface.
+ //
+ // Example:
+ // type I interface {
+ // f()
+ // }
+ //
+ // type T int
+ //
+ // var x I = T(1)
+ InvalidIfaceAssign
+
+ // InvalidChanAssign occurs when a chan assignment is invalid.
+ //
+ // Per the spec, a value x is assignable to a channel type T if:
+ // "x is a bidirectional channel value, T is a channel type, x's type V and
+ // T have identical element types, and at least one of V or T is not a
+ // defined type."
+ //
+ // Example:
+ // type T1 chan int
+ // type T2 chan int
+ //
+ // var x T1
+ // // Invalid assignment because both types are named
+ // var _ T2 = x
+ InvalidChanAssign
+
+ // IncompatibleAssign occurs when the type of the right-hand side expression
+ // in an assignment cannot be assigned to the type of the variable being
+ // assigned.
+ //
+ // Example:
+ // var x []int
+ // var _ int = x
+ IncompatibleAssign
+
+ // UnaddressableFieldAssign occurs when trying to assign to a struct field
+ // in a map value.
+ //
+ // Example:
+ // func f() {
+ // m := make(map[string]struct{i int})
+ // m["foo"].i = 42
+ // }
+ UnaddressableFieldAssign
+
+ /* decls > type (+ other type expression codes) */
+
+ // NotAType occurs when the identifier used as the underlying type in a type
+ // declaration or the right-hand side of a type alias does not denote a type.
+ //
+ // Example:
+ // var S = 2
+ //
+ // type T S
+ NotAType
+
+ // InvalidArrayLen occurs when an array length is not a constant value.
+ //
+ // Example:
+ // var n = 3
+ // var _ = [n]int{}
+ InvalidArrayLen
+
+ // BlankIfaceMethod occurs when a method name is '_'.
+ //
+ // Per the spec:
+ // "The name of each explicitly specified method must be unique and not
+ // blank."
+ //
+ // Example:
+ // type T interface {
+ // _(int)
+ // }
+ BlankIfaceMethod
+
+ // IncomparableMapKey occurs when a map key type does not support the == and
+ // != operators.
+ //
+ // Per the spec:
+ // "The comparison operators == and != must be fully defined for operands of
+ // the key type; thus the key type must not be a function, map, or slice."
+ //
+ // Example:
+ // var x map[T]int
+ //
+ // type T []int
+ IncomparableMapKey
+
+ // InvalidIfaceEmbed occurs when a non-interface type is embedded in an
+ // interface.
+ //
+ // Example:
+ // type T struct {}
+ //
+ // func (T) m()
+ //
+ // type I interface {
+ // T
+ // }
+ InvalidIfaceEmbed
+
+ // InvalidPtrEmbed occurs when an embedded field is of the pointer form *T,
+ // and T itself is itself a pointer, an unsafe.Pointer, or an interface.
+ //
+ // Per the spec:
+ // "An embedded field must be specified as a type name T or as a pointer to
+ // a non-interface type name *T, and T itself may not be a pointer type."
+ //
+ // Example:
+ // type T *int
+ //
+ // type S struct {
+ // *T
+ // }
+ InvalidPtrEmbed
+
+ /* decls > func and method */
+
+ // BadRecv occurs when a method declaration does not have exactly one
+ // receiver parameter.
+ //
+ // Example:
+ // func () _() {}
+ BadRecv
+
+ // InvalidRecv occurs when a receiver type expression is not of the form T
+ // or *T, or T is a pointer type.
+ //
+ // Example:
+ // type T struct {}
+ //
+ // func (**T) m() {}
+ InvalidRecv
+
+ // DuplicateFieldAndMethod occurs when an identifier appears as both a field
+ // and method name.
+ //
+ // Example:
+ // type T struct {
+ // m int
+ // }
+ //
+ // func (T) m() {}
+ DuplicateFieldAndMethod
+
+ // DuplicateMethod occurs when two methods on the same receiver type have
+ // the same name.
+ //
+ // Example:
+ // type T struct {}
+ // func (T) m() {}
+ // func (T) m(i int) int { return i }
+ DuplicateMethod
+
+ /* decls > special */
+
+ // InvalidBlank occurs when a blank identifier is used as a value or type.
+ //
+ // Per the spec:
+ // "The blank identifier may appear as an operand only on the left-hand side
+ // of an assignment."
+ //
+ // Example:
+ // var x = _
+ InvalidBlank
+
+ // InvalidIota occurs when the predeclared identifier iota is used outside
+ // of a constant declaration.
+ //
+ // Example:
+ // var x = iota
+ InvalidIota
+
+ // MissingInitBody occurs when an init function is missing its body.
+ //
+ // Example:
+ // func init()
+ MissingInitBody
+
+ // InvalidInitSig occurs when an init function declares parameters or
+ // results.
+ //
+ // Example:
+ // func init() int { return 1 }
+ InvalidInitSig
+
+ // InvalidInitDecl occurs when init is declared as anything other than a
+ // function.
+ //
+ // Example:
+ // var init = 1
+ InvalidInitDecl
+
+ // InvalidMainDecl occurs when main is declared as anything other than a
+ // function, in a main package.
+ InvalidMainDecl
+
+ /* exprs */
+
+ // TooManyValues occurs when a function returns too many values for the
+ // expression context in which it is used.
+ //
+ // Example:
+ // func ReturnTwo() (int, int) {
+ // return 1, 2
+ // }
+ //
+ // var x = ReturnTwo()
+ TooManyValues
+
+ // NotAnExpr occurs when a type expression is used where a value expression
+ // is expected.
+ //
+ // Example:
+ // type T struct {}
+ //
+ // func f() {
+ // T
+ // }
+ NotAnExpr
+
+ /* exprs > const */
+
+ // TruncatedFloat occurs when a float constant is truncated to an integer
+ // value.
+ //
+ // Example:
+ // var _ int = 98.6
+ TruncatedFloat
+
+ // NumericOverflow occurs when a numeric constant overflows its target type.
+ //
+ // Example:
+ // var x int8 = 1000
+ NumericOverflow
+
+ /* exprs > operation */
+
+ // UndefinedOp occurs when an operator is not defined for the type(s) used
+ // in an operation.
+ //
+ // Example:
+ // var c = "a" - "b"
+ UndefinedOp
+
+ // MismatchedTypes occurs when operand types are incompatible in a binary
+ // operation.
+ //
+ // Example:
+ // var a = "hello"
+ // var b = 1
+ // var c = a - b
+ MismatchedTypes
+
+ // DivByZero occurs when a division operation is provable at compile
+ // time to be a division by zero.
+ //
+ // Example:
+ // const divisor = 0
+ // var x int = 1/divisor
+ DivByZero
+
+ // NonNumericIncDec occurs when an increment or decrement operator is
+ // applied to a non-numeric value.
+ //
+ // Example:
+ // func f() {
+ // var c = "c"
+ // c++
+ // }
+ NonNumericIncDec
+
+ /* exprs > ptr */
+
+ // UnaddressableOperand occurs when the & operator is applied to an
+ // unaddressable expression.
+ //
+ // Example:
+ // var x = &1
+ UnaddressableOperand
+
+ // InvalidIndirection occurs when a non-pointer value is indirected via the
+ // '*' operator.
+ //
+ // Example:
+ // var x int
+ // var y = *x
+ InvalidIndirection
+
+ /* exprs > [] */
+
+ // NonIndexableOperand occurs when an index operation is applied to a value
+ // that cannot be indexed.
+ //
+ // Example:
+ // var x = 1
+ // var y = x[1]
+ NonIndexableOperand
+
+ // InvalidIndex occurs when an index argument is not of integer type,
+ // negative, or out-of-bounds.
+ //
+ // Example:
+ // var s = [...]int{1,2,3}
+ // var x = s[5]
+ //
+ // Example:
+ // var s = []int{1,2,3}
+ // var _ = s[-1]
+ //
+ // Example:
+ // var s = []int{1,2,3}
+ // var i string
+ // var _ = s[i]
+ InvalidIndex
+
+ // SwappedSliceIndices occurs when constant indices in a slice expression
+ // are decreasing in value.
+ //
+ // Example:
+ // var _ = []int{1,2,3}[2:1]
+ SwappedSliceIndices
+
+ /* operators > slice */
+
+ // NonSliceableOperand occurs when a slice operation is applied to a value
+ // whose type is not sliceable, or is unaddressable.
+ //
+ // Example:
+ // var x = [...]int{1, 2, 3}[:1]
+ //
+ // Example:
+ // var x = 1
+ // var y = 1[:1]
+ NonSliceableOperand
+
+ // InvalidSliceExpr occurs when a three-index slice expression (a[x:y:z]) is
+ // applied to a string.
+ //
+ // Example:
+ // var s = "hello"
+ // var x = s[1:2:3]
+ InvalidSliceExpr
+
+ /* exprs > shift */
+
+ // InvalidShiftCount occurs when the right-hand side of a shift operation is
+ // either non-integer, negative, or too large.
+ //
+ // Example:
+ // var (
+ // x string
+ // y int = 1 << x
+ // )
+ InvalidShiftCount
+
+ // InvalidShiftOperand occurs when the shifted operand is not an integer.
+ //
+ // Example:
+ // var s = "hello"
+ // var x = s << 2
+ InvalidShiftOperand
+
+ /* exprs > chan */
+
+ // InvalidReceive occurs when there is a channel receive from a value that
+ // is either not a channel, or is a send-only channel.
+ //
+ // Example:
+ // func f() {
+ // var x = 1
+ // <-x
+ // }
+ InvalidReceive
+
+ // InvalidSend occurs when there is a channel send to a value that is not a
+ // channel, or is a receive-only channel.
+ //
+ // Example:
+ // func f() {
+ // var x = 1
+ // x <- "hello!"
+ // }
+ InvalidSend
+
+ /* exprs > literal */
+
+ // DuplicateLitKey occurs when an index is duplicated in a slice, array, or
+ // map literal.
+ //
+ // Example:
+ // var _ = []int{0:1, 0:2}
+ //
+ // Example:
+ // var _ = map[string]int{"a": 1, "a": 2}
+ DuplicateLitKey
+
+ // MissingLitKey occurs when a map literal is missing a key expression.
+ //
+ // Example:
+ // var _ = map[string]int{1}
+ MissingLitKey
+
+ // InvalidLitIndex occurs when the key in a key-value element of a slice or
+ // array literal is not an integer constant.
+ //
+ // Example:
+ // var i = 0
+ // var x = []string{i: "world"}
+ InvalidLitIndex
+
+ // OversizeArrayLit occurs when an array literal exceeds its length.
+ //
+ // Example:
+ // var _ = [2]int{1,2,3}
+ OversizeArrayLit
+
+ // MixedStructLit occurs when a struct literal contains a mix of positional
+ // and named elements.
+ //
+ // Example:
+ // var _ = struct{i, j int}{i: 1, 2}
+ MixedStructLit
+
+ // InvalidStructLit occurs when a positional struct literal has an incorrect
+ // number of values.
+ //
+ // Example:
+ // var _ = struct{i, j int}{1,2,3}
+ InvalidStructLit
+
+ // MissingLitField occurs when a struct literal refers to a field that does
+ // not exist on the struct type.
+ //
+ // Example:
+ // var _ = struct{i int}{j: 2}
+ MissingLitField
+
+ // DuplicateLitField occurs when a struct literal contains duplicated
+ // fields.
+ //
+ // Example:
+ // var _ = struct{i int}{i: 1, i: 2}
+ DuplicateLitField
+
+ // UnexportedLitField occurs when a positional struct literal implicitly
+ // assigns an unexported field of an imported type.
+ UnexportedLitField
+
+ // InvalidLitField occurs when a field name is not a valid identifier.
+ //
+ // Example:
+ // var _ = struct{i int}{1: 1}
+ InvalidLitField
+
+ // UntypedLit occurs when a composite literal omits a required type
+ // identifier.
+ //
+ // Example:
+ // type outer struct{
+ // inner struct { i int }
+ // }
+ //
+ // var _ = outer{inner: {1}}
+ UntypedLit
+
+ // InvalidLit occurs when a composite literal expression does not match its
+ // type.
+ //
+ // Example:
+ // type P *struct{
+ // x int
+ // }
+ // var _ = P {}
+ InvalidLit
+
+ /* exprs > selector */
+
+ // AmbiguousSelector occurs when a selector is ambiguous.
+ //
+ // Example:
+ // type E1 struct { i int }
+ // type E2 struct { i int }
+ // type T struct { E1; E2 }
+ //
+ // var x T
+ // var _ = x.i
+ AmbiguousSelector
+
+ // UndeclaredImportedName occurs when a package-qualified identifier is
+ // undeclared by the imported package.
+ //
+ // Example:
+ // import "go/types"
+ //
+ // var _ = types.NotAnActualIdentifier
+ UndeclaredImportedName
+
+ // UnexportedName occurs when a selector refers to an unexported identifier
+ // of an imported package.
+ //
+ // Example:
+ // import "reflect"
+ //
+ // type _ reflect.flag
+ UnexportedName
+
+ // UndeclaredName occurs when an identifier is not declared in the current
+ // scope.
+ //
+ // Example:
+ // var x T
+ UndeclaredName
+
+ // MissingFieldOrMethod occurs when a selector references a field or method
+ // that does not exist.
+ //
+ // Example:
+ // type T struct {}
+ //
+ // var x = T{}.f
+ MissingFieldOrMethod
+
+ /* exprs > ... */
+
+ // BadDotDotDotSyntax occurs when a "..." occurs in a context where it is
+ // not valid.
+ //
+ // Example:
+ // var _ = map[int][...]int{0: {}}
+ BadDotDotDotSyntax
+
+ // NonVariadicDotDotDot occurs when a "..." is used on the final argument to
+ // a non-variadic function.
+ //
+ // Example:
+ // func printArgs(s []string) {
+ // for _, a := range s {
+ // println(a)
+ // }
+ // }
+ //
+ // func f() {
+ // s := []string{"a", "b", "c"}
+ // printArgs(s...)
+ // }
+ NonVariadicDotDotDot
+
+ // MisplacedDotDotDot occurs when a "..." is used somewhere other than the
+ // final argument to a function call.
+ //
+ // Example:
+ // func printArgs(args ...int) {
+ // for _, a := range args {
+ // println(a)
+ // }
+ // }
+ //
+ // func f() {
+ // a := []int{1,2,3}
+ // printArgs(0, a...)
+ // }
+ MisplacedDotDotDot
+
+ // InvalidDotDotDotOperand occurs when a "..." operator is applied to a
+ // single-valued operand.
+ //
+ // Example:
+ // func printArgs(args ...int) {
+ // for _, a := range args {
+ // println(a)
+ // }
+ // }
+ //
+ // func f() {
+ // a := 1
+ // printArgs(a...)
+ // }
+ //
+ // Example:
+ // func args() (int, int) {
+ // return 1, 2
+ // }
+ //
+ // func printArgs(args ...int) {
+ // for _, a := range args {
+ // println(a)
+ // }
+ // }
+ //
+ // func g() {
+ // printArgs(args()...)
+ // }
+ InvalidDotDotDotOperand
+
+ // InvalidDotDotDot occurs when a "..." is used in a non-variadic built-in
+ // function.
+ //
+ // Example:
+ // var s = []int{1, 2, 3}
+ // var l = len(s...)
+ InvalidDotDotDot
+
+ /* exprs > built-in */
+
+ // UncalledBuiltin occurs when a built-in function is used as a
+ // function-valued expression, instead of being called.
+ //
+ // Per the spec:
+ // "The built-in functions do not have standard Go types, so they can only
+ // appear in call expressions; they cannot be used as function values."
+ //
+ // Example:
+ // var _ = copy
+ UncalledBuiltin
+
+ // InvalidAppend occurs when append is called with a first argument that is
+ // not a slice.
+ //
+ // Example:
+ // var _ = append(1, 2)
+ InvalidAppend
+
+ // InvalidCap occurs when an argument to the cap built-in function is not of
+ // supported type.
+ //
+ // See https://golang.org/ref/spec#Lengthand_capacity for information on
+ // which underlying types are supported as arguments to cap and len.
+ //
+ // Example:
+ // var s = 2
+ // var x = cap(s)
+ InvalidCap
+
+ // InvalidClose occurs when close(...) is called with an argument that is
+ // not of channel type, or that is a receive-only channel.
+ //
+ // Example:
+ // func f() {
+ // var x int
+ // close(x)
+ // }
+ InvalidClose
+
+ // InvalidCopy occurs when the arguments are not of slice type or do not
+ // have compatible type.
+ //
+ // See https://golang.org/ref/spec#Appendingand_copying_slices for more
+ // information on the type requirements for the copy built-in.
+ //
+ // Example:
+ // func f() {
+ // var x []int
+ // y := []int64{1,2,3}
+ // copy(x, y)
+ // }
+ InvalidCopy
+
+ // InvalidComplex occurs when the complex built-in function is called with
+ // arguments with incompatible types.
+ //
+ // Example:
+ // var _ = complex(float32(1), float64(2))
+ InvalidComplex
+
+ // InvalidDelete occurs when the delete built-in function is called with a
+ // first argument that is not a map.
+ //
+ // Example:
+ // func f() {
+ // m := "hello"
+ // delete(m, "e")
+ // }
+ InvalidDelete
+
+ // InvalidImag occurs when the imag built-in function is called with an
+ // argument that does not have complex type.
+ //
+ // Example:
+ // var _ = imag(int(1))
+ InvalidImag
+
+ // InvalidLen occurs when an argument to the len built-in function is not of
+ // supported type.
+ //
+ // See https://golang.org/ref/spec#Lengthand_capacity for information on
+ // which underlying types are supported as arguments to cap and len.
+ //
+ // Example:
+ // var s = 2
+ // var x = len(s)
+ InvalidLen
+
+ // SwappedMakeArgs occurs when make is called with three arguments, and its
+ // length argument is larger than its capacity argument.
+ //
+ // Example:
+ // var x = make([]int, 3, 2)
+ SwappedMakeArgs
+
+ // InvalidMake occurs when make is called with an unsupported type argument.
+ //
+ // See https://golang.org/ref/spec#Makingslices_maps_and_channels for
+ // information on the types that may be created using make.
+ //
+ // Example:
+ // var x = make(int)
+ InvalidMake
+
+ // InvalidReal occurs when the real built-in function is called with an
+ // argument that does not have complex type.
+ //
+ // Example:
+ // var _ = real(int(1))
+ InvalidReal
+
+ /* exprs > assertion */
+
+ // InvalidAssert occurs when a type assertion is applied to a
+ // value that is not of interface type.
+ //
+ // Example:
+ // var x = 1
+ // var _ = x.(float64)
+ InvalidAssert
+
+ // ImpossibleAssert occurs for a type assertion x.(T) when the value x of
+ // interface cannot have dynamic type T, due to a missing or mismatching
+ // method on T.
+ //
+ // Example:
+ // type T int
+ //
+ // func (t *T) m() int { return int(*t) }
+ //
+ // type I interface { m() int }
+ //
+ // var x I
+ // var _ = x.(T)
+ ImpossibleAssert
+
+ /* exprs > conversion */
+
+ // InvalidConversion occurs when the argument type cannot be converted to the
+ // target.
+ //
+ // See https://golang.org/ref/spec#Conversions for the rules of
+ // convertibility.
+ //
+ // Example:
+ // var x float64
+ // var _ = string(x)
+ InvalidConversion
+
+ // InvalidUntypedConversion occurs when an there is no valid implicit
+ // conversion from an untyped value satisfying the type constraints of the
+ // context in which it is used.
+ //
+ // Example:
+ // var _ = 1 + ""
+ InvalidUntypedConversion
+
+ /* offsetof */
+
+ // BadOffsetofSyntax occurs when unsafe.Offsetof is called with an argument
+ // that is not a selector expression.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var x int
+ // var _ = unsafe.Offsetof(x)
+ BadOffsetofSyntax
+
+ // InvalidOffsetof occurs when unsafe.Offsetof is called with a method
+ // selector, rather than a field selector, or when the field is embedded via
+ // a pointer.
+ //
+ // Per the spec:
+ //
+ // "If f is an embedded field, it must be reachable without pointer
+ // indirections through fields of the struct. "
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // type T struct { f int }
+ // type S struct { *T }
+ // var s S
+ // var _ = unsafe.Offsetof(s.f)
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // type S struct{}
+ //
+ // func (S) m() {}
+ //
+ // var s S
+ // var _ = unsafe.Offsetof(s.m)
+ InvalidOffsetof
+
+ /* control flow > scope */
+
+ // UnusedExpr occurs when a side-effect free expression is used as a
+ // statement. Such a statement has no effect.
+ //
+ // Example:
+ // func f(i int) {
+ // i*i
+ // }
+ UnusedExpr
+
+ // UnusedVar occurs when a variable is declared but unused.
+ //
+ // Example:
+ // func f() {
+ // x := 1
+ // }
+ UnusedVar
+
+ // MissingReturn occurs when a function with results is missing a return
+ // statement.
+ //
+ // Example:
+ // func f() int {}
+ MissingReturn
+
+ // WrongResultCount occurs when a return statement returns an incorrect
+ // number of values.
+ //
+ // Example:
+ // func ReturnOne() int {
+ // return 1, 2
+ // }
+ WrongResultCount
+
+ // OutOfScopeResult occurs when the name of a value implicitly returned by
+ // an empty return statement is shadowed in a nested scope.
+ //
+ // Example:
+ // func factor(n int) (i int) {
+ // for i := 2; i < n; i++ {
+ // if n%i == 0 {
+ // return
+ // }
+ // }
+ // return 0
+ // }
+ OutOfScopeResult
+
+ /* control flow > if */
+
+ // InvalidCond occurs when an if condition is not a boolean expression.
+ //
+ // Example:
+ // func checkReturn(i int) {
+ // if i {
+ // panic("non-zero return")
+ // }
+ // }
+ InvalidCond
+
+ /* control flow > for */
+
+ // InvalidPostDecl occurs when there is a declaration in a for-loop post
+ // statement.
+ //
+ // Example:
+ // func f() {
+ // for i := 0; i < 10; j := 0 {}
+ // }
+ InvalidPostDecl
+
+ // InvalidChanRange occurs when a send-only channel used in a range
+ // expression.
+ //
+ // Example:
+ // func sum(c chan<- int) {
+ // s := 0
+ // for i := range c {
+ // s += i
+ // }
+ // }
+ InvalidChanRange
+
+ // InvalidIterVar occurs when two iteration variables are used while ranging
+ // over a channel.
+ //
+ // Example:
+ // func f(c chan int) {
+ // for k, v := range c {
+ // println(k, v)
+ // }
+ // }
+ InvalidIterVar
+
+ // InvalidRangeExpr occurs when the type of a range expression is not array,
+ // slice, string, map, or channel.
+ //
+ // Example:
+ // func f(i int) {
+ // for j := range i {
+ // println(j)
+ // }
+ // }
+ InvalidRangeExpr
+
+ /* control flow > switch */
+
+ // MisplacedBreak occurs when a break statement is not within a for, switch,
+ // or select statement of the innermost function definition.
+ //
+ // Example:
+ // func f() {
+ // break
+ // }
+ MisplacedBreak
+
+ // MisplacedContinue occurs when a continue statement is not within a for
+ // loop of the innermost function definition.
+ //
+ // Example:
+ // func sumeven(n int) int {
+ // proceed := func() {
+ // continue
+ // }
+ // sum := 0
+ // for i := 1; i <= n; i++ {
+ // if i % 2 != 0 {
+ // proceed()
+ // }
+ // sum += i
+ // }
+ // return sum
+ // }
+ MisplacedContinue
+
+ // MisplacedFallthrough occurs when a fallthrough statement is not within an
+ // expression switch.
+ //
+ // Example:
+ // func typename(i interface{}) string {
+ // switch i.(type) {
+ // case int64:
+ // fallthrough
+ // case int:
+ // return "int"
+ // }
+ // return "unsupported"
+ // }
+ MisplacedFallthrough
+
+ // DuplicateCase occurs when a type or expression switch has duplicate
+ // cases.
+ //
+ // Example:
+ // func printInt(i int) {
+ // switch i {
+ // case 1:
+ // println("one")
+ // case 1:
+ // println("One")
+ // }
+ // }
+ DuplicateCase
+
+ // DuplicateDefault occurs when a type or expression switch has multiple
+ // default clauses.
+ //
+ // Example:
+ // func printInt(i int) {
+ // switch i {
+ // case 1:
+ // println("one")
+ // default:
+ // println("One")
+ // default:
+ // println("1")
+ // }
+ // }
+ DuplicateDefault
+
+ // BadTypeKeyword occurs when a .(type) expression is used anywhere other
+ // than a type switch.
+ //
+ // Example:
+ // type I interface {
+ // m()
+ // }
+ // var t I
+ // var _ = t.(type)
+ BadTypeKeyword
+
+ // InvalidTypeSwitch occurs when .(type) is used on an expression that is
+ // not of interface type.
+ //
+ // Example:
+ // func f(i int) {
+ // switch x := i.(type) {}
+ // }
+ InvalidTypeSwitch
+
+ // InvalidExprSwitch occurs when a switch expression is not comparable.
+ //
+ // Example:
+ // func _() {
+ // var a struct{ _ func() }
+ // switch a /* ERROR cannot switch on a */ {
+ // }
+ // }
+ InvalidExprSwitch
+
+ /* control flow > select */
+
+ // InvalidSelectCase occurs when a select case is not a channel send or
+ // receive.
+ //
+ // Example:
+ // func checkChan(c <-chan int) bool {
+ // select {
+ // case c:
+ // return true
+ // default:
+ // return false
+ // }
+ // }
+ InvalidSelectCase
+
+ /* control flow > labels and jumps */
+
+ // UndeclaredLabel occurs when an undeclared label is jumped to.
+ //
+ // Example:
+ // func f() {
+ // goto L
+ // }
+ UndeclaredLabel
+
+ // DuplicateLabel occurs when a label is declared more than once.
+ //
+ // Example:
+ // func f() int {
+ // L:
+ // L:
+ // return 1
+ // }
+ DuplicateLabel
+
+ // MisplacedLabel occurs when a break or continue label is not on a for,
+ // switch, or select statement.
+ //
+ // Example:
+ // func f() {
+ // L:
+ // a := []int{1,2,3}
+ // for _, e := range a {
+ // if e > 10 {
+ // break L
+ // }
+ // println(a)
+ // }
+ // }
+ MisplacedLabel
+
+ // UnusedLabel occurs when a label is declared but not used.
+ //
+ // Example:
+ // func f() {
+ // L:
+ // }
+ UnusedLabel
+
+ // JumpOverDecl occurs when a label jumps over a variable declaration.
+ //
+ // Example:
+ // func f() int {
+ // goto L
+ // x := 2
+ // L:
+ // x++
+ // return x
+ // }
+ JumpOverDecl
+
+ // JumpIntoBlock occurs when a forward jump goes to a label inside a nested
+ // block.
+ //
+ // Example:
+ // func f(x int) {
+ // goto L
+ // if x > 0 {
+ // L:
+ // print("inside block")
+ // }
+ // }
+ JumpIntoBlock
+
+ /* control flow > calls */
+
+ // InvalidMethodExpr occurs when a pointer method is called but the argument
+ // is not addressable.
+ //
+ // Example:
+ // type T struct {}
+ //
+ // func (*T) m() int { return 1 }
+ //
+ // var _ = T.m(T{})
+ InvalidMethodExpr
+
+ // WrongArgCount occurs when too few or too many arguments are passed by a
+ // function call.
+ //
+ // Example:
+ // func f(i int) {}
+ // var x = f()
+ WrongArgCount
+
+ // InvalidCall occurs when an expression is called that is not of function
+ // type.
+ //
+ // Example:
+ // var x = "x"
+ // var y = x()
+ InvalidCall
+
+ /* control flow > suspended */
+
+ // UnusedResults occurs when a restricted expression-only built-in function
+ // is suspended via go or defer. Such a suspension discards the results of
+ // these side-effect free built-in functions, and therefore is ineffectual.
+ //
+ // Example:
+ // func f(a []int) int {
+ // defer len(a)
+ // return i
+ // }
+ UnusedResults
+
+ // InvalidDefer occurs when a deferred expression is not a function call,
+ // for example if the expression is a type conversion.
+ //
+ // Example:
+ // func f(i int) int {
+ // defer int32(i)
+ // return i
+ // }
+ InvalidDefer
+
+ // InvalidGo occurs when a go expression is not a function call, for example
+ // if the expression is a type conversion.
+ //
+ // Example:
+ // func f(i int) int {
+ // go int32(i)
+ // return i
+ // }
+ InvalidGo
+
+ // All codes below were added in Go 1.17.
+
+ /* decl */
+
+ // BadDecl occurs when a declaration has invalid syntax.
+ BadDecl
+
+ // RepeatedDecl occurs when an identifier occurs more than once on the left
+ // hand side of a short variable declaration.
+ //
+ // Example:
+ // func _() {
+ // x, y, y := 1, 2, 3
+ // }
+ RepeatedDecl
+
+ /* unsafe */
+
+ // InvalidUnsafeAdd occurs when unsafe.Add is called with a
+ // length argument that is not of integer type.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var p unsafe.Pointer
+ // var _ = unsafe.Add(p, float64(1))
+ InvalidUnsafeAdd
+
+ // InvalidUnsafeSlice occurs when unsafe.Slice is called with a
+ // pointer argument that is not of pointer type or a length argument
+ // that is not of integer type, negative, or out of bounds.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var x int
+ // var _ = unsafe.Slice(x, 1)
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var x int
+ // var _ = unsafe.Slice(&x, float64(1))
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var x int
+ // var _ = unsafe.Slice(&x, -1)
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var x int
+ // var _ = unsafe.Slice(&x, uint64(1) << 63)
+ InvalidUnsafeSlice
+
+ // All codes below were added in Go 1.18.
+
+ /* features */
+
+ // UnsupportedFeature occurs when a language feature is used that is not
+ // supported at this Go version.
+ UnsupportedFeature
+
+ /* type params */
+
+ // NotAGenericType occurs when a non-generic type is used where a generic
+ // type is expected: in type or function instantiation.
+ //
+ // Example:
+ // type T int
+ //
+ // var _ T[int]
+ NotAGenericType
+
+ // WrongTypeArgCount occurs when a type or function is instantiated with an
+ // incorrect number of type arguments, including when a generic type or
+ // function is used without instantiation.
+ //
+ // Errors involving failed type inference are assigned other error codes.
+ //
+ // Example:
+ // type T[p any] int
+ //
+ // var _ T[int, string]
+ //
+ // Example:
+ // func f[T any]() {}
+ //
+ // var x = f
+ WrongTypeArgCount
+
+ // CannotInferTypeArgs occurs when type or function type argument inference
+ // fails to infer all type arguments.
+ //
+ // Example:
+ // func f[T any]() {}
+ //
+ // func _() {
+ // f()
+ // }
+ //
+ // Example:
+ // type N[P, Q any] struct{}
+ //
+ // var _ N[int]
+ CannotInferTypeArgs
+
+ // InvalidTypeArg occurs when a type argument does not satisfy its
+ // corresponding type parameter constraints.
+ //
+ // Example:
+ // type T[P ~int] struct{}
+ //
+ // var _ T[string]
+ InvalidTypeArg // arguments? InferenceFailed
+
+ // InvalidInstanceCycle occurs when an invalid cycle is detected
+ // within the instantiation graph.
+ //
+ // Example:
+ // func f[T any]() { f[*T]() }
+ InvalidInstanceCycle
+
+ // InvalidUnion occurs when an embedded union or approximation element is
+ // not valid.
+ //
+ // Example:
+ // type _ interface {
+ // ~int | interface{ m() }
+ // }
+ InvalidUnion
+
+ // MisplacedConstraintIface occurs when a constraint-type interface is used
+ // outside of constraint position.
+ //
+ // Example:
+ // type I interface { ~int }
+ //
+ // var _ I
+ MisplacedConstraintIface
+
+ // InvalidMethodTypeParams occurs when methods have type parameters.
+ //
+ // It cannot be encountered with an AST parsed using go/parser.
+ InvalidMethodTypeParams
+
+ // MisplacedTypeParam occurs when a type parameter is used in a place where
+ // it is not permitted.
+ //
+ // Example:
+ // type T[P any] P
+ //
+ // Example:
+ // type T[P any] struct{ *P }
+ MisplacedTypeParam
+
+ // InvalidUnsafeSliceData occurs when unsafe.SliceData is called with
+ // an argument that is not of slice type. It also occurs if it is used
+ // in a package compiled for a language version before go1.20.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var x int
+ // var _ = unsafe.SliceData(x)
+ InvalidUnsafeSliceData
+
+ // InvalidUnsafeString occurs when unsafe.String is called with
+ // a length argument that is not of integer type, negative, or
+ // out of bounds. It also occurs if it is used in a package
+ // compiled for a language version before go1.20.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var b [10]byte
+ // var _ = unsafe.String(&b[0], -1)
+ InvalidUnsafeString
+
+ // InvalidUnsafeStringData occurs if it is used in a package
+ // compiled for a language version before go1.20.
+ _ // not used anymore
+
+)
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go
new file mode 100644
index 0000000..15ecf7c
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go
@@ -0,0 +1,179 @@
+// Code generated by "stringer -type=ErrorCode"; DO NOT EDIT.
+
+package typesinternal
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[InvalidSyntaxTree - -1]
+ _ = x[Test-1]
+ _ = x[BlankPkgName-2]
+ _ = x[MismatchedPkgName-3]
+ _ = x[InvalidPkgUse-4]
+ _ = x[BadImportPath-5]
+ _ = x[BrokenImport-6]
+ _ = x[ImportCRenamed-7]
+ _ = x[UnusedImport-8]
+ _ = x[InvalidInitCycle-9]
+ _ = x[DuplicateDecl-10]
+ _ = x[InvalidDeclCycle-11]
+ _ = x[InvalidTypeCycle-12]
+ _ = x[InvalidConstInit-13]
+ _ = x[InvalidConstVal-14]
+ _ = x[InvalidConstType-15]
+ _ = x[UntypedNilUse-16]
+ _ = x[WrongAssignCount-17]
+ _ = x[UnassignableOperand-18]
+ _ = x[NoNewVar-19]
+ _ = x[MultiValAssignOp-20]
+ _ = x[InvalidIfaceAssign-21]
+ _ = x[InvalidChanAssign-22]
+ _ = x[IncompatibleAssign-23]
+ _ = x[UnaddressableFieldAssign-24]
+ _ = x[NotAType-25]
+ _ = x[InvalidArrayLen-26]
+ _ = x[BlankIfaceMethod-27]
+ _ = x[IncomparableMapKey-28]
+ _ = x[InvalidIfaceEmbed-29]
+ _ = x[InvalidPtrEmbed-30]
+ _ = x[BadRecv-31]
+ _ = x[InvalidRecv-32]
+ _ = x[DuplicateFieldAndMethod-33]
+ _ = x[DuplicateMethod-34]
+ _ = x[InvalidBlank-35]
+ _ = x[InvalidIota-36]
+ _ = x[MissingInitBody-37]
+ _ = x[InvalidInitSig-38]
+ _ = x[InvalidInitDecl-39]
+ _ = x[InvalidMainDecl-40]
+ _ = x[TooManyValues-41]
+ _ = x[NotAnExpr-42]
+ _ = x[TruncatedFloat-43]
+ _ = x[NumericOverflow-44]
+ _ = x[UndefinedOp-45]
+ _ = x[MismatchedTypes-46]
+ _ = x[DivByZero-47]
+ _ = x[NonNumericIncDec-48]
+ _ = x[UnaddressableOperand-49]
+ _ = x[InvalidIndirection-50]
+ _ = x[NonIndexableOperand-51]
+ _ = x[InvalidIndex-52]
+ _ = x[SwappedSliceIndices-53]
+ _ = x[NonSliceableOperand-54]
+ _ = x[InvalidSliceExpr-55]
+ _ = x[InvalidShiftCount-56]
+ _ = x[InvalidShiftOperand-57]
+ _ = x[InvalidReceive-58]
+ _ = x[InvalidSend-59]
+ _ = x[DuplicateLitKey-60]
+ _ = x[MissingLitKey-61]
+ _ = x[InvalidLitIndex-62]
+ _ = x[OversizeArrayLit-63]
+ _ = x[MixedStructLit-64]
+ _ = x[InvalidStructLit-65]
+ _ = x[MissingLitField-66]
+ _ = x[DuplicateLitField-67]
+ _ = x[UnexportedLitField-68]
+ _ = x[InvalidLitField-69]
+ _ = x[UntypedLit-70]
+ _ = x[InvalidLit-71]
+ _ = x[AmbiguousSelector-72]
+ _ = x[UndeclaredImportedName-73]
+ _ = x[UnexportedName-74]
+ _ = x[UndeclaredName-75]
+ _ = x[MissingFieldOrMethod-76]
+ _ = x[BadDotDotDotSyntax-77]
+ _ = x[NonVariadicDotDotDot-78]
+ _ = x[MisplacedDotDotDot-79]
+ _ = x[InvalidDotDotDotOperand-80]
+ _ = x[InvalidDotDotDot-81]
+ _ = x[UncalledBuiltin-82]
+ _ = x[InvalidAppend-83]
+ _ = x[InvalidCap-84]
+ _ = x[InvalidClose-85]
+ _ = x[InvalidCopy-86]
+ _ = x[InvalidComplex-87]
+ _ = x[InvalidDelete-88]
+ _ = x[InvalidImag-89]
+ _ = x[InvalidLen-90]
+ _ = x[SwappedMakeArgs-91]
+ _ = x[InvalidMake-92]
+ _ = x[InvalidReal-93]
+ _ = x[InvalidAssert-94]
+ _ = x[ImpossibleAssert-95]
+ _ = x[InvalidConversion-96]
+ _ = x[InvalidUntypedConversion-97]
+ _ = x[BadOffsetofSyntax-98]
+ _ = x[InvalidOffsetof-99]
+ _ = x[UnusedExpr-100]
+ _ = x[UnusedVar-101]
+ _ = x[MissingReturn-102]
+ _ = x[WrongResultCount-103]
+ _ = x[OutOfScopeResult-104]
+ _ = x[InvalidCond-105]
+ _ = x[InvalidPostDecl-106]
+ _ = x[InvalidChanRange-107]
+ _ = x[InvalidIterVar-108]
+ _ = x[InvalidRangeExpr-109]
+ _ = x[MisplacedBreak-110]
+ _ = x[MisplacedContinue-111]
+ _ = x[MisplacedFallthrough-112]
+ _ = x[DuplicateCase-113]
+ _ = x[DuplicateDefault-114]
+ _ = x[BadTypeKeyword-115]
+ _ = x[InvalidTypeSwitch-116]
+ _ = x[InvalidExprSwitch-117]
+ _ = x[InvalidSelectCase-118]
+ _ = x[UndeclaredLabel-119]
+ _ = x[DuplicateLabel-120]
+ _ = x[MisplacedLabel-121]
+ _ = x[UnusedLabel-122]
+ _ = x[JumpOverDecl-123]
+ _ = x[JumpIntoBlock-124]
+ _ = x[InvalidMethodExpr-125]
+ _ = x[WrongArgCount-126]
+ _ = x[InvalidCall-127]
+ _ = x[UnusedResults-128]
+ _ = x[InvalidDefer-129]
+ _ = x[InvalidGo-130]
+ _ = x[BadDecl-131]
+ _ = x[RepeatedDecl-132]
+ _ = x[InvalidUnsafeAdd-133]
+ _ = x[InvalidUnsafeSlice-134]
+ _ = x[UnsupportedFeature-135]
+ _ = x[NotAGenericType-136]
+ _ = x[WrongTypeArgCount-137]
+ _ = x[CannotInferTypeArgs-138]
+ _ = x[InvalidTypeArg-139]
+ _ = x[InvalidInstanceCycle-140]
+ _ = x[InvalidUnion-141]
+ _ = x[MisplacedConstraintIface-142]
+ _ = x[InvalidMethodTypeParams-143]
+ _ = x[MisplacedTypeParam-144]
+ _ = x[InvalidUnsafeSliceData-145]
+ _ = x[InvalidUnsafeString-146]
+}
+
+const (
+ _ErrorCode_name_0 = "InvalidSyntaxTree"
+ _ErrorCode_name_1 = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilUseWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParamInvalidUnsafeSliceDataInvalidUnsafeString"
+)
+
+var (
+ _ErrorCode_index_1 = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 218, 234, 253, 261, 277, 295, 312, 330, 354, 362, 377, 393, 411, 428, 443, 450, 461, 484, 499, 511, 522, 537, 551, 566, 581, 594, 603, 617, 632, 643, 658, 667, 683, 703, 721, 740, 752, 771, 790, 806, 823, 842, 856, 867, 882, 895, 910, 926, 940, 956, 971, 988, 1006, 1021, 1031, 1041, 1058, 1080, 1094, 1108, 1128, 1146, 1166, 1184, 1207, 1223, 1238, 1251, 1261, 1273, 1284, 1298, 1311, 1322, 1332, 1347, 1358, 1369, 1382, 1398, 1415, 1439, 1456, 1471, 1481, 1490, 1503, 1519, 1535, 1546, 1561, 1577, 1591, 1607, 1621, 1638, 1658, 1671, 1687, 1701, 1718, 1735, 1752, 1767, 1781, 1795, 1806, 1818, 1831, 1848, 1861, 1872, 1885, 1897, 1906, 1913, 1925, 1941, 1959, 1977, 1992, 2009, 2028, 2042, 2062, 2074, 2098, 2121, 2139, 2161, 2180}
+)
+
+func (i ErrorCode) String() string {
+ switch {
+ case i == -1:
+ return _ErrorCode_name_0
+ case 1 <= i && i <= 146:
+ i -= 1
+ return _ErrorCode_name_1[_ErrorCode_index_1[i]:_ErrorCode_index_1[i+1]]
+ default:
+ return "ErrorCode(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/recv.go b/vendor/golang.org/x/tools/internal/typesinternal/recv.go
new file mode 100644
index 0000000..fea7c8b
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typesinternal/recv.go
@@ -0,0 +1,43 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typesinternal
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/internal/aliases"
+)
+
+// ReceiverNamed returns the named type (if any) associated with the
+// type of recv, which may be of the form N or *N, or aliases thereof.
+// It also reports whether a Pointer was present.
+func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) {
+ t := recv.Type()
+ if ptr, ok := aliases.Unalias(t).(*types.Pointer); ok {
+ isPtr = true
+ t = ptr.Elem()
+ }
+ named, _ = aliases.Unalias(t).(*types.Named)
+ return
+}
+
+// Unpointer returns T given *T or an alias thereof.
+// For all other types it is the identity function.
+// It does not look at underlying types.
+// The result may be an alias.
+//
+// Use this function to strip off the optional pointer on a receiver
+// in a field or method selection, without losing the named type
+// (which is needed to compute the method set).
+//
+// See also [typeparams.MustDeref], which removes one level of
+// indirection from the type, regardless of named types (analogous to
+// a LOAD instruction).
+func Unpointer(t types.Type) types.Type {
+ if ptr, ok := aliases.Unalias(t).(*types.Pointer); ok {
+ return ptr.Elem()
+ }
+ return t
+}
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/toonew.go b/vendor/golang.org/x/tools/internal/typesinternal/toonew.go
new file mode 100644
index 0000000..cc86487
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typesinternal/toonew.go
@@ -0,0 +1,89 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typesinternal
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/internal/stdlib"
+ "golang.org/x/tools/internal/versions"
+)
+
+// TooNewStdSymbols computes the set of package-level symbols
+// exported by pkg that are not available at the specified version.
+// The result maps each symbol to its minimum version.
+//
+// The pkg is allowed to contain type errors.
+func TooNewStdSymbols(pkg *types.Package, version string) map[types.Object]string {
+ disallowed := make(map[types.Object]string)
+
+ // Pass 1: package-level symbols.
+ symbols := stdlib.PackageSymbols[pkg.Path()]
+ for _, sym := range symbols {
+ symver := sym.Version.String()
+ if versions.Before(version, symver) {
+ switch sym.Kind {
+ case stdlib.Func, stdlib.Var, stdlib.Const, stdlib.Type:
+ disallowed[pkg.Scope().Lookup(sym.Name)] = symver
+ }
+ }
+ }
+
+ // Pass 2: fields and methods.
+ //
+ // We allow fields and methods if their associated type is
+ // disallowed, as otherwise we would report false positives
+ // for compatibility shims. Consider:
+ //
+ // //go:build go1.22
+ // type T struct { F std.Real } // correct new API
+ //
+ // //go:build !go1.22
+ // type T struct { F fake } // shim
+ // type fake struct { ... }
+ // func (fake) M () {}
+ //
+ // These alternative declarations of T use either the std.Real
+ // type, introduced in go1.22, or a fake type, for the field
+ // F. (The fakery could be arbitrarily deep, involving more
+ // nested fields and methods than are shown here.) Clients
+ // that use the compatibility shim T will compile with any
+ // version of go, whether older or newer than go1.22, but only
+ // the newer version will use the std.Real implementation.
+ //
+ // Now consider a reference to method M in new(T).F.M() in a
+ // module that requires a minimum of go1.21. The analysis may
+ // occur using a version of Go higher than 1.21, selecting the
+ // first version of T, so the method M is Real.M. This would
+ // spuriously cause the analyzer to report a reference to a
+ // too-new symbol even though this expression compiles just
+ // fine (with the fake implementation) using go1.21.
+ for _, sym := range symbols {
+ symVersion := sym.Version.String()
+ if !versions.Before(version, symVersion) {
+ continue // allowed
+ }
+
+ var obj types.Object
+ switch sym.Kind {
+ case stdlib.Field:
+ typename, name := sym.SplitField()
+ if t := pkg.Scope().Lookup(typename); t != nil && disallowed[t] == "" {
+ obj, _, _ = types.LookupFieldOrMethod(t.Type(), false, pkg, name)
+ }
+
+ case stdlib.Method:
+ ptr, recvname, name := sym.SplitMethod()
+ if t := pkg.Scope().Lookup(recvname); t != nil && disallowed[t] == "" {
+ obj, _, _ = types.LookupFieldOrMethod(t.Type(), ptr, pkg, name)
+ }
+ }
+ if obj != nil {
+ disallowed[obj] = symVersion
+ }
+ }
+
+ return disallowed
+}
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/types.go b/vendor/golang.org/x/tools/internal/typesinternal/types.go
new file mode 100644
index 0000000..8392328
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typesinternal/types.go
@@ -0,0 +1,65 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typesinternal provides access to internal go/types APIs that are not
+// yet exported.
+package typesinternal
+
+import (
+ "go/token"
+ "go/types"
+ "reflect"
+ "unsafe"
+)
+
+func SetUsesCgo(conf *types.Config) bool {
+ v := reflect.ValueOf(conf).Elem()
+
+ f := v.FieldByName("go115UsesCgo")
+ if !f.IsValid() {
+ f = v.FieldByName("UsesCgo")
+ if !f.IsValid() {
+ return false
+ }
+ }
+
+ addr := unsafe.Pointer(f.UnsafeAddr())
+ *(*bool)(addr) = true
+
+ return true
+}
+
+// ReadGo116ErrorData extracts additional information from types.Error values
+// generated by Go version 1.16 and later: the error code, start position, and
+// end position. If all positions are valid, start <= err.Pos <= end.
+//
+// If the data could not be read, the final result parameter will be false.
+func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos, ok bool) {
+ var data [3]int
+ // By coincidence all of these fields are ints, which simplifies things.
+ v := reflect.ValueOf(err)
+ for i, name := range []string{"go116code", "go116start", "go116end"} {
+ f := v.FieldByName(name)
+ if !f.IsValid() {
+ return 0, 0, 0, false
+ }
+ data[i] = int(f.Int())
+ }
+ return ErrorCode(data[0]), token.Pos(data[1]), token.Pos(data[2]), true
+}
+
+// NameRelativeTo returns a types.Qualifier that qualifies members of
+// all packages other than pkg, using only the package name.
+// (By contrast, [types.RelativeTo] uses the complete package path,
+// which is often excessive.)
+//
+// If pkg is nil, it is equivalent to [*types.Package.Name].
+func NameRelativeTo(pkg *types.Package) types.Qualifier {
+ return func(other *types.Package) string {
+ if pkg != nil && pkg == other {
+ return "" // same package; unqualified
+ }
+ return other.Name()
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/constraint.go b/vendor/golang.org/x/tools/internal/versions/constraint.go
new file mode 100644
index 0000000..179063d
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/constraint.go
@@ -0,0 +1,13 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package versions
+
+import "go/build/constraint"
+
+// ConstraintGoVersion is constraint.GoVersion (if built with go1.21+).
+// Otherwise nil.
+//
+// Deprecate once x/tools is after go1.21.
+var ConstraintGoVersion func(x constraint.Expr) string
diff --git a/vendor/golang.org/x/tools/internal/versions/constraint_go121.go b/vendor/golang.org/x/tools/internal/versions/constraint_go121.go
new file mode 100644
index 0000000..3801140
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/constraint_go121.go
@@ -0,0 +1,14 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.21
+// +build go1.21
+
+package versions
+
+import "go/build/constraint"
+
+func init() {
+ ConstraintGoVersion = constraint.GoVersion
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/features.go b/vendor/golang.org/x/tools/internal/versions/features.go
new file mode 100644
index 0000000..b53f178
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/features.go
@@ -0,0 +1,43 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package versions
+
+// This file contains predicates for working with file versions to
+// decide when a tool should consider a language feature enabled.
+
+// GoVersions that features in x/tools can be gated to.
+const (
+ Go1_18 = "go1.18"
+ Go1_19 = "go1.19"
+ Go1_20 = "go1.20"
+ Go1_21 = "go1.21"
+ Go1_22 = "go1.22"
+)
+
+// Future is an invalid unknown Go version sometime in the future.
+// Do not use directly with Compare.
+const Future = ""
+
+// AtLeast reports whether the file version v comes after a Go release.
+//
+// Use this predicate to enable a behavior once a certain Go release
+// has happened (and stays enabled in the future).
+func AtLeast(v, release string) bool {
+ if v == Future {
+ return true // an unknown future version is always after y.
+ }
+ return Compare(Lang(v), Lang(release)) >= 0
+}
+
+// Before reports whether the file version v is strictly before a Go release.
+//
+// Use this predicate to disable a behavior once a certain Go release
+// has happened (and stays enabled in the future).
+func Before(v, release string) bool {
+ if v == Future {
+ return false // an unknown future version happens after y.
+ }
+ return Compare(Lang(v), Lang(release)) < 0
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/gover.go b/vendor/golang.org/x/tools/internal/versions/gover.go
new file mode 100644
index 0000000..bbabcd2
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/gover.go
@@ -0,0 +1,172 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This is a fork of internal/gover for use by x/tools until
+// go1.21 and earlier are no longer supported by x/tools.
+
+package versions
+
+import "strings"
+
+// A gover is a parsed Go gover: major[.Minor[.Patch]][kind[pre]]
+// The numbers are the original decimal strings to avoid integer overflows
+// and since there is very little actual math. (Probably overflow doesn't matter in practice,
+// but at the time this code was written, there was an existing test that used
+// go1.99999999999, which does not fit in an int on 32-bit platforms.
+// The "big decimal" representation avoids the problem entirely.)
+type gover struct {
+ major string // decimal
+ minor string // decimal or ""
+ patch string // decimal or ""
+ kind string // "", "alpha", "beta", "rc"
+ pre string // decimal or ""
+}
+
+// compare returns -1, 0, or +1 depending on whether
+// x < y, x == y, or x > y, interpreted as toolchain versions.
+// The versions x and y must not begin with a "go" prefix: just "1.21" not "go1.21".
+// Malformed versions compare less than well-formed versions and equal to each other.
+// The language version "1.21" compares less than the release candidate and eventual releases "1.21rc1" and "1.21.0".
+func compare(x, y string) int {
+ vx := parse(x)
+ vy := parse(y)
+
+ if c := cmpInt(vx.major, vy.major); c != 0 {
+ return c
+ }
+ if c := cmpInt(vx.minor, vy.minor); c != 0 {
+ return c
+ }
+ if c := cmpInt(vx.patch, vy.patch); c != 0 {
+ return c
+ }
+ if c := strings.Compare(vx.kind, vy.kind); c != 0 { // "" < alpha < beta < rc
+ return c
+ }
+ if c := cmpInt(vx.pre, vy.pre); c != 0 {
+ return c
+ }
+ return 0
+}
+
+// lang returns the Go language version. For example, lang("1.2.3") == "1.2".
+func lang(x string) string {
+ v := parse(x)
+ if v.minor == "" || v.major == "1" && v.minor == "0" {
+ return v.major
+ }
+ return v.major + "." + v.minor
+}
+
+// isValid reports whether the version x is valid.
+func isValid(x string) bool {
+ return parse(x) != gover{}
+}
+
+// parse parses the Go version string x into a version.
+// It returns the zero version if x is malformed.
+func parse(x string) gover {
+ var v gover
+
+ // Parse major version.
+ var ok bool
+ v.major, x, ok = cutInt(x)
+ if !ok {
+ return gover{}
+ }
+ if x == "" {
+ // Interpret "1" as "1.0.0".
+ v.minor = "0"
+ v.patch = "0"
+ return v
+ }
+
+ // Parse . before minor version.
+ if x[0] != '.' {
+ return gover{}
+ }
+
+ // Parse minor version.
+ v.minor, x, ok = cutInt(x[1:])
+ if !ok {
+ return gover{}
+ }
+ if x == "" {
+ // Patch missing is same as "0" for older versions.
+ // Starting in Go 1.21, patch missing is different from explicit .0.
+ if cmpInt(v.minor, "21") < 0 {
+ v.patch = "0"
+ }
+ return v
+ }
+
+ // Parse patch if present.
+ if x[0] == '.' {
+ v.patch, x, ok = cutInt(x[1:])
+ if !ok || x != "" {
+ // Note that we are disallowing prereleases (alpha, beta, rc) for patch releases here (x != "").
+ // Allowing them would be a bit confusing because we already have:
+ // 1.21 < 1.21rc1
+ // But a prerelease of a patch would have the opposite effect:
+ // 1.21.3rc1 < 1.21.3
+ // We've never needed them before, so let's not start now.
+ return gover{}
+ }
+ return v
+ }
+
+ // Parse prerelease.
+ i := 0
+ for i < len(x) && (x[i] < '0' || '9' < x[i]) {
+ if x[i] < 'a' || 'z' < x[i] {
+ return gover{}
+ }
+ i++
+ }
+ if i == 0 {
+ return gover{}
+ }
+ v.kind, x = x[:i], x[i:]
+ if x == "" {
+ return v
+ }
+ v.pre, x, ok = cutInt(x)
+ if !ok || x != "" {
+ return gover{}
+ }
+
+ return v
+}
+
+// cutInt scans the leading decimal number at the start of x to an integer
+// and returns that value and the rest of the string.
+func cutInt(x string) (n, rest string, ok bool) {
+ i := 0
+ for i < len(x) && '0' <= x[i] && x[i] <= '9' {
+ i++
+ }
+ if i == 0 || x[0] == '0' && i != 1 { // no digits or unnecessary leading zero
+ return "", "", false
+ }
+ return x[:i], x[i:], true
+}
+
+// cmpInt returns cmp.Compare(x, y) interpreting x and y as decimal numbers.
+// (Copied from golang.org/x/mod/semver's compareInt.)
+func cmpInt(x, y string) int {
+ if x == y {
+ return 0
+ }
+ if len(x) < len(y) {
+ return -1
+ }
+ if len(x) > len(y) {
+ return +1
+ }
+ if x < y {
+ return -1
+ } else {
+ return +1
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain.go b/vendor/golang.org/x/tools/internal/versions/toolchain.go
new file mode 100644
index 0000000..377bf7a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/toolchain.go
@@ -0,0 +1,14 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package versions
+
+// toolchain is maximum version (<1.22) that the go toolchain used
+// to build the current tool is known to support.
+//
+// When a tool is built with >=1.22, the value of toolchain is unused.
+//
+// x/tools does not support building with go <1.18. So we take this
+// as the minimum possible maximum.
+var toolchain string = Go1_18
diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go
new file mode 100644
index 0000000..f65beed
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go
@@ -0,0 +1,14 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.19
+// +build go1.19
+
+package versions
+
+func init() {
+ if Compare(toolchain, Go1_19) < 0 {
+ toolchain = Go1_19
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go
new file mode 100644
index 0000000..1a9efa1
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go
@@ -0,0 +1,14 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.20
+// +build go1.20
+
+package versions
+
+func init() {
+ if Compare(toolchain, Go1_20) < 0 {
+ toolchain = Go1_20
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go
new file mode 100644
index 0000000..b7ef216
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go
@@ -0,0 +1,14 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.21
+// +build go1.21
+
+package versions
+
+func init() {
+ if Compare(toolchain, Go1_21) < 0 {
+ toolchain = Go1_21
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/types.go b/vendor/golang.org/x/tools/internal/versions/types.go
new file mode 100644
index 0000000..562eef2
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/types.go
@@ -0,0 +1,19 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package versions
+
+import (
+ "go/types"
+)
+
+// GoVersion returns the Go version of the type package.
+// It returns zero if no version can be determined.
+func GoVersion(pkg *types.Package) string {
+ // TODO(taking): x/tools can call GoVersion() [from 1.21] after 1.25.
+ if pkg, ok := any(pkg).(interface{ GoVersion() string }); ok {
+ return pkg.GoVersion()
+ }
+ return ""
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/types_go121.go b/vendor/golang.org/x/tools/internal/versions/types_go121.go
new file mode 100644
index 0000000..b4345d3
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/types_go121.go
@@ -0,0 +1,30 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.22
+// +build !go1.22
+
+package versions
+
+import (
+ "go/ast"
+ "go/types"
+)
+
+// FileVersion returns a language version (<=1.21) derived from runtime.Version()
+// or an unknown future version.
+func FileVersion(info *types.Info, file *ast.File) string {
+ // In x/tools built with Go <= 1.21, we do not have Info.FileVersions
+ // available. We use a go version derived from the toolchain used to
+ // compile the tool by default.
+ // This will be <= go1.21. We take this as the maximum version that
+ // this tool can support.
+ //
+ // There are no features currently in x/tools that need to tell fine grained
+ // differences for versions <1.22.
+ return toolchain
+}
+
+// InitFileVersions is a noop when compiled with this Go version.
+func InitFileVersions(*types.Info) {}
diff --git a/vendor/golang.org/x/tools/internal/versions/types_go122.go b/vendor/golang.org/x/tools/internal/versions/types_go122.go
new file mode 100644
index 0000000..aac5db6
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/types_go122.go
@@ -0,0 +1,41 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.22
+// +build go1.22
+
+package versions
+
+import (
+ "go/ast"
+ "go/types"
+)
+
+// FileVersion returns a file's Go version.
+// The reported version is an unknown Future version if a
+// version cannot be determined.
+func FileVersion(info *types.Info, file *ast.File) string {
+ // In tools built with Go >= 1.22, the Go version of a file
+ // follow a cascades of sources:
+ // 1) types.Info.FileVersion, which follows the cascade:
+ // 1.a) file version (ast.File.GoVersion),
+ // 1.b) the package version (types.Config.GoVersion), or
+ // 2) is some unknown Future version.
+ //
+ // File versions require a valid package version to be provided to types
+ // in Config.GoVersion. Config.GoVersion is either from the package's module
+ // or the toolchain (go run). This value should be provided by go/packages
+ // or unitchecker.Config.GoVersion.
+ if v := info.FileVersions[file]; IsValid(v) {
+ return v
+ }
+ // Note: we could instead return runtime.Version() [if valid].
+ // This would act as a max version on what a tool can support.
+ return Future
+}
+
+// InitFileVersions initializes info to record Go versions for Go files.
+func InitFileVersions(info *types.Info) {
+ info.FileVersions = make(map[*ast.File]string)
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/versions.go b/vendor/golang.org/x/tools/internal/versions/versions.go
new file mode 100644
index 0000000..8d1f745
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/versions/versions.go
@@ -0,0 +1,57 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package versions
+
+import (
+ "strings"
+)
+
+// Note: If we use build tags to use go/versions when go >=1.22,
+// we run into go.dev/issue/53737. Under some operations users would see an
+// import of "go/versions" even if they would not compile the file.
+// For example, during `go get -u ./...` (go.dev/issue/64490) we do not try to include
+// For this reason, this library just a clone of go/versions for the moment.
+
+// Lang returns the Go language version for version x.
+// If x is not a valid version, Lang returns the empty string.
+// For example:
+//
+// Lang("go1.21rc2") = "go1.21"
+// Lang("go1.21.2") = "go1.21"
+// Lang("go1.21") = "go1.21"
+// Lang("go1") = "go1"
+// Lang("bad") = ""
+// Lang("1.21") = ""
+func Lang(x string) string {
+ v := lang(stripGo(x))
+ if v == "" {
+ return ""
+ }
+ return x[:2+len(v)] // "go"+v without allocation
+}
+
+// Compare returns -1, 0, or +1 depending on whether
+// x < y, x == y, or x > y, interpreted as Go versions.
+// The versions x and y must begin with a "go" prefix: "go1.21" not "1.21".
+// Invalid versions, including the empty string, compare less than
+// valid versions and equal to each other.
+// The language version "go1.21" compares less than the
+// release candidate and eventual releases "go1.21rc1" and "go1.21.0".
+// Custom toolchain suffixes are ignored during comparison:
+// "go1.21.0" and "go1.21.0-bigcorp" are equal.
+func Compare(x, y string) int { return compare(stripGo(x), stripGo(y)) }
+
+// IsValid reports whether the version x is valid.
+func IsValid(x string) bool { return isValid(stripGo(x)) }
+
+// stripGo converts from a "go1.21" version to a "1.21" version.
+// If v does not start with "go", stripGo returns the empty string (a known invalid version).
+func stripGo(v string) string {
+ v, _, _ = strings.Cut(v, "-") // strip -bigcorp suffix.
+ if len(v) < 2 || v[:2] != "go" {
+ return ""
+ }
+ return v[2:]
+}
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 9df46d2..d499303 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -1,8 +1,9 @@
-# github.com/a-h/templ v0.2.747
-## explicit; go 1.21
-github.com/a-h/templ
-github.com/a-h/templ/runtime
-github.com/a-h/templ/safehtml
+# golang.org/x/mod v0.20.0
+## explicit; go 1.18
+golang.org/x/mod/semver
+# golang.org/x/sync v0.8.0
+## explicit; go 1.18
+golang.org/x/sync/errgroup
# golang.org/x/text v0.17.0
## explicit; go 1.18
golang.org/x/text/collate
@@ -11,6 +12,7 @@ golang.org/x/text/internal
golang.org/x/text/internal/catmsg
golang.org/x/text/internal/colltab
golang.org/x/text/internal/format
+golang.org/x/text/internal/gen
golang.org/x/text/internal/language
golang.org/x/text/internal/language/compact
golang.org/x/text/internal/number
@@ -19,5 +21,36 @@ golang.org/x/text/internal/tag
golang.org/x/text/language
golang.org/x/text/message
golang.org/x/text/message/catalog
+golang.org/x/text/message/pipeline
+golang.org/x/text/runes
golang.org/x/text/transform
+golang.org/x/text/unicode/cldr
golang.org/x/text/unicode/norm
+# golang.org/x/tools v0.24.0
+## explicit; go 1.19
+golang.org/x/tools/go/ast/astutil
+golang.org/x/tools/go/buildutil
+golang.org/x/tools/go/callgraph
+golang.org/x/tools/go/callgraph/cha
+golang.org/x/tools/go/gcexportdata
+golang.org/x/tools/go/internal/cgo
+golang.org/x/tools/go/loader
+golang.org/x/tools/go/packages
+golang.org/x/tools/go/ssa
+golang.org/x/tools/go/ssa/ssautil
+golang.org/x/tools/go/types/objectpath
+golang.org/x/tools/go/types/typeutil
+golang.org/x/tools/internal/aliases
+golang.org/x/tools/internal/event
+golang.org/x/tools/internal/event/core
+golang.org/x/tools/internal/event/keys
+golang.org/x/tools/internal/event/label
+golang.org/x/tools/internal/gcimporter
+golang.org/x/tools/internal/gocommand
+golang.org/x/tools/internal/packagesinternal
+golang.org/x/tools/internal/pkgbits
+golang.org/x/tools/internal/stdlib
+golang.org/x/tools/internal/tokeninternal
+golang.org/x/tools/internal/typeparams
+golang.org/x/tools/internal/typesinternal
+golang.org/x/tools/internal/versions